I would like to upload a string of text and have that string uploaded to Cloud storage. I've build it in plain JS, but having issues hacking it into a cloud function.
function download(exportObj){
var databuk = gcs.bucket('******.appspot.com');
// var bucket = admin.storage().bucket();
//var tocfileloc = storageRef.child('toctest.json');
// const name = "toctest.json";
// const bucketdes = bucket.name;
var dataStr = "data:text/json;charset=utf-8," + encodeURIComponent(JSON.stringify(exportObj));
databuk.putString(dataStr, 'data_url').then(snapshot => {
console.log('Uploaded a data_url string!');
return true;
}).catch(err=>{
console.log("error",err);
})
}
I have some code above! The string is "exportObj"
You'll want to use the Admin SDK for this. It'll be something along the lines of:
const admin = require('firebase-admin');
admin.initializeApp();
// ... then later, in your function
const file = admin.storage().bucket().file('path/to/your/file.txt');
return file.save('This will get stored in my storage bucket.', {
gzip: true,
contentType: 'text/plain'
}).then(() => {
console.log('all done!');
});
The specific "save" method is documented here.
Related
I have the following index.js that's triggered when a thumbnail is generated in cloud storage. It seems to work fine.
I'd like to replace the console.log line with code that adds a field like {"thumbnail_done":true} to the firebase document docid found in the script. I'm not clear on how to do that.
exports.thumbComplete = (event, context) => {
const fname = event.name;
let suffix = "_200x200.jpeg"
if (fname.endsWith(suffix)) {
let docid = fname.substring(0, fname.length - suffix.length);
console.log(`thumbnail processing complete: ${docid}`);
}
};
Thanks!
Got it working with the following:
// The Cloud Functions for Firebase SDK to create Cloud Functions and set up triggers.
const functions = require('firebase-functions');
// The Firebase Admin SDK to access Firestore.
const admin = require('firebase-admin');
admin.initializeApp();
exports.generateThumbnail = functions.storage.object().onFinalize(async (object) => {
const fname = object.name;
let suffix = "_200x200.jpeg"
if (fname.endsWith(suffix)) {
let docid = fname.substring(0, fname.length - suffix.length);
await admin.firestore().doc(`photo/${docid}`).update({ 'uploaded': true });
}
});
I wrote two functions (~ 07/20/21) using the FB admin SDK to upload a file to FB Storage. It worked for 300+ uploads. I came back to it yesterday (08/11) and it no longer works, and has had no code changes (that I know of).
The FB functions Logs are saying app ReferenceError: bucket is not defined at Object.writeFileToFirebase (/workspace/uploadUtils.js:12:18) The storage bucket is defined in my ./firebaseConfig.js in accordance with the documentation.
const firebase = require('firebase')
const functions = require('firebase-functions')
const admin = require('firebase-admin')
const firebaseConfig = require('./firebaseConfig.js')
firebase.initializeApp(firebaseConfig)
admin.initializeApp(firebaseConfig)
const bucket = admin.storage().bucket()
And my two functions are exported from uploadUtils.js
async function writeFileToFirebase(filename, mimetype, filebuffer) {
const file = bucket.file(filename)
const filestream = file.createWriteStream({
metadata: {
contentType: mimetype
}
})
await filestream.end(filebuffer).catch(functions.logger.log(err))
return
}
async function createThumbnail(newthumbname, mimetype, filebuffer) {
const file = bucket.file(newthumbname)
const thumbstream = file.createWriteStream({
metadata: {
contentType: mimetype
}
})
const gm = require('gm').subClass({
imageMagick: true
})
gm(filebuffer)
.resize(240, 240)
.toBuffer('jpg', (err, thumbbuffer) => {
thumbstream.end(thumbbuffer).catch(console.log(err))
})
return
}
exports.writeFileToFirebase = writeFileToFirebase
exports.createThumbnail = createThumbnail
Line 12 from the err msg is the const file = admin.storage().bucket()
My declaration of bucket is at a higher scope than the function call. And I'm using the default bucket I've specified in the firebaseConfig.js.
Can anyone tell me what's wrong with my bucket declaration? Or is my problem elsewhere?
It looks like I needed to move these two lines into the function declaration. They're not inherited... I have no idea why this used to work before this change.
const admin = require('firebase-admin')
const bucket = admin.storage().bucket()
I'm using cloud functions to resize and generate thumbnails for uploaded images in Firebase Storage. On the first upload the thumbnails are generated but i also want to be able to edit those images while keeping the same name.
This is how i'm doing it :
I upload an image with this function on the client :
uploadImage (imageFile, folderName, imageName){
const storageRef = firebase.storage().ref();
// need to prefix image name with "slot_"
storageRef.child(`${folderName}/slot_${imageName}`).put(imageFile)
}
Thumbnails are then generated with this cloud function :
export const generateThumbs = functions.storage.object().onFinalize(async
object => {
const bucket = gcs.bucket(object.bucket)
const filePath = object.name;
const fileName = filePath.split('/').pop();
const bucketDir = dirname(filePath);
const slotSizes = [64,250]
const temporaryDirectory = join(tmpdir(), `thumbs_${fileName}`);
const temporaryFilePath = join(temporaryDirectory, 'source.png');
// avoid loop includes only images
if (fileName.includes('thumb_') ||
!object.contentType.includes('image')) {
return false;
}
await fileSystem.ensureDir(temporaryDirectory);
// original file in temp directory
await bucket.file(filePath).download({
destination: temporaryFilePath
});
const slotUploadPromises = slotSizes.map(async size => {
const thumbName = `thumb_${size}_${fileName}`;
const thumbPath = join(temporaryDirectory, thumbName);
await sharp(temporaryFilePath).resize(size, size).toFile(thumbPath);
return bucket.upload(thumbPath, {
destination: join(bucketDir, thumbName),
metadata: {
contentType: 'image/jpeg',
}
})
});
await Promise.all(slotUploadPromises)
// removes temp directory
return fileSystem.remove(temporaryDirectory);
So if i call uploadImage(appleImage, 'MyImages', 'test') i'll have in my storage folder 3 images (naming IS important):
slot_test
thumb_250_slot_test
thumb_64_slot_test
At this point if i call again uploadImage(avocadoImage, 'MyImages', 'test') i'd expect to have in the storage the same "naming structure" but with the updated image in place of the old ones, so the new thumbnails should just overwrite the old ones. What actually happens is that the "base" image gets updated while both thumbnails don't. Ending up with :
slot_test (displaying the UPDATED image)
thumb_250_slot_test (displaying the thumbnail of the OLD image)
thumb_64_slot_test (displaying the thumbnail of the OLD image)
I've logged the cloud function extensively, no errors are thrown from the function during execution, thumbnails are created normally and the firebase console also updates the creation date of the thumbnails but i still get the old thumbnails image. I've tried removing the temporary directory using fs-extra emptyDir(), i've tried to remove every single thumbnail first (via client) and then uploading again with no luck.
EDIT : Found a solution to my problem by NOT creating any temporary folder or temporary files and using sharp pipeline instead. That said i'm still missing the underlying problem in the code above. I'm quite convinced that, for whatever reason, the function didn't remove the temporary folder and that was generating problems whenever i tried to overwrite the images. This function works :
export const generateThumbs = functions.storage.object().onFinalize(async object => {
const bucket = gcs.bucket(object.bucket)
const filePath = object.name;
const fileName = filePath.split('/').pop();
const bucketDir = dirname(filePath);
// metadata file
const metadata = {
contentType: 'image/jpeg',
}
if (fileName.includes('thumb_') || !object.contentType.includes('image')) {
return false;
}
if (fileName.includes('slot')) {
// array of promises
const slotUploadPromises = slotSizes.map(async size => {
const thumbName = `thumb_${size}_${fileName}`;
const thumbPath = join(path.dirname(filePath), thumbName);
const thumbnailUploadStream = bucket.file(thumbPath).createWriteStream({ metadata });
const pipeline = sharp();
pipeline.resize(size, Math.floor((size * 9) / 16)).max()
.pipe(thumbnailUploadStream);
bucket.file(filePath).createReadStream().pipe(pipeline);
return new Promise((resolve, reject) =>
thumbnailUploadStream
.on('finish', resolve)
.on('error', reject));
});
await Promise.all(slotUploadPromises)
}
I have to send a file to an API, therefor I have to use fs.readFileSync(). After uploading the picture to the storage, I am calling my function to execute the API call. But I cannot get the file from the storage. This is a section of the code, which always gets null in the result. I tried also to .getFiles() without a parameter and then I got all files but I dont want to filter them by iteration.
exports.stripe_uploadIDs = functions.https //.region("europe-west1")
.onCall((data, context) => {
const authID = context.auth.uid;
console.log("request is authentificated? :" + authID);
if (!authID) {
throw new functions.https.HttpsError("not authorized", "not authorized");
}
let accountID;
let result_fileUpload;
let tempFile = path.join(os.tmpdir(), "id_front.jpg");
const options_id_front_jpeg = {
prefix: "/user/" + authID + "/id_front.jpg"
};
const storageRef = admin
.storage()
.bucket()
.getFiles(options_id_front)
.then(results => {
console.log("JPG" + JSON.stringify(results));
// need to write this file to tempFile
return results;
});
const paymentRef = storageRef.then(() => {
return admin
.database()
.ref("Payment/" + authID)
.child("accountID")
.once("value");
});
const setAccountID = paymentRef.then(snap => {
accountID = snap.val();
return accountID;
});
const fileUpload = setAccountID.then(() => {
return Stripe.fileUploads.create(
{
purpose: "identity_document",
file: {
data: tempFile, // Documentation says I should use fs.readFileSync("filepath")
name: "id_front.jpg",
type: "application/octet-stream"
}
},
{ stripe_account: accountID }
);
});
const fileResult = fileUpload.then(result => {
result_fileUpload = result;
console.log(JSON.stringify(result_fileUpload));
return result_fileUpload;
});
return fileResult;
});
Result is:
JPG[[]]
You need to download your file from a bucket to your local function context env.
After your Firebase function start executing you can call the below:
More or less the below should work, just tweak to your needs. Call this within you .onCall context, you get the idea
import admin from 'firebase-admin';
import * as path from 'path';
import * as os from 'os';
import * as fs from 'fs';
admin.initializeApp();
const { log } = console;
async function tempFile(fileBucket: string, filePath: string) {
const bucket = admin.storage().bucket(fileBucket);
const fileName = 'MyFile.ext';
const tempFilePath = path.join(os.tmpdir(), fileName);
const metadata = {
contentType: 'DONT_FORGET_CONTEN_TYPE'
};
// Donwload the file to a local temp file
// Do whatever you need with it
await bucket.file(filePath).download({ destination: tempFilePath });
log('File downloaded to', tempFilePath);
// After you done and if you need to upload the modified file back to your
// bucket then uploaded
// This is optional
await bucket.upload(tempFilePath, {
destination: filePath,
metadata: metadata
});
//free up disk space by realseasing the file.
// Otherwise you might be charged extra for keeping memory space
return fs.unlinkSync(tempFilePath);
}
I want to generate a thumb image when a record is added to the database.
On the Firebase website they have some examples but they are all based on a storage trigger instead of a database trigger:
Image database object:
image: {
name: abc123,
raw: download url abc123
}
-
firebase thumb generator example (storage triggered):
exports.generateThumbnail = functions.storage.object().onChange(event => {
const object = event.data; // The Storage object.
const fileBucket = object.bucket; // The Storage bucket that contains the file.
const filePath = object.name; // File path in the bucket.
const contentType = object.contentType; // File content type.
const resourceState = object.resourceState; // The resourceState is 'exists' or 'not_exists' (for file/folder deletions).
const metageneration = object.metageneration; // Number of times metadata has been generated. New objects have a value of 1.
// Exit if this is triggered on a file that is not an image.
if (!contentType.startsWith('image/')) {
console.log('This is not an image.');
return;
}
// Get the file name.
const fileName = path.basename(filePath);
// Exit if the image is already a thumbnail.
if (fileName.startsWith('thumb_')) {
console.log('Already a Thumbnail.');
return;
}
// Exit if this is a move or deletion event.
if (resourceState === 'not_exists') {
console.log('This is a deletion event.');
return;
}
// Exit if file exists but is not new and is only being triggered
// because of a metadata change.
if (resourceState === 'exists' && metageneration > 1) {
console.log('This is a metadata change event.');
return;
}
// Download file from bucket.
const bucket = gcs.bucket(fileBucket);
const metadata = {
contentType: contentType
};
// We add a 'thumb_' prefix to thumbnails file name. That's where we'll upload the thumbnail.
const thumbFileName = `thumb_${fileName}`;
const thumbFilePath = path.join(path.dirname(filePath), thumbFileName);
// Create write stream for uploading thumbnail
const thumbnailUploadStream = bucket.file(thumbFilePath).createWriteStream({metadata});
// Create Sharp pipeline for resizing the image and use pipe to read from bucket read stream
const pipeline = sharp();
pipeline
.resize(THUMB_MAX_WIDTH, THUMB_MAX_HEIGHT)
.max()
.pipe(thumbnailUploadStream);
bucket.file(filePath).createReadStream().pipe(pipeline);
const streamAsPromise = new Promise((resolve, reject) =>
thumbnailUploadStream.on('finish', resolve).on('error', reject));
return streamAsPromise.then(() => {
console.log('Thumbnail created successfully');
});
});
-
How I want it to work (database triggered):
The database trigger event does not have a storage object. How can I access the storage object?
When a thumbnail is created successfully I want to add the donwloadUrl of the thumb image to the database
exports.generateThumbnail = functions.database.ref('/data/{dataID}/childData/{childDataID}/image/name').onCreate(event => {
const object = event.data; // This Storage object does not work when using a database trigger so how do I access the same storage object, I have the image name?
//thumb generator code...
return streamAsPromise.then(() => {
console.log('Thumbnail created successfully');
//add thumb image download url back to the database
});
});
-
image: {
name: abc123,
raw: download url abc123
thumb: download url thumb_abc123 //added after thumb is created
}
This is how I got it to work.
Add cloud storage to your project.
If you want to retrieve the image signed url like I do, add projectId and keyFilename to you cloud storage reference
const gcs = require('#google-cloud/storage')({
projectId: 'name-1234',
keyFilename: './keyfile.json'
});
generateThumbnail function
exports.generateThumbnail = functions.database.ref('/data/${dataID}/childData/${childDataID/image/name').onCreate(image => {
const dataID = image.params.dataID
const childDataID = image.params.childDataID
const fileName = image.data.val()
//get your project storage bucket id
const storageBucket = functions.config().firebase.storageBucket
//path to image
const imagePath = `${dataID}/${childDataID}`
//open bucket
const bucket = gcs.bucket(storageBucket)
//location of the image in the bucket
const object = bucket.file(imagePath)
const fileBucket = object.bucket; // The Storage bucket that contains the file.
const filePath = object.name; // File path in the bucket.
const metadata = {
contentType: 'image/jpeg'
};
// We add a 'thumb_' prefix to thumbnails file name. That's where we'll upload the thumbnail.
const thumbFileName = `thumb_${fileName}`;
const thumbFilePath = path.join(path.dirname(filePath), thumbFileName);
// Create write stream for uploading thumbnail
const thumbnailUploadStream = bucket.file(thumbFilePath).createWriteStream({metadata});
// Create Sharp pipeline for resizing the image and use pipe to read from bucket read stream
const pipeline = sharp();
pipeline
.resize(400, 400)
.max()
.pipe(thumbnailUploadStream);
bucket.file(filePath).createReadStream().pipe(pipeline);
const streamAsPromise = new Promise((resolve, reject) =>
thumbnailUploadStream.on('finish', resolve).on('error', reject));
return streamAsPromise.then(() => {
console.log('Thumbnail created successfully');
var today = new Date();
var dd = today.getDate();
var mm = today.getMonth() + 1; //January is 0!
const yyyy = today.getFullYear() + 5; // add a few years
if (dd < 10) {
dd = '0' + dd
}
if (mm < 10) {
mm = '0' + mm
}
today = mm + '-' + dd + '-' + yyyy;
bucket.file(filePath).getSignedUrl({
action: 'read',
expires: today
}, function(err, url) {
if (err) {
console.error(err);
return;
}
//add thumb image url to message (in database)
return admin.database().ref(`/${dataID}/childData/${childDataID}`).child('image').update({
thumb: url
})
});
});
})