Firebase cloud functions bucket upload no such file or directory - firebase

I'm writing a cloud function that triggers every time a file is uploaded on my default firebase storage bucket and if it's an image it converts it to JPG.
This is the official firebase example:
exports.processTripImage = functions.storage.object().onFinalize((object) => {
const filePath = object.name;
const baseFileName = path.basename(filePath, path.extname(filePath));
const fileDir = path.dirname(filePath);
const JPEGFilePath = path.normalize(path.format({ dir: fileDir, name: baseFileName, ext: JPEG_EXTENSION }));
const tempLocalFile = path.join(os.tmpdir(), filePath);
const tempLocalDir = path.dirname(tempLocalFile);
const tempLocalJPEGFile = path.join(os.tmpdir(), JPEGFilePath);
// Exit if this is triggered on a file that is not an image.
if (!object.contentType.startsWith('image/')) {
console.log('This is not an image.');
return null;
}
// Exit if the image is already a JPEG.
if (object.contentType.startsWith('image/jpeg')) {
console.log('Already a JPEG.');
return null;
}
const bucket = gcs.bucket(object.bucket);
// Create the temp directory where the storage file will be downloaded.
return mkdirp(tempLocalDir).then(() => {
// Download file from bucket.
return bucket.file(filePath).download({ destination: tempLocalFile });
}).then(() => {
console.log('The file has been downloaded to', tempLocalFile);
// Convert the image to JPEG using ImageMagick.
return spawn('convert', [tempLocalFile, tempLocalJPEGFile]);
}).then(() => {
console.log('JPEG image created at', tempLocalJPEGFile);
// Uploading the JPEG image.
return bucket.upload(tempLocalJPEGFile, { destination: JPEGFilePath });
}).then(() => {
console.log('JPEG image uploaded to Storage at', JPEGFilePath);
// Once the image has been converted delete the local files to free up disk space.
fs.unlinkSync(tempLocalJPEGFile);
fs.unlinkSync(tempLocalFile);
return;
});
})
The problem is that it writes the file and convert it correctly but when it tries to upload it back to the bucket it can't find the file in the tmp directory it just created.
My cloud function log:

Related

How to create a folder in Firebase Storage using Admin API

Aim: to upload a file into a folder within Firebase Storage
E.g.
default_bucket/folder1/file1
default_bucket/folder1/file2
default_bucket/folder2/file3
Using Firebase client-side I am able to upload a file to a folder within Firebase Storage like this:
const storageRef = firebase.storage().ref();
const fileRef = storageRef.child(`${folder}/${filename}`);
const metadata = {
contentType: file.type,
customMetadata: { }
};
return fileRef.put(file, metadata);
If the folder does not exist, it get's created.
However I have not managed to do the same server-side using the Admin SDK.
The code below, uploads the file into the default bucket.
But, I want to upload the file into a named folder within the default bucket.
The client side makes a POST request to the GCF, sending the file and a folder name.
Busboy is used to extra the folder name and file and pass them to the upload function; which uploads the file, then returns a donwnload link for it.
index.js
const task = require('./tasks/upload-file-to-storage');
app.post('/upload', (req, res, next) => {
try {
let uploadedFilename;
let folder;
if (req.method === 'OPTIONS') {
optionsHelper.doOptions(res);
} else if (req.method === 'POST') {
res.set('Access-Control-Allow-Origin', '*');
const busboy = new Busboy({ headers: req.headers });
const uploads = [];
busboy.on('file', (fieldname, file, filename, encoding, mimetype) => {
uploadedFilename = `${folder}^${filename}`;
const filepath = path.join(os.tmpdir(), uploadedFilename);
uploads.push({ file: filepath, filename: filename, folder: folder });
file.pipe(fs.createWriteStream(filepath));
});
busboy.on('field', (fieldname, val) => {
if (fieldname === 'folder') {
folder = val;
}
});
busboy.on('finish', () => {
if (uploads.length === 0) {
res.end('no files found');
}
for (let i = 0; i < uploads.length; i++) {
const upload = uploads[i];
const file = upload.file;
task.uploadFile(helpers.fbAdmin, upload.folder, upload.file, uploadedFilename).then(downloadLink => {
res.write(`${downloadLink}\n`);
fs.unlinkSync(file);
res.end();
});
}
});
busboy.end(req.rawBody);
} else {
// Client error - only support POST
res.status(405).end();
}
} catch (e) {
console.error(e);
res.sendStatus(500);
}
});
const api = functions.https.onRequest(app);
module.exports = {
api
;
upload-file-to-storage.js
exports.uploadFile = (fbAdmin, folder, filepath, filename) => {
// get the bucket to upload to
const bucket = fbAdmin.storage().bucket(); //`venture-spec-sheet.appspot.com/${folder}`
const uuid = uuid();
// Uploads a local file to the bucket
return bucket
.upload(filepath, {
gzip: true,
metadata: {
//destination: `/${folder}/${filename}`,
cacheControl: 'public, max-age=31536000',
firebaseStorageDownloadTokens: uuid
}
})
.then(() => {
const d = new Date();
const expires = d.setFullYear(d.getFullYear() + 50);
// get file from the bucket
const myFile = fbAdmin
.storage()
.bucket()
.file(filename);
// generate a download link and return it
return myFile.getSignedUrl({ action: 'read', expires: expires }).then(urls => {
const signedUrl = urls[0];
return signedUrl;
});
});
};
I've tried a few things
Setting the bucket name to default and a folder. This resulted in a server error.
const bucket = fbAdmin.storage().bucket(`${defaultName}/${folder}`);
Setting the bucket name to the folder. This resulted in a server error.
const bucket = fbAdmin.storage().bucket(folder);
And, I've also tried using the destination property of uploadOptions.
But this still puts the file in the default bucket.
.upload(filepath, {
gzip: true,
metadata: {
destination: `${folder}/${filename}`, // and /${folder}/${filename}
}
})
Is it possible to upload to a folder using the Admin SDK?
E.g. I want to upload a file so that is is placed in a named "folder".
I.e. so I can reference the file at the path: bucket/folder/file.jpg
In the example below, each "folder" is named with a firebase key.
Found the problem.
I stupidly declared the destination option in the wrong place.
Instead of in the metadata object:
return bucket
.upload(filepath, {
gzip: true,
metadata: {
destination: `${folder}/${filename}`,
cacheControl: 'public, max-age=31536000',
firebaseStorageDownloadTokens: uuid
}
})
It should have been on the options object:
return bucket
.upload(filepath, {
gzip: true,
destination: `${folder}/${filename}`,
metadata: {
cacheControl: 'public, max-age=31536000',
firebaseStorageDownloadTokens: uuid
}
})
With this change made the file now gets uploaded into a named "folder".
There is a create folder option besides Upload File button for a bucket in Storage console.One can create folders in bucket and upload files to it on console. To create such folders in bucket using admin APIs, add folders before file reference. e.g.
const blob = bucket.file('folder1/folder2/' + req.file.originalname);

Generate thumbnail cloud function error with GCS

I have an app in whcih I want to generate an thumbnail for every image uploaded to storage. I'm trying to use generate thumbnail cloud function ,but when a image is uploaded to the storage the cloud function resulting an error in it's logs in firebase.
TypeError: gcs(...).bucket is not a function
at exports.generateThumbnail.functions.storage.object.onFinalize (/user_code/index.js:77:73)
at cloudFunctionNewSignature (/user_code/node_modules/firebase-functions/lib/cloud-functions.js:105:23)
at cloudFunction (/user_code/node_modules/firebase-functions/lib/cloud-functions.js:135:20)
at /var/tmp/worker/worker.js:768:24
at process._tickDomainCallback (internal/process/next_tick.js:135:7)
Here is my index.js file. Added only GCS required here.
const functions = require("firebase-functions");
const gcs = require("#google-cloud/storage");
admin.initializeApp();
const THUMB_MAX_HEIGHT = 200;
const THUMB_MAX_WIDTH = 200;
// Thumbnail prefix added to file names.
const THUMB_PREFIX = 'thumb_';
exports.generateThumbnail = functions.storage.object().onFinalize((object) => {
// File and directory paths.
const filePath = object.name;
const contentType = object.contentType; // This is the image MIME type
const fileDir = path.dirname(filePath);
const fileName = path.basename(filePath);
const thumbFilePath = path.normalize(path.join(fileDir, `${THUMB_PREFIX}${fileName}`));
const tempLocalFile = path.join(os.tmpdir(), filePath);
const tempLocalDir = path.dirname(tempLocalFile);
const tempLocalThumbFile = path.join(os.tmpdir(), thumbFilePath);
// Exit if this is triggered on a file that is not an image.
if (!contentType.startsWith('image/')) {
console.log('This is not an image.');
return null;
}
// Exit if the image is already a thumbnail.
if (fileName.startsWith(THUMB_PREFIX)) {
console.log('Already a Thumbnail.');
return null;
}
// Cloud Storage files.
const bucket = gcs({keyFilename: 'service-account-credentials.json'}).bucket(object.bucket);
const file = bucket.file(filePath);
const thumbFile = bucket.file(thumbFilePath);
const metadata = {
contentType: contentType,
// To enable Client-side caching you can set the Cache-Control headers here. Uncomment below.
// 'Cache-Control': 'public,max-age=3600',
};
// Create the temp directory where the storage file will be downloaded.
return mkdirp(tempLocalDir).then(() => {
// Download file from bucket.
return file.download({destination: tempLocalFile});
}).then(() => {
console.log('The file has been downloaded to', tempLocalFile);
// Generate a thumbnail using ImageMagick.
return spawn('convert', [tempLocalFile, '-thumbnail', `${THUMB_MAX_WIDTH}x${THUMB_MAX_HEIGHT}>`, tempLocalThumbFile], {capture: ['stdout', 'stderr']});
}).then(() => {
console.log('Thumbnail created at', tempLocalThumbFile);
// Uploading the Thumbnail.
return bucket.upload(tempLocalThumbFile, {destination: thumbFilePath, metadata: metadata});
}).then(() => {
console.log('Thumbnail uploaded to Storage at', thumbFilePath);
// Once the image has been uploaded delete the local files to free up disk space.
fs.unlinkSync(tempLocalFile);
fs.unlinkSync(tempLocalThumbFile);
// Get the Signed URLs for the thumbnail and original image.
const config = {
action: 'read',
expires: '03-01-2500',
};
return Promise.all([
thumbFile.getSignedUrl(config),
file.getSignedUrl(config),
]);
}).then((results) => {
console.log('Got Signed URLs.');
const thumbResult = results[0];
const originalResult = results[1];
const thumbFileUrl = thumbResult[0];
const fileUrl = originalResult[0];
console.log('Got Signed URLs. '+ thumbFileUrl);
return result;
}).then(() => console.log('Thumbnail URLs saved to database.'));
});
Unable to understand what's the issue is!.
I made changes according to suggestion of an answer
require(...) is not a function
at Object.<anonymous> (D:\mercury_two\mercury\functions\index.js:20:45)
at Module._compile (internal/modules/cjs/loader.js:678:30)
at Object.Module._extensions..js (internal/modules/cjs/loader.js:689:10)
at Module.load (internal/modules/cjs/loader.js:589:32)
at tryModuleLoad (internal/modules/cjs/loader.js:528:12)
at Function.Module._load (internal/modules/cjs/loader.js:520:3)
at Module.require (internal/modules/cjs/loader.js:626:17)
at require (internal/modules/cjs/helpers.js:20:18)
at C:\Users\Harsha\AppData\Roaming\npm\node_modules\firebase-tools\lib\triggerParser.js:15:15
at Object.<anonymous> (C:\Users\Harsha\AppData\Roaming\npm\node_modules\firebase-tools\lib\triggerParser.js:53:3)
Then I can't even deploy the function. 'firebase deploy' throws above error.
You should not do
const bucket = gcs({keyFilename: 'service-account-credentials.json'}).bucket(object.bucket);
but only
const bucket = gcs.bucket(object.bucket);
You need to use the service-account-credentials.json (i.e. the Service Account Key JSON file) only when you require the gcs module, at the top of your Cloud Function, as follows:
const gcs = require('#google-cloud/storage')({keyFilename: 'service-account-credentials.json});
See the official Firebase sample that shows that in detail: https://github.com/firebase/functions-samples/blob/master/generate-thumbnail/functions/index.js

How to increase the max http request size limit for HTTP triggers in Cloud Functions

I'm trying to invoke a google cloud function sending images larger than 50Mb. The purpose of the cloud function is to resize the images and upload them to google cloud storage.
However, when I send the HTTP post to my cloud function I get the following error: 413 Request Entity Too Large
Does anyone have any workaround to this error? Can I increase the http request size limit?
The limit for HTTP trigger upload and download payload size is documented at 10MB. There is no way to get this limit increased, but you can always file a feature request explaining why it should be increased.
You can let the client upload directly to storage. authinticated onto his own user folder and security rules limiting the file size to whatever size you wish into a temp folder.
Then have a cloud function trigger started resizing the image.
And Delete the original image when finished.
I'm attaching a code example of mine -
you should add the a delete of the file after conversion...
/**
* When an image is uploaded in the Storage bucket We generate a thumbnail automatically using
* ImageMagick.
* After the thumbnail has been generated and uploaded to Cloud Storage,
* we write the public URL to the Firebase Realtime Database.
*/
exports.generateThumbnail = functions.storage.object().onFinalize((object) => {
console.log('Generated Started');
// File and directory paths.
const filePath = object.name;
const contentType = object.contentType; // This is the image MIME type
const fileDir = path.dirname(filePath);
const fileName = path.basename(filePath);
const thumbFilePath = path.normalize(path.join(fileDir, `${THUMB_PREFIX}${fileName}`));
const tempLocalFile = path.join(os.tmpdir(), filePath);
const tempLocalDir = path.dirname(tempLocalFile);
const tempLocalThumbFile = path.join(os.tmpdir(), thumbFilePath);
// Exit if this is triggered on a file that is not an image.
if (!contentType.startsWith('image/')) {
console.log('This is not an image.');
deleteImage(filename);
return null;
}
// Exit if the image is already a thumbnail.
if (fileName.startsWith(THUMB_PREFIX)) {
console.log('Already a Thumbnail.');
deleteImage(filename);
return null;
}
// Cloud Storage files.
const bucket = gcs.bucket(object.bucket);
const file = bucket.file(filePath);
const thumbFile = bucket.file(thumbFilePath);
const metadata = {
contentType: contentType,
// To enable Client-side caching you can set the Cache-Control headers here. Uncomment below.
'Cache-Control': 'public,max-age=3600',
};
// Create the temp directory where the storage file will be downloaded.
return mkdirp(tempLocalDir).then(() => {
console.log('DL Started');
// Download file from bucket.
return file.download({
destination: tempLocalFile
});
}).then(() => {
console.log('The file has been downloaded to', tempLocalFile);
// Generate a thumbnail using ImageMagick.
return spawn('convert', [tempLocalFile, '-thumbnail', `${THUMB_MAX_WIDTH}x${THUMB_MAX_HEIGHT}>`, tempLocalThumbFile], {
capture: ['stdout', 'stderr']
});
}).then(() => {
console.log('Thumbnail created at', tempLocalThumbFile);
// Uploading the Thumbnail.
return bucket.upload(tempLocalThumbFile, {
destination: thumbFilePath,
metadata: metadata
});
}).then(() => {
console.log('Thumbnail uploaded to Storage at', thumbFilePath);
// Once the image has been uploaded delete the local files to free up disk space.
fs.unlinkSync(tempLocalFile);
fs.unlinkSync(tempLocalThumbFile);
// Get the Signed URLs for the thumbnail and original image.
const config = {
action: 'read',
expires: '03-01-2500',
};
return Promise.all([
thumbFile.getSignedUrl(config),
// file.getSignedUrl(config),
]);
}).then((results) => {
console.log('Got Signed URLs.');
const thumbResult = results[0];
// const originalResult = results[1];
const thumbFileUrl = thumbResult[0];
// const fileUrl = originalResult[0];
// Add the URLs to the Database
const uid = getUidFromFilePath(fileDir);
if (!uid) return null;
return Promise.all([
admin.auth().updateUser(uid, {
photoURL: thumbFileUrl
}),
admin.database().ref(`/users/${uid}/profile/photoURL`).set(thumbFileUrl)
]);
}).then(() => console.log('Thumbnail URLs saved to database.'));
});
As of 2022, the quota limit for the second generation of cloud functions is 32MB.

Firebase Storage - Is there any way to resize the original Image using cloud functions without renaming the file, without infinite loop in trigger

I have analyzed the thumbnail creation code using firebase storage trigger.
I need to resize the current image without renaming the file and it should not cause infinite loop.
Basically, whenever I am uploading the image to firebase storage, it should be resized to specific size, but, other properties like download url, name should not be changed.
Below is the code for generating thumbnail. But, I need to make it to resize the current image. Please help.
Here, if the filename is having thumb_, then only the infinite loop will stop, I need to stop it using other properties like meta data or anything
exports.generateThumbnail = functions.storage.object().onChange((event) => {
const object = event.data;
const fileBucket = object.bucket;
const filePath = object.name;
const contentType = object.contentType;
const resourceState = object.resourceState;
const metageneration = object.metageneration;
if (!contentType.startsWith('image/')) {
console.log('This is not an image.');
return null;
}
const fileName = path.basename(filePath);
if (fileName.startsWith('thumb_')) {
console.log('Already a Thumbnail.');
return null;
//Here, if the filename is having thumb_, then only the infinite loop will stop, I need to stop it using other properties like meta data or anything else
}
if (resourceState === 'not_exists') {
console.log('This is a deletion event.');
return null;
}
if (resourceState === 'exists' && metageneration > 1) {
console.log('This is a metadata change event.');
return null;
}
const bucket = gcs.bucket(fileBucket);
const tempFilePath = path.join(os.tmpdir(), fileName);
const metadata = {
contentType: contentType,
};
return bucket.file(filePath).download({
destination: tempFilePath,
}).then(() => {
console.log('Image downloaded locally to', tempFilePath);
return spawn('convert', [tempFilePath, '-thumbnail', '200x200>', tempFilePath]);
}).then(() => {
console.log('Thumbnail created at', tempFilePath);
const thumbFileName = `thumb_${fileName}`;
const thumbFilePath = path.join(path.dirname(filePath), thumbFileName);
return bucket.upload(tempFilePath, {
destination: thumbFilePath,
metadata: metadata,
});
}).then(() => fs.unlinkSync(tempFilePath));
});
Pass custom metadata to the upload function:
return bucket.upload(tempFilePath, {
destination: filePath,
metadata: {
contentType,
metadata: {
isThumb: 'true',
}
},
})
When you replace the file, the cloud function will be triggered again. To break the loop check the custom metadata:
/**
* File metadata.
*/
const meta = object.metadata;
/**
* Exit if the image is already a thumbnail.
*/
if (meta && meta.isThumb == 'true') {
console.log('Already a Thumbnail.');
return null;
}

Get Firebase storage Object on a database-triggered cloud function

I want to generate a thumb image when a record is added to the database.
On the Firebase website they have some examples but they are all based on a storage trigger instead of a database trigger:
Image database object:
image: {
name: abc123,
raw: download url abc123
}
-
firebase thumb generator example (storage triggered):
exports.generateThumbnail = functions.storage.object().onChange(event => {
const object = event.data; // The Storage object.
const fileBucket = object.bucket; // The Storage bucket that contains the file.
const filePath = object.name; // File path in the bucket.
const contentType = object.contentType; // File content type.
const resourceState = object.resourceState; // The resourceState is 'exists' or 'not_exists' (for file/folder deletions).
const metageneration = object.metageneration; // Number of times metadata has been generated. New objects have a value of 1.
// Exit if this is triggered on a file that is not an image.
if (!contentType.startsWith('image/')) {
console.log('This is not an image.');
return;
}
// Get the file name.
const fileName = path.basename(filePath);
// Exit if the image is already a thumbnail.
if (fileName.startsWith('thumb_')) {
console.log('Already a Thumbnail.');
return;
}
// Exit if this is a move or deletion event.
if (resourceState === 'not_exists') {
console.log('This is a deletion event.');
return;
}
// Exit if file exists but is not new and is only being triggered
// because of a metadata change.
if (resourceState === 'exists' && metageneration > 1) {
console.log('This is a metadata change event.');
return;
}
// Download file from bucket.
const bucket = gcs.bucket(fileBucket);
const metadata = {
contentType: contentType
};
// We add a 'thumb_' prefix to thumbnails file name. That's where we'll upload the thumbnail.
const thumbFileName = `thumb_${fileName}`;
const thumbFilePath = path.join(path.dirname(filePath), thumbFileName);
// Create write stream for uploading thumbnail
const thumbnailUploadStream = bucket.file(thumbFilePath).createWriteStream({metadata});
// Create Sharp pipeline for resizing the image and use pipe to read from bucket read stream
const pipeline = sharp();
pipeline
.resize(THUMB_MAX_WIDTH, THUMB_MAX_HEIGHT)
.max()
.pipe(thumbnailUploadStream);
bucket.file(filePath).createReadStream().pipe(pipeline);
const streamAsPromise = new Promise((resolve, reject) =>
thumbnailUploadStream.on('finish', resolve).on('error', reject));
return streamAsPromise.then(() => {
console.log('Thumbnail created successfully');
});
});
-
How I want it to work (database triggered):
The database trigger event does not have a storage object. How can I access the storage object?
When a thumbnail is created successfully I want to add the donwloadUrl of the thumb image to the database
exports.generateThumbnail = functions.database.ref('/data/{dataID}/childData/{childDataID}/image/name').onCreate(event => {
const object = event.data; // This Storage object does not work when using a database trigger so how do I access the same storage object, I have the image name?
//thumb generator code...
return streamAsPromise.then(() => {
console.log('Thumbnail created successfully');
//add thumb image download url back to the database
});
});
-
image: {
name: abc123,
raw: download url abc123
thumb: download url thumb_abc123 //added after thumb is created
}
This is how I got it to work.
Add cloud storage to your project.
If you want to retrieve the image signed url like I do, add projectId and keyFilename to you cloud storage reference
const gcs = require('#google-cloud/storage')({
projectId: 'name-1234',
keyFilename: './keyfile.json'
});
generateThumbnail function
exports.generateThumbnail = functions.database.ref('/data/${dataID}/childData/${childDataID/image/name').onCreate(image => {
const dataID = image.params.dataID
const childDataID = image.params.childDataID
const fileName = image.data.val()
//get your project storage bucket id
const storageBucket = functions.config().firebase.storageBucket
//path to image
const imagePath = `${dataID}/${childDataID}`
//open bucket
const bucket = gcs.bucket(storageBucket)
//location of the image in the bucket
const object = bucket.file(imagePath)
const fileBucket = object.bucket; // The Storage bucket that contains the file.
const filePath = object.name; // File path in the bucket.
const metadata = {
contentType: 'image/jpeg'
};
// We add a 'thumb_' prefix to thumbnails file name. That's where we'll upload the thumbnail.
const thumbFileName = `thumb_${fileName}`;
const thumbFilePath = path.join(path.dirname(filePath), thumbFileName);
// Create write stream for uploading thumbnail
const thumbnailUploadStream = bucket.file(thumbFilePath).createWriteStream({metadata});
// Create Sharp pipeline for resizing the image and use pipe to read from bucket read stream
const pipeline = sharp();
pipeline
.resize(400, 400)
.max()
.pipe(thumbnailUploadStream);
bucket.file(filePath).createReadStream().pipe(pipeline);
const streamAsPromise = new Promise((resolve, reject) =>
thumbnailUploadStream.on('finish', resolve).on('error', reject));
return streamAsPromise.then(() => {
console.log('Thumbnail created successfully');
var today = new Date();
var dd = today.getDate();
var mm = today.getMonth() + 1; //January is 0!
const yyyy = today.getFullYear() + 5; // add a few years
if (dd < 10) {
dd = '0' + dd
}
if (mm < 10) {
mm = '0' + mm
}
today = mm + '-' + dd + '-' + yyyy;
bucket.file(filePath).getSignedUrl({
action: 'read',
expires: today
}, function(err, url) {
if (err) {
console.error(err);
return;
}
//add thumb image url to message (in database)
return admin.database().ref(`/${dataID}/childData/${childDataID}`).child('image').update({
thumb: url
})
});
});
})

Resources