This question already has an answer here:
Google Cloud Function Environmnet Timing out on every functions
(1 answer)
Closed 4 years ago.
I have a Firebase cloud function that is loosely based on this example
https://github.com/firebase/functions-samples/tree/master/image-sharp
Today, when deploying some small changes it gave me this warning
$ firebase deploy --only functions
⚠ functions: package.json indicates an outdated version of firebase-functions.
Please upgrade using npm install --save firebase-functions#latest in your functions directory.
so I did the upgrade, which upgraded firebase-functions from ^1.0.3 to ^2.0.0
since then have been getting this when running the function
Function execution took 60002 ms, finished with status: 'timeout'
instead of the usual
Function execution took 10 ms, finished with status: 'ok'
I started stripping down my function but even going down to bare bones it was still getting the error.
I then started a new project used the example function as is and it behaves exactly the same way. With firebase-functions ^2.0.0 it gives the timeout error but with ^1.0.0 it works fine.
Is this a known issue?
Thanks
Here is the example code
exports.generateThumbnail = functions.storage.object().onFinalize((object) => {
const fileBucket = object.bucket; // The Storage bucket that contains the file.
const filePath = object.name; // File path in the bucket.
const contentType = object.contentType; // File content type.
// Exit if this is triggered on a file that is not an image.
if (!contentType.startsWith('image/')) {
console.log('This is not an image.');
return null;
}
// Get the file name.
const fileName = path.basename(filePath);
// Exit if the image is already a thumbnail.
if (fileName.startsWith('thumb_')) {
console.log('Already a Thumbnail.');
return null;
}
// Download file from bucket.
const bucket = gcs.bucket(fileBucket);
const metadata = {
contentType: contentType,
};
// We add a 'thumb_' prefix to thumbnails file name. That's where we'll upload the thumbnail.
const thumbFileName = `thumb_${fileName}`;
const thumbFilePath = path.join(path.dirname(filePath), thumbFileName);
// Create write stream for uploading thumbnail
const thumbnailUploadStream = bucket.file(thumbFilePath).createWriteStream({metadata});
// Create Sharp pipeline for resizing the image and use pipe to read from bucket read stream
const pipeline = sharp();
pipeline
.resize(THUMB_MAX_WIDTH, THUMB_MAX_HEIGHT)
.max()
.pipe(thumbnailUploadStream);
bucket.file(filePath).createReadStream().pipe(pipeline);
const streamAsPromise = new Promise((resolve, reject) =>
thumbnailUploadStream.on('finish', resolve).on('error', reject));
return streamAsPromise.then(() => {
console.log('Thumbnail created successfully');
return null;
});
I was going to comment but it requires me 50+ reputations.....
Anyway, I am experiencing the same problem:
exports.sendNotificationForMessage = functions.firestore.document('chatrooms/{chatroomId}/messages/{messageId}').onCreate((snap, context) => {
const newMessage = snap.data();
const messageContent = newMessage.text;
const senderName = newMessage.senderDisplayName;
const senderId = newMessage.senderId;
const chatroomId = context.params.chatroomId;
console.log(newMessage)
return true;
});
It finished with status timeout.
If it's a problem with firebase-function 2.0, what is the command to downgrade it back to version 1.x? Googled about it but no luck.
Try calling resolve and reject with ():
exports.generateThumbnail = functions.storage.object().onFinalize((object) => {
const fileBucket = object.bucket; // The Storage bucket that contains the file.
const filePath = object.name; // File path in the bucket.
const contentType = object.contentType; // File content type.
// Exit if this is triggered on a file that is not an image.
if (!contentType.startsWith('image/')) {
console.log('This is not an image.');
return null;
}
// Get the file name.
const fileName = path.basename(filePath);
// Exit if the image is already a thumbnail.
if (fileName.startsWith('thumb_')) {
console.log('Already a Thumbnail.');
return null;
}
// Download file from bucket.
const bucket = gcs.bucket(fileBucket);
const metadata = {
contentType: contentType,
};
// We add a 'thumb_' prefix to thumbnails file name. That's where we'll upload the thumbnail.
const thumbFileName = `thumb_${fileName}`;
const thumbFilePath = path.join(path.dirname(filePath), thumbFileName);
// Create write stream for uploading thumbnail
const thumbnailUploadStream = bucket.file(thumbFilePath).createWriteStream({metadata});
// Create Sharp pipeline for resizing the image and use pipe to read from bucket read stream
const pipeline = sharp();
pipeline.resize(THUMB_MAX_WIDTH, THUMB_MAX_HEIGHT).max().pipe(thumbnailUploadStream);
bucket.file(filePath).createReadStream().pipe(pipeline);
const streamAsPromise = new Promise((resolve, reject) =>
thumbnailUploadStream.on('finish', resolve()).on('error', reject()));
return streamAsPromise.then(() => {
console.log('Thumbnail created successfully');
return null;
});
});
I'm writing a cloud function that triggers every time a file is uploaded on my default firebase storage bucket and if it's an image it converts it to JPG.
This is the official firebase example:
exports.processTripImage = functions.storage.object().onFinalize((object) => {
const filePath = object.name;
const baseFileName = path.basename(filePath, path.extname(filePath));
const fileDir = path.dirname(filePath);
const JPEGFilePath = path.normalize(path.format({ dir: fileDir, name: baseFileName, ext: JPEG_EXTENSION }));
const tempLocalFile = path.join(os.tmpdir(), filePath);
const tempLocalDir = path.dirname(tempLocalFile);
const tempLocalJPEGFile = path.join(os.tmpdir(), JPEGFilePath);
// Exit if this is triggered on a file that is not an image.
if (!object.contentType.startsWith('image/')) {
console.log('This is not an image.');
return null;
}
// Exit if the image is already a JPEG.
if (object.contentType.startsWith('image/jpeg')) {
console.log('Already a JPEG.');
return null;
}
const bucket = gcs.bucket(object.bucket);
// Create the temp directory where the storage file will be downloaded.
return mkdirp(tempLocalDir).then(() => {
// Download file from bucket.
return bucket.file(filePath).download({ destination: tempLocalFile });
}).then(() => {
console.log('The file has been downloaded to', tempLocalFile);
// Convert the image to JPEG using ImageMagick.
return spawn('convert', [tempLocalFile, tempLocalJPEGFile]);
}).then(() => {
console.log('JPEG image created at', tempLocalJPEGFile);
// Uploading the JPEG image.
return bucket.upload(tempLocalJPEGFile, { destination: JPEGFilePath });
}).then(() => {
console.log('JPEG image uploaded to Storage at', JPEGFilePath);
// Once the image has been converted delete the local files to free up disk space.
fs.unlinkSync(tempLocalJPEGFile);
fs.unlinkSync(tempLocalFile);
return;
});
})
The problem is that it writes the file and convert it correctly but when it tries to upload it back to the bucket it can't find the file in the tmp directory it just created.
My cloud function log:
I'm trying to invoke a google cloud function sending images larger than 50Mb. The purpose of the cloud function is to resize the images and upload them to google cloud storage.
However, when I send the HTTP post to my cloud function I get the following error: 413 Request Entity Too Large
Does anyone have any workaround to this error? Can I increase the http request size limit?
The limit for HTTP trigger upload and download payload size is documented at 10MB. There is no way to get this limit increased, but you can always file a feature request explaining why it should be increased.
You can let the client upload directly to storage. authinticated onto his own user folder and security rules limiting the file size to whatever size you wish into a temp folder.
Then have a cloud function trigger started resizing the image.
And Delete the original image when finished.
I'm attaching a code example of mine -
you should add the a delete of the file after conversion...
/**
* When an image is uploaded in the Storage bucket We generate a thumbnail automatically using
* ImageMagick.
* After the thumbnail has been generated and uploaded to Cloud Storage,
* we write the public URL to the Firebase Realtime Database.
*/
exports.generateThumbnail = functions.storage.object().onFinalize((object) => {
console.log('Generated Started');
// File and directory paths.
const filePath = object.name;
const contentType = object.contentType; // This is the image MIME type
const fileDir = path.dirname(filePath);
const fileName = path.basename(filePath);
const thumbFilePath = path.normalize(path.join(fileDir, `${THUMB_PREFIX}${fileName}`));
const tempLocalFile = path.join(os.tmpdir(), filePath);
const tempLocalDir = path.dirname(tempLocalFile);
const tempLocalThumbFile = path.join(os.tmpdir(), thumbFilePath);
// Exit if this is triggered on a file that is not an image.
if (!contentType.startsWith('image/')) {
console.log('This is not an image.');
deleteImage(filename);
return null;
}
// Exit if the image is already a thumbnail.
if (fileName.startsWith(THUMB_PREFIX)) {
console.log('Already a Thumbnail.');
deleteImage(filename);
return null;
}
// Cloud Storage files.
const bucket = gcs.bucket(object.bucket);
const file = bucket.file(filePath);
const thumbFile = bucket.file(thumbFilePath);
const metadata = {
contentType: contentType,
// To enable Client-side caching you can set the Cache-Control headers here. Uncomment below.
'Cache-Control': 'public,max-age=3600',
};
// Create the temp directory where the storage file will be downloaded.
return mkdirp(tempLocalDir).then(() => {
console.log('DL Started');
// Download file from bucket.
return file.download({
destination: tempLocalFile
});
}).then(() => {
console.log('The file has been downloaded to', tempLocalFile);
// Generate a thumbnail using ImageMagick.
return spawn('convert', [tempLocalFile, '-thumbnail', `${THUMB_MAX_WIDTH}x${THUMB_MAX_HEIGHT}>`, tempLocalThumbFile], {
capture: ['stdout', 'stderr']
});
}).then(() => {
console.log('Thumbnail created at', tempLocalThumbFile);
// Uploading the Thumbnail.
return bucket.upload(tempLocalThumbFile, {
destination: thumbFilePath,
metadata: metadata
});
}).then(() => {
console.log('Thumbnail uploaded to Storage at', thumbFilePath);
// Once the image has been uploaded delete the local files to free up disk space.
fs.unlinkSync(tempLocalFile);
fs.unlinkSync(tempLocalThumbFile);
// Get the Signed URLs for the thumbnail and original image.
const config = {
action: 'read',
expires: '03-01-2500',
};
return Promise.all([
thumbFile.getSignedUrl(config),
// file.getSignedUrl(config),
]);
}).then((results) => {
console.log('Got Signed URLs.');
const thumbResult = results[0];
// const originalResult = results[1];
const thumbFileUrl = thumbResult[0];
// const fileUrl = originalResult[0];
// Add the URLs to the Database
const uid = getUidFromFilePath(fileDir);
if (!uid) return null;
return Promise.all([
admin.auth().updateUser(uid, {
photoURL: thumbFileUrl
}),
admin.database().ref(`/users/${uid}/profile/photoURL`).set(thumbFileUrl)
]);
}).then(() => console.log('Thumbnail URLs saved to database.'));
});
As of 2022, the quota limit for the second generation of cloud functions is 32MB.
I want to generate a thumb image when a record is added to the database.
On the Firebase website they have some examples but they are all based on a storage trigger instead of a database trigger:
Image database object:
image: {
name: abc123,
raw: download url abc123
}
-
firebase thumb generator example (storage triggered):
exports.generateThumbnail = functions.storage.object().onChange(event => {
const object = event.data; // The Storage object.
const fileBucket = object.bucket; // The Storage bucket that contains the file.
const filePath = object.name; // File path in the bucket.
const contentType = object.contentType; // File content type.
const resourceState = object.resourceState; // The resourceState is 'exists' or 'not_exists' (for file/folder deletions).
const metageneration = object.metageneration; // Number of times metadata has been generated. New objects have a value of 1.
// Exit if this is triggered on a file that is not an image.
if (!contentType.startsWith('image/')) {
console.log('This is not an image.');
return;
}
// Get the file name.
const fileName = path.basename(filePath);
// Exit if the image is already a thumbnail.
if (fileName.startsWith('thumb_')) {
console.log('Already a Thumbnail.');
return;
}
// Exit if this is a move or deletion event.
if (resourceState === 'not_exists') {
console.log('This is a deletion event.');
return;
}
// Exit if file exists but is not new and is only being triggered
// because of a metadata change.
if (resourceState === 'exists' && metageneration > 1) {
console.log('This is a metadata change event.');
return;
}
// Download file from bucket.
const bucket = gcs.bucket(fileBucket);
const metadata = {
contentType: contentType
};
// We add a 'thumb_' prefix to thumbnails file name. That's where we'll upload the thumbnail.
const thumbFileName = `thumb_${fileName}`;
const thumbFilePath = path.join(path.dirname(filePath), thumbFileName);
// Create write stream for uploading thumbnail
const thumbnailUploadStream = bucket.file(thumbFilePath).createWriteStream({metadata});
// Create Sharp pipeline for resizing the image and use pipe to read from bucket read stream
const pipeline = sharp();
pipeline
.resize(THUMB_MAX_WIDTH, THUMB_MAX_HEIGHT)
.max()
.pipe(thumbnailUploadStream);
bucket.file(filePath).createReadStream().pipe(pipeline);
const streamAsPromise = new Promise((resolve, reject) =>
thumbnailUploadStream.on('finish', resolve).on('error', reject));
return streamAsPromise.then(() => {
console.log('Thumbnail created successfully');
});
});
-
How I want it to work (database triggered):
The database trigger event does not have a storage object. How can I access the storage object?
When a thumbnail is created successfully I want to add the donwloadUrl of the thumb image to the database
exports.generateThumbnail = functions.database.ref('/data/{dataID}/childData/{childDataID}/image/name').onCreate(event => {
const object = event.data; // This Storage object does not work when using a database trigger so how do I access the same storage object, I have the image name?
//thumb generator code...
return streamAsPromise.then(() => {
console.log('Thumbnail created successfully');
//add thumb image download url back to the database
});
});
-
image: {
name: abc123,
raw: download url abc123
thumb: download url thumb_abc123 //added after thumb is created
}
This is how I got it to work.
Add cloud storage to your project.
If you want to retrieve the image signed url like I do, add projectId and keyFilename to you cloud storage reference
const gcs = require('#google-cloud/storage')({
projectId: 'name-1234',
keyFilename: './keyfile.json'
});
generateThumbnail function
exports.generateThumbnail = functions.database.ref('/data/${dataID}/childData/${childDataID/image/name').onCreate(image => {
const dataID = image.params.dataID
const childDataID = image.params.childDataID
const fileName = image.data.val()
//get your project storage bucket id
const storageBucket = functions.config().firebase.storageBucket
//path to image
const imagePath = `${dataID}/${childDataID}`
//open bucket
const bucket = gcs.bucket(storageBucket)
//location of the image in the bucket
const object = bucket.file(imagePath)
const fileBucket = object.bucket; // The Storage bucket that contains the file.
const filePath = object.name; // File path in the bucket.
const metadata = {
contentType: 'image/jpeg'
};
// We add a 'thumb_' prefix to thumbnails file name. That's where we'll upload the thumbnail.
const thumbFileName = `thumb_${fileName}`;
const thumbFilePath = path.join(path.dirname(filePath), thumbFileName);
// Create write stream for uploading thumbnail
const thumbnailUploadStream = bucket.file(thumbFilePath).createWriteStream({metadata});
// Create Sharp pipeline for resizing the image and use pipe to read from bucket read stream
const pipeline = sharp();
pipeline
.resize(400, 400)
.max()
.pipe(thumbnailUploadStream);
bucket.file(filePath).createReadStream().pipe(pipeline);
const streamAsPromise = new Promise((resolve, reject) =>
thumbnailUploadStream.on('finish', resolve).on('error', reject));
return streamAsPromise.then(() => {
console.log('Thumbnail created successfully');
var today = new Date();
var dd = today.getDate();
var mm = today.getMonth() + 1; //January is 0!
const yyyy = today.getFullYear() + 5; // add a few years
if (dd < 10) {
dd = '0' + dd
}
if (mm < 10) {
mm = '0' + mm
}
today = mm + '-' + dd + '-' + yyyy;
bucket.file(filePath).getSignedUrl({
action: 'read',
expires: today
}, function(err, url) {
if (err) {
console.error(err);
return;
}
//add thumb image url to message (in database)
return admin.database().ref(`/${dataID}/childData/${childDataID}`).child('image').update({
thumb: url
})
});
});
})
The code I currently have:
exports.generateThumbnail = functions.storage.object().onChange(event => {
...
.then(() => {
console.log('File downloaded locally to', tempFilePath);
// Generate a thumbnail using ImageMagick.
if (contentType.startsWith('video/')) {
return spawn('convert', [tempFilePath + '[0]', '-quiet', `${tempFilePath}.jpg`]);
} else if (contentType.startsWith('image/')){
return spawn('convert', [tempFilePath, '-thumbnail', '200x200', tempFilePath]);
The error I get in the console:
Failed AGAIN! { Error: spawn ffmpeg ENOENT
at exports._errnoException (util.js:1026:11)
at Process.ChildProcess._handle.onexit (internal/child_process.js:193:32)
at onErrorNT (internal/child_process.js:359:16)
at _combinedTickCallback (internal/process/next_tick.js:74:11)
at process._tickDomainCallback (internal/process/next_tick.js:122:9)
code: 'ENOENT',
errno: 'ENOENT',
syscall: 'spawn ffmpeg',
path: 'ffmpeg',
spawnargs: [ '-t', '1', '-i', '/tmp/myVideo.m4v', 'theThumbs.jpg' ] }
I also tried Imagemagick:
return spawn('convert', [tempFilePath + '[0]', '-quiet',`${tempFilePath}.jpg`]);
Also without any success.
Can anyone point me to the right direction here?
#andrew-robinson post was a good start.
The following will generate a thumbnail for both images and videos.
Add the following to your npm packages:
#ffmpeg-installer/ffmpeg
#google-cloud/storage
child-process-promise
mkdirp
mkdirp-promise
Use the following to generate a thumbnail from a larger image:
function generateFromImage(file, tempLocalThumbFile, fileName) {
const tempLocalFile = path.join(os.tmpdir(), fileName);
// Download file from bucket.
return file.download({destination: tempLocalFile}).then(() => {
console.info('The file has been downloaded to', tempLocalFile);
// Generate a thumbnail using ImageMagick with constant width and variable height (maintains ratio)
return spawn('convert', [tempLocalFile, '-thumbnail', THUMB_MAX_WIDTH, tempLocalThumbFile], {capture: ['stdout', 'stderr']});
}).then(() => {
fs.unlinkSync(tempLocalFile);
return Promise.resolve();
})
}
Use the following to generate a thumbnail from a video:
function generateFromVideo(file, tempLocalThumbFile) {
return file.getSignedUrl({action: 'read', expires: '05-24-2999'}).then((signedUrl) => {
const fileUrl = signedUrl[0];
const promise = spawn(ffmpegPath, ['-ss', '0', '-i', fileUrl, '-f', 'image2', '-vframes', '1', '-vf', `scale=${THUMB_MAX_WIDTH}:-1`, tempLocalThumbFile]);
// promise.childProcess.stdout.on('data', (data) => console.info('[spawn] stdout: ', data.toString()));
// promise.childProcess.stderr.on('data', (data) => console.info('[spawn] stderr: ', data.toString()));
return promise;
})
}
The following will execute when a video or image is uploaded to storage.
It determines the file type, generates the thumbnail to a temp file, uploads the thumbnail to storage, then call 'updateDatabase()' which should be a promise that updates your database (if necessary):
const functions = require('firebase-functions');
const mkdirp = require('mkdirp-promise');
const gcs = require('#google-cloud/storage');
const admin = require('firebase-admin');
const spawn = require('child-process-promise').spawn;
const ffmpegPath = require('#ffmpeg-installer/ffmpeg').path;
const path = require('path');
const os = require('os');
const fs = require('fs');
const db = admin.firestore();
// Max height and width of the thumbnail in pixels.
const THUMB_MAX_WIDTH = 384;
const SERVICE_ACCOUNT = '<your firebase credentials file>.json';
const adminConfig = JSON.parse(process.env.FIREBASE_CONFIG);
module.exports = functions.storage.bucket(adminConfig.storageBucket).object().onFinalize(object => {
const fileBucket = object.bucket; // The Storage bucket that contains the file.
const filePathInBucket = object.name;
const resourceState = object.resourceState; // The resourceState is 'exists' or 'not_exists' (for file/folder deletions).
const metageneration = object.metageneration; // Number of times metadata has been generated. New objects have a value of 1.
const contentType = object.contentType; // This is the image MIME type
const isImage = contentType.startsWith('image/');
const isVideo = contentType.startsWith('video/');
// Exit if this is a move or deletion event.
if (resourceState === 'not_exists') {
return Promise.resolve();
}
// Exit if file exists but is not new and is only being triggered
// because of a metadata change.
else if (resourceState === 'exists' && metageneration > 1) {
return Promise.resolve();
}
// Exit if the image is already a thumbnail.
else if (filePathInBucket.indexOf('.thumbnail.') !== -1) {
return Promise.resolve();
}
// Exit if this is triggered on a file that is not an image or video.
else if (!(isImage || isVideo)) {
return Promise.resolve();
}
const fileDir = path.dirname(filePathInBucket);
const fileName = path.basename(filePathInBucket);
const fileInfo = parseName(fileName);
const thumbFileExt = isVideo ? 'jpg' : fileInfo.ext;
let thumbFilePath = path.normalize(path.join(fileDir, `${fileInfo.name}_${fileInfo.timestamp}.thumbnail.${thumbFileExt}`));
const tempLocalThumbFile = path.join(os.tmpdir(), thumbFilePath);
const tempLocalDir = path.join(os.tmpdir(), fileDir);
const generateOperation = isVideo ? generateFromVideo : generateFromImage;
// Cloud Storage files.
const bucket = gcs({keyFilename: SERVICE_ACCOUNT}).bucket(fileBucket);
const file = bucket.file(filePathInBucket);
const metadata = {
contentType: isVideo ? 'image/jpeg' : contentType,
// To enable Client-side caching you can set the Cache-Control headers here. Uncomment below.
// 'Cache-Control': 'public,max-age=3600',
};
// Create the temp directory where the storage file will be downloaded.
return mkdirp(tempLocalDir).then(() => {
return generateOperation(file, tempLocalThumbFile, fileName);
}).then(() => {
console.info('Thumbnail created at', tempLocalThumbFile);
// Get the thumbnail dimensions
return spawn('identify', ['-ping', '-format', '%wx%h', tempLocalThumbFile], {capture: ['stdout', 'stderr']});
}).then((result) => {
const dim = result.stdout.toString();
const idx = thumbFilePath.indexOf('.');
thumbFilePath = `${thumbFilePath.substring(0,idx)}_${dim}${thumbFilePath.substring(idx)}`;
console.info('Thumbnail dimensions:', dim);
// Uploading the Thumbnail.
return bucket.upload(tempLocalThumbFile, {destination: thumbFilePath, metadata: metadata});
}).then(() => {
console.info('Thumbnail uploaded to Storage at', thumbFilePath);
const thumbFilename = path.basename(thumbFilePath);
return updateDatabase(fileDir, fileName, thumbFilename);
}).then(() => {
console.info('Thumbnail generated.');
fs.unlinkSync(tempLocalThumbFile);
return Promise.resolve();
})
});
parseName() should parse your filename format. At the very least it should return the file's basename and extension.
updateDatabase() should return a promise that updates your database with the newly generated thumbnail (if necessary).
Note that #ffmpeg-installer/ffmpeg removes the need of directly including a ffmpeg binary in your cloud function.
To use ffmpeg or any other system command-line tool that is not pre-installed on the firebase cloud function container, you can add a pre-compiled binary to the functions folder (alongside index.js) and it will upload it along with your cloud function code in the deploy step. You can then execute the binary using child-process-promise spawn as you were doing with ImageMagick (which is already installed).
You can get the ffmpeg binary here https://johnvansickle.com/ffmpeg/
I used the x86_64 build https://johnvansickle.com/ffmpeg/builds/ffmpeg-git-64bit-static.tar.xz
Untar with
tar -xvzf ffmpeg-release-64bit-static.tar.xz
and just add the one ffmpeg file to the functions folder.
This link explains how you can extract the thumbnail from the video with just the url so there is no need to download the file fully.
https://wistia.com/blog/faster-thumbnail-extraction-ffmpeg
The command to extract the thumbnail with width 512px and keeping the aspect ratio is
const spawn = require('child-process-promise').spawn;
const extractThumbnailFromVideoUrl = (fileUrl, tempThumbnailFilePath) => {
return spawn('./ffmpeg', ['-ss', '0', '-i', fileUrl, '-f', 'image2', '-vframes', '1', '-vf', 'scale=512:-1', tempThumbnailFilePath]);
};
Note the ./ in ./ffmpeg
For more details on the scale arguments you can see here https://trac.ffmpeg.org/wiki/Scaling%20(resizing)%20with%20ffmpeg
If the spawn command fails then as you have seen you will not get a very helpful error output. To get better output you can listen to the stdout and stderr event streams on the ChildProcess
const extractThumbnailFromVideoUrl = (fileUrl, tempThumbnailFilePath) => {
const promise = spawn('./ffmpeg', ['-ss', '0', '-i', fileUrl, '-f', 'image2', '-vframes', '1', '-vf', 'scale=512:-1', tempThumbnailFilePath]);
promise.childProcess.stdout.on('data', (data: any) => console.log('[spawn] stdout: ', data.toString()));
promise.childProcess.stderr.on('data', (data: any) => console.log('[spawn] stderr: ', data.toString()));
return promise;
};
The output of the ffmpeg call will then be displayed in your cloud function logs like they would if you ran the command locally from the terminal. For more info on that you can see https://www.npmjs.com/package/child-process-promise
http://node.readthedocs.io/en/latest/api/child_process/
The following is a complete version of the cloud function assuming only video files. If you want to handle images or other files as well then you can add the code to exit early or call different methods as you were doing. This makes calls to create temp directories and cleans those directories up at the end of the method but I've omitted the details of those functions.
import * as functions from 'firebase-functions';
import * as gcs from '#google-cloud/storage';
import {cleanupFiles, makeTempDirectories} from '../services/system-utils';
const spawn = require('child-process-promise').spawn;
const storageProjectId = `${functions.config().project_id}.appspot.com`;
export const videoFileThumbnailGenerator = functions.storage.bucket(storageProjectId).object().onChange(event => {
const object = event.data;
const fileBucket = object.bucket; // The Storage bucket that contains the file.
const filePathInBucket = object.name; // File path in the bucket.
const resourceState = object.resourceState; // The resourceState is 'exists' or 'not_exists' (for file/folder deletions).
const metageneration = object.metageneration; // Number of times metadata has been generated. New objects have a value of 1.
// Exit if this is a move or deletion event.
if (resourceState === 'not_exists') {
console.log('This is a deletion event.');
return Promise.resolve();
}
// Exit if file exists but is not new and is only being triggered
// because of a metadata change.
if (resourceState === 'exists' && metageneration > 1) {
console.log('This is a metadata change event.');
return Promise.resolve();
}
const bucket = gcs({keyFilename: `${functions.config().firebase_admin_credentials}`}).bucket(fileBucket);
const filePathSplit = filePathInBucket.split('/');
const filename = filePathSplit.pop();
const filenameSplit = filename.split('.');
const fileExtension = filenameSplit.pop();
const baseFilename = filenameSplit.join('.');
const fileDir = filePathSplit.join('/') + (filePathSplit.length > 0 ? '/' : '');
const file = bucket.file(filePathInBucket);
const tempThumbnailDir = '/tmp/thumbnail/';
const jpgFilename = `${baseFilename}.jpg`;
const tempThumbnailFilePath = `${tempThumbnailDir}${jpgFilename}`;
const thumbnailFilePath = `${fileDir}thumbnail/${jpgFilename}`;
return makeTempDirectories([tempThumbnailDir])
.then(() => file.getSignedUrl({action: 'read', expires: '05-24-2999'}))
.then(signedUrl => signedUrl[0])
.then(fileUrl => extractThumbnailFromVideoUrl(fileUrl, tempThumbnailFilePath))
.then(() => bucket.upload(tempThumbnailFilePath, {destination: thumbnailFilePath}))
.then(() => cleanupFiles([
{directoryName: tempThumbnailFilePath},
]))
.catch(err => console.error('Video upload error: ', err));
});
const extractThumbnailFromVideoUrl = (fileUrl, tempThumbnailFilePath) => {
return spawn('./ffmpeg', ['-ss', '0', '-i', fileUrl, '-f', 'image2', '-vframes', '1', '-vf', 'scale=512:-1', tempThumbnailFilePath]);
};