google.storage.object.finalize not triggering firebase cloud function - firebase

I have a firebase app, when user uploads photo to storage, it triggers a generatethumbnail cloud function. All standard code, it worked fine, i deployed on Feb 24th 2019.
Now when I Upload a photo, nothing happens. I look in storage and the photo is there, but when i look at the logs for firebase cloud functions, the generateThumbnail function hasn't been called. How can I debug / fix this? I was thinking of just redeploying my code, or perhaps upgrading my libraries etc in case there have been breaking changes?
Here's my code:
import * as functions from 'firebase-functions';
// import * as Storage from '#google-cloud/storage';
// const gcs = new Storage();
import * as admin from 'firebase-admin';
const gcs = admin.storage()
const firestore = admin.firestore();
import { tmpdir } from 'os';
import { join, dirname } from 'path';
import * as sharp from 'sharp';
import * as fs from 'fs-extra';
export const generateThumbs = functions.storage
.object()
.onFinalize(async object => {
const bucket = gcs.bucket(object.bucket);
const filePath = object.name;
const parts = filePath.split('/');
const fileName = parts.pop();
const propertyID = parts.pop();
// console.log(`got property id ${propertyID}`)
const bucketDir = dirname(filePath);
const workingDir = join(tmpdir(), 'thumbs');
const tmpFilePath = join(workingDir, fileName);
if (fileName.includes('thumb#') || !object.contentType.includes('image')) {
console.log('exiting function');
return false;
}
// 1. Ensure thumbnail dir exists
await fs.ensureDir(workingDir);
// 2. Download Source File
await bucket.file(filePath).download({
destination: tmpFilePath
});
// 3. Resize the images and define an array of upload promises
const sizes = [256];
let thumbLocation = '';
const uploadPromises = sizes.map(async size => {
const thumbName = `thumb#${size}_${fileName}`;
const thumbPath = join(workingDir, thumbName);
// Resize source image
await sharp(tmpFilePath)
.resize(256, 171)
.toFile(thumbPath);
thumbLocation = join(bucketDir, thumbName);
// Upload to GCS
return bucket.upload(thumbPath, {
destination: thumbLocation
});
});
// 4. Run the upload operations
await Promise.all(uploadPromises);
// 5. Cleanup remove the tmp/thumbs from the filesystem
await fs.remove(workingDir);
let photoURL = ''
const hour = 1000 * 60 * 60;
const year = hour * 24 * 365;
const EXP = Date.now() + year * 10;
await bucket.file(filePath).getSignedUrl({
action: 'read',
expires: EXP
}).then(signedUrls => {
photoURL = signedUrls[0];
});
let thumbURL = '';
await bucket.file(thumbLocation).getSignedUrl({
action: 'read',
expires: EXP
}).then(signedUrls => {
thumbURL = signedUrls[0];
});
if (!(photoURL && thumbURL)) {
return Promise.resolve('Error no thumbs');
}
const propertyRef = firestore.collection('properties').doc(propertyID);
return firestore.runTransaction(t => {
return t.get(propertyRef)
.then(doc => {
if (!doc.exists) {
console.log(`doc does not exist ${propertyID}`)
return;
}
let photos = doc.data().photos;
photos = photos || [];
photos.push({
big: photoURL,
small: thumbURL,
});
t.update(propertyRef, { photos: photos });
});
});
});

All standard code, it worked fine, i deployed on Feb 24th 2019.
Until a month or so ago Cloud Functions would get deactivated by the system if they'd been inactive for 30 days or more. This behavior has since been changed, since it was quite unintuitive to most developers. But you will need to redeploy your Cloud Functions once more to opt in to the new behavior.

Related

Uploading to Firebase Storage from a Google Cloud Function

I'm trying to create a Firebase Function that allows me to pass an array of image URLs in to create generate a montage, upload the file to Firebase Storage and then return the generated Download URL. This will be called from my app, so I'm using functions.https.onCall.
const functions = require("firebase-functions");
const admin = require('firebase-admin');
var gm = require('gm').subClass({imageMagick: true});
admin.initializeApp();
exports.createMontage = functions.https.onCall((data, context) => {
var storageRef = admin.storage().bucket( 'gs://xyz-zyx.appspot.com' );
var createdMontage = storageRef.file('createdMontage.jpg');
function generateMontage(list){
let g = gm()
list.forEach(function(p){
g.montage(p);
})
g.geometry('+81+81')
g.density(5000,5000)
.write(createdMontage, function(err) {
if(!err) console.log("Written montage image.");
});
return true
}
generateMontage(data)
return createdMontage.getDownloadURL();
});
The function generateMontage() works locally on NodeJs (with a local write destination).
Thank you.
Have a look at this example from the docs:
https://cloud.google.com/storage/docs/uploading-objects#storage-upload-object-code-sample
2021-01-11 Update
Here's a working example. I'm using regular Cloud Functions and it's limited in that the srcObject, dstObject and bucketName are constants but, it does create montages which is your goal.
PROJECT=[[YOUR-PROJECT]]
BILLING=[[YOUR-BILLING]]
REGION=[[YOUR-REGION]]
FUNCTION=[[YOUR-FUNCTION]]
BUCKET=[[YOUR-BUCKET]]
OBJECT=[[YOUR-OBJECT]] # Path from ${BUCKET} root
gcloud projects create ${PROJECT}
gcloud beta billing projects link ${PROJECT} \
--billing-account=${BILLING}
gcloud services enable cloudfunctions.googleapis.com \
--project=${PROJECT}
gcloud services enable cloudbuild.googleapis.com \
--project=${PROJECT}
gcloud functions deploy ${FUNCTION} \
--memory=4gib \
--max-instances=1
--allow-unauthenticated \
--entry-point=montager \
--set-env-vars=BUCKET=${BUCKET},OBJECT=${OBJECT} \
--runtime=nodejs12 \
--trigger-http \
--project=${PROJECT} \
--region=${REGION}
ENDPOINT=$(\
gcloud functions describe ${FUNCTION} \
--project=${PROJECT} \
--region=${REGION} \
--format="value(httpsTrigger.url)")
curl \
--request GET \
${ENDPOINT}
`package.json`:
```JSON
{
"name": "montage",
"version": "0.0.1",
"dependencies": {
"#google-cloud/storage": "5.7.1",
"gm": "^1.23.1"
}
}
And index.js:
const { Storage } = require('#google-cloud/storage');
const storage = new Storage();
const gm = require('gm').subClass({ imageMagick: true });
const bucketName = process.env["BUCKET"];
const srcObject = process.env["OBJECT"];
const dstObject = "montage.png";
// Creates 2x2 montage
const list = [
`/tmp/${srcObject}`,
`/tmp/${srcObject}`,
`/tmp/${srcObject}`,
`/tmp/${srcObject}`
];
const montager = async (req, res) => {
// Download GCS `srcObject` to `/tmp`
const f = await storage
.bucket(bucketName)
.file(srcObject)
.download({
destination: `/tmp/${srcObject}`
});
// Creating GCS write stream for montage
const obj = await storage
.bucket(bucketName)
.file(dstObject)
.createWriteStream();
let g = gm();
list.forEach(f => {
g.montage(f);
});
console.log(`Returning`);
g
.geometry('+81+81')
.density(5000, 5000)
.stream()
.pipe(obj)
.on(`finish`, () => {
console.log(`finish`);
res.status(200).send(`ok`);
})
.on(`error`, (err) => {
console.log(`error: ${err}`);
res.status(500).send(`uhoh!`);
});
}
exports.montager = montager;
I have never used 'gm', but, according to its npm page, it has a toBuffer function.
So maybe something like this could work:
const functions = require('firebase-functions');
const admin = require('firebase-admin');
const gm = require('gm').subClass({ imageMagick: true });
admin.initializeApp();
exports.createMontage = functions.https.onCall((data, _context) => {
const bucketName = 'xyz-zyx'; // not sure, I've always used the default bucket
const bucket = admin.storage().bucket(bucketName);
const storagePath = 'createdMontage.jpg';
const fileRef = bucket.file(storagePath);
const generateMontage = async (list) => {
const g = gm();
list.forEach(function (p) {
g.montage(p);
});
g.geometry('+81+81');
g.density(5000, 5000);
return new Promise(resolve => {
g.toBuffer('JPG', (_err, buffer) => {
const saveTask = fileRef.save(buffer, { contentType: 'image/jpeg' });
const baseStorageUrl = `https://firebasestorage.googleapis.com/v0/b/${bucket.name}/o/`;
const encodedPath = encodeURIComponent(storagePath);
const postfix = '?alt=media'; // see stackoverflow.com/a/58443247/6002078
const publicUrl = baseStorageUrl + encodedPath + postfix;
saveTask.then(() => resolve(publicUrl));
});
});
};
return generateMontage(data);
});
But it seems it can be done more easily. As Methkal Khalawi commented:
here is a full example on how to use ImageMagic with Functions. Though they are using it for blurring an image but the idea is the same. And here is a tutorial from the documentation.
I think you can pipe output stream from gm module to firebase storage object write stream.
const functions = require("firebase-functions");
const admin = require('firebase-admin');
var gm = require('gm').subClass({imageMagick: true});
admin.initializeApp();
exports.createMontage = functions.https.onCall(async (data, context) => {
var storage = admin.storage().bucket( 'gs://xyz-zyx.appspot.com' );
var downloadURL = await new Promise((resolve, reject) => {
let g = gm()
list.forEach(function(p){
g.montage(p);
})
g.geometry('+81+81')
g.density(5000,5000)
.stream((err, stdout, stderr) => {
if (err) {
reject();
}
stdout.pipe(
storage.file('generatedMotent.png).createWriteStream({
metadata: {
contentType: 'image/png',
},
})
).on('finish', () => {
storage
.file('generatedMotent')
.getSignedUrl({
action: 'read',
expires: '03-09-2491', // Non expring public url
})
.then((url) => {
resolve(url);
});
});
})
});
return downloadURL;
});
FYI, Firebase Admin SDK storage object does not have getDownloadURL() function.
You should generate non-expiring public signed URL from the storage object.
In addition to, it should cause another problem after some period of time according to this issue.
To get rid of this issue happening, you should initialize firebase app with permanent service account.
const admin = require('firebase-admin');
const serviceAccount = require('../your-service-account.json');
admin.initializeApp({
credential: admin.credential.cert(serviceAccount),
projectId: JSON.parse(process.env.FIREBASE_CONFIG).projectId,
databaseURL: JSON.parse(process.env.FIREBASE_CONFIG).databaseURL,
storageBucket: JSON.parse(process.env.FIREBASE_CONFIG).storageBucket,
});

Error: ****#appspot.gserviceaccount.com does not have storage.objects.get access

I have a simple firebase function that triggers on a file being uploaded to Firebase Storage. It was working on the non-main bucket, but once I changed it to listen to the main bucket I began receiving these error messages....
Error: *****#appspot.gserviceaccount.com does not have storage.objects.get access to *****.appspot.com/ff-icon-01.png.
The function is in the same project as the storage bucket.
const admin = require('firebase-admin');
admin.initializeApp();
const functions = require('firebase-functions');
const {Storage} = require('#google-cloud/storage');
const gcs = new Storage();
import { tmpdir } from 'os';
import { join, dirname } from 'path';
import * as sharp from 'sharp';
import * as fs from 'fs-extra';
export const makeThumbnail = functions.storage
.object()
.onFinalize(async object => {
const bucket = gcs.bucket(object.bucket);
const filePath = object.name;
const fileName = filePath.split('/').pop();
const bucketDir = dirname(filePath);
const workingDir = join(tmpdir(), 'thumbs');
const tmpFilePath = join(workingDir, 'source.png');
if (fileName.includes('thumb#') || !object.contentType.includes('image')) {
console.log('exiting function');
return false;
}
// 1. Ensure thumbnail dir exists
await fs.ensureDir(workingDir);
// 2. Download Source File
await bucket.file(filePath).download({
destination: tmpFilePath
});
// 3. Resize the images and define an array of upload promises
const sizes = [64, 128, 256];
const uploadPromises = sizes.map(async size => {
const thumbName = `thumb#${size}_${fileName}`;
const thumbPath = join(workingDir, thumbName);
// Resize source image
await sharp(tmpFilePath)
.resize(size, size)
.toFile(thumbPath);
// Upload to GCS
return bucket.upload(thumbPath, {
destination: join(bucketDir, thumbName)
});
});
// 4. Run the upload operations
await Promise.all(uploadPromises);
// 5. Cleanup remove the tmp/thumbs from the filesystem
return fs.remove(workingDir);
});
They have the same rules. Not sure what's up.
rules_version = '2';
service firebase.storage {
match /b/{bucket}/o {
match /{allPaths=**} {
allow read, write: if request.auth != null;
}
}
}
For someone else who runs into this issue.
The problem for me was that I were using the wrong project in my gcloud setup when uploading my functions. So I used one project in the firebase cli while using another project in the gcloud cli.
It worked for me when I deleted all the functions, change the gcloud cli project to the right one and then uploaded the functions again.

firebase cloud function using getSignedUrl for thumbnail is expiring after about a week, no clear reason why?

I'm able to successfully generate a signedUrl for my thumbnails I am creating, but after about a week they no longer work...
I am not getting any errors or information as to why they are expiring, they just are.
I have been using firebase to develop my app and now all of a sudden I have to deal with all this google cloud storage permission and what not - really have no idea whats going on... It's got to be some sort of permission issue?
I have tried generating a new service account from the firebase console but no luck... I am tired of waiting weeks to see if they are going to expire again or not. I hope someone can guide me to a solution for this - it seems like its a problem for many people... We cant afford to go live and have gray thumbnails all over the app because they expire.
Here is how we are generating the signedUrl with firebase cloud functions:
export const generateThumbs = functions.storage
.object()
.onFinalize(async object => {
const fileBucket = object.bucket; // The Storage bucket that contains the file.
const filePath = object.name; // File path in the bucket.
const fileName = filePath.split('/').pop();
const userUid = filePath.split('/')[2];
const sizes = [150, 256];
const bucketDir = dirname(filePath);
if (!filePath.startsWith('categories/')) {
console.log('This is not in the categories directory.');
return false;
}
if (fileName.includes('thumb#') || !object.contentType.includes('image')) {
console.log('exiting function');
return false;
}
const bucket = gcs.bucket(fileBucket);
const tempFilePath = path.join(tmpdir(), fileName);
return bucket.file(filePath).download({
destination: tempFilePath
}).then(() => {
sizes.map(size => {
const newFileName = `thumb#${size}_${fileName}`
const newFileTemp = path.join(tmpdir(), newFileName);
const newFilePath = `thumbs/${newFileName}`
return sharp(tempFilePath)
.resize(size, size)
.toFile(newFileTemp, () => {
return bucket.upload(newFileTemp, {
destination: join(bucketDir, newFilePath),
metadata: {
contentType: 'image/jpeg'
}
}).then((data) => {
const file = data[0]
file.getSignedUrl({
action: 'read',
expires: '03-17-2100'
}, function(err, url) {
if (err) {
console.error(err);
return;
}
if (size === 150) {
return admin.database().ref('profileThumbs').child(userUid).child(fileName).set({ thumb: url });
} else if (size === 256) {
return admin.database().ref('categories').child(fileName).child('thumb').set(url)
.then(() => {
admin.database().ref('categories').child(fileName).child('tempThumb').remove();
})
}
})
})
})
})
}).catch(error =>{
console.log(error);
});
})
After setting the expiration date to 03-17-2100, we don't expect this type of behaviour, but like I said I feel like its something to do with gcs permissions - I tried to contact them but after about a week I am still waiting for their response.
I appreciate all the feedback!

Firebase Functions get files from storage

I have to send a file to an API, therefor I have to use fs.readFileSync(). After uploading the picture to the storage, I am calling my function to execute the API call. But I cannot get the file from the storage. This is a section of the code, which always gets null in the result. I tried also to .getFiles() without a parameter and then I got all files but I dont want to filter them by iteration.
exports.stripe_uploadIDs = functions.https //.region("europe-west1")
.onCall((data, context) => {
const authID = context.auth.uid;
console.log("request is authentificated? :" + authID);
if (!authID) {
throw new functions.https.HttpsError("not authorized", "not authorized");
}
let accountID;
let result_fileUpload;
let tempFile = path.join(os.tmpdir(), "id_front.jpg");
const options_id_front_jpeg = {
prefix: "/user/" + authID + "/id_front.jpg"
};
const storageRef = admin
.storage()
.bucket()
.getFiles(options_id_front)
.then(results => {
console.log("JPG" + JSON.stringify(results));
// need to write this file to tempFile
return results;
});
const paymentRef = storageRef.then(() => {
return admin
.database()
.ref("Payment/" + authID)
.child("accountID")
.once("value");
});
const setAccountID = paymentRef.then(snap => {
accountID = snap.val();
return accountID;
});
const fileUpload = setAccountID.then(() => {
return Stripe.fileUploads.create(
{
purpose: "identity_document",
file: {
data: tempFile, // Documentation says I should use fs.readFileSync("filepath")
name: "id_front.jpg",
type: "application/octet-stream"
}
},
{ stripe_account: accountID }
);
});
const fileResult = fileUpload.then(result => {
result_fileUpload = result;
console.log(JSON.stringify(result_fileUpload));
return result_fileUpload;
});
return fileResult;
});
Result is:
JPG[[]]
You need to download your file from a bucket to your local function context env.
After your Firebase function start executing you can call the below:
More or less the below should work, just tweak to your needs. Call this within you .onCall context, you get the idea
import admin from 'firebase-admin';
import * as path from 'path';
import * as os from 'os';
import * as fs from 'fs';
admin.initializeApp();
const { log } = console;
async function tempFile(fileBucket: string, filePath: string) {
const bucket = admin.storage().bucket(fileBucket);
const fileName = 'MyFile.ext';
const tempFilePath = path.join(os.tmpdir(), fileName);
const metadata = {
contentType: 'DONT_FORGET_CONTEN_TYPE'
};
// Donwload the file to a local temp file
// Do whatever you need with it
await bucket.file(filePath).download({ destination: tempFilePath });
log('File downloaded to', tempFilePath);
// After you done and if you need to upload the modified file back to your
// bucket then uploaded
// This is optional
await bucket.upload(tempFilePath, {
destination: filePath,
metadata: metadata
});
//free up disk space by realseasing the file.
// Otherwise you might be charged extra for keeping memory space
return fs.unlinkSync(tempFilePath);
}

Get Firebase storage Object on a database-triggered cloud function

I want to generate a thumb image when a record is added to the database.
On the Firebase website they have some examples but they are all based on a storage trigger instead of a database trigger:
Image database object:
image: {
name: abc123,
raw: download url abc123
}
-
firebase thumb generator example (storage triggered):
exports.generateThumbnail = functions.storage.object().onChange(event => {
const object = event.data; // The Storage object.
const fileBucket = object.bucket; // The Storage bucket that contains the file.
const filePath = object.name; // File path in the bucket.
const contentType = object.contentType; // File content type.
const resourceState = object.resourceState; // The resourceState is 'exists' or 'not_exists' (for file/folder deletions).
const metageneration = object.metageneration; // Number of times metadata has been generated. New objects have a value of 1.
// Exit if this is triggered on a file that is not an image.
if (!contentType.startsWith('image/')) {
console.log('This is not an image.');
return;
}
// Get the file name.
const fileName = path.basename(filePath);
// Exit if the image is already a thumbnail.
if (fileName.startsWith('thumb_')) {
console.log('Already a Thumbnail.');
return;
}
// Exit if this is a move or deletion event.
if (resourceState === 'not_exists') {
console.log('This is a deletion event.');
return;
}
// Exit if file exists but is not new and is only being triggered
// because of a metadata change.
if (resourceState === 'exists' && metageneration > 1) {
console.log('This is a metadata change event.');
return;
}
// Download file from bucket.
const bucket = gcs.bucket(fileBucket);
const metadata = {
contentType: contentType
};
// We add a 'thumb_' prefix to thumbnails file name. That's where we'll upload the thumbnail.
const thumbFileName = `thumb_${fileName}`;
const thumbFilePath = path.join(path.dirname(filePath), thumbFileName);
// Create write stream for uploading thumbnail
const thumbnailUploadStream = bucket.file(thumbFilePath).createWriteStream({metadata});
// Create Sharp pipeline for resizing the image and use pipe to read from bucket read stream
const pipeline = sharp();
pipeline
.resize(THUMB_MAX_WIDTH, THUMB_MAX_HEIGHT)
.max()
.pipe(thumbnailUploadStream);
bucket.file(filePath).createReadStream().pipe(pipeline);
const streamAsPromise = new Promise((resolve, reject) =>
thumbnailUploadStream.on('finish', resolve).on('error', reject));
return streamAsPromise.then(() => {
console.log('Thumbnail created successfully');
});
});
-
How I want it to work (database triggered):
The database trigger event does not have a storage object. How can I access the storage object?
When a thumbnail is created successfully I want to add the donwloadUrl of the thumb image to the database
exports.generateThumbnail = functions.database.ref('/data/{dataID}/childData/{childDataID}/image/name').onCreate(event => {
const object = event.data; // This Storage object does not work when using a database trigger so how do I access the same storage object, I have the image name?
//thumb generator code...
return streamAsPromise.then(() => {
console.log('Thumbnail created successfully');
//add thumb image download url back to the database
});
});
-
image: {
name: abc123,
raw: download url abc123
thumb: download url thumb_abc123 //added after thumb is created
}
This is how I got it to work.
Add cloud storage to your project.
If you want to retrieve the image signed url like I do, add projectId and keyFilename to you cloud storage reference
const gcs = require('#google-cloud/storage')({
projectId: 'name-1234',
keyFilename: './keyfile.json'
});
generateThumbnail function
exports.generateThumbnail = functions.database.ref('/data/${dataID}/childData/${childDataID/image/name').onCreate(image => {
const dataID = image.params.dataID
const childDataID = image.params.childDataID
const fileName = image.data.val()
//get your project storage bucket id
const storageBucket = functions.config().firebase.storageBucket
//path to image
const imagePath = `${dataID}/${childDataID}`
//open bucket
const bucket = gcs.bucket(storageBucket)
//location of the image in the bucket
const object = bucket.file(imagePath)
const fileBucket = object.bucket; // The Storage bucket that contains the file.
const filePath = object.name; // File path in the bucket.
const metadata = {
contentType: 'image/jpeg'
};
// We add a 'thumb_' prefix to thumbnails file name. That's where we'll upload the thumbnail.
const thumbFileName = `thumb_${fileName}`;
const thumbFilePath = path.join(path.dirname(filePath), thumbFileName);
// Create write stream for uploading thumbnail
const thumbnailUploadStream = bucket.file(thumbFilePath).createWriteStream({metadata});
// Create Sharp pipeline for resizing the image and use pipe to read from bucket read stream
const pipeline = sharp();
pipeline
.resize(400, 400)
.max()
.pipe(thumbnailUploadStream);
bucket.file(filePath).createReadStream().pipe(pipeline);
const streamAsPromise = new Promise((resolve, reject) =>
thumbnailUploadStream.on('finish', resolve).on('error', reject));
return streamAsPromise.then(() => {
console.log('Thumbnail created successfully');
var today = new Date();
var dd = today.getDate();
var mm = today.getMonth() + 1; //January is 0!
const yyyy = today.getFullYear() + 5; // add a few years
if (dd < 10) {
dd = '0' + dd
}
if (mm < 10) {
mm = '0' + mm
}
today = mm + '-' + dd + '-' + yyyy;
bucket.file(filePath).getSignedUrl({
action: 'read',
expires: today
}, function(err, url) {
if (err) {
console.error(err);
return;
}
//add thumb image url to message (in database)
return admin.database().ref(`/${dataID}/childData/${childDataID}`).child('image').update({
thumb: url
})
});
});
})

Resources