Firebase google cloud functions - Download File - Not Found - firebase

I'm trying to do the thumb generator example (generate image thumbnail when one is uploaded). This example was adapter from a previous version of the API.
const functions = require('firebase-functions');
const { Storage } = require('#google-cloud/storage');
const os = require('os');
const path = require('path');
const sharp = require('sharp');
const fs = require('fs-extra');
exports.generateThumbs = functions.storage
.object()
.onFinalize(async (object) => {
const storage = new Storage();
const bucket = await storage.bucket(object.name);
const filePath = object.name;
const fileName = filePath.split('/').pop();
const bucketDir = path.dirname(filePath);
const workingDir = path.join(os.tmpdir(), 'thumbs');
const tmpFilePath = path.join(workingDir, 'source.png');
if (fileName.includes('thumb#') || !object.contentType.includes('image')) {
console.log('exiting function');
return false;
}
// 1. Ensure thumbnail dir exists
await fs.ensureDir(workingDir);
// 2. Download Source File
const file = await bucket.file(filePath);
await file.download({
destination: tmpFilePath
});
// 3. Resize the images and define an array of upload promises
const sizes = [64, 128, 256];
const uploadPromises = sizes.map(async size => {
const thumbName = `thumb#${size}_${fileName}`;
const thumbPath = path.join(workingDir, thumbName);
// Resize source image
await sharp(tmpFilePath)
.resize(size, size)
.toFile(thumbPath);
// Upload to GCS
return bucket.upload(thumbPath, {
destination: path.join(bucketDir, thumbName)
});
});
// 4. Run the upload operations
await Promise.all(uploadPromises);
// 5. Cleanup remove the tmp/thumbs from the filesystem
return fs.remove(workingDir);
});
But I get the following error:
Error: Not Found
at new ApiError (/srv/node_modules/#google-cloud/common/build/src/util.js:58:28)
at Util.parseHttpRespMessage (/srv/node_modules/#google-cloud/common/build/src/util.js:159:41)
at Util.handleResp (/srv/node_modules/#google-cloud/common/build/src/util.js:136:74)
at Duplexify.requestStream.on.on.res (/srv/node_modules/#google-cloud/storage/build/src/file.js:392:31)
at emitOne (events.js:116:13)
at Duplexify.emit (events.js:211:7)
at emitOne (events.js:116:13)
at DestroyableTransform.emit (events.js:211:7)
at onResponse (/srv/node_modules/retry-request/index.js:194:19)
at Request.<anonymous> (/srv/node_modules/retry-request/index.js:149:11)
at bucket.file(...).download()
API 2.X.X introduced some changes and I can't seem to make this work. Can anyone give me a hand?
Thank you.

Got it working with the following code:
const functions = require('firebase-functions');
const { Storage } = require('#google-cloud/storage');
const os = require('os');
const path = require('path');
const sharp = require('sharp');
const fs = require('fs-extra');
exports.generateThumbs = functions.storage
.object()
.onFinalize(async (object) => {
const storage = new Storage();
const bucket = storage.bucket(object.bucket);
const filePath = object.name;
const fileName = filePath.split('/').pop();
const bucketDir = path.dirname(filePath);
const workingDir = path.join(os.tmpdir(), 'thumbs');
const tmpFilePath = path.join(workingDir, 'source.png');
if (fileName.includes('thumb#') || !object.contentType.includes('image')) {
console.log('exiting function');
return false;
}
// 1. Ensure thumbnail dir exists
await fs.ensureDir(workingDir);
// 2. Download Source File
await bucket.file(filePath).download({
destination: tmpFilePath
});
// 3. Resize the images and define an array of upload promises
const sizes = [64, 128, 256];
const uploadPromises = sizes.map(async size => {
const thumbName = `thumb#${size}_${fileName}`;
const thumbPath = path.join(workingDir, thumbName);
// Resize source image
await sharp(tmpFilePath)
.resize(size, size)
.toFile(thumbPath);
// Upload to GCS
return bucket.upload(thumbPath, {
destination: path.join(bucketDir, thumbName)
});
});
// 4. Run the upload operations
await Promise.all(uploadPromises);
// 5. Cleanup remove the tmp/thumbs from the filesystem
return fs.remove(workingDir);
});

Related

Error: ****#appspot.gserviceaccount.com does not have storage.objects.get access

I have a simple firebase function that triggers on a file being uploaded to Firebase Storage. It was working on the non-main bucket, but once I changed it to listen to the main bucket I began receiving these error messages....
Error: *****#appspot.gserviceaccount.com does not have storage.objects.get access to *****.appspot.com/ff-icon-01.png.
The function is in the same project as the storage bucket.
const admin = require('firebase-admin');
admin.initializeApp();
const functions = require('firebase-functions');
const {Storage} = require('#google-cloud/storage');
const gcs = new Storage();
import { tmpdir } from 'os';
import { join, dirname } from 'path';
import * as sharp from 'sharp';
import * as fs from 'fs-extra';
export const makeThumbnail = functions.storage
.object()
.onFinalize(async object => {
const bucket = gcs.bucket(object.bucket);
const filePath = object.name;
const fileName = filePath.split('/').pop();
const bucketDir = dirname(filePath);
const workingDir = join(tmpdir(), 'thumbs');
const tmpFilePath = join(workingDir, 'source.png');
if (fileName.includes('thumb#') || !object.contentType.includes('image')) {
console.log('exiting function');
return false;
}
// 1. Ensure thumbnail dir exists
await fs.ensureDir(workingDir);
// 2. Download Source File
await bucket.file(filePath).download({
destination: tmpFilePath
});
// 3. Resize the images and define an array of upload promises
const sizes = [64, 128, 256];
const uploadPromises = sizes.map(async size => {
const thumbName = `thumb#${size}_${fileName}`;
const thumbPath = join(workingDir, thumbName);
// Resize source image
await sharp(tmpFilePath)
.resize(size, size)
.toFile(thumbPath);
// Upload to GCS
return bucket.upload(thumbPath, {
destination: join(bucketDir, thumbName)
});
});
// 4. Run the upload operations
await Promise.all(uploadPromises);
// 5. Cleanup remove the tmp/thumbs from the filesystem
return fs.remove(workingDir);
});
They have the same rules. Not sure what's up.
rules_version = '2';
service firebase.storage {
match /b/{bucket}/o {
match /{allPaths=**} {
allow read, write: if request.auth != null;
}
}
}
For someone else who runs into this issue.
The problem for me was that I were using the wrong project in my gcloud setup when uploading my functions. So I used one project in the firebase cli while using another project in the gcloud cli.
It worked for me when I deleted all the functions, change the gcloud cli project to the right one and then uploaded the functions again.

google.storage.object.finalize not triggering firebase cloud function

I have a firebase app, when user uploads photo to storage, it triggers a generatethumbnail cloud function. All standard code, it worked fine, i deployed on Feb 24th 2019.
Now when I Upload a photo, nothing happens. I look in storage and the photo is there, but when i look at the logs for firebase cloud functions, the generateThumbnail function hasn't been called. How can I debug / fix this? I was thinking of just redeploying my code, or perhaps upgrading my libraries etc in case there have been breaking changes?
Here's my code:
import * as functions from 'firebase-functions';
// import * as Storage from '#google-cloud/storage';
// const gcs = new Storage();
import * as admin from 'firebase-admin';
const gcs = admin.storage()
const firestore = admin.firestore();
import { tmpdir } from 'os';
import { join, dirname } from 'path';
import * as sharp from 'sharp';
import * as fs from 'fs-extra';
export const generateThumbs = functions.storage
.object()
.onFinalize(async object => {
const bucket = gcs.bucket(object.bucket);
const filePath = object.name;
const parts = filePath.split('/');
const fileName = parts.pop();
const propertyID = parts.pop();
// console.log(`got property id ${propertyID}`)
const bucketDir = dirname(filePath);
const workingDir = join(tmpdir(), 'thumbs');
const tmpFilePath = join(workingDir, fileName);
if (fileName.includes('thumb#') || !object.contentType.includes('image')) {
console.log('exiting function');
return false;
}
// 1. Ensure thumbnail dir exists
await fs.ensureDir(workingDir);
// 2. Download Source File
await bucket.file(filePath).download({
destination: tmpFilePath
});
// 3. Resize the images and define an array of upload promises
const sizes = [256];
let thumbLocation = '';
const uploadPromises = sizes.map(async size => {
const thumbName = `thumb#${size}_${fileName}`;
const thumbPath = join(workingDir, thumbName);
// Resize source image
await sharp(tmpFilePath)
.resize(256, 171)
.toFile(thumbPath);
thumbLocation = join(bucketDir, thumbName);
// Upload to GCS
return bucket.upload(thumbPath, {
destination: thumbLocation
});
});
// 4. Run the upload operations
await Promise.all(uploadPromises);
// 5. Cleanup remove the tmp/thumbs from the filesystem
await fs.remove(workingDir);
let photoURL = ''
const hour = 1000 * 60 * 60;
const year = hour * 24 * 365;
const EXP = Date.now() + year * 10;
await bucket.file(filePath).getSignedUrl({
action: 'read',
expires: EXP
}).then(signedUrls => {
photoURL = signedUrls[0];
});
let thumbURL = '';
await bucket.file(thumbLocation).getSignedUrl({
action: 'read',
expires: EXP
}).then(signedUrls => {
thumbURL = signedUrls[0];
});
if (!(photoURL && thumbURL)) {
return Promise.resolve('Error no thumbs');
}
const propertyRef = firestore.collection('properties').doc(propertyID);
return firestore.runTransaction(t => {
return t.get(propertyRef)
.then(doc => {
if (!doc.exists) {
console.log(`doc does not exist ${propertyID}`)
return;
}
let photos = doc.data().photos;
photos = photos || [];
photos.push({
big: photoURL,
small: thumbURL,
});
t.update(propertyRef, { photos: photos });
});
});
});
All standard code, it worked fine, i deployed on Feb 24th 2019.
Until a month or so ago Cloud Functions would get deactivated by the system if they'd been inactive for 30 days or more. This behavior has since been changed, since it was quite unintuitive to most developers. But you will need to redeploy your Cloud Functions once more to opt in to the new behavior.

Upload a file, from an URL to Google Storage, in a cloud function

I try to find a way to upload a PDF file, generated by a php/MySQL server to my Google Storage bucket. The URL is simple : www.my_domain.com/file.pdf . I tried with the code below , but I'm having some issues to make it work. The error is : path (fs.createWriteStream(destination)) must be a string or Buffer. Thanks in advance for your help !
const http = require('http');
const fs = require('fs');
const {Storage} = require('#google-cloud/storage')
const gcs = new Storage({
keyFilename: 'my_keyfile.json'
})
const bucket = gcs.bucket('my_bucket.appspot.com');
const destination = bucket.file('file.pdf');
var theURL = 'https://www.my_domain.com/file.pdf';
var download = function() {
var file = fs.createWriteStream(destination);
var request = http.get(theURL, function(response) {
response.pipe(file);
file.on('finish', function() {
console.log("File uploaded to Storage")
file.close();
});
});
}
I finally found a solution :
const http = require('http');
const fs = require('fs');
const {Storage} = require('#google-cloud/storage')
const gcs = new Storage({
keyFilename: 'my_keyfile.json'
})
const bucket = gcs.bucket('my_bucket.appspot.com');
const destination = os.tmpdir() + "/file.pdf";
const destinationStorage = path.join(os.tmpdir(), "file.pdf");
var theURL = 'https://www.my_domain.com/file.pdf';
var download = function () {
var request = http.get(theURL, function (response) {
if (response.statusCode === 200) {
var file = fs.createWriteStream(destination);
response.pipe(file);
file.on('finish', function () {
console.log('Pipe OK');
bucket.upload(destinationStorage, {
destination: "file.pdf"
}, (err, file) => {
console.log('File OK on Storage');
});
file.close();
});
}
});
}
firebase-admin, as of v7.0.0, uses google-cloud/storage v2.3.0, which can no longer accept file URLs on bucket.upload.
I figured I would share my solution as well.
const rq = require('request');
// filePath = File location on google storage bucket
// fileUrl = URL of the remote file
const bucketFile = bucket.file(filePath);
const fileWriteStream = bucketFile.createWriteStream();
let rqPipe = rq(fileUrl).pipe(fileWriteStream);
// And if you want the file to be publicly readable
rqPipe.on('finish', async () => {
await bucketFile.makePublic();
});

How can I upload a uint8 pixel data to Storage on Firebase Functions?

bucket.upload only accepts file directories and it seems that in functions the method put can not be used. With put, I think I could be able to upload with new Uint8Array(data). But it does not work with upload method.
exports.modificarImagen = functions.storage.object().onChange(event => {
const THUMB_PREFIX = 'thumb_';
const object = event.data;
const fileBucket = object.bucket;
const filePath = object.name;
const contentType = object.contentType;
const resourceState = object.resourceState;
const metageneration = object.metageneration;
const SIZES = [64];
const bucket = gcs.bucket(fileBucket);
const fileName = filePath.split('/').pop();
const tempIconoPath = path.join(os.tmpdir(), 'icono-amarillo.png');
const tempPerfilPath = path.join(os.tmpdir(), 'perfil64.jpg');
return bucket.file('images/icono-amarillo.png').download({
destination: tempIconoPath
}).then(() => {
bucket.file('images/perfil64.jpg').download({
destination: tempPerfilPath
}).then(() => {
_.each(SIZES, (size) => {
let newFileName = 'nueva_imagen.png';
let newFileTemp = path.join(os.tmpdir(), newFileName);
let newFilePath = `images/${newFileName};`
sharp(tempIconoPath)
.flatten()
.background('#ff6600')
.overlayWith(tempPerfilPath, { gravity: sharp.gravity.southeast } )
.sharpen()
.withMetadata()
.raw()
.toBuffer().then(function(outputBuffer) {
//here is the problem. outputBuffer is a raw file uint8array and
// storage only allows file path.
bucket.upload(outputBuffer, {
destination: newFilePath
}).then(() => { console.log("do another thing"); });
});
})//each
})
})
})
You can use Google Cloud Storage and Sharp along with streams. The following should work:
function transform(fileBucket, tempPerfilPath, filePath, newFilePath) {
const thumbnailUploadStream = fileBucket.file(newFilePath).createWriteStream();
// Create Sharp pipeline for resizing the image and use pipe to read from bucket read stream
const pipeline = sharp();
pipeline
.flatten()
.background('#ff6600')
.overlayWith(tempPerfilPath, { gravity: sharp.gravity.southeast } )
.sharpen()
.withMetadata()
.raw()
.pipe(thumbnailUploadStream);
fileBucket.file(filePath).createReadStream().pipe(pipeline);
const streamAsPromise = new Promise((resolve, reject) =>
thumbnailUploadStream.on('finish', resolve).on('error', reject));
return streamAsPromise.then(() => {
return console.log('Image created and uploaded successfully');
});
}

How to use cloud functions to delete a storage image by signed download url?

i am using a firebase cloud function to generate thumbnails and to store the signed image urls in firestore:
'use strict';
const functions = require('firebase-functions');
const mkdirp = require('mkdirp-promise');
const gcs = require('#google-cloud/storage')({keyFilename: 'service-account-credentials.json'});
const admin = require('firebase-admin');
admin.initializeApp(functions.config().firebase);
const spawn = require('child-process-promise').spawn;
const path = require('path');
const os = require('os');
const fs = require('fs');
const THUMB_MAX_HEIGHT = 200;
const THUMB_MAX_WIDTH = 200;
const THUMB_PREFIX = 'thumb_';
exports.onUploadImage = functions.storage.object().onChange(async event => {
const filePath = event.data.name;
const contentType = event.data.contentType;
const fileDir = path.dirname(filePath);
const fileName = path.basename(filePath);
const thumbFilePath = path.normalize(path.join(fileDir, `${THUMB_PREFIX}${fileName}`));
const tempLocalFile = path.join(os.tmpdir(), filePath);
const tempLocalDir = path.dirname(tempLocalFile);
const tempLocalThumbFile = path.join(os.tmpdir(), thumbFilePath);
if (!contentType.startsWith('image/')) {
return null;
}
if (fileName.startsWith(THUMB_PREFIX)) {
return null;
}
if (event.data.resourceState === 'not_exists') {
return null;
}
const tankId = event.data.metadata.tankId;
const userId = event.data.metadata.userId;
const imageType = event.data.metadata.type;
const bucket = gcs.bucket(event.data.bucket);
const file = bucket.file(filePath);
const thumbFile = bucket.file(thumbFilePath);
const metadata = {
contentType: contentType,
customMetadata: {
'type': imageType
}
};
try {
await mkdirp(tempLocalDir);
await file.download({destination: tempLocalFile});
await spawn('convert', [tempLocalFile, '-thumbnail', `${THUMB_MAX_WIDTH}x${THUMB_MAX_HEIGHT}>`, tempLocalThumbFile], {capture: ['stdout', 'stderr']});
await bucket.upload(tempLocalThumbFile, { destination: thumbFilePath, metadata: metadata });
await fs.unlinkSync(tempLocalFile);
await fs.unlinkSync(tempLocalThumbFile);
const config = {
action: 'read',
expires: '03-01-2500'
};
const results = await Promise.all([
thumbFile.getSignedUrl(config),
file.getSignedUrl(config)
]);
const thumbResult = results[0];
const originalResult = results[1];
const thumbFileUrl = thumbResult[0];
const fileUrl = originalResult[0];
const tankRef = admin.firestore().collection('tanks').doc(tankId);
switch(imageType) {
case 'gallery':
await tankRef
.collection('gallery')
.add({
url: fileUrl,
thumbnail: thumbFileUrl,
createdAt: new Date()
});
const tankSnapshot = await tankRef.get();
const tankData = await tankSnapshot.data();
let galleryCount = tankData.galleryCount || 0;
galleryCount += 1;
if (galleryCount < 0) galleryCount = 0;
return await tankRef.update({ galleryCount }, { merge: true });
case 'tankImage':
await tankRef.set({ image: fileUrl, image_thumb: thumbFileUrl }, { merge: true });
return null;
case 'profileImage':
await admin.auth().updateUser(userId, { photoURL: thumbFileUrl });
await admin.firestore()
.collection('users')
.doc(userId)
.set({image: fileUrl});
return null;
default:
return null
}
}
catch(err) {
console.log(err);
}
});
Now i am trying to write another cloud function that deletes the stored files from the bucket when the firestore db entry was deleted:
exports.onGalleryImageDelete = functions.firestore
.document('/tanks/{tankId}/gallery/{docId}')
.onDelete(async event => {
const deletedDoc = event.data.previous.data();
const bucket = admin.storage().bucket();
await bucket.file(deletedDoc.url).delete(); // this is wrong... no idea how to solve this
await bucket.file(deletedDoc.thumbnail).delete();
return await updateTankDocumentCounter(event, 'galleryCount', 'onDelete');
});
This code actually does not work and returns some API error. How could i delete those images from the given signed download urls?
Based on the comment from Doug Stevenson, i stored the path to the db and ended up with this cloud function:
exports.onGalleryImageDelete = functions.firestore
.document('/tanks/{tankId}/gallery/{docId}')
.onDelete(async event => {
const deletedDoc = event.data.previous.data();
const filePath = deletedDoc.path;
const fileDir = path.dirname(filePath);
const fileName = path.basename(filePath);
const thumbFilePath = path.normalize(path.join(fileDir, `${THUMB_PREFIX}${fileName}`));
const bucket = admin.storage().bucket();
return await Promise.all([
await bucket.file(filePath).delete(),
await bucket.file(thumbFilePath).delete(),
updateTankDocumentCounter(event, 'galleryCount', 'onDelete')
]);
});
I meet this issue, too. The result return a document which store all hidden information.
If something good at Firebase, others quite bad.

Resources