Uploading to Firebase Storage from a Google Cloud Function - firebase

I'm trying to create a Firebase Function that allows me to pass an array of image URLs in to create generate a montage, upload the file to Firebase Storage and then return the generated Download URL. This will be called from my app, so I'm using functions.https.onCall.
const functions = require("firebase-functions");
const admin = require('firebase-admin');
var gm = require('gm').subClass({imageMagick: true});
admin.initializeApp();
exports.createMontage = functions.https.onCall((data, context) => {
var storageRef = admin.storage().bucket( 'gs://xyz-zyx.appspot.com' );
var createdMontage = storageRef.file('createdMontage.jpg');
function generateMontage(list){
let g = gm()
list.forEach(function(p){
g.montage(p);
})
g.geometry('+81+81')
g.density(5000,5000)
.write(createdMontage, function(err) {
if(!err) console.log("Written montage image.");
});
return true
}
generateMontage(data)
return createdMontage.getDownloadURL();
});
The function generateMontage() works locally on NodeJs (with a local write destination).
Thank you.

Have a look at this example from the docs:
https://cloud.google.com/storage/docs/uploading-objects#storage-upload-object-code-sample
2021-01-11 Update
Here's a working example. I'm using regular Cloud Functions and it's limited in that the srcObject, dstObject and bucketName are constants but, it does create montages which is your goal.
PROJECT=[[YOUR-PROJECT]]
BILLING=[[YOUR-BILLING]]
REGION=[[YOUR-REGION]]
FUNCTION=[[YOUR-FUNCTION]]
BUCKET=[[YOUR-BUCKET]]
OBJECT=[[YOUR-OBJECT]] # Path from ${BUCKET} root
gcloud projects create ${PROJECT}
gcloud beta billing projects link ${PROJECT} \
--billing-account=${BILLING}
gcloud services enable cloudfunctions.googleapis.com \
--project=${PROJECT}
gcloud services enable cloudbuild.googleapis.com \
--project=${PROJECT}
gcloud functions deploy ${FUNCTION} \
--memory=4gib \
--max-instances=1
--allow-unauthenticated \
--entry-point=montager \
--set-env-vars=BUCKET=${BUCKET},OBJECT=${OBJECT} \
--runtime=nodejs12 \
--trigger-http \
--project=${PROJECT} \
--region=${REGION}
ENDPOINT=$(\
gcloud functions describe ${FUNCTION} \
--project=${PROJECT} \
--region=${REGION} \
--format="value(httpsTrigger.url)")
curl \
--request GET \
${ENDPOINT}
`package.json`:
```JSON
{
"name": "montage",
"version": "0.0.1",
"dependencies": {
"#google-cloud/storage": "5.7.1",
"gm": "^1.23.1"
}
}
And index.js:
const { Storage } = require('#google-cloud/storage');
const storage = new Storage();
const gm = require('gm').subClass({ imageMagick: true });
const bucketName = process.env["BUCKET"];
const srcObject = process.env["OBJECT"];
const dstObject = "montage.png";
// Creates 2x2 montage
const list = [
`/tmp/${srcObject}`,
`/tmp/${srcObject}`,
`/tmp/${srcObject}`,
`/tmp/${srcObject}`
];
const montager = async (req, res) => {
// Download GCS `srcObject` to `/tmp`
const f = await storage
.bucket(bucketName)
.file(srcObject)
.download({
destination: `/tmp/${srcObject}`
});
// Creating GCS write stream for montage
const obj = await storage
.bucket(bucketName)
.file(dstObject)
.createWriteStream();
let g = gm();
list.forEach(f => {
g.montage(f);
});
console.log(`Returning`);
g
.geometry('+81+81')
.density(5000, 5000)
.stream()
.pipe(obj)
.on(`finish`, () => {
console.log(`finish`);
res.status(200).send(`ok`);
})
.on(`error`, (err) => {
console.log(`error: ${err}`);
res.status(500).send(`uhoh!`);
});
}
exports.montager = montager;

I have never used 'gm', but, according to its npm page, it has a toBuffer function.
So maybe something like this could work:
const functions = require('firebase-functions');
const admin = require('firebase-admin');
const gm = require('gm').subClass({ imageMagick: true });
admin.initializeApp();
exports.createMontage = functions.https.onCall((data, _context) => {
const bucketName = 'xyz-zyx'; // not sure, I've always used the default bucket
const bucket = admin.storage().bucket(bucketName);
const storagePath = 'createdMontage.jpg';
const fileRef = bucket.file(storagePath);
const generateMontage = async (list) => {
const g = gm();
list.forEach(function (p) {
g.montage(p);
});
g.geometry('+81+81');
g.density(5000, 5000);
return new Promise(resolve => {
g.toBuffer('JPG', (_err, buffer) => {
const saveTask = fileRef.save(buffer, { contentType: 'image/jpeg' });
const baseStorageUrl = `https://firebasestorage.googleapis.com/v0/b/${bucket.name}/o/`;
const encodedPath = encodeURIComponent(storagePath);
const postfix = '?alt=media'; // see stackoverflow.com/a/58443247/6002078
const publicUrl = baseStorageUrl + encodedPath + postfix;
saveTask.then(() => resolve(publicUrl));
});
});
};
return generateMontage(data);
});
But it seems it can be done more easily. As Methkal Khalawi commented:
here is a full example on how to use ImageMagic with Functions. Though they are using it for blurring an image but the idea is the same. And here is a tutorial from the documentation.

I think you can pipe output stream from gm module to firebase storage object write stream.
const functions = require("firebase-functions");
const admin = require('firebase-admin');
var gm = require('gm').subClass({imageMagick: true});
admin.initializeApp();
exports.createMontage = functions.https.onCall(async (data, context) => {
var storage = admin.storage().bucket( 'gs://xyz-zyx.appspot.com' );
var downloadURL = await new Promise((resolve, reject) => {
let g = gm()
list.forEach(function(p){
g.montage(p);
})
g.geometry('+81+81')
g.density(5000,5000)
.stream((err, stdout, stderr) => {
if (err) {
reject();
}
stdout.pipe(
storage.file('generatedMotent.png).createWriteStream({
metadata: {
contentType: 'image/png',
},
})
).on('finish', () => {
storage
.file('generatedMotent')
.getSignedUrl({
action: 'read',
expires: '03-09-2491', // Non expring public url
})
.then((url) => {
resolve(url);
});
});
})
});
return downloadURL;
});
FYI, Firebase Admin SDK storage object does not have getDownloadURL() function.
You should generate non-expiring public signed URL from the storage object.
In addition to, it should cause another problem after some period of time according to this issue.
To get rid of this issue happening, you should initialize firebase app with permanent service account.
const admin = require('firebase-admin');
const serviceAccount = require('../your-service-account.json');
admin.initializeApp({
credential: admin.credential.cert(serviceAccount),
projectId: JSON.parse(process.env.FIREBASE_CONFIG).projectId,
databaseURL: JSON.parse(process.env.FIREBASE_CONFIG).databaseURL,
storageBucket: JSON.parse(process.env.FIREBASE_CONFIG).storageBucket,
});

Related

How to use the app object in cloud functions without passing it as parameter?

I am building cloud functions for the backend of my app but I couldn't figure out a way to use the app or db variables without passing them into my functions as parameters. I tried initializing the apps seperately in its own functions but multiple app initialization of the same app is not allowed but I want to use only one app.
So the question is, is there a way to implement the below code without passing the app/db parameter into every function?
PS: Also I would appreciate if you could suggest few tips to improve the quality of the file structuring and how I import / export functions.
index.js
const functions = require("firebase-functions");
const admin = require("firebase-admin");
const cors = require("cors")({ credentials: true, origin: true });
const app = admin.initializeApp();
const db = app.firestore();
const { addVehicle } = require("./src/vehicles/addVehicle");
const { getVehicles } = require("./src/vehicles/getVehicles");
exports.addVehicle = functions.https.onRequest(async (req, res) => {
cors(req, res, async () => {
const result = await addVehicle(req, res, db);
res.json((result));
});
});
exports.getVehicles = functions.https.onRequest(async (req, res) => {
cors(req, res, async () => {
res.json((await getVehicles(req,res,db)));
});
});
addVehicle.js
const functions = require("firebase-functions");
const admin = require("firebase-admin");
const Vehicle = require("../../models/Vehicle");
exports.addVehicle = async (req, res, db) => {
try{
const vehicleInfo = new Vehicle(req.body);
const addedVehicle = await db.collection("vehicles").add(vehicleInfo);
console.log(addedVehicle);
res.json({data: "Succesfully added vehicle"});
}
catch(err){
if(err){
res.json(err);
}
}
};
getVehicles.js
const functions = require("firebase-functions");
const admin = require("firebase-admin");
const Vehicle = require("../../models/Vehicle");
exports.getVehicles = async (req, res, db) => {
try{
const vehiclesSnapshot = await db.collection("vehicles").get();
const vehicles = [];
vehiclesSnapshot.forEach(doc => {
vehicles.push(doc.data());
});
res.json({ data: vehicles });
}
catch(err){
if(err){
res.json(err);
}
}
};

Firebase storage - Download directory as ".zip"

If you use firebase storage, you could see it's not possible to download a folder (as zip) directly from firebase UI or using gcloud UI. So it can be hard to create backup of your firebase storage bucket, moreover if you use firestore you can export collections into firebase storage.
I created a nodejs script based on firebase-admin and jszip, with two args : first the download path in firebase storage and second is the path where stored the zip file
package.json:
{
"dependencies": {
"firebase-admin": "^9.6.0",
"jszip": "^3.6.0"
}
}
index.js:
const fs = require("fs");
const JSZip = require('jszip');
const admin = require('firebase-admin');
const serviceAccount = require("./service-account-key.json");
async function main (){
try{
admin.initializeApp({
credential: admin.credential.cert(serviceAccount),
storageBucket: "stackd-56e96.appspot.com",
});
const bucket = admin.storage().bucket();
const src_storage_path = process.argv[2];
let dest_storage_path = process.argv[3];
if(dest_storage_path.includes(":"))
dest_storage_path = dest_storage_path.replace(":", "_")
const jszip = new JSZip();
const files = (await bucket.getFiles({
prefix: `${src_storage_path}/`
}))[0]
const filesContent = await Promise.all(
files.map(file => file.download())
);
filesContent.forEach((content, i) => {
jszip.file(files[i].name, content[0])
});
const content = await jszip.generateAsync({ type: 'nodebuffer' });
await fs.promises.writeFile(dest_storage_path, content)
} catch (error){
console.error(error)
}
}
main();
command line exemple:
node index.js 2021-04-16T11:47:46_54052 backup.zip

Unable to deploy functions in firebase

I am trying to integrate Razorpay in my nuxt app.
For that, I installed razorpay dependency using npm i razorpay
My index.js files starts with
const functions = require('firebase-functions')
const Razorpay = require('razorpay')
const admin = require('firebase-admin')
const crypto = require('crypto')
But after writing the function(basic helloWorld function) and deploying it gave me an error unable to deploy function.
But when I commented below line the helloWorld function deployed successfully.
//const Razorpay = require('razorpay')
Again I uncommented above line and it still gives me error unable to deploy.
Version info
Node v12.18.3
Firebase v8.16.2
My Dependencies
"dependencies": {
"#nuxtjs/axios": "^5.12.2",
"#nuxtjs/pwa": "^3.0.2",
"cookieparser": "^0.1.0",
"core-js": "^3.6.5",
"firebase": "^8.2.0",
"js-cookie": "^2.2.1",
"jwt-decode": "^3.1.2",
"nuxt": "^2.14.6",
"nuxt-buefy": "^0.4.3",
"razorpay": "^2.0.6",
"uuid": "^8.3.2",
"vuexfire": "^3.2.5"
},
Please note that you will need to put the following in the Firebase Cloud Function to integrate the Razorpray:
const Razorpay = require("razorpay");
var key_id = "YOUR_RAZORPAY_KEY_ID";
var key_secret = "YOUR_RAZORPAY_KEY_SECRET";
var instance = new Razorpay({
key_id: key_id,
key_secret: key_secret
});
You need to follow the next steps to integrate it:
Signup for razorpay and grab your Key_Id and Key_Secret from Razorpray
Integrate the checkout modal from razorpay in the front end to accept the payment details from user.
Implement Order API in the backend.
Capture Authorized payments.
Please have a look into the following Medium tutorial for better understanding and this GitHub Repository for a code example.
********** UPDATE **********
Regarding Cors error, please make sure the following:
Import Cors
const cors = require('cors')({origin: true});
Call the cors module at the top of each function as following:
exports.createPayment = functions.https.onRequest(async (req, res) => {
cors(req, res, () => {
// your function body here - use the provided req and res from cors
})
});
const functions = require('firebase-functions')
const Razorpay = require('razorpay')
const admin = require('firebase-admin')
const crypto = require('crypto')
const cors = require('cors')({ origin: true })
admin.initializeApp()
//Function to Create Payment
exports.createPayment = functions.https.onRequest(async (req, res) => {
cors(req, res, () => {
admin
.firestore()
.collection('payments')
.add(req.body.data)
.then((payment) => {
var instance = new Razorpay({
key_id: 'rzp_test_my_key',
key_secret: 'my_secret_key',
})
var options = {
amount: req.body.data.amount * 100,
currency: 'INR',
receipt: payment.id,
payment_capture: 1,
}
instance.orders.create(options, function (err, order) {
res.status(201).send({ data: order })
})
})
})
})
// Function to Verify Payment
exports.verifyPayment = functions.https.onRequest(async (req, res) => {
cors(req, res, () => {
const order = req.body.data
const text = order.razorpay_order_id + '|' + order.razorpay_payment_id
var signature = crypto
.createHmac('sha256', secret_key)
.update(text)
.digest('hex')
if (signature === order.razorpay_signature) {
res.status(201).send({ data: { message: 'Successfull Payment' } })
} else {
res.status(400).send({ data: { message: 'Signature mismatch' } })
}
})
})

Error: ****#appspot.gserviceaccount.com does not have storage.objects.get access

I have a simple firebase function that triggers on a file being uploaded to Firebase Storage. It was working on the non-main bucket, but once I changed it to listen to the main bucket I began receiving these error messages....
Error: *****#appspot.gserviceaccount.com does not have storage.objects.get access to *****.appspot.com/ff-icon-01.png.
The function is in the same project as the storage bucket.
const admin = require('firebase-admin');
admin.initializeApp();
const functions = require('firebase-functions');
const {Storage} = require('#google-cloud/storage');
const gcs = new Storage();
import { tmpdir } from 'os';
import { join, dirname } from 'path';
import * as sharp from 'sharp';
import * as fs from 'fs-extra';
export const makeThumbnail = functions.storage
.object()
.onFinalize(async object => {
const bucket = gcs.bucket(object.bucket);
const filePath = object.name;
const fileName = filePath.split('/').pop();
const bucketDir = dirname(filePath);
const workingDir = join(tmpdir(), 'thumbs');
const tmpFilePath = join(workingDir, 'source.png');
if (fileName.includes('thumb#') || !object.contentType.includes('image')) {
console.log('exiting function');
return false;
}
// 1. Ensure thumbnail dir exists
await fs.ensureDir(workingDir);
// 2. Download Source File
await bucket.file(filePath).download({
destination: tmpFilePath
});
// 3. Resize the images and define an array of upload promises
const sizes = [64, 128, 256];
const uploadPromises = sizes.map(async size => {
const thumbName = `thumb#${size}_${fileName}`;
const thumbPath = join(workingDir, thumbName);
// Resize source image
await sharp(tmpFilePath)
.resize(size, size)
.toFile(thumbPath);
// Upload to GCS
return bucket.upload(thumbPath, {
destination: join(bucketDir, thumbName)
});
});
// 4. Run the upload operations
await Promise.all(uploadPromises);
// 5. Cleanup remove the tmp/thumbs from the filesystem
return fs.remove(workingDir);
});
They have the same rules. Not sure what's up.
rules_version = '2';
service firebase.storage {
match /b/{bucket}/o {
match /{allPaths=**} {
allow read, write: if request.auth != null;
}
}
}
For someone else who runs into this issue.
The problem for me was that I were using the wrong project in my gcloud setup when uploading my functions. So I used one project in the firebase cli while using another project in the gcloud cli.
It worked for me when I deleted all the functions, change the gcloud cli project to the right one and then uploaded the functions again.

google.storage.object.finalize not triggering firebase cloud function

I have a firebase app, when user uploads photo to storage, it triggers a generatethumbnail cloud function. All standard code, it worked fine, i deployed on Feb 24th 2019.
Now when I Upload a photo, nothing happens. I look in storage and the photo is there, but when i look at the logs for firebase cloud functions, the generateThumbnail function hasn't been called. How can I debug / fix this? I was thinking of just redeploying my code, or perhaps upgrading my libraries etc in case there have been breaking changes?
Here's my code:
import * as functions from 'firebase-functions';
// import * as Storage from '#google-cloud/storage';
// const gcs = new Storage();
import * as admin from 'firebase-admin';
const gcs = admin.storage()
const firestore = admin.firestore();
import { tmpdir } from 'os';
import { join, dirname } from 'path';
import * as sharp from 'sharp';
import * as fs from 'fs-extra';
export const generateThumbs = functions.storage
.object()
.onFinalize(async object => {
const bucket = gcs.bucket(object.bucket);
const filePath = object.name;
const parts = filePath.split('/');
const fileName = parts.pop();
const propertyID = parts.pop();
// console.log(`got property id ${propertyID}`)
const bucketDir = dirname(filePath);
const workingDir = join(tmpdir(), 'thumbs');
const tmpFilePath = join(workingDir, fileName);
if (fileName.includes('thumb#') || !object.contentType.includes('image')) {
console.log('exiting function');
return false;
}
// 1. Ensure thumbnail dir exists
await fs.ensureDir(workingDir);
// 2. Download Source File
await bucket.file(filePath).download({
destination: tmpFilePath
});
// 3. Resize the images and define an array of upload promises
const sizes = [256];
let thumbLocation = '';
const uploadPromises = sizes.map(async size => {
const thumbName = `thumb#${size}_${fileName}`;
const thumbPath = join(workingDir, thumbName);
// Resize source image
await sharp(tmpFilePath)
.resize(256, 171)
.toFile(thumbPath);
thumbLocation = join(bucketDir, thumbName);
// Upload to GCS
return bucket.upload(thumbPath, {
destination: thumbLocation
});
});
// 4. Run the upload operations
await Promise.all(uploadPromises);
// 5. Cleanup remove the tmp/thumbs from the filesystem
await fs.remove(workingDir);
let photoURL = ''
const hour = 1000 * 60 * 60;
const year = hour * 24 * 365;
const EXP = Date.now() + year * 10;
await bucket.file(filePath).getSignedUrl({
action: 'read',
expires: EXP
}).then(signedUrls => {
photoURL = signedUrls[0];
});
let thumbURL = '';
await bucket.file(thumbLocation).getSignedUrl({
action: 'read',
expires: EXP
}).then(signedUrls => {
thumbURL = signedUrls[0];
});
if (!(photoURL && thumbURL)) {
return Promise.resolve('Error no thumbs');
}
const propertyRef = firestore.collection('properties').doc(propertyID);
return firestore.runTransaction(t => {
return t.get(propertyRef)
.then(doc => {
if (!doc.exists) {
console.log(`doc does not exist ${propertyID}`)
return;
}
let photos = doc.data().photos;
photos = photos || [];
photos.push({
big: photoURL,
small: thumbURL,
});
t.update(propertyRef, { photos: photos });
});
});
});
All standard code, it worked fine, i deployed on Feb 24th 2019.
Until a month or so ago Cloud Functions would get deactivated by the system if they'd been inactive for 30 days or more. This behavior has since been changed, since it was quite unintuitive to most developers. But you will need to redeploy your Cloud Functions once more to opt in to the new behavior.

Resources