Amazon Lambda to Firebase - firebase

I get 'Cannot find module 'firebase' when I try to run this in Lambda (Node.js 4.3)
var Firebase = require('firebase');
Same thing happens when I try to upload a zipped package that includes node_modules/firebase
Does anybody have a working 'write from lambda to firebase' implementation?

To safely use firebase npm package (version 3.3.0) in AWS Lambda (Nodejs 4.3), Please do the following:
'use strict';
var firebase = require("firebase");
exports.handler = (event, context, callback) => {
context.callbackWaitsForEmptyEventLoop = false; //<---Important
var config = {
apiKey: "<<apikey>>",
authDomain: "<<app_id>>.firebaseapp.com",
databaseURL: "https://<<app_id>>.firebaseio.com",
storageBucket: "<<app_id>>.appspot.com",
};
if(firebase.apps.length == 0) { // <---Important!!! In lambda, it will cause double initialization.
firebase.initializeApp(config);
}
...
<Your Logic here...>
...
};

I solved my problem by using firebase REST api
var https = require('https');
exports.handler = function(event, context, callback) {
var body = JSON.stringify({
foo: "bar"
})
var https = require('https');
var options = {
host: 'project-XXXXX.firebaseio.com',
port: 443,
path: '/.json',
method: 'POST'
};
var req = https.request(options, function(res) {
console.log(res.statusCode);
res.on('data', function(d) {
process.stdout.write(d);
});
});
req.end(body);
req.on('error', function(e) {
console.error(e);
});
callback(null, "some success message");
}

This is late, but in case someone else is looking:
Zipping your project folder instead of the contents of the project folder can cause this. The zipped folder, when extracted, should not contain a folder with the lambda files in it, but should have the index.js file and the node_modules folder at root level.
A working example of a lambda function is (using latest shiny firebase stuff *sigh*):
var firebase = require('firebase');
// Your service account details
var credentials = {
"type": "service_account",
"project_id": "project-123451234512345123",
"private_key_id": "my1private2key3id",
"private_key": "-----BEGIN PRIVATE KEY-----InsertKeyHere-----END PRIVATE KEY-----\n",
"client_email": "projectname#project-123451234512345123.iam.gserviceaccount.com",
"client_id": "1111222223333344444",
"auth_uri": "https://accounts.google.com/o/oauth2/auth",
"token_uri": "https://accounts.google.com/o/oauth2/token",
"auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
"client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/projectname%40project-123451234512345123.iam.gserviceaccount.com"
};
firebase.initializeApp({
serviceAccount: credentials,
databaseURL: "https://project-123451234512345123.firebaseio.com"
});
exports.handler = function (event, context, callback) {
// I use some data passed in from AWS API Gateway:
if (!event.firebaseUid) {
callback('Missing param for id');
}
firebase.database().ref().child('users').child(firebaseUid).child('at').set(newTokens.access_token).then(function (data) {
console.log('Firebase data: ', data);
firebase.database().goOffline();
callback(null, 'Firebase data: ', data);
}).catch(function (error) {
callback('Database set error ' + error);
});
};
Now for the caveat. I have experienced this causing the lambda function to timeout even after the firebase callback has happened, ie. the set function seems to create a listener that holds the lambda function open despite return of correct data.
Update: Calling firebase.database().goOffline() fixes the Lambda function timeout issue i was experiencing.
The usual cautions about security not being verified or appropriate, and the possibilities of halting space and time by using this apply.

2017-03-22 edit: google just announced firebase cloud functions, which is a much better way to do this. Cloud functions work just like lambda, and can trigger from firebase events.
Here's my solution using the REST api (so you don't need to require anything):
var https = require('https');
var firebaseHost = "yourapp.firebaseio.com";
function fbGet(key){
return new Promise((resolve, reject) => {
var options = {
hostname: firebaseHost,
port: 443,
path: key + ".json",
method: 'GET'
};
var req = https.request(options, function (res) {
res.setEncoding('utf8');
var body = '';
res.on('data', function(chunk) {
body += chunk;
});
res.on('end', function() {
resolve(JSON.parse(body))
});
});
req.end();
req.on('error', reject);
});
}
function fbPut(key, value){
return new Promise((resolve, reject) => {
var options = {
hostname: firebaseHost,
port: 443,
path: key + ".json",
method: 'PUT'
};
var req = https.request(options, function (res) {
console.log("request made")
res.setEncoding('utf8');
var body = '';
res.on('data', function(chunk) {
body += chunk;
});
res.on('end', function() {
resolve(body)
});
});
req.end(JSON.stringify(value));
req.on('error', reject);
});
}
You can use it like this:
fbPut("/foo/bar", "lol").then(res => {
console.log("wrote data")
})
And then:
fbGet("/foo/bar").then(data => {
console.log(data); // prints "lol"
}).catch(e => {
console.log("error saving to firebase: ");
console.log(e);
})

Another alternative if you're using a node-based development setup is to use the node-lambda package from here. Essentially it provides wrappers to set up, test and deploy to lambda. node-lambda deploy will package up any modules you've installed (e.g. with npm i --save firebase) and make sure they're available on Lambda itself. I've found it really helpful for managing external modules.

For me firebase-admin should do the trick.
https://firebase.google.com/docs/admin/setup
Thanks for Josiah Choi for suggesting context.callbackWaitsForEmptyEventLoop though. So lambda doesn't need to initializeFirebase everytimes. My first run was really slow.
var firebase = require('firebase-admin');
module.exports.Test = (event, context, callback) => {
context.callbackWaitsForEmptyEventLoop = false; //<---Important
if(firebase.apps.length == 0) { // <---Important!!! In lambda, it will cause double initialization.
firebase.initializeApp({
credential: firebase.credential.cert("serviceAccount.json"),
databaseURL: <YOUR FIREBASE URL>
});
}
firebase.database().ref('conversation').once('value').then(function(snapshot) {
console.log (snapshot.val()) ;
var bodyReturn = {
input: snapshot.val()
} ;
callback(null,bodyReturn);
context.succeed() ;
});
};

After trying a few things, this seems to work for me (v 3.10.8) :
for(var i=0;i<5;i++)
{
var firebase = require('firebase');
var config = {
apiKey: "",
authDomain: "",
databaseURL: "",
storageBucket: "",
messagingSenderId: ""
};
if(firebase.apps)
if(firebase.apps.length==0)
firebase.initializeApp(config)
firebase.database().ref().child("test").once('value').
then(function(snapshot) {
console.log(snapshot.val());
});
}

Related

Mock function in Firebase local emulator

Such as described here, I'm using local emulator (on-line) to make tests im my cloud functions.
Index.js:
var status = 200;
exports.saveAndSendMail = functions.https.onCall( async (req, res) => {
try{
let jsons = req.body;
await saveInfirestore(jsons);
await sendMail("Data saved", jsons);
} finally {
closeConnection(res, status);
}
async function saveInfirestore(json) {
//execute business logic and save in firestore (irrelevant for this question)
}
function closeConnection (res, status){
res.sendStatus(status);
res.end();
}
async function sendMail(title, message) {
try {
AWS.config.loadFromPath('./config_mail.json');
// Create sendEmail params
var params = {
Destination: {
ToAddresses: [
'mymail#gmail.com'
]
},
Message: { /* required */
Body: { /* required */
Html: {
Charset: "UTF-8",
Data: JSON.stringfy(message);
}
},
Subject: {
Charset: 'UTF-8',
Data: title
}
},
Source: '"Origin" <origin#gmail.com>',
ReplyToAddresses: [
'origin#gmail.com'
]
};
// Create the promise and SES service object
var sendPromise = new AWS.SES({apiVersion: '2022-17-01'}).sendEmail(params).promise();
}
catch(e){
throw e;
}
// Handle promise's fulfilled/rejected states
sendPromise.then(
function(data) {
console.log(data.MessageId);
}).catch(
function(err) {
console.error(err, err.stack);
});
}
index.test.js
const { expect } = require("chai");
const admin = require("firebase-admin");
const test = require("firebase-functions-test")({
projectId: process.env.GCLOUD_PROJECT,
});
const myFunctions = require("../index");
describe("Unit tests", () => {
after(() => {
test.cleanup();
});
it("test if save is correct", async () => {
const wrapped = test.wrap(myFunctions.saveAndSendMail);
const req = {
body: [{
value: 5,
name: 'mario'
}]
};
const result = await wrapped(req);
let snap = await db.collection("collection_data").get();
expect(snap.size).to.eq(1);
snap.forEach(doc => {
let data = doc.data();
expect(data.value).to.eql(5);
expect(data.name).to.eql('mario');
});
});
I execute it with: firebase emulators:exec "npm run test"
I have 2 problems.
1 - When execute, I receive the error TypeError: res.sendStatus is not a function. If I comment closeConnection call in block finally (index.js), this code run perfectly and all tests and "expect" run with success. But, this correct way is mock this method or mock 'res' calls. I tried mock with something like this:
const res = {
sendStatus: (status) => {
},
end: () => {
}
}
const result = await wrapped(req, res);
But, I receive this error:
Error: Options object {} has invalid key "sendStatus"
at /home/linuxuser/my-project/firebase/functions/myfolder/node_modules/firebase-functions-test/lib/main.js:99:19
at Array.forEach (<anonymous>)
at _checkOptionValidity (node_modules/firebase-functions-test/lib/main.js:97:26)
at wrapped (node_modules/firebase-functions-test/lib/main.js:57:13)
at Context.<anonymous> (test/index.test.js:50:26)
at processImmediate (node:internal/timers:464:21)
Problem 2:
I'm not wish receive an e-mail each time that tests executes. How I mock sendMail function?
Something very important to point out is that you are currently trying to use a Firebase callable function, as shown by the function heading functions.https.onCall(() => {});. Since you want to work with requests and response codes, the correct type of function to use is an HTTP function. You would only need to change the heading in your index.js:
exports.saveAndSendMail = functions.https.onRequest(async (req, res) => {
// function body
});
Now, your first problem can then be solved by correctly mocking the res object that is passed to the function (inside index.test.js). When testing HTTP functions, you must not use test.wrap() when calling the function, nor expect the result as you were doing with const result = await wrapped(req); This is since Wrap being only supported for testing onCall functions. You can see another snippet of how to call an HTTP function for testing in the documentation.
it("test if save is correct", async () => {
const req = {
body: [{
value: 5,
name: 'mario'
}]
};
// mocking the response object that is returned from the function:
const res = {
sendStatus: (code) => {
expect(code).to.eql(200); // asserting that we get 200 back as the response code
},
end: () => {
}
};
const result = await myFunctions.saveAndSendMail(req, res); // mocking a call to an HTTP function, without test.wrap()
// rest of the function…
For your second problem, I haven’t used AWS SES before, but it seems this library offers ways to mock the functions so that you won’t have to actually send emails during your tests.

Unimplemented API error when running admin.storage().bucket(..).file(..).move() in an emulated Firebase function

"firebase": "9.0.1" - local
"firebase-tools": 9.17.0 - global
"node": v14.17.4
I'm calling .move() using the Firebase admin SDK in an emulated function. Firebase returns the following error:
ApiError: file#copy failed with an error - Not Implemented
at new ApiError (.../functions/node_modules/#google-cloud/common/build/src/util.js:73:15)
statusCode: 501
request: {
agent: [Agent],
headers: [Object],
href: 'http://localhost:9199/b/public-8s9ch/o/91ff38b1-b521-4a23-8844-b12cffa0ee98%2Fscreenshot.png/rewriteTo/b/private-8s9ch/o/91ff38b1-b521-4a23-8844-b12cffa0ee98%2Fcd665e29-edc8-4832-9bc7-110c21f1e560?'
},
body: 'Not Implemented',
The exact same code, with the same tests and installed packages, work perfectly when deployed to Firebase instead of being used in the emulator.
The same issue occurs with any call to the storage API regardless of using the Admin SDK or using the client SDK e.g. .copy, .move, .delete, etc.
Here's some code to help with reproducing the error:
export const firebaseFunction = functions
.runWith({ memory: "128MB" })
.region("us-central1")
.firestore.document("documents/{docId}")
.onCreate(async (snap, context) => {
try {
const { srcId } = snap.data();
const [files] = await admin
.storage()
.bucket('public')
.getFiles({
prefix: `${srcId}/`,
delimiter: "/",
autoPaginate: false,
});
const promises = files.map((file) => {
return new Promise<void>(async (resolve, reject) => {
try {
const dst = admin
.storage()
.bucket('private')
.file(`${srcId}/${uuidv4()}`);
await file.move(dst);
resolve();
} catch (err) {
reject(err);
}
});
});
await Promise.all(promises);
} catch (error) {
return console.log(error);
}
});

Saving a buffer in Google Cloud Bucket

I'm trying to find a solution that will let me stream in-memory created zip to Google Cloud Bucket (I'm using Firebase, but seems like it's beyond it so I need to handle it through GCB).
I have nailed down file creation part (code below) and when it's working locally on my machine it saves it in the main folder where server files reside. So far so good.
Now I found this link that lets stream transfers, but not sure how to connect them. Should it be after zip is created? Instead? Any suggestions are welcome!
const express = require('express')
var router = express.Router()
var archiver = require('archiver')
var admin = require("firebase-admin");
var serviceAccount = require("../servicekey.json")
admin.initializeApp({
credential: admin.credential.cert(serviceAccount),
databaseURL: "https://myName.firebaseio.com",
storageBucket: "myName.appspot.com"
})
var bucket = admin.storage().bucket()
const {
convertCSVtoJSON,
generateDocuments,
generateDocx,
isCorrectTemplateFileType
} = require('./generateServices')
router.post('/', async (req, res) => {
try {
if(!isCorrectTemplateFileType(req.files.template))
return res.status(403).send({
message: 'Wrong file type. Please provide .docx file.'
})
const template = req.files.template.data
const data = await convertCSVtoJSON(req.files.data1)
let zip = archiver('zip')
zip.on('warning', function(err) {
console.log(err)
});
zip.on('error', function(err) {
res.status(500).send({error: err.message})
});
zip.on('entry', function(ars) {
// console.log(ars)
});
zip.on('end', function() {
console.log('Archive wrote %d bytes', zip.pointer())
});
// res.attachment('archive-name.zip')
// zip.pipe(output)
// zip.pipe(res)
data.forEach((docData, index) => {
let buff = generateDocx(template, docData, 'title')
zip.append(buff, { name: `${index}.docx` })
})
zip.finalize()
console.log(zip)
const file = bucket.file("pliki.zip") // nazwa do zmiany
file.save(zip, (err) => {
if (!err) {
console.log("cool");
} else {
console.log("error " + err);
}
});
res.sendStatus(201)
} catch (error) {
console.log(error)
res.send(error)
}
})
module.exports = router

Firebase functions--how to handle needing to wait for an async process to end before the express app is ready to be exported

I'm trying to get apollo gateway to run in google's cloud functions (particularly through firebase). Running apollo server is super easy on cloud functions with the following code
const functions = require("firebase-functions");
const { ApolloServer } = require("apollo-server-express");
var app = require("express")();
const server = new ApolloServer({
schema: buildFederatedSchema([{ typeDefs, resolvers }])
});
server.applyMiddleware({ app });
exports.apollo = functions.https.onRequest(app);
That all works nice and dandy because there's no async functions to wait on. But with apollo gateway, we do have an async function to wait on before the server can start listening.
The way it works with just expressjs is this:
const { ApolloServer } = require("apollo-server-express");
var app = require("express")();
const gateway = new ApolloGateway({
serviceList: [{ name: "clients", url:"http://localhost:5000/apollo/graphql" }]
});
(async () => {
const { schema, executor } = await gateway.load();
const server = new ApolloServer({ schema, executor });
server.listen().then(({ url }) => {
console.log(`🚀 Server ready at ${url}`);
});
})();
So now the question becomes how do I execute that in a cloud function? One idea I had was to pass the express app to the firebase functions, and THEN do all the async stuff to initialize the listening for federation. But that doesnt work :(. Any ideas?
const functions = require("firebase-functions");
const { ApolloServer } = require("apollo-server-express");
var app = require("express")();
const gateway = new ApolloGateway({
serviceList: [{ name: "clients", url: "http://localhost:5000/apollo/graphql" }]
});
exports.apollo = functions.https.onRequest(app);
(async () => {
const { schema, executor } = await gateway.load();
const server = new ApolloServer({ schema, executor });
server.applyMiddleware({ app });
})()
I got it to work! So the trick was to put the logic of setting up the federation INSIDE the onRequest piece.
Here's the solution:
var app = require("express")();
var server;
var start_gateway = async () => {
if (server !== undefined) return;
const gateway = new ApolloGateway({
serviceList: [{ name: "clients", url: "http://localhost:4000/graphql"
}]
});
const { schema, executor } = await gateway.load();
server = new ApolloServer({ schema, executor });
server.applyMiddleware({ app });
console.log("Made FEDERATED GRAPH");
return;
};
exports.apollo = functions.https.onRequest(async (req, res) => {
await start_gateway();
return await app(req, res);
})

Using IBM Watson Text-to-Speech with Firebase Cloud Functions?

I'm trying to use set up a Firebase Cloud Function to access IBM Watson Text-to-Speech. The problem is writing the returned audiofile to my Firestore database.
This test to return the list of voices worked, logging the response to the Functions log:
exports.test = functions.firestore.document('IBM_Watson_Token/Test_Value').onUpdate((change, context) => {
var textToSpeech = new TextToSpeechV1({
username: 'groucho',
password: 'swordfish'
});
return textToSpeech.listVoices(null, function(error, voices) {
if (error) {
console.log(error);
} else {
console.log(JSON.stringify(voices, null, 2));
}
});
});
Here is the documentation example Node code for returning an audiofile and writing it to the server:
var TextToSpeechV1 = require('watson-developer-cloud/text-to-speech/v1');
var fs = require('fs');
var textToSpeech = new TextToSpeechV1({
username: '{username}',
password: '{password}'
});
var synthesizeParams = {
text: 'Hello world',
accept: 'audio/wav',
voice: 'en-US_AllisonVoice'
};
// Pipe the synthesized text to a file.
textToSpeech.synthesize(synthesizeParams).on('error', function(error) {
console.log(error);
}).pipe(fs.createWriteStream('hello_world.wav'));
Firebase doesn't allow writing files to the server using fs, you have to write to a Firestore database. I changed the last line of the example code to write to Firestore, using a promise:
exports.test = functions.firestore.document('IBM_Watson_Token/Test_Value').onUpdate((change, context) => {
var textToSpeech = new TextToSpeechV1({
username: 'groucho',
password: 'swordfish'
});
var synthesizeParams = {
text: 'Hello world',
accept: 'audio/wav',
voice: 'en-US_AllisonVoice'
};
return textToSpeech.synthesize(synthesizeParams).on('error', function(error) {
console.log(error);
}).then(function (audiofile) {
admin.firestore().collection('IBM_Watson_Token').doc('hello_world').set({
'audiofile': audiofile
})
})
.catch(function (error) {
console.log(error);
});
});
The error message was
TypeError: textToSpeech.synthesize(...).on(...).then is not a function
How do I save the audiofile that comes back from Watson to Firestore?
That would be because the synthesize method is not returning a promise. You will need to use a callback construct that looks like
textToSpeech.synthesize(params, function (err, body, response) {
if (err) {
...
} else {
// body is the audio
...
}
});

Resources