Firebase Cloud Functions deploy async function with NPM 8 - firebase

my functions package.json
...
"dependencies": {
"firebase-admin": "~5.13.0",
"firebase-functions": "^2.0.0",
"puppeteer": "^1.6.2"
},
"engines": {
"node": "8"
}
my old non async functions get deployed. But not my new async one:
exports.screenshot = async (req, res) => {
const url = "https://google.de"; // req.query.url;
if (!url) {
return res.send('Please provide URL as GET parameter, for example: ?url=https://example.com');
}
const browser = await puppeteer.launch({
args: ["--no-sandbox"]
});
const page = await browser.newPage();
await page.goto(url);
const imageBuffer = await page.screenshot();
await browser.close();
res.set("Content-Type", "image/png");
res.send(imageBuffer);
};
when running: firebase deploy --only functions:screenshot
I get the error:
The following functions are found in your project but do not exist in your local source code:
screenshot(us-central1)
what can I do?

exports.screenshot = functions.https.onRequest(async (req, res) => {
...
});
did it for me

Related

Firebase storage-resize-images not triggered by bucket upload

I'm using a Firebase function to upload images to Storage.
I installed the extension firebase/storage-resize-images#0.1.29
When I upload an image directly within the dashboard, the resize happens.
However, images uploaded with my functions do not trigger the resize.
What am I missing here?
async function migrateImageFromURL (folder, url, name, callback) {
const {filePath, fileName} = await downloadRemoteUrlImage(url, name.split('.')[0])
const bucket = admin.storage().bucket();
const destination = `dev/${folder}/${fileName}`;
try {
await bucket.upload(filePath, {
destination: destination,
gzip: true,
metadata: {
fileName,
contentType: `image/${fileName.split('.')[1]}`,
cacheControl: 'public, max-age=31536000',
},
});
callback(destination)
}
catch (e) {
throw new Error("uploadLocalFileToStorage failed: " + e);
}
return ''
};
Create a cloud function with busyboy
Create a sample firebase function
Copy the code for package.json
Run the command to install the node modules
npm run
Copy the code for index.js
To generate a private key file for your service account:
In the Firebase console, open Settings > Service Accounts.
Click Generate New Private Key, then confirm by clicking Generate Key.
Choose Node Js
Securely store the JSON file containing the key.
Copy the firebase-admin-sdk.json file inside the function directory
Deploy the function to firebase
firebase deploy --only functions
Get the http endpoint from the firebase console
Index.js
const functions = require("firebase-functions");
const os = require("os");
const path = require("path");
const spawn = require("child-process-promise").spawn;
//To enable Cross AXIS
//Change to False to avoid DDOS
const cors = require("cors")({ origin: true });
//Parse Files
const Busboy = require("busboy");
//Files System
const fs = require("fs");
var gcconfig = {
// Get the project ID from firebaserc
projectId: "<project_id>",
// Write the name of the file in the root director which contains the private key of firebase-admin-sdk
keyFilename: "firebase-admin-sdk.json"
};
// const gcs = require("#google-cloud/storage")(gcconfig);
const {Storage} = require('#google-cloud/storage');
const gcs = new Storage(gcconfig);
exports.uploadFile = functions.https.onRequest((req, res) => {
//Allowing CROSS SITE
cors(req, res, () => {
if (req.method !== "POST") {
return res.status(500).json({
message: "Not allowed"
});
}
console.log("Starting BusBOY");
const busboy = Busboy({ headers: req.headers});
let uploadData = null;
//File parsing
busboy.on("file", (fieldname, file, filename, encoding, mimetype) => {
// Firebase cloudfunction will have some tmpdirectory tmpdir
// It will be cleaned up after execution
console.log("File function reached ");
console.log("Temp folder is "+os.tmpdir());
console.log("File name is "+filename.filename);
const filepath = path.join(os.tmpdir(),filename.filename);
console.log("Location of file is "+filepath);
uploadData = { file: filepath, type: mimetype };
console.log("Writing to temp file storage");
//Writing file to storage
file.pipe(fs.createWriteStream(filepath));
//Extra Details such as limit error
file.on('limit', () => {
console.log("Reached size limit");
debugLog(options, `Size limit reached for ${field}->${filename.filename}, bytes:${getFilesizeInBytes(filename)}`);
});
file.on('end', () => {
const size = getFilesizeInBytes(filename.filename);
console.log("File size is "+size+" bytes");
});
file.on('error', (err) => {
console.log("File format error");
});
});
//For Form data Listener
// busboy.on("field",()=>{
// });
// Finishes the whole process, only upload after that
busboy.on("finish", () => {
// Firebase storage, Inside the console itself
// Copy the folder location
// gs://<Project_id>.appspot.com
// Remove the gs String
console.log("Finished BusBoy");
var your_project_id="<your_project_id>.appspot.com";
const bucket = gcs.bucket(your_project_id);
console.log("Uploading Image to firebase");
bucket
.upload(uploadData.file, {
uploadType: "media",
metadata: {
metadata: {
contentType: uploadData.type
}
}
})
.then(() => {
// Success
console.log("Uploaded Successfully");
res.status(200).json({
message: "It worked!"
});
})
.catch(err => {
// Error
console.log("Error while uploading");
res.status(500).json({
error: err
});
});
});
//End the parsing
console.log("End Parsing");
busboy.end(req.rawBody);
});
});
//Finding the file size from the filename
function getFilesizeInBytes(filename) {
var stats = fs.statSync(filename);
var fileSizeInBytes = stats.size;
return fileSizeInBytes;
}
package.json
{
"name": "functions",
"description": "Cloud Functions for Firebase",
"scripts": {
"serve": "firebase emulators:start --only functions",
"shell": "firebase functions:shell",
"start": "npm run shell",
"deploy": "firebase deploy --only functions:uploadFile",
"logs": "firebase functions:log"
},
"engines": {
"node": "14"
},
"main": "index.js",
"dependencies": {
"firebase-admin": "^9.8.0",
"firebase-functions": "^3.14.1",
"#google-cloud/storage": "^6.0.1",
"busboy": "^1.6.0",
"child-process-promise": "^2.2.1",
"cors": "^2.8.5"
},
"devDependencies": {
"firebase-functions-test": "^0.2.0"
},
"private": true
}
For more details
Checkout https://github.com/katmakhan/firebase-course/tree/master/Firebase%20Cloud%20Function/Image%20Uploader

How to upload files to firebase storage using rest api in unity

I want to upload files to firebase storage using the rest API from unity. It is like clicking the browse button, browsing on local storage, selecting files, and then uploading.
How do I do this?
Create a cloud function with busyboy
Create a sample firebase function
Copy the code for package.json
Run the command to install the node modules
npm run
Copy the code for index.js
To generate a private key file for your service account:
In the Firebase console, open Settings > Service Accounts.
Click Generate New Private Key, then confirm by clicking Generate Key.
Choose Node Js
Securely store the JSON file containing the key.
Copy the firebase-admin-sdk.json file inside the function directory
Deploy the function to firebase
firebase deploy --only functions
Get the http endpoint from the firebase console
Index.js
const functions = require("firebase-functions");
const os = require("os");
const path = require("path");
const spawn = require("child-process-promise").spawn;
//To enable Cross AXIS
//Change to False to avoid DDOS
const cors = require("cors")({ origin: true });
//Parse Files
const Busboy = require("busboy");
//Files System
const fs = require("fs");
var gcconfig = {
// Get the project ID from firebaserc
projectId: "<project_id>",
// Write the name of the file in the root director which contains the private key of firebase-admin-sdk
keyFilename: "firebase-admin-sdk.json"
};
// const gcs = require("#google-cloud/storage")(gcconfig);
const {Storage} = require('#google-cloud/storage');
const gcs = new Storage(gcconfig);
exports.uploadFile = functions.https.onRequest((req, res) => {
//Allowing CROSS SITE
cors(req, res, () => {
if (req.method !== "POST") {
return res.status(500).json({
message: "Not allowed"
});
}
console.log("Starting BusBOY");
const busboy = Busboy({ headers: req.headers});
let uploadData = null;
//File parsing
busboy.on("file", (fieldname, file, filename, encoding, mimetype) => {
// Firebase cloudfunction will have some tmpdirectory tmpdir
// It will be cleaned up after execution
console.log("File function reached ");
console.log("Temp folder is "+os.tmpdir());
console.log("File name is "+filename.filename);
const filepath = path.join(os.tmpdir(),filename.filename);
console.log("Location of file is "+filepath);
uploadData = { file: filepath, type: mimetype };
console.log("Writing to temp file storage");
//Writing file to storage
file.pipe(fs.createWriteStream(filepath));
//Extra Details such as limit error
file.on('limit', () => {
console.log("Reached size limit");
debugLog(options, `Size limit reached for ${field}->${filename.filename}, bytes:${getFilesizeInBytes(filename)}`);
});
file.on('end', () => {
const size = getFilesizeInBytes(filename.filename);
console.log("File size is "+size+" bytes");
});
file.on('error', (err) => {
console.log("File format error");
});
});
//For Form data Listener
// busboy.on("field",()=>{
// });
// Finishes the whole process, only upload after that
busboy.on("finish", () => {
// Firebase storage, Inside the console itself
// Copy the folder location
// gs://<Project_id>.appspot.com
// Remove the gs String
console.log("Finished BusBoy");
var your_project_id="<your_project_id>.appspot.com";
const bucket = gcs.bucket(your_project_id);
console.log("Uploading Image to firebase");
bucket
.upload(uploadData.file, {
uploadType: "media",
metadata: {
metadata: {
contentType: uploadData.type
}
}
})
.then(() => {
// Success
console.log("Uploaded Successfully");
res.status(200).json({
message: "It worked!"
});
})
.catch(err => {
// Error
console.log("Error while uploading");
res.status(500).json({
error: err
});
});
});
//End the parsing
console.log("End Parsing");
busboy.end(req.rawBody);
});
});
//Finding the file size from the filename
function getFilesizeInBytes(filename) {
var stats = fs.statSync(filename);
var fileSizeInBytes = stats.size;
return fileSizeInBytes;
}
package.json
{
"name": "functions",
"description": "Cloud Functions for Firebase",
"scripts": {
"serve": "firebase emulators:start --only functions",
"shell": "firebase functions:shell",
"start": "npm run shell",
"deploy": "firebase deploy --only functions:uploadFile",
"logs": "firebase functions:log"
},
"engines": {
"node": "14"
},
"main": "index.js",
"dependencies": {
"firebase-admin": "^9.8.0",
"firebase-functions": "^3.14.1",
"#google-cloud/storage": "^6.0.1",
"busboy": "^1.6.0",
"child-process-promise": "^2.2.1",
"cors": "^2.8.5"
},
"devDependencies": {
"firebase-functions-test": "^0.2.0"
},
"private": true
}
For more details
Checkout https://github.com/katmakhan/firebase-course/tree/master/Firebase%20Cloud%20Function/Image%20Uploader

Unimplemented API error when running admin.storage().bucket(..).file(..).move() in an emulated Firebase function

"firebase": "9.0.1" - local
"firebase-tools": 9.17.0 - global
"node": v14.17.4
I'm calling .move() using the Firebase admin SDK in an emulated function. Firebase returns the following error:
ApiError: file#copy failed with an error - Not Implemented
at new ApiError (.../functions/node_modules/#google-cloud/common/build/src/util.js:73:15)
statusCode: 501
request: {
agent: [Agent],
headers: [Object],
href: 'http://localhost:9199/b/public-8s9ch/o/91ff38b1-b521-4a23-8844-b12cffa0ee98%2Fscreenshot.png/rewriteTo/b/private-8s9ch/o/91ff38b1-b521-4a23-8844-b12cffa0ee98%2Fcd665e29-edc8-4832-9bc7-110c21f1e560?'
},
body: 'Not Implemented',
The exact same code, with the same tests and installed packages, work perfectly when deployed to Firebase instead of being used in the emulator.
The same issue occurs with any call to the storage API regardless of using the Admin SDK or using the client SDK e.g. .copy, .move, .delete, etc.
Here's some code to help with reproducing the error:
export const firebaseFunction = functions
.runWith({ memory: "128MB" })
.region("us-central1")
.firestore.document("documents/{docId}")
.onCreate(async (snap, context) => {
try {
const { srcId } = snap.data();
const [files] = await admin
.storage()
.bucket('public')
.getFiles({
prefix: `${srcId}/`,
delimiter: "/",
autoPaginate: false,
});
const promises = files.map((file) => {
return new Promise<void>(async (resolve, reject) => {
try {
const dst = admin
.storage()
.bucket('private')
.file(`${srcId}/${uuidv4()}`);
await file.move(dst);
resolve();
} catch (err) {
reject(err);
}
});
});
await Promise.all(promises);
} catch (error) {
return console.log(error);
}
});

Error message in `Firebase` function log:

I am trying to read data in Firebase - Cloud Firestore database in Dialogflow. But not able to connect to the database and getting the following error in log:
Warning, estimating Firebase Config based on GCLOUD_PROJECT.
Initializing firebase-admin may fail
Code is in welcome intent fulfillment - Inline Editor(Powered by Cloud Functions for Firebase)
Collection Name in Cloud Firestore - users
document - 10 and 11 (totally 2)
field - book_id and text
Can someone please help me resolve this one or face similar issue.?
I have tried different solutions provided for this error faced by others, but none of it has solved mine.
Tried changing node version to 8 in package.json
Tried Initializeapp as below:
const admin = require('firebase-admin');
const app = dialogflow({debug: true});
admin.initializeApp();
Its not going inside app.intent(welcome) itself
index.js
// See https://github.com/dialogflow/dialogflow-fulfillment-nodejs
// for Dialogflow fulfillment library docs, samples, and to report issues
'use strict';
const axios = require('axios');
const {dialogflow} = require('actions-on-google');
const functions = require('firebase-functions');
const {WebhookClient} = require('dialogflow-fulfillment');
const {Card, Suggestion} = require('dialogflow-fulfillment');
const admin = require('firebase-admin');
const app = dialogflow({debug: true});
admin.initializeApp();
const db = admin.firestore();
db.settings({timestampsInSnapshots: true});
const collectionRef = db.collection('users');
process.env.DEBUG = 'dialogflow:debug'; // enables lib debugging statements
exports.dialogflowFirebaseFulfillment = functions.https.onRequest((request, response) => {
const agent = new WebhookClient({ request, response });
console.log('Dialogflow Request headers: ' + JSON.stringify(request.headers));
console.log('Dialogflow Request body: ' + JSON.stringify(request.body));
/*function welcome(agent) {
agent.add(`Welcome to my agent!`);
}*/
function fallback(agent) {
agent.add(`I didn't understand`);
agent.add(`I'm sorry, can you try again?`);
}
app.intent('welcome', (conv) => {
console.log("welcome agent invoked");
const book = (agent.parameters.book).toString();
const termRef = collectionRef.doc('10');
return termRef.get()
.then((snapshot) => {
const {text, book_id} = snapshot.data();
conv.ask(`Here you go, ${text}, ${book_id}. ` +
`What else do you want to know?`);
console.log('text:', +text);
console.log('book_id:', +book_id);
}).catch((e) => {
console.log('error:', e);
conv.close('Sorry, try again');
});
});
});
package.json
{
"name": "dialogflowFirebaseFulfillment",
"description": "This is the default fulfillment for a Dialogflow agents using Cloud Functions for Firebase",
"version": "0.0.1",
"private": true,
"license": "Apache Version 2.0",
"author": "Google Inc.",
"engines": {
"node": "8"
},
"scripts": {
"start": "firebase serve --only functions:dialogflowFirebaseFulfillment",
"deploy": "firebase deploy --only functions:dialogflowFirebaseFulfillment"
},
"dependencies": {
"actions-on-google": "^2.2.0",
"firebase-admin": "^5.13.1",
"firebase-functions": "^2.0.2",
"dialogflow": "^0.6.0",
"dialogflow-fulfillment": "^0.5.0"
}
}
Trying to get book_id value and text value from collection users in cloud firestore

Firestore automatic backup using cloud functions?

Firebase docs recommends that you deploy an App Engine application to handle automatic firestore exports.
https://firebase.google.com/docs/firestore/solutions/schedule-export
app.js
const axios = require('axios');
const dateformat = require('dateformat');
const express = require('express');
const { google } = require('googleapis');
const app = express();
// Trigger a backup
app.get('/cloud-firestore-export', async (req, res) => {
const auth = await google.auth.getClient({
scopes: ['https://www.googleapis.com/auth/datastore']
});
const accessTokenResponse = await auth.getAccessToken();
const accessToken = accessTokenResponse.token;
const headers = {
'Content-Type': 'application/json',
Authorization: 'Bearer ' + accessToken
};
const outputUriPrefix = req.param('outputUriPrefix');
if (!(outputUriPrefix && outputUriPrefix.indexOf('gs://') == 0)) {
res.status(500).send(`Malformed outputUriPrefix: ${outputUriPrefix}`);
}
// Construct a backup path folder based on the timestamp
const timestamp = dateformat(Date.now(), 'yyyy-mm-dd-HH-MM-ss');
let path = outputUriPrefix;
if (path.endsWith('/')) {
path += timestamp;
} else {
path += '/' + timestamp;
}
const body = {
outputUriPrefix: path
};
// If specified, mark specific collections for backup
const collectionParam = req.param('collections');
if (collectionParam) {
body.collectionIds = collectionParam.split(',');
}
const projectId = process.env.GOOGLE_CLOUD_PROJECT;
const url = `https://firestore.googleapis.com/v1beta1/projects/${projectId}/databases/(default):exportDocuments`;
try {
const response = await axios.post(url, body, { headers: headers });
res
.status(200)
.send(response.data)
.end();
} catch (e) {
if (e.response) {
console.warn(e.response.data);
}
res
.status(500)
.send('Could not start backup: ' + e)
.end();
}
});
// Index page, just to make it easy to see if the app is working.
app.get('/', (req, res) => {
res
.status(200)
.send('[scheduled-backups]: Hello, world!')
.end();
});
// Start the server
const PORT = process.env.PORT || 6060;
app.listen(PORT, () => {
console.log(`App listening on port ${PORT}`);
console.log('Press Ctrl+C to quit.');
});
package.json
{
"name": "solution-scheduled-backups",
"version": "1.0.0",
"description": "Scheduled Cloud Firestore backups via AppEngine cron",
"main": "app.js",
"engines": {
"node": "8.x.x"
},
"scripts": {
"deploy": "gcloud app deploy --quiet app.yaml cron.yaml",
"start": "node app.js"
},
"author": "Google, Inc.",
"license": "Apache-2.0",
"dependencies": {
"axios": "^0.18.0",
"dateformat": "^3.0.3",
"express": "^4.16.4",
"googleapis": "^38.0.0"
},
"devDependencies": {
"prettier": "^1.16.4"
}
}
cron.yaml
cron:
- description: "Daily Cloud Firestore Export"
url: /cloud-firestore-export?outputUriPrefix=gs://BUCKET_NAME[/PATH]&collections=test1,test2
target: cloud-firestore-admin
schedule: every 24 hours
QUESTION
But I was wondering, is it possible to achieve the same thing with an HTTP cloud function and the Cloud Scheduler ?
Is there anything in this App Engine code that I couldn't reproduce or access with an HTTP cloud function? I mean, is the App Engine project really necessary here?
NOTE: This is not an opinion-based question, nor it's too broad. I want to know if I need the App Engine to achieve this behavior or not, and why.
I wouldn't need to set it as an express server, obviously. Just a normal HTTP cloud function, that when called, does the exporting.
I would add something like the following cron job into the Cloud Scheduler:
You will not need App Engine at all if you use Cloud Scheduler to trigger a function that does the backup.

Resources