I'm using a Firebase function to upload images to Storage.
I installed the extension firebase/storage-resize-images#0.1.29
When I upload an image directly within the dashboard, the resize happens.
However, images uploaded with my functions do not trigger the resize.
What am I missing here?
async function migrateImageFromURL (folder, url, name, callback) {
const {filePath, fileName} = await downloadRemoteUrlImage(url, name.split('.')[0])
const bucket = admin.storage().bucket();
const destination = `dev/${folder}/${fileName}`;
try {
await bucket.upload(filePath, {
destination: destination,
gzip: true,
metadata: {
fileName,
contentType: `image/${fileName.split('.')[1]}`,
cacheControl: 'public, max-age=31536000',
},
});
callback(destination)
}
catch (e) {
throw new Error("uploadLocalFileToStorage failed: " + e);
}
return ''
};
Create a cloud function with busyboy
Create a sample firebase function
Copy the code for package.json
Run the command to install the node modules
npm run
Copy the code for index.js
To generate a private key file for your service account:
In the Firebase console, open Settings > Service Accounts.
Click Generate New Private Key, then confirm by clicking Generate Key.
Choose Node Js
Securely store the JSON file containing the key.
Copy the firebase-admin-sdk.json file inside the function directory
Deploy the function to firebase
firebase deploy --only functions
Get the http endpoint from the firebase console
Index.js
const functions = require("firebase-functions");
const os = require("os");
const path = require("path");
const spawn = require("child-process-promise").spawn;
//To enable Cross AXIS
//Change to False to avoid DDOS
const cors = require("cors")({ origin: true });
//Parse Files
const Busboy = require("busboy");
//Files System
const fs = require("fs");
var gcconfig = {
// Get the project ID from firebaserc
projectId: "<project_id>",
// Write the name of the file in the root director which contains the private key of firebase-admin-sdk
keyFilename: "firebase-admin-sdk.json"
};
// const gcs = require("#google-cloud/storage")(gcconfig);
const {Storage} = require('#google-cloud/storage');
const gcs = new Storage(gcconfig);
exports.uploadFile = functions.https.onRequest((req, res) => {
//Allowing CROSS SITE
cors(req, res, () => {
if (req.method !== "POST") {
return res.status(500).json({
message: "Not allowed"
});
}
console.log("Starting BusBOY");
const busboy = Busboy({ headers: req.headers});
let uploadData = null;
//File parsing
busboy.on("file", (fieldname, file, filename, encoding, mimetype) => {
// Firebase cloudfunction will have some tmpdirectory tmpdir
// It will be cleaned up after execution
console.log("File function reached ");
console.log("Temp folder is "+os.tmpdir());
console.log("File name is "+filename.filename);
const filepath = path.join(os.tmpdir(),filename.filename);
console.log("Location of file is "+filepath);
uploadData = { file: filepath, type: mimetype };
console.log("Writing to temp file storage");
//Writing file to storage
file.pipe(fs.createWriteStream(filepath));
//Extra Details such as limit error
file.on('limit', () => {
console.log("Reached size limit");
debugLog(options, `Size limit reached for ${field}->${filename.filename}, bytes:${getFilesizeInBytes(filename)}`);
});
file.on('end', () => {
const size = getFilesizeInBytes(filename.filename);
console.log("File size is "+size+" bytes");
});
file.on('error', (err) => {
console.log("File format error");
});
});
//For Form data Listener
// busboy.on("field",()=>{
// });
// Finishes the whole process, only upload after that
busboy.on("finish", () => {
// Firebase storage, Inside the console itself
// Copy the folder location
// gs://<Project_id>.appspot.com
// Remove the gs String
console.log("Finished BusBoy");
var your_project_id="<your_project_id>.appspot.com";
const bucket = gcs.bucket(your_project_id);
console.log("Uploading Image to firebase");
bucket
.upload(uploadData.file, {
uploadType: "media",
metadata: {
metadata: {
contentType: uploadData.type
}
}
})
.then(() => {
// Success
console.log("Uploaded Successfully");
res.status(200).json({
message: "It worked!"
});
})
.catch(err => {
// Error
console.log("Error while uploading");
res.status(500).json({
error: err
});
});
});
//End the parsing
console.log("End Parsing");
busboy.end(req.rawBody);
});
});
//Finding the file size from the filename
function getFilesizeInBytes(filename) {
var stats = fs.statSync(filename);
var fileSizeInBytes = stats.size;
return fileSizeInBytes;
}
package.json
{
"name": "functions",
"description": "Cloud Functions for Firebase",
"scripts": {
"serve": "firebase emulators:start --only functions",
"shell": "firebase functions:shell",
"start": "npm run shell",
"deploy": "firebase deploy --only functions:uploadFile",
"logs": "firebase functions:log"
},
"engines": {
"node": "14"
},
"main": "index.js",
"dependencies": {
"firebase-admin": "^9.8.0",
"firebase-functions": "^3.14.1",
"#google-cloud/storage": "^6.0.1",
"busboy": "^1.6.0",
"child-process-promise": "^2.2.1",
"cors": "^2.8.5"
},
"devDependencies": {
"firebase-functions-test": "^0.2.0"
},
"private": true
}
For more details
Checkout https://github.com/katmakhan/firebase-course/tree/master/Firebase%20Cloud%20Function/Image%20Uploader
Related
I want to upload files to firebase storage using the rest API from unity. It is like clicking the browse button, browsing on local storage, selecting files, and then uploading.
How do I do this?
Create a cloud function with busyboy
Create a sample firebase function
Copy the code for package.json
Run the command to install the node modules
npm run
Copy the code for index.js
To generate a private key file for your service account:
In the Firebase console, open Settings > Service Accounts.
Click Generate New Private Key, then confirm by clicking Generate Key.
Choose Node Js
Securely store the JSON file containing the key.
Copy the firebase-admin-sdk.json file inside the function directory
Deploy the function to firebase
firebase deploy --only functions
Get the http endpoint from the firebase console
Index.js
const functions = require("firebase-functions");
const os = require("os");
const path = require("path");
const spawn = require("child-process-promise").spawn;
//To enable Cross AXIS
//Change to False to avoid DDOS
const cors = require("cors")({ origin: true });
//Parse Files
const Busboy = require("busboy");
//Files System
const fs = require("fs");
var gcconfig = {
// Get the project ID from firebaserc
projectId: "<project_id>",
// Write the name of the file in the root director which contains the private key of firebase-admin-sdk
keyFilename: "firebase-admin-sdk.json"
};
// const gcs = require("#google-cloud/storage")(gcconfig);
const {Storage} = require('#google-cloud/storage');
const gcs = new Storage(gcconfig);
exports.uploadFile = functions.https.onRequest((req, res) => {
//Allowing CROSS SITE
cors(req, res, () => {
if (req.method !== "POST") {
return res.status(500).json({
message: "Not allowed"
});
}
console.log("Starting BusBOY");
const busboy = Busboy({ headers: req.headers});
let uploadData = null;
//File parsing
busboy.on("file", (fieldname, file, filename, encoding, mimetype) => {
// Firebase cloudfunction will have some tmpdirectory tmpdir
// It will be cleaned up after execution
console.log("File function reached ");
console.log("Temp folder is "+os.tmpdir());
console.log("File name is "+filename.filename);
const filepath = path.join(os.tmpdir(),filename.filename);
console.log("Location of file is "+filepath);
uploadData = { file: filepath, type: mimetype };
console.log("Writing to temp file storage");
//Writing file to storage
file.pipe(fs.createWriteStream(filepath));
//Extra Details such as limit error
file.on('limit', () => {
console.log("Reached size limit");
debugLog(options, `Size limit reached for ${field}->${filename.filename}, bytes:${getFilesizeInBytes(filename)}`);
});
file.on('end', () => {
const size = getFilesizeInBytes(filename.filename);
console.log("File size is "+size+" bytes");
});
file.on('error', (err) => {
console.log("File format error");
});
});
//For Form data Listener
// busboy.on("field",()=>{
// });
// Finishes the whole process, only upload after that
busboy.on("finish", () => {
// Firebase storage, Inside the console itself
// Copy the folder location
// gs://<Project_id>.appspot.com
// Remove the gs String
console.log("Finished BusBoy");
var your_project_id="<your_project_id>.appspot.com";
const bucket = gcs.bucket(your_project_id);
console.log("Uploading Image to firebase");
bucket
.upload(uploadData.file, {
uploadType: "media",
metadata: {
metadata: {
contentType: uploadData.type
}
}
})
.then(() => {
// Success
console.log("Uploaded Successfully");
res.status(200).json({
message: "It worked!"
});
})
.catch(err => {
// Error
console.log("Error while uploading");
res.status(500).json({
error: err
});
});
});
//End the parsing
console.log("End Parsing");
busboy.end(req.rawBody);
});
});
//Finding the file size from the filename
function getFilesizeInBytes(filename) {
var stats = fs.statSync(filename);
var fileSizeInBytes = stats.size;
return fileSizeInBytes;
}
package.json
{
"name": "functions",
"description": "Cloud Functions for Firebase",
"scripts": {
"serve": "firebase emulators:start --only functions",
"shell": "firebase functions:shell",
"start": "npm run shell",
"deploy": "firebase deploy --only functions:uploadFile",
"logs": "firebase functions:log"
},
"engines": {
"node": "14"
},
"main": "index.js",
"dependencies": {
"firebase-admin": "^9.8.0",
"firebase-functions": "^3.14.1",
"#google-cloud/storage": "^6.0.1",
"busboy": "^1.6.0",
"child-process-promise": "^2.2.1",
"cors": "^2.8.5"
},
"devDependencies": {
"firebase-functions-test": "^0.2.0"
},
"private": true
}
For more details
Checkout https://github.com/katmakhan/firebase-course/tree/master/Firebase%20Cloud%20Function/Image%20Uploader
I'm trying to use the Cloud Functions for Firebase to make a Dialogflow intent result in an MQTT message being published. I've managed to get the data I needed and all I still am not able to accomplish is:
Establishing a connection to the MQTT broker;
Publishing to said broker.
As the second one requires the former, it is not my concern yet.
The way I see it is: either my code is wrong or MQTT is included in what Firebase calls "external network". And I'm here, mainly, to make sure I don't upgrade to a paid plan for nothing.
index.js:
'use strict';
const functions = require('firebase-functions');
const { WebhookClient, Card, Suggestion } = require('dialogflow-fulfillment');
var mqtt = require('mqtt');
process.env.DEBUG = 'dialogflow:debug';
const HOST = 'broker.mqttdashboard.com';
const PORT = 1883;
const TOPIC = 'topic/voice_recog';
exports.dialogflowFirebaseFulfillment = functions.region('europe-west1').https.onRequest((request, response) => {
const agent = new WebhookClient({ request, response });
try {
function publishParameter(agent) {
let message = agent.parameters.param_test;
return publishToMqtt(message).then((output) => {
agent.add(output);
}).catch(error => {
agent.add('error from publishToMqtt');
});
}
let intentMap = new Map();
intentMap.set('Repeat parameter', publishParameter);
agent.handleRequest(intentMap);
}
catch (err) {
console.error(err);
agent.add(err.message);
agent.send_();
}
});
function publishToMqtt(message) {
console.log("------------------------------");
console.log("Topic: \""+TOPIC+"\"");
console.log("Message: \""+message+"\"");
console.log("------------------------------");
return new Promise((resolve, reject) => {
var options = {
port: PORT,
host: HOST,
clientId: 'mqttjs_' + Math.random().toString(16).substr(2, 8),
keepalive: 60,
reconnectPeriod: 1000,
protocolId: 'MQIsdp',
protocolVersion: 3,
clean: true,
encoding: 'utf8'
};
var client = mqtt.connect("mqtt://"+HOST, {port: PORT});
// is not executed
client.on('connect', function () {
console.log('client connected');
});
// is not executed
client.publish(TOPIC, message, {}, function (err) {
console.log("Tried publishing \""+message+"\" to \""+TOPIC+"\".");
if (err) {
console.log("But the future refused to change:" + err);
reject();
}
else {
resolve(message);
client.end();
clearTimeout(noResp);
}
});
let noResp = setTimeout(() => {
console.log("No connection"); // always gives this result
reject();
client.end();
}, 5000);
});
}
package.json:
{
"name": "assistant-to-mqtt",
"description": "publishes intent parameter to MQTT broker",
"engines": {
"node": "8"
},
"version": "0.0.1",
"private": true,
"scripts": {
"start": "firebase serve --only functions:publish_mqtt",
"deploy": "firebase deploy --only functions:publish_mqtt"
},
"dependencies": {
"actions-on-google": "^2.1.3",
"dialogflow-fulfillment": "^0.4.1",
"firebase-admin": "^5.12.1",
"firebase-functions": "^2.2.1",
"mqtt": "^2.13.0"
}
}
All help is appreciated. Thank you.
EDIT: The log.
You are actually using a MQTT Broker which is treated as an external access URL by Firebase. Try enabling the billing as described here.
I do receive an error message while deploying.
In Firebase Functions showing Warning, estimating Firebase Config based on GCLOUD_PROJECT. Initializing firebase-admin may fail
I have already define all the apikey etc but the database is not linked dialogflow.
Please advise.
const functions = require('firebase-functions');
var config = {
apiKey: xxxxx,
authDomain: xxxxx,
databaseURL: xxxxx,
projectId: xxxxx,
storageBucket: xxxxx,
messagingSenderId: xxxxx,
;// your config object could be differ
const admin = require('firebase-admin');
admin.initializeApp(config);
process.env.DEBUG = 'dialogflow:debug'; // enables lib debugging statements
exports.dialogflowFirebaseFulfillment = functions.https.onRequest((request, response) => {
console.log('Dialogflow Request headers: ' + JSON.stringify(request.headers));
console.log('Dialogflow Request body: ' + JSON.stringify(request.body));
const db = admin.database();
const action = request.body.queryResult.action;
if (action === 'product_description') {
const product = request.body.queryResult.parameters.Products.trim();
const ref = db.ref(`products/${product.toLowerCase()}/description`);
ref.once('value').then((snapshot) => {
const result = snapshot.val();
if (result === null) {
response.json({
fulfillmentText: `Product does not exists in inventory`
});
return;
}
response.json({
fulfillmentText: `Here is the description of ${product}: ${result}`,
source: action
});
}).catch((err) => {
response.json({
fulfillmentText: `I don't know what is it`
});
});
} else if(action === 'product_quantity') {
const product = request.body.queryResult.parameters.Products.trim();
const ref = db.ref(`products/${product.toLowerCase()}`);
ref.once('value').then((snapshot) => {
const result = snapshot.val();
if (result === null) {
response.json({
fulfillmentText: `Product does not exists in inventory`
});
return;
}
if (!result.stock) {
response.json({
fulfillmentText: `Currently ${product} is out of stock`,
source: action
});
} else {
response.json({
fulfillmentText: `We have ${result.stock} ${product} in stock`,
source: action
});
}
}).catch((err) => {
response.json({
fulfillmentText: `I don't know what is it`
});
});
} else {
response.json({
fulfillmentText: `I don't know what is it`
});
}
});`
This is the my package.json
{
"name": "dialogflowFirebaseFulfillment",
"description": "This is the default fulfillment for a Dialogflow agents using Cloud Functions for Firebase",
"version": "0.0.1",
"private": true,
"license": "Apache Version 2.0",
"author": "Google Inc.",
"engines": {
"node": "8"
},
"scripts": {
"start": "firebase serve --only functions:dialogflowFirebaseFulfillment",
"deploy": "firebase deploy --only functions:dialogflowFirebaseFulfillment"
},
"dependencies": {
"actions-on-google": "^2.12.0",
"firebase-admin": "^8.8.0",
"firebase-functions": "^3.3.0",
"dialogflow": "^0.12.1",
"dialogflow-fulfillment": "^0.6.1"
}
}
Firebase docs recommends that you deploy an App Engine application to handle automatic firestore exports.
https://firebase.google.com/docs/firestore/solutions/schedule-export
app.js
const axios = require('axios');
const dateformat = require('dateformat');
const express = require('express');
const { google } = require('googleapis');
const app = express();
// Trigger a backup
app.get('/cloud-firestore-export', async (req, res) => {
const auth = await google.auth.getClient({
scopes: ['https://www.googleapis.com/auth/datastore']
});
const accessTokenResponse = await auth.getAccessToken();
const accessToken = accessTokenResponse.token;
const headers = {
'Content-Type': 'application/json',
Authorization: 'Bearer ' + accessToken
};
const outputUriPrefix = req.param('outputUriPrefix');
if (!(outputUriPrefix && outputUriPrefix.indexOf('gs://') == 0)) {
res.status(500).send(`Malformed outputUriPrefix: ${outputUriPrefix}`);
}
// Construct a backup path folder based on the timestamp
const timestamp = dateformat(Date.now(), 'yyyy-mm-dd-HH-MM-ss');
let path = outputUriPrefix;
if (path.endsWith('/')) {
path += timestamp;
} else {
path += '/' + timestamp;
}
const body = {
outputUriPrefix: path
};
// If specified, mark specific collections for backup
const collectionParam = req.param('collections');
if (collectionParam) {
body.collectionIds = collectionParam.split(',');
}
const projectId = process.env.GOOGLE_CLOUD_PROJECT;
const url = `https://firestore.googleapis.com/v1beta1/projects/${projectId}/databases/(default):exportDocuments`;
try {
const response = await axios.post(url, body, { headers: headers });
res
.status(200)
.send(response.data)
.end();
} catch (e) {
if (e.response) {
console.warn(e.response.data);
}
res
.status(500)
.send('Could not start backup: ' + e)
.end();
}
});
// Index page, just to make it easy to see if the app is working.
app.get('/', (req, res) => {
res
.status(200)
.send('[scheduled-backups]: Hello, world!')
.end();
});
// Start the server
const PORT = process.env.PORT || 6060;
app.listen(PORT, () => {
console.log(`App listening on port ${PORT}`);
console.log('Press Ctrl+C to quit.');
});
package.json
{
"name": "solution-scheduled-backups",
"version": "1.0.0",
"description": "Scheduled Cloud Firestore backups via AppEngine cron",
"main": "app.js",
"engines": {
"node": "8.x.x"
},
"scripts": {
"deploy": "gcloud app deploy --quiet app.yaml cron.yaml",
"start": "node app.js"
},
"author": "Google, Inc.",
"license": "Apache-2.0",
"dependencies": {
"axios": "^0.18.0",
"dateformat": "^3.0.3",
"express": "^4.16.4",
"googleapis": "^38.0.0"
},
"devDependencies": {
"prettier": "^1.16.4"
}
}
cron.yaml
cron:
- description: "Daily Cloud Firestore Export"
url: /cloud-firestore-export?outputUriPrefix=gs://BUCKET_NAME[/PATH]&collections=test1,test2
target: cloud-firestore-admin
schedule: every 24 hours
QUESTION
But I was wondering, is it possible to achieve the same thing with an HTTP cloud function and the Cloud Scheduler ?
Is there anything in this App Engine code that I couldn't reproduce or access with an HTTP cloud function? I mean, is the App Engine project really necessary here?
NOTE: This is not an opinion-based question, nor it's too broad. I want to know if I need the App Engine to achieve this behavior or not, and why.
I wouldn't need to set it as an express server, obviously. Just a normal HTTP cloud function, that when called, does the exporting.
I would add something like the following cron job into the Cloud Scheduler:
You will not need App Engine at all if you use Cloud Scheduler to trigger a function that does the backup.
my functions package.json
...
"dependencies": {
"firebase-admin": "~5.13.0",
"firebase-functions": "^2.0.0",
"puppeteer": "^1.6.2"
},
"engines": {
"node": "8"
}
my old non async functions get deployed. But not my new async one:
exports.screenshot = async (req, res) => {
const url = "https://google.de"; // req.query.url;
if (!url) {
return res.send('Please provide URL as GET parameter, for example: ?url=https://example.com');
}
const browser = await puppeteer.launch({
args: ["--no-sandbox"]
});
const page = await browser.newPage();
await page.goto(url);
const imageBuffer = await page.screenshot();
await browser.close();
res.set("Content-Type", "image/png");
res.send(imageBuffer);
};
when running: firebase deploy --only functions:screenshot
I get the error:
The following functions are found in your project but do not exist in your local source code:
screenshot(us-central1)
what can I do?
exports.screenshot = functions.https.onRequest(async (req, res) => {
...
});
did it for me