Hello I am getting an error trying to deploy a function on firebase and it is bothering me because it worked in the past and now that I wanted to deploy the same code it is giving me the error above.
Can someone have a look because I checked the documentation thinking that something might change and the names of the attributes or something are not the same but the function seems 100% sound based on the documentation.
Kind regards and kudos to everyone.
Much respect if someone manages to give me a hint. I will add the log files also.
Code :
const functions = require("firebase-functions");
const axios = require("axios");
const admin = require("firebase-admin");
admin.initializeApp();
const database = admin.firestore();
const page = 1;
const fiat = "RON";
const tradeType = "BUY";
const asset = "USDT";
const payTypes = ["ING"];
let finalData = [];
let tempDataBeforeProccessing = [];
const baseObj = {
page,
rows: 20,
publisherType: null,
asset,
tradeType,
fiat,
payTypes,
};
const stringData = JSON.stringify(baseObj);
const getTheData = async function() {
tempDataBeforeProccessing=[];
await axios.post("https://p2p.binance.com/bapi/c2c/v2/friendly/c2c/adv/search", baseObj, {
hostname: "p2p.binance.com",
port: 443,
path: "/bapi/c2c/v2/friendly/c2c/adv/search",
method: "POST",
headers: {
"Content-Type": "application/json",
"Content-Length": stringData.length,
},
}).then((res)=>{
tempDataBeforeProccessing=res.data.data;
});
};
const processData = function() {
finalData=[];
let obj = [];
for (let i = 0; i < tempDataBeforeProccessing.length; i++) {
let payTypesz = "";
for (let y = 0; y <
tempDataBeforeProccessing[i]["adv"]["tradeMethods"].length; y++) {
payTypesz +=
tempDataBeforeProccessing[i]["adv"]["tradeMethods"][y]["identifier"];
if (y <
tempDataBeforeProccessing[i]["adv"]["tradeMethods"].length - 1) {
payTypesz += ", ";
}
}
obj = {
tradeType: tempDataBeforeProccessing[i]["adv"]["tradeType"],
asset: tempDataBeforeProccessing[i]["adv"]["asset"],
fiatUnit: tempDataBeforeProccessing[i]["adv"]["fiatUnit"],
price: tempDataBeforeProccessing[i]["adv"]["price"],
surplusAmount:
tempDataBeforeProccessing[i]["adv"]["surplusAmount"],
maxSingleTransAmount:
tempDataBeforeProccessing[i]["adv"]["maxSingleTransAmount"],
minSingleTransAmount:
tempDataBeforeProccessing[i]["adv"]["minSingleTransAmount"],
nickName:
tempDataBeforeProccessing[i]["advertiser"]["nickName"],
monthOrderCount:
tempDataBeforeProccessing[i]["advertiser"]["monthOrderCount"],
monthFinishRate:
tempDataBeforeProccessing[i]["advertiser"]["monthFinishRate"],
payTypes: payTypesz,
};
finalData.push(obj);
}
console.log(finalData);
};
const entireCall = async function() {
await getTheData();
processData();
};
exports.scheduledFunction = functions.pubsub
.schedule("every 1 minutes")
.onRun(async (context) => {
await database.collection("SebiBinanceSale").doc("BCR Bank").delete();
await entireCall();
for (let i = 0; i < finalData.length; i++) {
await database.collection("SebiBinanceSale").doc("BCR Bank")
.collection("1").doc(i.toString())
.set({
"tradeType": finalData[i]["tradeType"],
"asset": finalData[i]["asset"],
"fiatUnit": finalData[i]["fiatUnit"],
"price": finalData[i]["price"],
"surplusAmount": finalData[i]["surplusAmount"],
"maxSingleTransAmount": finalData[i]["maxSingleTransAmount"],
"minSingleTransAmount": finalData[i]["minSingleTransAmount"],
"nickName": finalData[i]["nickName"],
"monthOrderCount": finalData[i]["monthOrderCount"],
"monthFinishRate": finalData[i]["monthFinishRate"],
"payTypes": finalData[i]["payTypes"],
});
}
return console.log("Succes Upload of the data ");
});
error:
Function failed on loading user code. This is likely due to a bug in the user code. Error message: Error: please examine your function logs to see the error cause: https://cloud.google.com/functions/docs/monitoring/logging#viewing_logs. Additional troubleshooting documentation can be found at https://cloud.google.com/functions/docs/troubleshooting#logging. Please visit https://cloud.google.com/functions/docs/troubleshooting for in-depth troubleshooting documentation.
Functions deploy had errors with the following functions:
scheduledFunction(us-central1)
i functions: cleaning up build files...
Error: There was an error deploying functions
ivanoiualexandrupaul#Ivanoius-MacBook-Pro functions %
log file :
[debug] [2022-10-29T17:40:16.776Z] Error: Failed to update function scheduledFunction in region us-central1
at /usr/local/lib/node_modules/firebase-tools/lib/deploy/functions/release/fabricator.js:41:11
at processTicksAndRejections (internal/process/task_queues.js:95:5)
at async Fabricator.updateV1Function (/usr/local/lib/node_modules/firebase-tools/lib/deploy/functions/release/fabricator.js:305:32)
at async Fabricator.updateEndpoint (/usr/local/lib/node_modules/firebase-tools/lib/deploy/functions/release/fabricator.js:140:13)
at async handle (/usr/local/lib/node_modules/firebase-tools/lib/deploy/functions/release/fabricator.js:78:17)
[error]
[error] Error: There was an error deploying functions
When you are using scheduled functions in Firebase Functions, an App Engine instance is created that is needed for Cloud Scheduler to work. You can read about it here.They use the location that has been set by default for resources. I think that you are getting this error because there is a difference between the default GCP resource location you specified and the region of your scheduled cloud function.Check your Cloud Scheduler function details and see which region it has been deployed to. By default, functions run in the us-central1 region. Check this link to see how we can change the region of the function.
You can also try re installation using the command
npm install -g firebase-tools
Also check if any lock files are generated and delete these and run the firebase deploy --only functions again and see if that works.
Related
My project used #Nativescript/firebase(https://github.com/EddyVerbruggen/nativescript-plugin-firebase) ignores methods of firebase.firestore.timestamp, and returns undefined by properties.
The below is minimum reproduction
app.js
import Vue from "nativescript-vue";
import Home from "./components/Home";
var firebase = require("#nativescript/firebase").firebase;
firebase
.init({})
.then(
function () {
console.log("firebase.init done");
},
function (error) {
console.log("firebase.init error: " + error);
}
);
new Vue({
render: (h) => h("frame", [h(Home)]),
}).$start();
Home.vue
import { firebase } from "#nativescript/firebase";
export default {
computed: {
async message() {
const Ref = firebase.firestore
.collection("comments")
.doc("07bhQeWDf3u1j0B4vNwG");
const doc = await Ref.get();
const hoge = doc.data();
console.log("hoge.commented_at", hoge.commented_at); // CONSOLE LOG: hoge.commented_at Sat Oct 23 2021 22:44:48 GMT+0900 (JST)
console.log("hoge.commented_at.seconds", hoge.commented_at.seconds); // CONSOLE LOG: hoge.commented_at.seconds undefined
const hogeToDate = hoge.toDate();
console.log("hogeToDate", hogeToDate); // no console.log appear
return hogeToDate; // simulator shows "object Promise"
},
},
};
I also tried const hogeTimestampNow = firebase.firestore.Timestamp.now(); then no console.log appear...
Environment
vue.js
Node.js v14.17.6
nativescript v8.1.2
nativescript-vue v2.9.0
#nativescript/firebase v11.1.3
If you dive into the source of #nativescript/firebase, in particular looking at /src/firebase-common.ts, you can see that firebase is a custom implementation and not the object/namespace normally exported by the ordinary Firebase Web SDK.
It uses a custom implementation so that it can be transformed depending on the platform the code is running on as shown in /src/firebase.android.ts and /src/firebase.ios.ts.
Of particular importance, is that Firestore's Timestamp objects are internally converted to JavaScript Date objects when exposed to your code as each platform has its own version of a Timestamp object. Because the exposed JavaScript Date object doesn't have a seconds property, you get undefined when attempting to access hoge.commented_at.seconds.
The equivalent of Timestamp#seconds would be Math.floor(hoge.commented_at / 1000) (you could also be more explicit with Math.floor(hoge.commented_at.getTime() / 1000) if you don't like relying on JavaScript's type coercion).
function getSeconds(dt: Date) {
return Math.floor(dt.getTime() / 1000)
}
While you can import the Timestamp object from the Modular Web SDK (v9+), when passed into the NativeScript plugin, it would be turned into an ordinary object (i.e. { seconds: number, nanoseconds: number } rather than a Timestamp).
import { Timestamp } from 'firebase/firestore/lite';
const commentedAtTS = Timestamp.fromDate(hoge.commented_at);
docRef.set({ commentedAt: commentedAtTS.toDate() }) // must turn back to Date object before writing!
firebase.firestore.timestamp does not work via #nativescript/firebase as #samthecodingman said.(https://stackoverflow.com/a/69853638/15966408)
Just use ordinally javascript methods and edit.
I tried
get timestamp from firestore then convert to milliseconds
get date with new Date() then convert to milliseconds
and same miliseconds logged.
via firestore
const Ref = firebase.firestore.collection("comments").doc("07bhQeWDf3u1j0B4vNwG");
const doc = await Ref.get();
const hoge = doc.data();
console.log("hoge.commented_at in milliseconds: ", Math.floor(hoge.commented_at / 1000));
// CONSOLE LOG: hoge.commented_at in milliseconds: 1634996688
via javascript methods
const getNewDate = new Date("October 23, 2021, 22:44:48 GMT+0900");
// same as hoge.commented_at
console.log("getNewDate in milliseconds: ", getNewDate.getTime() / 1000);
// CONSOLE LOG: getNewDate in milliseconds: 1634996688
I am trying to make a call within a firebase function to a locally managed server. I am not super familiar with node as a development environment so I am not sure what is the issue.
const functions = require('firebase-functions');
const https = require('http');
exports.testPost = functions.https.onRequest((req, res) => {
var options = {
host: 'localdevserver.edu',
port: 80,
path: '/my/endpoint'
};
let data = '';
http.get(options, function(resp){
resp.on('data', function(chunk){
//do something with chunk
data += chunk;
resp.on('end', console.log("dones"));
});
}).on("error", function(e){
console.log("Got error: " + e.message);
});
});
When I look in the Firebase Functions Log, it says either timeout or no reject defined.
With HTTP type functions, you need to send a response to the client in order to terminate the function. Otherwise it will time out.
res.send("OK");
Please read the documentation for more details.
You can use SYNC-REQUEST
npm install sync-request
var request = require('sync-request');
var res = request('GET', 'http://google.com');
console.log(res.body.toString('utf-8'));
the function would be something like this:
exports.testPost = functions.https.onRequest((req, res) => {
var request = require('sync-request');
var res = request('GET', 'http://google.com');
var res = res.body.toString('utf-8');
resp.on(res, console.log("dones"));
});
TL;DR;
Does anyone know if it's possible to use console.log in a Firebase/Google Cloud Function to log entries to Stack Driver using the jsonPayload property so my logs are searchable (currently anything I pass to console.log gets stringified into textPayload).
I have a multi-module project with some code running on Firebase Cloud Functions, and some running in other environments like Google Compute Engine. Simplifying things a little, I essentially have a 'core' module, and then I deploy the 'cloud-functions' module to Cloud Functions, 'backend-service' to GCE, which all depend on 'core' etc.
I'm using bunyan for logging throughout my 'core' module, and when deployed to GCE the logger is configured using '#google-cloud/logging-bunyan' so my logs go to Stack Driver.
Aside: Using this configuration in Google Cloud Functions is causing issues with Error: Endpoint read failed which I think is due to functions not going cold and trying to reuse dead connections, but I'm not 100% sure what the real cause is.
So now I'm trying to log using console.log(arg) where arg is an object, not a string. I want this object to appear in Stack Driver under the jsonPayload but it's being stringified and put into the textPayload field.
It took me awhile, but I finally came across this example in firebase functions samples repository. In the end I settled on something a bit like this:
const Logging = require('#google-cloud/logging');
const logging = new Logging();
const log = logging.log('my-func-logger');
const logMetadata = {
resource: {
type: 'cloud_function',
labels: {
function_name: process.env.FUNCTION_NAME ,
project: process.env.GCLOUD_PROJECT,
region: process.env.FUNCTION_REGION
},
},
};
const logData = { id: 1, score: 100 };
const entry = log.entry(logMetaData, logData);
log.write(entry)
You can add a string severity property value to logMetaData (e.g. "INFO" or "ERROR"). Here is the list of possible values.
Update for available node 10 env vars. These seem to do the trick:
labels: {
function_name: process.env.FUNCTION_TARGET,
project: process.env.GCP_PROJECT,
region: JSON.parse(process.env.FIREBASE_CONFIG).locationId
}
UPDATE: Looks like for Node 10 runtimes they want you to set env values explicitly during deploy. I guess there has been a grace period in place because my deployed functions are still working.
I ran into the same problem, and as stated by comments on #wtk's answer, I would like to add replicating all of the default cloud function logging behavior I could find in the snippet below, including execution_id.
At least for using Cloud Functions with the HTTP Trigger option the following produced correct logs for me. I have not tested for Firebase Cloud Functions
// global
const { Logging } = require("#google-cloud/logging");
const logging = new Logging();
const Log = logging.log("cloudfunctions.googleapis.com%2Fcloud-functions");
const LogMetadata = {
severity: "INFO",
type: "cloud_function",
labels: {
function_name: process.env.FUNCTION_NAME,
project: process.env.GCLOUD_PROJECT,
region: process.env.FUNCTION_REGION
}
};
// per request
const data = { foo: "bar" };
const traceId = req.get("x-cloud-trace-context").split("/")[0];
const metadata = {
...LogMetadata,
severity: 'INFO',
trace: `projects/${process.env.GCLOUD_PROJECT}/traces/${traceId}`,
labels: {
execution_id: req.get("function-execution-id")
}
};
Log.write(Log.entry(metadata, data));
The github link in #wtk's answer should be updated to:
https://github.com/firebase/functions-samples/blob/2f678fb933e416fed9be93e290ae79f5ea463a2b/stripe/functions/index.js#L103
As it refers to the repository as of when the question was answered, and has the following function in it:
// To keep on top of errors, we should raise a verbose error report with Stackdriver rather
// than simply relying on console.error. This will calculate users affected + send you email
// alerts, if you've opted into receiving them.
// [START reporterror]
function reportError(err, context = {}) {
// This is the name of the StackDriver log stream that will receive the log
// entry. This name can be any valid log stream name, but must contain "err"
// in order for the error to be picked up by StackDriver Error Reporting.
const logName = 'errors';
const log = logging.log(logName);
// https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/MonitoredResource
const metadata = {
resource: {
type: 'cloud_function',
labels: {function_name: process.env.FUNCTION_NAME},
},
};
// https://cloud.google.com/error-reporting/reference/rest/v1beta1/ErrorEvent
const errorEvent = {
message: err.stack,
serviceContext: {
service: process.env.FUNCTION_NAME,
resourceType: 'cloud_function',
},
context: context,
};
// Write the error log entry
return new Promise((resolve, reject) => {
log.write(log.entry(metadata, errorEvent), (error) => {
if (error) {
return reject(error);
}
resolve();
});
});
}
// [END reporterror]
Within the same firebase project and using a cloud function (written in node.js), I first download an FTP file (using npm ftp module) and then try to upload it into the firebase storage.
Every attempts failed so far and documentation doesn't help...any expert advices/tips would be greatly appreciated?
The following code uses two different approaches : fs.createWriteStream() and bucket.file().createWriteStream(). Both failed but for different reasons (see error messages in the code).
'use strict'
// [START import]
let admin = require('firebase-admin')
let functions = require('firebase-functions')
const gcpStorage = require('#google-cloud/storage')()
admin.initializeApp(functions.config().firebase)
var FtpClient = require('ftp')
var fs = require('fs')
// [END import]
// [START Configs]
// Firebase Storage is configured with the following rules and grants read write access to everyone
/*
service firebase.storage {
match /b/{bucket}/o {
match /{allPaths=**} {
allow read, write;
}
}
}
*/
// Replace this with your project id, will be use by: const bucket = gcpStorage.bucket(firebaseProjectID)
const firebaseProjectID = 'your_project_id'
// Public FTP server, uploaded files are removed after 48 hours ! Upload new ones when needed for testing
const CONFIG = {
test_ftp: {
source_path: '/48_hour',
ftp: {
host: 'ftp.uconn.edu'
}
}
}
const SOURCE_FTP = CONFIG.test_ftp
// [END Configs]
// [START saveFTPFileWithFSCreateWriteStream]
function saveFTPFileWithFSCreateWriteStream(file_name) {
const ftpSource = new FtpClient()
ftpSource.on('ready', function() {
ftpSource.get(SOURCE_FTP.source_path + '/' + file_name, function(err, stream) {
if (err) throw err
stream.once('close', function() { ftpSource.end() })
stream.pipe(fs.createWriteStream(file_name))
console.log('File downloaded: ', file_name)
})
})
ftpSource.connect(SOURCE_FTP.ftp)
}
// This fails with the following error in firebase console:
// Error: EROFS: read-only file system, open '20170601.tar.gz' at Error (native)
// [END saveFTPFileWithFSCreateWriteStream]
// [START saveFTPFileWithBucketUpload]
function saveFTPFileWithBucketUpload(file_name) {
const bucket = gcpStorage.bucket(firebaseProjectID)
const file = bucket.file(file_name)
const ftpSource = new FtpClient()
ftpSource.on('ready', function() {
ftpSource.get(SOURCE_FTP.source_path + '/' + file_name, function(err, stream) {
if (err) throw err
stream.once('close', function() { ftpSource.end() })
stream.pipe(file.createWriteStream())
console.log('File downloaded: ', file_name)
})
})
ftpSource.connect(SOURCE_FTP.ftp)
}
// [END saveFTPFileWithBucketUpload]
// [START database triggers]
// Listens for new triggers added to /ftp_fs_triggers/:pushId and calls the saveFTPFileWithFSCreateWriteStream
// function to save the file in the default project storage bucket
exports.dbTriggersFSCreateWriteStream = functions.database
.ref('/ftp_fs_triggers/{pushId}')
.onWrite(event => {
const trigger = event.data.val()
const fileName = trigger.file_name // i.e. : trigger.file_name = '20170601.tar.gz'
return saveFTPFileWithFSCreateWriteStream(trigger.file_name)
// This fails with the following error in firebase console:
// Error: EROFS: read-only file system, open '20170601.tar.gz' at Error (native)
})
// Listens for new triggers added to /ftp_bucket_triggers/:pushId and calls the saveFTPFileWithBucketUpload
// function to save the file in the default project storage bucket
exports.dbTriggersBucketUpload = functions.database
.ref('/ftp_bucket_triggers/{pushId}')
.onWrite(event => {
const trigger = event.data.val()
const fileName = trigger.file_name // i.e. : trigger.file_name = '20170601.tar.gz'
return saveFTPFileWithBucketUpload(trigger.file_name)
// This fails with the following error in firebase console:
/*
Error: Uncaught, unspecified "error" event. ([object Object])
at Pumpify.emit (events.js:163:17)
at Pumpify.onerror (_stream_readable.js:579:12)
at emitOne (events.js:96:13)
at Pumpify.emit (events.js:188:7)
at Pumpify.Duplexify._destroy (/user_code/node_modules/#google-cloud/storage/node_modules/duplexify/index.js:184:15)
at /user_code/node_modules/#google-cloud/storage/node_modules/duplexify/index.js:175:10
at _combinedTickCallback (internal/process/next_tick.js:67:7)
at process._tickDomainCallback (internal/process/next_tick.js:122:9)
*/
})
// [END database triggers]
I've finally found the correct way to implement this.
1) Make sure the bucket is correctly referenced. Initially I just used
my project_id without the '.appspot.com' at the end'.
const bucket = gsc.bucket('<project_id>.appspot.com')
2) Create a bucket stream first then pipe the stream from the FTP get call to the bucketWriteStream. Note that file_name will be the name of the saved file (this file does not have to exist beforehand).
ftpSource.get(filePath, function(err, stream) {
if (err) throw err
stream.once('close', function() { ftpSource.end() })
// This didn't work !
//stream.pipe(fs.createWriteStream(fileName))
// This works...
let bucketWriteStream = bucket.file(fileName).createWriteStream()
stream.pipe(bucketWriteStream)
})
Et voilĂ , works like a charm...
I developed a Firebase Cloud function that processes several manipulations on uploaded images.
My code is based on this documentation article and this Cloud Function example. Hence, it is using Google Cloud Storage package.
It is working fine almost all the time, but sometimes I am getting this error when uploading to or deleting from Storage :
Error: read ECONNRESET
at exports._errnoException (util.js:1026:11)
at TLSWrap.onread (net.js:569:26)
I am using the default bucket of my application, referenced by event.data.bucket.
Let me know if you need additional information or code snippets, even if my code is really close to the Function example I linked before.
I found this GitHub issue, but I checked that I am returning a promise everytime. For example, here is the deletion part that triggers the error :
index.js
exports.exampleFunction = functions.storage.object().onChange(event => {
return f_thumbnails.exampleFunction(event);
});
example_function.js
module.exports = exports = function (_admin, _config) {
admin = _admin;
config = _config;
return {
"exampleFunction": function (event) {
return exampleFunction(event);
}
};
};
const exampleFunction = function (event) {
const gcsSourceFilePath = event.data.name;
const gcsSourceFilePathSplit = gcsSourceFilePath.split('/');
const gcsBaseFolder = gcsSourceFilePathSplit.length > 0 ? gcsSourceFilePathSplit[0] : '';
const gcsSourceFileName = gcsSourceFilePathSplit.pop();
const gceSourceFileDir = gcsSourceFilePathSplit.join('/') + (gcsSourceFilePathSplit.length > 0 ? '/' : '');
// Not an image
if (!event.data.contentType.startsWith('image/')) {
console.log('Not an image !');
return;
}
// Thumbnail
if (gcsSourceFileName.startsWith(config.IMAGES_THUMBNAIL_PREFIX)) {
console.log('Thumbnail !');
return;
}
const bucket = gcs.bucket(event.data.bucket);
const gcsThumbnailFilePath = gceSourceFileDir + config.IMAGES_THUMBNAIL_PREFIX + gcsSourceFileName;
// File deletion
if (event.data.resourceState === 'not_exists') {
console.log('Thumbnail deletion : ' + gcsThumbnailFilePath);
return bucket.file(gcsThumbnailFilePath).delete().then(() => {
console.log('Deleted thumbnail ' + gcsThumbnailFilePath);
});
}
...
This seems to be related to the google-cloud-node library's handling of sockets, and the default socket timeout in the Cloud Functions environment.
One solution verified by a user is to modify the way the library invokes requests, to not keep the socket open forever by specifying forever: false, eg.
var request = require('request').defaults({
timeout: 60000,
gzip: true,
forever: false,
pool: {
maxSockets: Infinity
}
});
This is hardcoded in packages/common/src/utils.js, so you'll need to vendor a copy of the modified library into your project rather than include it as an NPM dependency. See the related public issue for more details on the issue and a link to a fork with the patch applied.