I my next.js blog app I have been trying to setup a global API call:
import { createClient } from "contentful";
const client = createClient({
space: process.env.CONTENTFUL_SPACE_ID,
accessToken: process.env.CONTENTFUL_ACCESS_KEY,
});
const auth_data = await client.getEntries({ content_type: "author" });
export function getAuthors() {
var authors = [];
var auth_len = auth_data.items.length;
for (var i = 0; i < auth_len; i++) {
authors.push({
authorSlug: auth_data.items[i].fields.name
.toString()
.replace(/ /g, "-")
.toLowerCase(),
authorContent: auth_data.items[i].fields.description,
authorFrontMatter: {
title: auth_data.items[i].fields.name,
image: "https:" + auth_data.items[i].fields.image.fields.file.url,
},
});
}
return authors;
}
I keep getting TypeError: Expected parameter accessToken because the environment variable will not be reached from the /lib foleder where this getAuthor() function is located. If I prefix the variable with NEXT_PUBLIC_ I could reach the environment variable from the /lib, but at the same time I would expose the variables to the browser.
Is there a way to reach the environment variable from the /lib WITHOUT exposing them to the browser?
Related
Hello I am getting an error trying to deploy a function on firebase and it is bothering me because it worked in the past and now that I wanted to deploy the same code it is giving me the error above.
Can someone have a look because I checked the documentation thinking that something might change and the names of the attributes or something are not the same but the function seems 100% sound based on the documentation.
Kind regards and kudos to everyone.
Much respect if someone manages to give me a hint. I will add the log files also.
Code :
const functions = require("firebase-functions");
const axios = require("axios");
const admin = require("firebase-admin");
admin.initializeApp();
const database = admin.firestore();
const page = 1;
const fiat = "RON";
const tradeType = "BUY";
const asset = "USDT";
const payTypes = ["ING"];
let finalData = [];
let tempDataBeforeProccessing = [];
const baseObj = {
page,
rows: 20,
publisherType: null,
asset,
tradeType,
fiat,
payTypes,
};
const stringData = JSON.stringify(baseObj);
const getTheData = async function() {
tempDataBeforeProccessing=[];
await axios.post("https://p2p.binance.com/bapi/c2c/v2/friendly/c2c/adv/search", baseObj, {
hostname: "p2p.binance.com",
port: 443,
path: "/bapi/c2c/v2/friendly/c2c/adv/search",
method: "POST",
headers: {
"Content-Type": "application/json",
"Content-Length": stringData.length,
},
}).then((res)=>{
tempDataBeforeProccessing=res.data.data;
});
};
const processData = function() {
finalData=[];
let obj = [];
for (let i = 0; i < tempDataBeforeProccessing.length; i++) {
let payTypesz = "";
for (let y = 0; y <
tempDataBeforeProccessing[i]["adv"]["tradeMethods"].length; y++) {
payTypesz +=
tempDataBeforeProccessing[i]["adv"]["tradeMethods"][y]["identifier"];
if (y <
tempDataBeforeProccessing[i]["adv"]["tradeMethods"].length - 1) {
payTypesz += ", ";
}
}
obj = {
tradeType: tempDataBeforeProccessing[i]["adv"]["tradeType"],
asset: tempDataBeforeProccessing[i]["adv"]["asset"],
fiatUnit: tempDataBeforeProccessing[i]["adv"]["fiatUnit"],
price: tempDataBeforeProccessing[i]["adv"]["price"],
surplusAmount:
tempDataBeforeProccessing[i]["adv"]["surplusAmount"],
maxSingleTransAmount:
tempDataBeforeProccessing[i]["adv"]["maxSingleTransAmount"],
minSingleTransAmount:
tempDataBeforeProccessing[i]["adv"]["minSingleTransAmount"],
nickName:
tempDataBeforeProccessing[i]["advertiser"]["nickName"],
monthOrderCount:
tempDataBeforeProccessing[i]["advertiser"]["monthOrderCount"],
monthFinishRate:
tempDataBeforeProccessing[i]["advertiser"]["monthFinishRate"],
payTypes: payTypesz,
};
finalData.push(obj);
}
console.log(finalData);
};
const entireCall = async function() {
await getTheData();
processData();
};
exports.scheduledFunction = functions.pubsub
.schedule("every 1 minutes")
.onRun(async (context) => {
await database.collection("SebiBinanceSale").doc("BCR Bank").delete();
await entireCall();
for (let i = 0; i < finalData.length; i++) {
await database.collection("SebiBinanceSale").doc("BCR Bank")
.collection("1").doc(i.toString())
.set({
"tradeType": finalData[i]["tradeType"],
"asset": finalData[i]["asset"],
"fiatUnit": finalData[i]["fiatUnit"],
"price": finalData[i]["price"],
"surplusAmount": finalData[i]["surplusAmount"],
"maxSingleTransAmount": finalData[i]["maxSingleTransAmount"],
"minSingleTransAmount": finalData[i]["minSingleTransAmount"],
"nickName": finalData[i]["nickName"],
"monthOrderCount": finalData[i]["monthOrderCount"],
"monthFinishRate": finalData[i]["monthFinishRate"],
"payTypes": finalData[i]["payTypes"],
});
}
return console.log("Succes Upload of the data ");
});
error:
Function failed on loading user code. This is likely due to a bug in the user code. Error message: Error: please examine your function logs to see the error cause: https://cloud.google.com/functions/docs/monitoring/logging#viewing_logs. Additional troubleshooting documentation can be found at https://cloud.google.com/functions/docs/troubleshooting#logging. Please visit https://cloud.google.com/functions/docs/troubleshooting for in-depth troubleshooting documentation.
Functions deploy had errors with the following functions:
scheduledFunction(us-central1)
i functions: cleaning up build files...
Error: There was an error deploying functions
ivanoiualexandrupaul#Ivanoius-MacBook-Pro functions %
log file :
[debug] [2022-10-29T17:40:16.776Z] Error: Failed to update function scheduledFunction in region us-central1
at /usr/local/lib/node_modules/firebase-tools/lib/deploy/functions/release/fabricator.js:41:11
at processTicksAndRejections (internal/process/task_queues.js:95:5)
at async Fabricator.updateV1Function (/usr/local/lib/node_modules/firebase-tools/lib/deploy/functions/release/fabricator.js:305:32)
at async Fabricator.updateEndpoint (/usr/local/lib/node_modules/firebase-tools/lib/deploy/functions/release/fabricator.js:140:13)
at async handle (/usr/local/lib/node_modules/firebase-tools/lib/deploy/functions/release/fabricator.js:78:17)
[error]
[error] Error: There was an error deploying functions
When you are using scheduled functions in Firebase Functions, an App Engine instance is created that is needed for Cloud Scheduler to work. You can read about it here.They use the location that has been set by default for resources. I think that you are getting this error because there is a difference between the default GCP resource location you specified and the region of your scheduled cloud function.Check your Cloud Scheduler function details and see which region it has been deployed to. By default, functions run in the us-central1 region. Check this link to see how we can change the region of the function.
You can also try re installation using the command
npm install -g firebase-tools
Also check if any lock files are generated and delete these and run the firebase deploy --only functions again and see if that works.
I am trying to pass some authentication headers to fetch a third party API , but I am getting the following error when running a /server/api/walmart.js file in Nuxt3:
[nuxt] [request error] this[D].init is not a function at new Sign
(https://nuxt-starter-jzgdht.w.staticblitz.com/blitz.331c80ffab288536319518a60349d00207075dad.js:6:1135208)
at Object.createSign
(https://nuxt-starter-jzgdht.w.staticblitz.com/blitz.331c80ffab288536319518a60349d00207075dad.js:6:808197)
at Scheme.sign (./node_modules/node-rsa/src/schemes/pkcs1.js:152:32)
at RSAKey.module.exports.Key.RSAKey.sign
(./node_modules/node-rsa/src/libs/rsa.js:264:40) at
NodeRSA.module.exports.NodeRSA.sign
(./node_modules/node-rsa/src/NodeRSA.js:318:32) at
generateWalmartHeaders (./.nuxt/dev/index.mjs:446:28) at eval
(./.nuxt/dev/index.mjs:458:14) at eval
(./node_modules/h3/dist/index.mjs:364:14) at Object.eval [as handler]
(./node_modules/h3/dist/index.mjs:564:12) at eval
(./node_modules/h3/dist/index.mjs:475:31) [nuxt] [request error]
this[D].init is not a function at new Sign
(https://nuxt-starter-jzgdht.w.staticblitz.com/blitz.331c80ffab288536319518a60349d00207075dad.js:6:1135208)
at Object.createSign
(https://nuxt-starter-jzgdht.w.staticblitz.com/blitz.331c80ffab288536319518a60349d00207075dad.js:6:808197)
at Scheme.sign (./node_modules/node-rsa/src/schemes/pkcs1.js:152:32)
at RSAKey.module.exports.Key.RSAKey.sign
(./node_modules/node-rsa/src/libs/rsa.js:264:40) at
NodeRSA.module.exports.NodeRSA.sign
(./node_modules/node-rsa/src/NodeRSA.js:318:32) at
generateWalmartHeaders (./.nuxt/dev/index.mjs:446:28) at eval
(./.nuxt/dev/index.mjs:458:14) at eval
(./node_modules/h3/dist/index.mjs:364:14) at Object.eval [as handler]
(./node_modules/h3/dist/index.mjs:564:12) at eval
(./node_modules/h3/dist/index.mjs:475:31)
import NodeRSA from 'node-rsa';
const keyData = {
consumerId: '<consumer id removed for this post>',
privateKey: `-----BEGIN RSA PRIVATE KEY-----
MIIEowIBAAKCAQEAiDffvTlnBcHfDhPjYlJhSk+atPFE6HpFaf4mm/aYBXmOn89A
MMulkkmpu+RGj0SaPrpX/ockoSeMuuEPPd3AQ5uZAnJw9TBnE2/kJrPsHaKyVFGV
hZneksIK/KdP/kpGSuVAkocPdit5zKwliMnc6/GVLpDFvU8K2QPnvYu2Jp8XBDwg
gTu2tzHQkrm0cdCYZklKKqN9NdVLJy+6AL50+vX19nHwDdCYCvnPtH/hXxMwRnIJ
3qibO8owPWh+q/xMld2K2OoUNWpbsxBj/r9Jxu60d429+XcIUu1hyPHG1lDKiK/n
Z4L+7WE4Ez8sEVD0YrE9hRRbLl5Dsvi4XZrG9QIDAQABAoIBAQCFNN5+1JuCbcwK
oDhD9fteB+pp92ZDUQ6AUCDBc6vF7tEiRjGnhf4ryA1LyDeX8qZDoUZbiRyw21Qr
i9qzzR4u/wHp+q+rleG2iDy7/EZx1KA/BGkLdSTKrya/W35GVavXocg7gggErw80
r0MlOQHpWR1hpAE61wjsn30HRpChwow1YZE/6cMIQN2nCJ+JGXuZDoGazYk4HwFD
Fmrtag/FjShYUVgr4QlrPzYzcTCKX1UKQkZ9AED9Q2prKSvoD8ZxOJSaVF0FP1ty
/i9L1I/eJEusSBPXc9v5xJjs7q8RRKZhn3TuvBHos3LDuaQKwPBe+w0vHr8ZVw5t
tuZPiq8BAoGBAOYILkVnnrskg2qzqewHukiKnZHLi+WkJQPQzTHvVMMCEUaxF+6p
Y58tLlaEd0uNH1ntDPya5s2y01/1eZ/8n1U1/SlqKoQ5apkC77eNBklMxixJ0xGP
zu1bj5COba6pXdY+YcW2z7W0ubmPD3YCSD/VUD/IKXwTEAHm2J11ffM9AoGBAJeY
hRbiSQW6GXWHu41qqIYsWfdJpy/A0qhpXjmySq1XFH/ThHDuFmn5RkZVvn7D/Pql
GfO8E55QWjK+SLO7LBRazKP0GNmrKinVMKyo7WUgwrZy3fwEY5wcNaNFB/YL9J4M
OSRp6eV3pnUwQI2NhzTzuHAyJgd/r+I6zMeSTn4ZAoGADGhejpHTRwbmK8g7Hycf
jjAj5axUBHQBJx6JIutk6AvhgK2mu9HZNMnMGRCWGrYm/cPCkpGMZ4YAzsk/4ThQ
I9mAqU43suAh9tTotz7dGvEQM21b/DOEltr8eHCmS+iIzjiZL3/33jY8Wlz0GYpv
+Tl5VadnTXD9yQx5nKysuYUCgYA6PNy8Kth0u8a2ERvrOxNc4EL7ri7tOH11N218
atMnfnGgnciefcjck2f880nId1CDldO/f/xlcGcGYXWanohTlYJSZh752DjNc1pM
qmTw2cITx1MiUylVOr0caROi4XrrELUPGSVDA1FOaegSuVE89XhgmdNkRBh0p7Qt
4zYGWQKBgFGuZgbPl76K3fj5POY5OlPusXdCCqJMHKPug+4e5mG82T1KvLFCj1PQ
Fm0y9PoS3A8SXf+aEeLhRqVrpaU4w5RK8PqeF6IB2hpDrHJ8b+3ERU3J7/KuU8Vw
pmwDIbjWypH4dNJgRMti+RKDb9llup6xP5Q4PQRzvUQdklSCp3D8
-----END RSA PRIVATE KEY-----`,
keyVer: 1,
};
const generateWalmartHeaders = () => {
const { privateKey, consumerId, keyVer } = keyData;
const hashList = {
'WM_CONSUMER.ID': consumerId,
'WM_CONSUMER.INTIMESTAMP': Date.now().toString(),
'WM_SEC.KEY_VERSION': keyVer,
};
const sortedHashString = `${hashList['WM_CONSUMER.ID']}\n${hashList['WM_CONSUMER.INTIMESTAMP']}\n${hashList['WM_SEC.KEY_VERSION']}\n`;
const signer = new NodeRSA(privateKey, 'pkcs1');
const signature = signer.sign(sortedHashString);
const signatureEnc = signature.toString('base64');
return {
'WM_SEC.AUTH_SIGNATURE': signatureEnc,
'WM_CONSUMER.INTIMESTAMP': hashList['WM_CONSUMER.INTIMESTAMP'],
'WM_CONSUMER.ID': hashList['WM_CONSUMER.ID'],
'WM_SEC.KEY_VERSION': hashList['WM_SEC.KEY_VERSION'],
};
};
export default defineEventHandler(() => {
const options = {
method: 'GET',
headers: generateWalmartHeaders(), // <--- the error seems to originate from this
return {
api: 'works',
options: options,
};
});
Am I not using the defineEventHandler() correctly there?
#redshift,
1.The const 'options' in the defineEventHandler() function has not closing tag (unterminated).
Keep in mind we're working in server side here, check your functions within generateWalmartHeaders(). Are they all functioning in the backend ?
Fire this code with hard coded headers , and see if it runs / debug it.
Check Docs - they changed the documentation 20th of June sometime later than u posted this question.
check this example...
export default defineEventHandler((event) => {
return {
api: 'works'
}
})
Check my working example , maybe it will help you get on your way :
const config = useRuntimeConfig()
let environment = process.env.NODE_ENV;
let $endpoint = environment == 'development' ? 'http://dev.endpointisdev/' : 'http://prod.production/';
console.log('ENVIRONMENT =', process.env.NODE_ENV)
console.log('$ENDPOINTS =', $endpoint)
export default defineEventHandler(async(event) => {
const productsRequest = await $fetch(`${$endpoint}` +'somelistrequest', {
method: 'POST',
headers: {
'Authorization': 'Basic ' + btoa(`${config.auth_user}:${config.auth_pw}`),
'guiId': '7552662'
},
body: {
reference : "blablabla", //
productListRequest : {
securityKey : `${config._key}`
}
}
});
return responseObject.list
})
IMPORTANT :
I fire this from my store with something like this ..
Now i've encountered a situation today where deployment to production made this api call fail because it needed to be "camelCase"
let submitOrder = await $fetch("/api/submitOrder", {method: 'POST', body: orderPayload});
I'm having trouble using the meteor slingshot component with the S3 with temporary AWS Credentials component. I keep getting the error Exception while invoking method 'slingshot/uploadRequest' InvalidClientTokenId: The security token included in the request is invalid.
Absolutely no idea what I'm doing wrong. If I use slingshot normally without credentials it works fine.
import { Meteor } from 'meteor/meteor';
import moment from 'moment';
const cryptoRandomString = require('crypto-random-string');
var AWS = require('aws-sdk');
var sts = new AWS.STS();
Slingshot.createDirective('UserProfileResumeUpload', Slingshot.S3Storage.TempCredentials, {
bucket: 'mybuckname', // change this to your s3's bucket name
region: 'ap-southeast-2',
acl: 'private',
temporaryCredentials: Meteor.wrapAsync(function (expire, callback) {
//AWS dictates that the minimum duration must be 900 seconds:
var duration = Math.max(Math.round(expire / 1000), 900);
sts.getSessionToken({
DurationSeconds: duration
}, function (error, result) {
callback(error, result && result.Credentials);
});
}),
authorize: function () {
//Deny uploads if user is not logged in.
if (!this.userId) {
const message = 'Please login before posting files';
throw new Meteor.Error('Login Required', message);
}
return true;
},
key: function () {
return 'mydirectory' + '/' + cryptoRandomString(10) + moment().valueOf();
}
});
Path: Settings.json
{
"AWSAccessKeyId": "myAWSKEYID",
"AWSSecretAccessKey": "MyAWSSeceretAccessKey"
}
I've done it in server side like this :
Slingshot.createDirective("UserProfileResumeUpload", Slingshot.S3Storage, {
AWSAccessKeyId: Meteor.settings.AWS.AccessKeyId,
AWSSecretAccessKey: Meteor.settings.AWS.SecretAccessKey,
bucket: 'mybuckname', // change this to your s3's bucket name
region: 'ap-southeast-2',
acl: 'private',
...
}
and in settings.json
{
"AWS": {
"AccessKeyId": "myAWSKEYID",
"SecretAccessKey": "MyAWSSeceretAccessKey"
}
}
Within the same firebase project and using a cloud function (written in node.js), I first download an FTP file (using npm ftp module) and then try to upload it into the firebase storage.
Every attempts failed so far and documentation doesn't help...any expert advices/tips would be greatly appreciated?
The following code uses two different approaches : fs.createWriteStream() and bucket.file().createWriteStream(). Both failed but for different reasons (see error messages in the code).
'use strict'
// [START import]
let admin = require('firebase-admin')
let functions = require('firebase-functions')
const gcpStorage = require('#google-cloud/storage')()
admin.initializeApp(functions.config().firebase)
var FtpClient = require('ftp')
var fs = require('fs')
// [END import]
// [START Configs]
// Firebase Storage is configured with the following rules and grants read write access to everyone
/*
service firebase.storage {
match /b/{bucket}/o {
match /{allPaths=**} {
allow read, write;
}
}
}
*/
// Replace this with your project id, will be use by: const bucket = gcpStorage.bucket(firebaseProjectID)
const firebaseProjectID = 'your_project_id'
// Public FTP server, uploaded files are removed after 48 hours ! Upload new ones when needed for testing
const CONFIG = {
test_ftp: {
source_path: '/48_hour',
ftp: {
host: 'ftp.uconn.edu'
}
}
}
const SOURCE_FTP = CONFIG.test_ftp
// [END Configs]
// [START saveFTPFileWithFSCreateWriteStream]
function saveFTPFileWithFSCreateWriteStream(file_name) {
const ftpSource = new FtpClient()
ftpSource.on('ready', function() {
ftpSource.get(SOURCE_FTP.source_path + '/' + file_name, function(err, stream) {
if (err) throw err
stream.once('close', function() { ftpSource.end() })
stream.pipe(fs.createWriteStream(file_name))
console.log('File downloaded: ', file_name)
})
})
ftpSource.connect(SOURCE_FTP.ftp)
}
// This fails with the following error in firebase console:
// Error: EROFS: read-only file system, open '20170601.tar.gz' at Error (native)
// [END saveFTPFileWithFSCreateWriteStream]
// [START saveFTPFileWithBucketUpload]
function saveFTPFileWithBucketUpload(file_name) {
const bucket = gcpStorage.bucket(firebaseProjectID)
const file = bucket.file(file_name)
const ftpSource = new FtpClient()
ftpSource.on('ready', function() {
ftpSource.get(SOURCE_FTP.source_path + '/' + file_name, function(err, stream) {
if (err) throw err
stream.once('close', function() { ftpSource.end() })
stream.pipe(file.createWriteStream())
console.log('File downloaded: ', file_name)
})
})
ftpSource.connect(SOURCE_FTP.ftp)
}
// [END saveFTPFileWithBucketUpload]
// [START database triggers]
// Listens for new triggers added to /ftp_fs_triggers/:pushId and calls the saveFTPFileWithFSCreateWriteStream
// function to save the file in the default project storage bucket
exports.dbTriggersFSCreateWriteStream = functions.database
.ref('/ftp_fs_triggers/{pushId}')
.onWrite(event => {
const trigger = event.data.val()
const fileName = trigger.file_name // i.e. : trigger.file_name = '20170601.tar.gz'
return saveFTPFileWithFSCreateWriteStream(trigger.file_name)
// This fails with the following error in firebase console:
// Error: EROFS: read-only file system, open '20170601.tar.gz' at Error (native)
})
// Listens for new triggers added to /ftp_bucket_triggers/:pushId and calls the saveFTPFileWithBucketUpload
// function to save the file in the default project storage bucket
exports.dbTriggersBucketUpload = functions.database
.ref('/ftp_bucket_triggers/{pushId}')
.onWrite(event => {
const trigger = event.data.val()
const fileName = trigger.file_name // i.e. : trigger.file_name = '20170601.tar.gz'
return saveFTPFileWithBucketUpload(trigger.file_name)
// This fails with the following error in firebase console:
/*
Error: Uncaught, unspecified "error" event. ([object Object])
at Pumpify.emit (events.js:163:17)
at Pumpify.onerror (_stream_readable.js:579:12)
at emitOne (events.js:96:13)
at Pumpify.emit (events.js:188:7)
at Pumpify.Duplexify._destroy (/user_code/node_modules/#google-cloud/storage/node_modules/duplexify/index.js:184:15)
at /user_code/node_modules/#google-cloud/storage/node_modules/duplexify/index.js:175:10
at _combinedTickCallback (internal/process/next_tick.js:67:7)
at process._tickDomainCallback (internal/process/next_tick.js:122:9)
*/
})
// [END database triggers]
I've finally found the correct way to implement this.
1) Make sure the bucket is correctly referenced. Initially I just used
my project_id without the '.appspot.com' at the end'.
const bucket = gsc.bucket('<project_id>.appspot.com')
2) Create a bucket stream first then pipe the stream from the FTP get call to the bucketWriteStream. Note that file_name will be the name of the saved file (this file does not have to exist beforehand).
ftpSource.get(filePath, function(err, stream) {
if (err) throw err
stream.once('close', function() { ftpSource.end() })
// This didn't work !
//stream.pipe(fs.createWriteStream(fileName))
// This works...
let bucketWriteStream = bucket.file(fileName).createWriteStream()
stream.pipe(bucketWriteStream)
})
Et voilĂ , works like a charm...
I developed a Firebase Cloud function that processes several manipulations on uploaded images.
My code is based on this documentation article and this Cloud Function example. Hence, it is using Google Cloud Storage package.
It is working fine almost all the time, but sometimes I am getting this error when uploading to or deleting from Storage :
Error: read ECONNRESET
at exports._errnoException (util.js:1026:11)
at TLSWrap.onread (net.js:569:26)
I am using the default bucket of my application, referenced by event.data.bucket.
Let me know if you need additional information or code snippets, even if my code is really close to the Function example I linked before.
I found this GitHub issue, but I checked that I am returning a promise everytime. For example, here is the deletion part that triggers the error :
index.js
exports.exampleFunction = functions.storage.object().onChange(event => {
return f_thumbnails.exampleFunction(event);
});
example_function.js
module.exports = exports = function (_admin, _config) {
admin = _admin;
config = _config;
return {
"exampleFunction": function (event) {
return exampleFunction(event);
}
};
};
const exampleFunction = function (event) {
const gcsSourceFilePath = event.data.name;
const gcsSourceFilePathSplit = gcsSourceFilePath.split('/');
const gcsBaseFolder = gcsSourceFilePathSplit.length > 0 ? gcsSourceFilePathSplit[0] : '';
const gcsSourceFileName = gcsSourceFilePathSplit.pop();
const gceSourceFileDir = gcsSourceFilePathSplit.join('/') + (gcsSourceFilePathSplit.length > 0 ? '/' : '');
// Not an image
if (!event.data.contentType.startsWith('image/')) {
console.log('Not an image !');
return;
}
// Thumbnail
if (gcsSourceFileName.startsWith(config.IMAGES_THUMBNAIL_PREFIX)) {
console.log('Thumbnail !');
return;
}
const bucket = gcs.bucket(event.data.bucket);
const gcsThumbnailFilePath = gceSourceFileDir + config.IMAGES_THUMBNAIL_PREFIX + gcsSourceFileName;
// File deletion
if (event.data.resourceState === 'not_exists') {
console.log('Thumbnail deletion : ' + gcsThumbnailFilePath);
return bucket.file(gcsThumbnailFilePath).delete().then(() => {
console.log('Deleted thumbnail ' + gcsThumbnailFilePath);
});
}
...
This seems to be related to the google-cloud-node library's handling of sockets, and the default socket timeout in the Cloud Functions environment.
One solution verified by a user is to modify the way the library invokes requests, to not keep the socket open forever by specifying forever: false, eg.
var request = require('request').defaults({
timeout: 60000,
gzip: true,
forever: false,
pool: {
maxSockets: Infinity
}
});
This is hardcoded in packages/common/src/utils.js, so you'll need to vendor a copy of the modified library into your project rather than include it as an NPM dependency. See the related public issue for more details on the issue and a link to a fork with the patch applied.