const pool = pooledMap(20, feeds, (url) => {
return getWithXidel(url, '//item/link');
});
const pool2 = pooledMap(20, feedEntries, (url) => {
return getWithXidel(url, "//entry/link[#rel='alternate']/#href");
});
for await (const data of pool) {
console.log('data: ', data);
console.log('found links: ', data?.links?.length);
for (const link of data?.links) {
const url = link.trim();
results.push(url);
}
}
Is it possible to combine these two pools into one? I tried Array.concat but it breaks it. says its not iterable.
Deno standard library's pooledMap returns an AsyncIterableIterator that, like an AsyncGenerator, can be iterated through using for await...of and is not synchronously iterable.
Deno standard library also provides a MuxAsyncIterator that "multiplexes multiple async iterators into a single stream" which you can use to "combine" multiple pools:
import {
pooledMap,
MuxAsyncIterator,
} from "https://deno.land/std#0.116.0/async/mod.ts";
const pool = pooledMap(20, feeds, (url) => {
return getWithXidel(url, '//item/link');
});
const pool2 = pooledMap(20, feedEntries, (url) => {
return getWithXidel(url, "//entry/link[#rel='alternate']/#href");
});
const mux = new MuxAsyncIterator();
mux.add(pool);
mux.add(pool2);
for await (const data of mux) {
console.log('data: ', data);
console.log('found links: ', data?.links?.length);
for (const link of data?.links) {
const url = link.trim();
results.push(url);
}
}
Related
I am doing an Image Upload feature with Cloudinary. I'm providing an array which may contains base64coded or uploaded image which is a url :
[
"https://res.cloudinary.com/\[userName\]/image/upload/v167xxxx4/luxxxfsgasxxxxxx7t9.jpg", "https://res.cloudinary.com/doeejabc9/image/upload/v1675361225/rf6adyht6jfx10vuzjva.jpg",
"data:image/jpeg;base64,/9j/4AAUSkZJRgABAQEBLAEsAA.......", "data:image/jpeg;base64,/9j/4AAUSkZJRgABAQEBLAEsAA......."
]
I'm using this function to upload the "un-uploaded", which returns the all uploaded version:
export async function uploadImage(el: string[]) {
const partition = el.reduce(
(result: string[][], element: string) => {
element.includes("data:image/")
? result[0].push(element)
: result[1].push(element);
return result;
},
[[], []]
);
for (let i = 0; i < partition[0].length; i++) {
const data = new FormData();
data.append("file", partition[0][i]);
data.append("upload_preset", "my_preset_name");
const res = await fetch(
"https://api.cloudinary.com/v1_1/userName/image/upload",
{
method: "POST",
body: data,
}
);
const file = await res.json();
partition[1].push(file.secure_url);
console.log(partition[1]);
}
return partition[1];
}
Then I will use the return value to update the state and call the api to update database:
const uploaded = await uploadImage(el[1])
console.log(uploaded);
setFinalVersionDoc({
...chosenDocument,
[chosenDocument[el[0]]]: uploaded,
});
However, it always updates the useState before the console.log(uploaded). I thought async/await would make sure the value is updated before moving on.
The GitHub repo is attached for better picture. The fragment is under EditModal in the 'component/document' folder:
https://github.com/anthonychan1211/cms
Thanks a lot!
I am hoping to make the upload happen before updating the state.
The function is correct, but you are trying to await the promise inside the callback function of a forEach, but await inside forEach doesn't work.
This doesn't work:
async function handleEdit() {
const entries = Object.entries(chosenDocument);
entries.forEach(async (el) => { // <------ the problem
if (Array.isArray(el[1])) {
const uploaded = await uploadImage(el[1]);
el[1].splice(0, el[1].length, uploaded);
}
});
[...]
}
If you want to have the same behaviour (forEach runs sequentially), you can use a for const of loop instead.
This works (sequentially)
(execution order guaranteed)
async function handleEdit() {
const entries = Object.entries(chosenDocument);
for (const el of entries) {
// await the promises 1,2,...,n in sequence
if (Array.isArray(el[1])) {
const uploaded = await uploadImage(el[1]);
el[1].splice(0, el[1].length, uploaded);
}
}
}
This also works (in parallel)
(execution order not guaranteed)
async function handleEdit() {
const entries = Object.entries(chosenDocument);
await Promise.all(entries.map(async (el) => {
// map returns an array of promises, and await Promise.all() then executes them all at the same time
if (Array.isArray(el[1])) {
const uploaded = await uploadImage(el[1]);
el[1].splice(0, el[1].length, uploaded);
}
}));
[...]
}
If the order in which your files are uploaded doesn't matter, picking the parallel method will be faster/better.
So, I have my route which console.logs 'undefined':
router.get("/validate-pin", async (req, res) => {
// restrict when done
try {
const { userId, pin } = req.query;
const isActivePin = await pinsDB.compareActivePin(userId, pin);
console.log(isActivePin)
return res.status(200).json(isActivePin);
} catch (error) {
console.log(error);
res.status(500).json({ error: "db error: ", error });
}
});
I have my compareActivePin method, which logs out the 'res' parameter, but for some reason doesn't return it:
async function compareActivePin(userId, received_pin) {
const active_pin = await db("account_pins").where({ userId, isActive: true });
const pinIsValidated = bcrypt.compareSync(
received_pin,
active_pin[0].account_pin
);
if (pinIsValidated) {
let skLocation = await db("sks").where({ userId }).select("url");
await readKey(skLocation[0].url, (res) => {
// console.log(res);
return res;
});
} else return false;
}
And I have my readKey method, which actually grabs the data I want my compareActivePin to return. This works like a charm.
const readKey = async (key, callback) => {
const aws = require("aws-sdk");
aws.config.update({
secretAccessKey: process.env.AWS_SECRET_ACCESS_KEY,
accessKeyId: process.env.AWS_ACCESS_KEY_ID,
region: "us-east-2",
});
const s3 = new aws.S3();
const getParams = { Bucket: process.env.SK_BUCKET, Key: `${key}.txt` };
await s3.getObject(getParams, (err, data) => {
if (err) return err;
return callback(data.Body.toString());
});
};
So, just to recap. When I hit my endpoint, I pass in a userId and pin (strings). This calls the compareActivePin method which validates the pin and then, if the pin is valid, it then calls readKey, which grabs the file from S3 and returns the text within the file.
Like I said, I'm able to log it out to the console from within the readKey callback, but when I try to log it out as the returned value from the route, it comes back undefined.
Hoping someone could point me in the right direction.
Thanks...
I ended up answering my own question. I don't think it's possible to get a return value from the callback, so I ended up paring down the call from the database and sending the response from the readKey function using the router response object, like so:
//CompareActivePin Function
async function compareActivePin(userId, received_pin) {
const active_pin = await db("account_pins").where({ userId, isActive: true });
const pinIsValidated = bcrypt.compareSync(
received_pin,
active_pin[0].account_pin
);
return pinIsValidated;
}
//Router Call
router.get("/validate-pin", async (req, res) => {
// restrict when done
try {
const { userId, pin } = req.query;
const isActivePin = await pinsDB.compareActivePin(userId, pin);
if (isActivePin) {
let skLocation = await skDB.findUrl(userId);
readKeyFunc(skLocation[0].url, (result) => {
return res.status(200).json({ confirmed: isActivePin, key: result });
});
} else return res.status(401).json({ confirmed: isActivePin, key: null });
} catch (error) {
res.status(500).json({ error: "db error: ", error });
}
});
This also goes a long way toward keeping my database methods pure and separating my concerns.
Thanks, StackOverflow!
As per documentation we can add appcheck as below,
exports.yourCallableFunction = functions.https.onCall((data, context) => {
// context.app will be undefined if the request doesn't include a valid
// App Check token.
if (context.app == undefined) {
throw new functions.https.HttpsError(
'failed-precondition',
'The function must be called from an App Check verified app.')
}
});
My question right now is how do I need to add app-check for below scenario?
exports.date = functions.https.onRequest((req, res) => {
});
In the client, get an appCheck token from Firebase. Send it in a header to your function. Get the token from the req object's headers. Verify the the token with firebase-admin. I'll include the documentation for the client below, then the gist of how I implemented it client side with Apollo-client graphql. Then I'll include the documentation for the backend, then the gist of how I implemented the backend, again with Apollo.
client (from the documentation):
const { initializeAppCheck, getToken } = require('firebase/app-check');
const appCheck = initializeAppCheck(
app,
{ provider: provider } // ReCaptchaV3Provider or CustomProvider
);
const callApiWithAppCheckExample = async () => {
let appCheckTokenResponse;
try {
appCheckTokenResponse = await getToken(appCheck, /* forceRefresh= */ false);
} catch (err) {
// Handle any errors if the token was not retrieved.
return;
}
// Include the App Check token with requests to your server.
const apiResponse = await fetch('https://yourbackend.example.com/yourApiEndpoint', {
headers: {
'X-Firebase-AppCheck': appCheckTokenResponse.token,
}
});
// Handle response from your backend.
};
client (gist from my implementation)
import { setContext } from "#apollo/client/link/context";
import { app } from '../firebase/setup';
import { initializeAppCheck, ReCaptchaV3Provider, getToken } from "firebase/app-check"
let appCheck
let appCheckTokenResponse
const getAppCheckToken = async () => {
const appCheckTokenResponsePromise = await getToken(appCheck, /* forceRefresh= */ false)
appCheckTokenResponse = appCheckTokenResponsePromise
}
const authLink = setContext(async (_, { headers }) => {
if (typeof window !== "undefined" && process.env.NEXT_PUBLIC_ENV === 'production') {
appCheck = initializeAppCheck(app, {
provider: new ReCaptchaV3Provider('my_public_key_from_recaptcha_V3'),
isTokenAutoRefreshEnabled: true
})
await getAppCheckToken()
}
return {
headers: {
...headers,
'X-Firebase-AppCheck': appCheckTokenResponse?.token,
},
}
})
backend / server (from the documentation)
const express = require('express');
const app = express();
const firebaseAdmin = require('firebase-admin');
const firebaseApp = firebaseAdmin.initializeApp();
const appCheckVerification = async (req, res, next) => {
const appCheckToken = req.header('X-Firebase-AppCheck');
if (!appCheckToken) {
res.status(401);
return next('Unauthorized');
}
try {
const appCheckClaims = await firebaseAdmin.appCheck().verifyToken(appCheckToken);
// If verifyToken() succeeds, continue with the next middleware
// function in the stack.
return next();
} catch (err) {
res.status(401);
return next('Unauthorized');
}
}
app.get('/yourApiEndpoint', [appCheckVerification], (req, res) => {
// Handle request.
});
backend / server (gist from my implementation)
import { https } from 'firebase-functions'
import gqlServer from './graphql/server'
const functions = require('firebase-functions')
const env = process.env.ENV || functions.config().config.env
const server = gqlServer()
const api = https.onRequest((req, res) => {
server(req, res)
})
export { api }
. . .
import * as admin from 'firebase-admin';
const functions = require('firebase-functions');
const env = process.env.ENV || functions.config().config.env
admin.initializeApp()
appCheckVerification = async (req: any, res: any) => {
const appCheckToken = req.header('X-Firebase-AppCheck')
if (!appCheckToken) {
return false
}
try {
const appCheckClaims = await admin.appCheck().verifyToken(appCheckToken);
return true
} catch (error) {
console.error(error)
return false
}
}
. . .
const apolloServer = new ApolloServer({
introspection: isDevelopment,
typeDefs: schema,
resolvers,
context: async ({ req, res }) => {
if (!isDevelopment && !isTest) {
const appCheckVerification = await appCheckVerification(req, res)
if (!appCheckVerification) throw Error('Something went wrong with verification')
}
return { req, res, }
}
If you enforce app check in Cloud Functions it will only allow calls from apps that are registered in your project.
I'm not sure if that is sufficient for your use-case though, as I doubt most apps where you can provide a web hook will have implemented app attestation - which is how App Check recognizes valid requests.
You can generate an app check token in the client and verify the token in the server using firebase admin SDK. Here is the firebase documentation for the same
Firebase enable App check enforcement documentation teaches you that to validate the caller from your function you just need to check the context.app then gives you an example like this
exports.EXAMPLE = functions.https.onCall((data, context) => {});
https://firebase.google.com/docs/app-check/cloud-functions?authuser=0
But when you are deploying your function in the google cloud dashboard, you select HTTP FUNCTION -> nodejs 14 -> then you are directed to code like this
/**
* Responds to any HTTP request.
*
* #param {!express:Request} req HTTP request context.
* #param {!express:Response} res HTTP response context.
*/
exports.helloWorld = (req, res) => {
let message = req.query.message || req.body.message || 'Hello World!';
res.status(200).send(message);
};
My question when I saw this was: "How am i going to get a context if I only have request/response"
The answer is simple. YOU MUST SWITCH THE CONSTRUCTORS
You must re-write your function in a way that instead of dealing with req/res like any express function you are dealing with context/data
http functions are different of callable functions (the ones that deals with context/data)
IT IS SIMILAR BUT NOT EXACTLY EQUAL AND SOME MODIFICATIONS WILL BE NECESSARY.
mainly if your function deals with async stuff and have a delayed response you are going to need to rewrite many stuff
check this tutorial
https://firebase.google.com/docs/functions/callable
I've written a cloud task and it works perfectly and triggers the link I gave without any problems, but it won't stop retrying running the link.
How can I make it run it only once?
What I'm trying to do is run a Firestore Function once in the future, on a document write in a collection. I found this tutorial for it.
So far my task creation code works perfectly, and delivers correct payload to the function it's going to call. And the called function works correctly too the first time it runs and exits with status 200. But on the retries I have to exit with error 500 since there's no data to access anymore.
I can see the 200 and 500 logs in firestore function's logs, but Cloud Tasks' logs is empty, even if a method has been run 50 times!
This is the full code
import * as functions from 'firebase-functions'
import * as admin from 'firebase-admin'
const { CloudTasksClient } = require('#google-cloud/tasks')
exports.moveActivityFromPlanToRecord = () =>
functions
.region('europe-west1')
.firestore.document('Users/{userId}/Activities/{activityId}')
.onCreate(async snapshot => {
const moveTime = snapshot.data()! as MoveTime
if (!moveTime || !moveTime.dueTime) {
console.log("DueTime is empty or null: \n" + moveTime)
return
}
// Get the project ID from the FIREBASE_CONFIG env var
const project = JSON.parse(process.env.FIREBASE_CONFIG!).projectId
const location = 'europe-west1'
const queue = 'activityDateEventChecker'
//queuePath is going to be a string that uniquely identifes the task
const tasksClient = new CloudTasksClient()
const queuePath: string =
tasksClient.queuePath(project, location, queue)
// URL to my callback function and the contents of the payload to deliver
const url = `https://${location}-${project}.cloudfunctions.net/activityDateEventCheckerCallback`
const docPath = snapshot.ref.path
const dueTime = moveTime.dueTime
const payload: MoveTaskPayload = { docPath, dueTime }
console.log(payload)
// build up the configuration for the Cloud Task
const task = {
httpRequest: {
httpMethod: 'POST',
url: url,
body: Buffer.from(JSON.stringify(payload)).toString('base64'),
headers: {
'Content-Type': 'application/json',
},
},
scheduleTime: {
seconds: moveTime.dueTime / 1000
}
}
// enqueue the task in the queue
return tasksClient.createTask({ parent: queuePath, task: task })
})
interface MoveTime extends admin.firestore.DocumentData {
dueTime?: number
}
interface MoveTaskPayload {
docPath: string,
dueTime: number
}
exports.activityDateEventCheckerCallback = () =>
functions
.region('europe-west1')
.https.onRequest(async (req, res) => {
const payload = req.body as MoveTaskPayload
try {
// getting the item
const activity = await admin.firestore().doc(payload.docPath).get()
// if time is up for it
if (Date.now() >= payload.dueTime && activity.data() != undefined) {
// getting path to activity to be in record
const pathUser = activity.ref.parent.parent?.path
const pathDocRecord = admin.firestore().doc(`${pathUser}/Record/${activity.id}`)
console.log("RECORD-- ", (await (await pathDocRecord.get()).data())?.subject)
// moving activity into record
await pathDocRecord.set(activity.data()!)
await activity.ref.delete()
// sending notif to user
const fcmPayload = {
notification: {
title: `${activity.data()?.subject}`,
body: " Time for activity. Record how it goes!"
},
data: {
activityId: activity.id
}
}
const user = await admin.firestore().doc(pathUser!).get()
const fcmToken: string = user.data()?.fcmToken
return admin.messaging().sendToDevice(fcmToken, fcmPayload)
}
return null
} catch (error) {
console.error(error)
res.status(500).send(error)
return null
}
})
Tasks in Cloud Task retries when it does not get response code 2XX.
You can config the retry in Cloud Task Queue using maxAttempt paramtere.
Details are mentioned in the doc
I would like to call an asynchronous function outside the lambda handler with by the following code:
var client;
(async () => {
var result = await initSecrets("MyWebApi");
var secret = JSON.parse(result.Payload);
client= new MyWebApiClient(secret.API_KEY, secret.API_SECRET);
});
async function initSecrets(secretName) {
var input = {
"secretName" : secretName
};
var result = await lambda.invoke({
FunctionName: 'getSecrets',
InvocationType: "RequestResponse",
Payload: JSON.stringify(input)
}).promise();
return result;
}
exports.handler = async function (event, context) {
var myReq = await client('Request');
console.log(myReq);
};
The 'client' does not get initialized. The same code works perfectly if executed within the handler.
initSecrets contains a lambda invocation of getSecrets() which calls the AWS SecretsManager
Has anyone an idea how asynchronous functions can be properly called for initialization purpose outside the handler?
Thank you very much for your support.
I ran into a similar issue trying to get next-js to work with aws-serverless-express.
I fixed it by doing the below (using typescript so just ignore the :any type bits)
const appModule = require('./App');
let server: any = undefined;
appModule.then((expressApp: any) => {
server = createServer(expressApp, null, binaryMimeTypes);
});
function waitForServer(event: any, context: any){
setImmediate(() => {
if(!server){
waitForServer(event, context);
}else{
proxy(server, event, context);
}
});
}
exports.handler = (event: any, context: any) => {
if(server){
proxy(server, event, context);
}else{
waitForServer(event, context);
}
}
So for your code maybe something like
var client = undefined;
initSecrets("MyWebApi").then(result => {
var secret = JSON.parse(result.Payload);
client= new MyWebApiClient(secret.API_KEY, secret.API_SECRET)
})
function waitForClient(){
setImmediate(() => {
if(!client ){
waitForClient();
}else{
client('Request')
}
});
}
exports.handler = async function (event, context) {
if(client){
client('Request')
}else{
waitForClient(event, context);
}
};
client is being called before it has initialised; the client var is being "exported" (and called) before the async function would have completed. When you are calling await client() the client would still be undefined.
edit, try something like this
var client = async which => {
var result = await initSecrets("MyWebApi");
var secret = JSON.parse(result.Payload);
let api = new MyWebApiClient(secret.API_KEY, secret.API_SECRET);
return api(which) // assuming api class is returning a promise
}
async function initSecrets(secretName) {
var input = {
"secretName" : secretName
};
var result = await lambda.invoke({
FunctionName: 'getSecrets',
InvocationType: "RequestResponse",
Payload: JSON.stringify(input)
}).promise();
return result;
}
exports.handler = async function (event, context) {
var myReq = await client('Request');
console.log(myReq);
};
This can be also be solved with async/await give Node v8+
You can load your configuration in a module like so...
const fetch = require('node-fetch');
module.exports = async () => {
const config = await fetch('https://cdn.jsdelivr.net/gh/GEOLYTIX/public/z2.json');
return await config.json();
}
Then declare a _config outside the handler by require / executing the config module. Your handler must be an async function. _config will be a promise at first which you must await to resolve into the configuration object.
const _config = require('./config')();
module.exports = async (req, res) => {
const config = await _config;
res.send(config);
}
Ideally you want your initialization code to run during the initialization phase and not the invocation phase of the lambda to minimize cold start times. Synchronous code at module level runs at initialization time and AWS recently added top level await support in node14 and newer lambdas: https://aws.amazon.com/blogs/compute/using-node-js-es-modules-and-top-level-await-in-aws-lambda/ . Using this you can make the init phase wait for your async initialization code by using top level await like so:
const sleep = ms => new Promise(resolve => setTimeout(resolve, ms))
console.log("start init");
await sleep(1000);
console.log("end init");
export const handler = async (event) => {
return {
statusCode: 200,
body: JSON.stringify('Hello from Lambda!'),
};
};
This works great if you are using ES modules. If for some reason you are stuck using commonjs (e.g. because your tooling like jest or ts-node doesn't yet fully support ES modules) then you can make your commonjs module look like an es module by making it export a Promise that waits on your initialization rather than exporting an object. Like so:
const sleep = ms => new Promise(resolve => setTimeout(resolve, ms))
const main = async () => {
console.log("start init");
await sleep(1000);
console.log("end init");
const handler = async (event) => {
return {
statusCode: 200,
body: JSON.stringify('Hello from Lambda!'),
};
};
return { handler };
};
# note we aren't exporting main here, but rather the result
# of calling main() which is a promise resolving to {handler}:
module.exports = main();