If I do the following:
module.exports = function(grunt) {
grunt.initConfig({
pkg: grunt.file.readJSON('package.json'),
});
grunt.registerTask('myAsync','An async test task',function(){
var done = this.async();
setTimeout(function(){
console.log('This is my async task');
done();
},1000);
});
grunt.registerTask('myTask','A test task',function(){
grunt.task.run('myAsync');
console.log('This is my task');
});
grunt.registerTask('default', ['myTask']);
};
The output is:
Running "myTask" task
This is my task
Running "myAsync" task
This is my async task
So 'myTask' is not waiting for 'myAsync' to finish. I would like 'myTask' to wait for 'myAsync'. Came up with the following but not sure it this is the way to do it:
module.exports = function(grunt) {
// Project configuration.
grunt.myAsync = function myAsync(callback){
var done = this.async();
setTimeout(function(){
console.log('This is my async task');
callback();
done();
},1000);
};
grunt.initConfig({
pkg: grunt.file.readJSON('package.json'),
});
grunt.registerTask('myAsync','An async test task',function(){
grunt.myAsync();
});
grunt.registerTask('myTask','A test task',function(){
var done = this.async();
console.dir(this);
grunt.myAsync.call(this,function(){
console.log('This is my task');
done();
});
});
// Default task(s).
grunt.registerTask('default', ['myTask']);
};
It allows me to either let grunt run 'myAsync' or run it from another task and wait for it.
Is there another way of doing this? Could not find how to wait for async tasks when calling them from a task.
[UPDATE]
Kyle got me on the right track, have added 3 tasks; 1 drop db, 2, run test, 3 run tests and drop db. Since tasks run in queue the 3rd task only has to run the other 2. Instead of having the first task (run tests) call the second task (drop db).
It looks something like this:
grunt.registerTask('runtests','Runs the integration tests.',function(){
var done = this.async();
setTimeout(function(){
console.log('ran the tests db');
done();
},100);
});
grunt.registerTask('dropdb','Drops the db.',function(){
var done = this.async();
setTimeout(function(){
console.log('droped db');
done();
},100);
});
grunt.registerTask('dropandrun','Runs the integration tests.',function(){
if(!grunt.option('nodrop')){
grunt.task.run('dropsdb');
}
grunt.task.run('runtests');
});
Grunt runs tasks in a queue. So if you queue a task within another task with grunt.task.run(), it will run that task after the current has finished.
Rather than adding methods to the grunt instance itself, you can create your own functions and libraries your tasks can call. Such as like this:
module.exports = function(grunt) {
function myAsync(callback){
setTimeout(function(){
console.log('This is my async task');
callback();
},1000);
}
grunt.initConfig({
pkg: grunt.file.readJSON('package.json'),
});
grunt.registerTask('myAsync','An async test task',function(){
myAsync(this.async());
});
grunt.registerTask('myTask','A test task',function(){
var done = this.async();
myAsync(function() {
console.log('This is my task');
done();
});
});
grunt.registerTask('default', ['myTask']);
};
Later as those functions grow you can move them into their own file and use them like this:
// ./lib/myasync.js
module.exports = function(callback) {
setTimeout(function() {
console.log('This is my async function');
callback();
}, 1000);
};
...
// Gruntfile.js
var myAsync = require('./lib/myasync.js');
Related
I'm following this tutorial so that I generate a static file during the vercel build. I've tried moving the files around but it always shows a log like:
23:41:48.432 $ node ./pages/api/build.js 23:41:48.493 Build time file created successfully! 23:41:48.495 Done in 0.09s. 23:41:48.507 Error: A "routes-manifest.json" couldn't be found. This is normally caused by a misconfiguration in your project.
Then it links to this. I've checked the issues and everything seems to be fine.
If I remove the "vercel-build": "node ./pages/api/build.js" line from package.json, the error disappears. But also the functionality..
my pages/api/index.js file:
const BuiltTime = require('./built-time');
module.exports = (req, res) => {
res.setHeader('content-type', 'text/plain');
res.send(`
This Serverless Function was built at ${new Date(BuiltTime)}.
The current time is ${new Date()}
`);
};
my pages/build.js:
const fs = require('fs');
fs.writeFile(
'pages/api/built-time.js',
`module.exports = '${new Date()}'`,
(err) => {
if (err) throw err;
console.log('Build time file created successfully!');
}
);
my package.json:
{
...
"scripts":{
"vercel-build": "node ./pages/api/build.js",
}
}
In the end I didn't use "vercel-build". I just run a script before the build:
// package.json
{
"scripts": {
"make-q": "node ./build.js",
"build": "yarn run make-q && next build",
}
The build file can't use import or call typescript files (at least for now):
// build.js
const slugify = require('./utils/slugify');
const fs = require('fs');
const qs = slugify('some string');
fs.writeFile(
'questionsDB.js',
`module.exports = ${JSON.stringify(qs, null, 2)}`,
(err) => {
if (err) throw err;
console.log('file created successfully!');
}
);
Finally, inside the pages/api/test.js:
const db = require('../../db');
module.exports = (req, res) => {
res.setHeader('content-type', 'text/plain');
res.send(`
working:
${db}
`);
};
Now, if I call url/api/test I get the result based on the build.
Things I tried that failed:
Different node version
Update nextjs
remove yarn.lock and build again
I'm trying to get apollo gateway to run in google's cloud functions (particularly through firebase). Running apollo server is super easy on cloud functions with the following code
const functions = require("firebase-functions");
const { ApolloServer } = require("apollo-server-express");
var app = require("express")();
const server = new ApolloServer({
schema: buildFederatedSchema([{ typeDefs, resolvers }])
});
server.applyMiddleware({ app });
exports.apollo = functions.https.onRequest(app);
That all works nice and dandy because there's no async functions to wait on. But with apollo gateway, we do have an async function to wait on before the server can start listening.
The way it works with just expressjs is this:
const { ApolloServer } = require("apollo-server-express");
var app = require("express")();
const gateway = new ApolloGateway({
serviceList: [{ name: "clients", url:"http://localhost:5000/apollo/graphql" }]
});
(async () => {
const { schema, executor } = await gateway.load();
const server = new ApolloServer({ schema, executor });
server.listen().then(({ url }) => {
console.log(`🚀 Server ready at ${url}`);
});
})();
So now the question becomes how do I execute that in a cloud function? One idea I had was to pass the express app to the firebase functions, and THEN do all the async stuff to initialize the listening for federation. But that doesnt work :(. Any ideas?
const functions = require("firebase-functions");
const { ApolloServer } = require("apollo-server-express");
var app = require("express")();
const gateway = new ApolloGateway({
serviceList: [{ name: "clients", url: "http://localhost:5000/apollo/graphql" }]
});
exports.apollo = functions.https.onRequest(app);
(async () => {
const { schema, executor } = await gateway.load();
const server = new ApolloServer({ schema, executor });
server.applyMiddleware({ app });
})()
I got it to work! So the trick was to put the logic of setting up the federation INSIDE the onRequest piece.
Here's the solution:
var app = require("express")();
var server;
var start_gateway = async () => {
if (server !== undefined) return;
const gateway = new ApolloGateway({
serviceList: [{ name: "clients", url: "http://localhost:4000/graphql"
}]
});
const { schema, executor } = await gateway.load();
server = new ApolloServer({ schema, executor });
server.applyMiddleware({ app });
console.log("Made FEDERATED GRAPH");
return;
};
exports.apollo = functions.https.onRequest(async (req, res) => {
await start_gateway();
return await app(req, res);
})
I would like to call an asynchronous function outside the lambda handler with by the following code:
var client;
(async () => {
var result = await initSecrets("MyWebApi");
var secret = JSON.parse(result.Payload);
client= new MyWebApiClient(secret.API_KEY, secret.API_SECRET);
});
async function initSecrets(secretName) {
var input = {
"secretName" : secretName
};
var result = await lambda.invoke({
FunctionName: 'getSecrets',
InvocationType: "RequestResponse",
Payload: JSON.stringify(input)
}).promise();
return result;
}
exports.handler = async function (event, context) {
var myReq = await client('Request');
console.log(myReq);
};
The 'client' does not get initialized. The same code works perfectly if executed within the handler.
initSecrets contains a lambda invocation of getSecrets() which calls the AWS SecretsManager
Has anyone an idea how asynchronous functions can be properly called for initialization purpose outside the handler?
Thank you very much for your support.
I ran into a similar issue trying to get next-js to work with aws-serverless-express.
I fixed it by doing the below (using typescript so just ignore the :any type bits)
const appModule = require('./App');
let server: any = undefined;
appModule.then((expressApp: any) => {
server = createServer(expressApp, null, binaryMimeTypes);
});
function waitForServer(event: any, context: any){
setImmediate(() => {
if(!server){
waitForServer(event, context);
}else{
proxy(server, event, context);
}
});
}
exports.handler = (event: any, context: any) => {
if(server){
proxy(server, event, context);
}else{
waitForServer(event, context);
}
}
So for your code maybe something like
var client = undefined;
initSecrets("MyWebApi").then(result => {
var secret = JSON.parse(result.Payload);
client= new MyWebApiClient(secret.API_KEY, secret.API_SECRET)
})
function waitForClient(){
setImmediate(() => {
if(!client ){
waitForClient();
}else{
client('Request')
}
});
}
exports.handler = async function (event, context) {
if(client){
client('Request')
}else{
waitForClient(event, context);
}
};
client is being called before it has initialised; the client var is being "exported" (and called) before the async function would have completed. When you are calling await client() the client would still be undefined.
edit, try something like this
var client = async which => {
var result = await initSecrets("MyWebApi");
var secret = JSON.parse(result.Payload);
let api = new MyWebApiClient(secret.API_KEY, secret.API_SECRET);
return api(which) // assuming api class is returning a promise
}
async function initSecrets(secretName) {
var input = {
"secretName" : secretName
};
var result = await lambda.invoke({
FunctionName: 'getSecrets',
InvocationType: "RequestResponse",
Payload: JSON.stringify(input)
}).promise();
return result;
}
exports.handler = async function (event, context) {
var myReq = await client('Request');
console.log(myReq);
};
This can be also be solved with async/await give Node v8+
You can load your configuration in a module like so...
const fetch = require('node-fetch');
module.exports = async () => {
const config = await fetch('https://cdn.jsdelivr.net/gh/GEOLYTIX/public/z2.json');
return await config.json();
}
Then declare a _config outside the handler by require / executing the config module. Your handler must be an async function. _config will be a promise at first which you must await to resolve into the configuration object.
const _config = require('./config')();
module.exports = async (req, res) => {
const config = await _config;
res.send(config);
}
Ideally you want your initialization code to run during the initialization phase and not the invocation phase of the lambda to minimize cold start times. Synchronous code at module level runs at initialization time and AWS recently added top level await support in node14 and newer lambdas: https://aws.amazon.com/blogs/compute/using-node-js-es-modules-and-top-level-await-in-aws-lambda/ . Using this you can make the init phase wait for your async initialization code by using top level await like so:
const sleep = ms => new Promise(resolve => setTimeout(resolve, ms))
console.log("start init");
await sleep(1000);
console.log("end init");
export const handler = async (event) => {
return {
statusCode: 200,
body: JSON.stringify('Hello from Lambda!'),
};
};
This works great if you are using ES modules. If for some reason you are stuck using commonjs (e.g. because your tooling like jest or ts-node doesn't yet fully support ES modules) then you can make your commonjs module look like an es module by making it export a Promise that waits on your initialization rather than exporting an object. Like so:
const sleep = ms => new Promise(resolve => setTimeout(resolve, ms))
const main = async () => {
console.log("start init");
await sleep(1000);
console.log("end init");
const handler = async (event) => {
return {
statusCode: 200,
body: JSON.stringify('Hello from Lambda!'),
};
};
return { handler };
};
# note we aren't exporting main here, but rather the result
# of calling main() which is a promise resolving to {handler}:
module.exports = main();
I did a fresh install of express app. And my
package.json
{
"name": "projects",
"version": "0.0.0",
"private": true,
"scripts": {
"start": "nodemon ./app.js"
},
"dependencies": {
"body-parser": "~1.18.2",
"cookie-parser": "~1.4.3",
"debug": "~2.6.9",
"express": "~4.15.5",
"morgan": "~1.9.0",
"pug": "2.0.0-beta11",
"serve-favicon": "~2.4.5"
},
"devDependencies": {
"browser-sync": "^2.18.13",
"connect-browser-sync": "^2.1.0",
"nodemon": "^1.12.5",
"reload": "^2.2.2"
}
}
app.js
var express = require('express');
var http = require('http');
var reload = require('reload');
var path = require('path');
var favicon = require('serve-favicon');
var logger = require('morgan');
var cookieParser = require('cookie-parser');
var bodyParser = require('body-parser');
var index = require('./routes/index');
var users = require('./routes/users');
var app = express();
app.set('port', process.env.PORT || 3000);
// view engine setup
app.set('views', path.join(__dirname, 'views'));
app.set('view engine', 'pug');
// uncomment after placing your favicon in /public
//app.use(favicon(path.join(__dirname, 'public', 'favicon.ico')));
app.use(logger('dev'));
app.use(bodyParser.json());
app.use(bodyParser.urlencoded({ extended: false }));
app.use(cookieParser());
app.use(express.static(path.join(__dirname, 'public')));
app.use('/', index);
app.use('/users', users);
// catch 404 and forward to error handler
app.use(function(req, res, next) {
var err = new Error('Not Found');
err.status = 404;
next(err);
});
// error handler
app.use(function(err, req, res, next) {
// set locals, only providing error in development
res.locals.message = err.message;
res.locals.error = req.app.get('env') === 'development' ? err : {};
// render the error page
res.status(err.status || 500);
res.render('error');
});
var server = http.createServer(app)
reload(app);
server.listen(app.get('port'), function () {
console.log('Web server listening on port ' + app.get('port'))
});
module.exports = app;
I dont want to use gulp if reload does my job.
When i change the text welcome in index.pug, the chrome is not reloaded. If i refresh i could see the change.
How can i auto reload my page on changes in any of the folder.
Note: nodemon is working fine.Again it doesn't reload the browser.
Reaload plugin reloads the page only if the express server is restarted by nodemon. From the docs:
When you restart the server, the client will detect the server being restarted and automatically refresh the page.
Nodemon does not watch Pug templates, so it's not restarted on template change:
By default, nodemon looks for files with the .js, .mjs, .coffee, .litcoffee, and .json extensions.
You can set .pug extension to be monitored by nodemon. But I think it will lead to unnecessary server restarts because Pug templates seem to be evaluated at runtime when the request for the page is happening.
Don't also forget to add reload script to all your pages by modifying the main layout template:
doctype html
html
head
...
body
block content
script(src='/reload/reload.js')
To achieve that, you need install Livereload to work together with Nodemon. A simple Express APP with necessary changes, it would be like this:
Complete step by step explained
var createError = require("http-errors");
var express = require("express");
var path = require("path");
var cookieParser = require("cookie-parser");
var logger = require("morgan");
var livereload = require("livereload");
var connectLiveReload = require("connect-livereload");
var indexRouter = require("./routes/index");
var usersRouter = require("./routes/users");
const liveReloadServer = livereload.createServer();
liveReloadServer.server.once("connection", () => {
setTimeout(() => {
liveReloadServer.refresh("/");
}, 100);
});
var app = express();
app.use(connectLiveReload());
// view engine setup
app.set("views", path.join(__dirname, "views"));
app.set("view engine", "hbs");
app.use(logger("dev"));
app.use(express.json());
app.use(express.urlencoded({ extended: false }));
app.use(cookieParser());
app.use(express.static(path.join(__dirname, "public")));
app.use("/", indexRouter);
app.use("/users", usersRouter);
// catch 404 and forward to error handler
app.use(function (req, res, next) {
next(createError(404));
});
// error handler
app.use(function (err, req, res, next) {
// set locals, only providing error in development
res.locals.message = err.message;
res.locals.error = req.app.get("env") === "development" ? err : {};
// render the error page
res.status(err.status || 500);
res.render("error");
});
module.exports = app;
I get 'Cannot find module 'firebase' when I try to run this in Lambda (Node.js 4.3)
var Firebase = require('firebase');
Same thing happens when I try to upload a zipped package that includes node_modules/firebase
Does anybody have a working 'write from lambda to firebase' implementation?
To safely use firebase npm package (version 3.3.0) in AWS Lambda (Nodejs 4.3), Please do the following:
'use strict';
var firebase = require("firebase");
exports.handler = (event, context, callback) => {
context.callbackWaitsForEmptyEventLoop = false; //<---Important
var config = {
apiKey: "<<apikey>>",
authDomain: "<<app_id>>.firebaseapp.com",
databaseURL: "https://<<app_id>>.firebaseio.com",
storageBucket: "<<app_id>>.appspot.com",
};
if(firebase.apps.length == 0) { // <---Important!!! In lambda, it will cause double initialization.
firebase.initializeApp(config);
}
...
<Your Logic here...>
...
};
I solved my problem by using firebase REST api
var https = require('https');
exports.handler = function(event, context, callback) {
var body = JSON.stringify({
foo: "bar"
})
var https = require('https');
var options = {
host: 'project-XXXXX.firebaseio.com',
port: 443,
path: '/.json',
method: 'POST'
};
var req = https.request(options, function(res) {
console.log(res.statusCode);
res.on('data', function(d) {
process.stdout.write(d);
});
});
req.end(body);
req.on('error', function(e) {
console.error(e);
});
callback(null, "some success message");
}
This is late, but in case someone else is looking:
Zipping your project folder instead of the contents of the project folder can cause this. The zipped folder, when extracted, should not contain a folder with the lambda files in it, but should have the index.js file and the node_modules folder at root level.
A working example of a lambda function is (using latest shiny firebase stuff *sigh*):
var firebase = require('firebase');
// Your service account details
var credentials = {
"type": "service_account",
"project_id": "project-123451234512345123",
"private_key_id": "my1private2key3id",
"private_key": "-----BEGIN PRIVATE KEY-----InsertKeyHere-----END PRIVATE KEY-----\n",
"client_email": "projectname#project-123451234512345123.iam.gserviceaccount.com",
"client_id": "1111222223333344444",
"auth_uri": "https://accounts.google.com/o/oauth2/auth",
"token_uri": "https://accounts.google.com/o/oauth2/token",
"auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
"client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/projectname%40project-123451234512345123.iam.gserviceaccount.com"
};
firebase.initializeApp({
serviceAccount: credentials,
databaseURL: "https://project-123451234512345123.firebaseio.com"
});
exports.handler = function (event, context, callback) {
// I use some data passed in from AWS API Gateway:
if (!event.firebaseUid) {
callback('Missing param for id');
}
firebase.database().ref().child('users').child(firebaseUid).child('at').set(newTokens.access_token).then(function (data) {
console.log('Firebase data: ', data);
firebase.database().goOffline();
callback(null, 'Firebase data: ', data);
}).catch(function (error) {
callback('Database set error ' + error);
});
};
Now for the caveat. I have experienced this causing the lambda function to timeout even after the firebase callback has happened, ie. the set function seems to create a listener that holds the lambda function open despite return of correct data.
Update: Calling firebase.database().goOffline() fixes the Lambda function timeout issue i was experiencing.
The usual cautions about security not being verified or appropriate, and the possibilities of halting space and time by using this apply.
2017-03-22 edit: google just announced firebase cloud functions, which is a much better way to do this. Cloud functions work just like lambda, and can trigger from firebase events.
Here's my solution using the REST api (so you don't need to require anything):
var https = require('https');
var firebaseHost = "yourapp.firebaseio.com";
function fbGet(key){
return new Promise((resolve, reject) => {
var options = {
hostname: firebaseHost,
port: 443,
path: key + ".json",
method: 'GET'
};
var req = https.request(options, function (res) {
res.setEncoding('utf8');
var body = '';
res.on('data', function(chunk) {
body += chunk;
});
res.on('end', function() {
resolve(JSON.parse(body))
});
});
req.end();
req.on('error', reject);
});
}
function fbPut(key, value){
return new Promise((resolve, reject) => {
var options = {
hostname: firebaseHost,
port: 443,
path: key + ".json",
method: 'PUT'
};
var req = https.request(options, function (res) {
console.log("request made")
res.setEncoding('utf8');
var body = '';
res.on('data', function(chunk) {
body += chunk;
});
res.on('end', function() {
resolve(body)
});
});
req.end(JSON.stringify(value));
req.on('error', reject);
});
}
You can use it like this:
fbPut("/foo/bar", "lol").then(res => {
console.log("wrote data")
})
And then:
fbGet("/foo/bar").then(data => {
console.log(data); // prints "lol"
}).catch(e => {
console.log("error saving to firebase: ");
console.log(e);
})
Another alternative if you're using a node-based development setup is to use the node-lambda package from here. Essentially it provides wrappers to set up, test and deploy to lambda. node-lambda deploy will package up any modules you've installed (e.g. with npm i --save firebase) and make sure they're available on Lambda itself. I've found it really helpful for managing external modules.
For me firebase-admin should do the trick.
https://firebase.google.com/docs/admin/setup
Thanks for Josiah Choi for suggesting context.callbackWaitsForEmptyEventLoop though. So lambda doesn't need to initializeFirebase everytimes. My first run was really slow.
var firebase = require('firebase-admin');
module.exports.Test = (event, context, callback) => {
context.callbackWaitsForEmptyEventLoop = false; //<---Important
if(firebase.apps.length == 0) { // <---Important!!! In lambda, it will cause double initialization.
firebase.initializeApp({
credential: firebase.credential.cert("serviceAccount.json"),
databaseURL: <YOUR FIREBASE URL>
});
}
firebase.database().ref('conversation').once('value').then(function(snapshot) {
console.log (snapshot.val()) ;
var bodyReturn = {
input: snapshot.val()
} ;
callback(null,bodyReturn);
context.succeed() ;
});
};
After trying a few things, this seems to work for me (v 3.10.8) :
for(var i=0;i<5;i++)
{
var firebase = require('firebase');
var config = {
apiKey: "",
authDomain: "",
databaseURL: "",
storageBucket: "",
messagingSenderId: ""
};
if(firebase.apps)
if(firebase.apps.length==0)
firebase.initializeApp(config)
firebase.database().ref().child("test").once('value').
then(function(snapshot) {
console.log(snapshot.val());
});
}