I have added grunt jshint task to my grunt. I created custom reporter to send out jsHint output as email. My custom reporter function is invoked. But no emails are coming through. There are no errors in the code.
Grunt version: "grunt": "^0.4.5",
"nodemailer": "^1.11.0",
"nodemailer-sendmail-transport": "^1.0.0"
Here is the sample code:
var nodemailer = require('nodemailer');
var smtpTransport = require('nodemailer-smtp-transport');
var async = require('async');
module.exports = {
reporter: function (errors) {
var len = errors.length,
str = '';
var items = [1];
errors.forEach(function (r) {
var file = r.file,
err = r.error;
str += file + ": line " + err.line + ", col " +
err.character + ", " + err.reason + "\n";
});
if (str) {
str += "\n" + len + " error" + ((len === 1) ? "" : "s") + "\n";
}
var transporter = nodemailer.createTransport( smtpTransport( {
service: "gmail",
secureConnection: false, // use SSL
port: 587, // port for secure SMTP
auth: {
user: "<my gmail username>",
pass: "<gmail account password>"
},
tls:{
ciphers:'SSLv3'
},
logger: true, // log to console
debug: true // include SMTP traffic in the logs
}));
// setup e-mail data with unicode symbols
var mailOptions = {
from: '<sender address>',
to: '<recipient address>',
subject: 'Hello', // Subject line
text: "why are you not working"
/* text: str */// plaintext body
/*html: '<b>Hello world</b>' // html body*/
};
async.eachSeries(items, function (item, next) {
transporter.sendMail(mailOptions, function(error, response){
// THIS CALLBACK IS NOT CALLED AT ALL
if(error){
console.log(error);
}else{
console.log("Message sent");
}
next(null);
});
}, function(err){
// All tasks are done now
console.log('All tasks are done now');
});
}
};
with async or without async doesn't matter. No emails are coming. I tried bye turning on the "Receive emails from unsecured apps" by following another stackoverflow post. That also did not help.
I would like to know is this correct approach or not? Any help/input is appreicated.
Related
using hapi server -v = 16.7
joi -v = 10.6
deployed in aws ec2, nginx reverse proxy
Server Code in server.js:
'use strict';
//let newrelic = require('newrelic');
let Hapi = require('hapi');
let Routes = require('./Routes');
let Plugins = require('./Plugins');
let Bootstrap = require('./Utils/BootStrap');
process.env.NODE_TLS_REJECT_UNAUTHORIZED = "0";
const Inert = require('inert');
const Vision = require('vision');
const HapiSwagger = require('hapi-swagger');
const Pack = require('./package');
const Config = require('./Config');
const mongoose = require('mongoose');
mongoose.Promise = global.Promise;
const server = new Hapi.Server();
server.connection({
port: Config.dbConfig.config.PORT,
routes: {cors: true}
});
let mongoOpts = {
reconnectTries: 60,
reconnectInterval: 2000,
useMongoClient:true
};
mongoose.connect(Config.dbConfig.config.dbURI,mongoOpts, (err, res) => {
if (err) {
console.log("DB Error: ", err);
process.exit(1);
} else {
console.log('MongoDB Connected');
}
});
server.register([
Inert,
Vision,
{
'register': HapiSwagger,
'options': {
info: {
'title': Config.dbConfig.config.SWAGGERNAME,
'version': Pack.version,
description: '',
},
//documentationPath: '/',
tags:[
{
name: 'v2.1.0',
description: 'API'
}
],
grouping: 'tags',
},
}], (err) => {
server.start((err) => {
if (err) {
console.log("Server err----------->", err);
} else {
console.log('Server running at:', server.info.uri);
}
});
});
server.on('response', (request) => {console.log('response===>>>:');
if(request.url.path.startsWith('/panel')){
}
else {//console.log('HEADERS===========>:',request.headers);
/* console for checking the params in the request*/
console.log(request.info.remoteAddress + ': ' + request.method.toUpperCase() +
' ' + request.url.path + ' --> ' + request.response.statusCode);
console.log('Request payload:', request.payload);
}
});
process.on('uncaughtException', function (err) {
console.log(err);
});
server.register(Plugins, function (err) {
if (err) {
server.error('Error while loading plugins : ' + err)
} else {
server.log('info', 'Plugins Loaded')
}
});
server.register(Inert, function (err) {
if (err) {
throw err;
}
server.route(Routes);
});
server.route([
{
method: 'GET',
path: '/{param*}', /* show error page */
handler: function (req, res) {
res.file('./error.html')
}
},
{
path: "/panel/{path*}",
method: "GET",
handler: { /* open admin panel index from panel path*/
directory: {
path: ['./_admin'],
listing: false,
index: ['index.html']
}
}
}
]);
This is deployed in AWS-EC2, with Nginx reverse proxy.When we hit POST request with payload, working fine in localhost and saves data to MongoDB. But when we hit same POST method from another domain against ec2 deployed server, the below error is showing in pm2 logs. It seems its responding with null before running the controller.
Error in pm2 logs, when we hit POST method:
TypeError: Cannot read property 'statusCode' of null
at /var/www/html/vhosts/custom_BE_ecommerce/api/server.js:123:65
at Object.internals.handler (/var/www/html/vhosts/custom_BE_ecommerce/api/node_modules/podium/lib/index.js:283:33)
at invoke (/var/www/html/vhosts/custom_BE_ecommerce/api/node_modules/podium/lib/index.js:255:23)
at each (/var/www/html/vhosts/custom_BE_ecommerce/api/node_modules/podium/lib/index.js:259:13)
at Object.exports.parallel (/var/www/html/vhosts/custom_BE_ecommerce/api/node_modules/items/lib/index.js:70:13)
at Object.internals.emit (/var/www/html/vhosts/custom_BE_ecommerce/api/node_modules/podium/lib/index.js:276:18)
at module.exports.internals.Server.internals.Podium._emit (/var/www/html/vhosts/custom_BE_ecommerce/api/node_modules/podium/lib/index.js:156:15)
at each (/var/www/html/vhosts/custom_BE_ecommerce/api/node_modules/podium/lib/index.js:197:47)
at Object.exports.parallel (/var/www/html/vhosts/custom_BE_ecommerce/api/node_modules/items/lib/index.js:70:13)
at relay (/var/www/html/vhosts/custom_BE_ecommerce/api/node_modules/podium/lib/index.js:198:15)
at Object.internals.emit (/var/www/html/vhosts/custom_BE_ecommerce/api/node_modules/podium/lib/index.js:202:16)
at internals.emitEmitter (/var/www/html/vhosts/custom_BE_ecommerce/api/node_modules/podium/lib/index.js:303:15)
at processTicksAndRejections (node:internal/process/task_queues:79:21)
Need help on this issue.
I have a table that has more than 25 items and wrote a basic script to break them into sub arrays of 25 items each then loops thru that collection of sub arrays to run a batch write item command in the AWS DynamoDB Client. The issue I am getting is a returned validation error. When I run the same seed file via the aws-cli it seeds the table perfectly. This makes me think it has something to do with my script. See anything I am missing? Thanks in advance!
var { DynamoDB } = require('aws-sdk');
var db = new DynamoDB.DocumentClient({
region: 'localhost',
endpoint: 'http://localhost:8000',
});
const allItems = require('./allItems.json');
const tableName = 'some-table-name';
console.log({ tableName, allItems });
var batches = [];
var currentBatch = [];
var count = 0;
for (let i = 0; i < allItems.length; i++) {
//push item to the current batch
count++;
currentBatch.push(allItems[i]);
if (count === 25) {
batches.push(currentBatch);
currentBatch = [];
}
}
//if there are still items left in the curr batch, add to the collection of batches
if (currentBatch.length > 0 && currentBatch.length !== 25) {
batches.push(currentBatch);
}
var completedRequests = 0;
var errors = false;
//request handler for DynamoDB
function requestHandler(err, data) {
console.log('In the request handler...');
return function (err, data) {
completedRequests++;
errors = errors ? true : err;
//log error
if (errors) {
console.error('Request caused a DB error.');
console.error('ERROR: ' + err);
console.error(JSON.stringify(err, null, 2));
} else {
var res = {
statusCode: 200,
headers: {
'Content-Type': 'application/json',
'Access-Control-Allow-Methods': 'GET,POST,OPTIONS',
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Credentials': true,
},
body: JSON.stringify(data),
isBase64Encoded: false,
};
console.log(`Success: returned ${data}`);
return res;
}
if (completedRequests == batches.length) {
return errors;
}
};
}
//Make request
var params;
for (let j = 0; j < batches.length; j++) {
//items go in params.RequestedItems.id array
//format for the items is {PutRequest : {Item: ITEM_OBJECT}}
params = '{"RequestItems": {"' + tableName + '": []}}';
params = JSON.parse(params);
params.RequestItems[tableName] = batches[j];
console.log('before db.batchWriteItem: ', params);
try {
//send to db
db.batchWrite(params, requestHandler(params));
} catch{
console.error(err)
}
}
Here is the formatted request object and the error:
before db.batchWriteItem:
{ RequestItems:
{ 'some-table-name': [ [Object], [Object], [Object], [Object] ] }
}
In the request handler...
Request caused a DB error.
ERROR: ValidationException: Invalid attribute value type
{
"message": "Invalid attribute value type",
"code": "ValidationException",
"time": "2020-08-04T10:51:13.751Z",
"requestId": "dd49628c-6ee9-4275-9349-6edca29636fd",
"statusCode": 400,
"retryable": false,
"retryDelay": 47.94198279972915
}
You are using the DocumentClient in the nodejs code. This will automatically convert the data format used by DynamoDB to a more easily consumable format.
e.g.
{
"id": {
"S": "A string value"
}
}
would become
{
"id": "A string value"
}
The CLI does not perform this data conversion.
You can use the regular DynamoDB client to not perform this conversion in Nodejs. e.g. const db = new Dynamodb()
First post from a non-development expert. I have been scouring the libraries and online resources on how to use Dialogflow Fulfilment to integrate with the Google Calendar API. I am able to set the start and end times, the summary, description, location successfully in the Google Calendar, but I am unable to add attendees successfully. I have tried many variations on the attendees format. Here is the code I am using I removed the private key for security reasons. One note when I used the square brackets as some sites suggested I always received a response that the time was already booked.
'use strict';
const functions = require('firebase-functions');
const {google} = require('googleapis');
const {WebhookClient} = require('dialogflow-fulfillment');
// Enter your calendar ID below and service account JSON below
const calendarId = 'piec3rnlo2v2p2cemgjdjfctmg#group.calendar.google.com';
const serviceAccount = {
"type": "service_account",
"project_id": "whatduewhen2020v1-kgwjyd",
"private_key_id": "2a2dead3e050ef295cfef9c2c27bd2ac7d2b7471",
"private_key": "-----BEGIN PRIVATE KEY-----,
"client_email": "google-calendar#whatduewhen2020v1-kgwjyd.iam.gserviceaccount.com",
"client_id": "114290887729963225819",
"auth_uri": "https://accounts.google.com/o/oauth2/auth",
"token_uri": "https://oauth2.googleapis.com/token",
"auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
"client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/google-calendar%40whatduewhen2020v1-kgwjyd.iam.gserviceaccount.com"
}; // Starts with {"type": "service_account",...
// Set up Google Calendar Service account credentials
const serviceAccountAuth = new google.auth.JWT({
email: serviceAccount.client_email,
key: serviceAccount.private_key,
scopes: 'https://www.googleapis.com/auth/calendar'
});
const calendar = google.calendar('v3');
process.env.DEBUG = 'dialogflow:*'; // enables lib debugging statements
const timeZone = 'America/Toronto';
const timeZoneOffset = '-05:00';
exports.dialogflowFirebaseFulfillment = functions.https.onRequest((request, response) => {
const agent = new WebhookClient({ request, response });
console.log("Parameters", agent.parameters);
const appointment_type = agent.parameters.AppointmentType;
const attendee_email = agent.parameters.email;
const attendee_phone = agent.parameters.phone_number;
const attendee_firstname = agent.parameters.given_name;
const attendee_lastname = agent.parameters.last_name;
function makeAppointment (agent) {
// Calculate appointment start and end datetimes (end = +1hr from start)
//console.log("Parameters", agent.parameters.date);
const dateTimeStart = new Date(Date.parse(agent.parameters.date.split('T')[0] + 'T' + agent.parameters.time.split('T')[1].split('-')[0] + timeZoneOffset));
const dateTimeEnd = new Date(new Date(dateTimeStart).setHours(dateTimeStart.getHours() + 1));
const appointmentTimeString = dateTimeStart.toLocaleString(
'en-US',
{ month: 'long', day: 'numeric', hour: 'numeric', timeZone: timeZone }
);
// Check the availibility of the time, and make an appointment if there is time on the calendar
return createCalendarEvent(dateTimeStart, dateTimeEnd, appointment_type, attendee_email, attendee_phone, attendee_firstname, attendee_lastname).then(() => {
agent.add(`Excellent, it looks like ${appointmentTimeString} is available and we have reserved the time for you!.`);
}).catch(() => {
agent.add(`I'm so sorry, it looks like we're already booked on ${appointmentTimeString} is there an alternate day or time you are available?`);
});
}
let intentMap = new Map();
intentMap.set('Schedule Appointment', makeAppointment);
agent.handleRequest(intentMap);
});
function createCalendarEvent (dateTimeStart, dateTimeEnd, appointment_type, attendee_email, attendee_phone, attendee_firstname, attendee_lastname) {
return new Promise((resolve, reject) => {
calendar.events.list({
auth: serviceAccountAuth, // List events for time period
calendarId: calendarId,
timeMin: dateTimeStart.toISOString(),
timeMax: dateTimeEnd.toISOString()
}, (err, calendarResponse) => {
// Check if there is a event already on the Calendar
if (err || calendarResponse.data.items.length > 0) {
reject(err || new Error('Requested time conflicts with another appointment'));
} else {
// Create event for the requested time period
calendar.events.insert({ auth: serviceAccountAuth,
calendarId: calendarId,
resource: {summary: ' Membership Discussion' + ' ' + attendee_email + ' ' + attendee_phone ,
description: ' Membership Discussion' + ' ' + attendee_email + ' ' + attendee_phone,
location: 'Call ' + attendee_firstname + ' ' + attendee_lastname + ' at ' + attendee_phone,
start: {dateTime: dateTimeStart},
end: {dateTime: dateTimeEnd},
attendees: { email: 'new#example.com'} },
}, (err, event) => {
err ? reject(err) : resolve(event);
}
);
}
});
});
}
Issue Description
We are using OneSignal as 3rd party push service and configured it using parse-server-onesignal-push-adapter as we are sending pushes from cloud code. Normal pushes are working but scheduled pushes are not. No matter what we set to "push_time" parameter on Push.send(), pushes are sent immediately.
Expected Results
Working scheduled pushes
Actual Outcome
Pushes are sent immediately even if there is push_time parameter set on Parse.Push.send().
How we send pushes
Parse.Push.send({
where: query,
data: {
"alert": "Voting complete. Click here to see the results.",
"sound": "cheering.caf",
//"badge": "Increment",
"content-available": 1,
"category": "VOTING_COMPLETE",
"qc": request.object.id
},
push_time: pushTime
}, {
success: function() {
console.log('##### PUSH OK');
},
error: function(error) {
console.log('##### PUSH ERROR');
},
useMasterKey: true
});
Environment Setup
var express = require('express');
var ParseServer = require('parse-server').ParseServer;
var path = require('path');
var databaseUri = process.env.DATABASE_URI || process.env.MONGODB_URI;
if (!databaseUri) {
console.log('DATABASE_URI not specified, falling back to localhost.');
}
var OneSignalPushAdapter = require('parse-server-onesignal-push-adapter');
var oneSignalPushAdapter = new OneSignalPushAdapter({
oneSignalAppId:"***************************",
oneSignalApiKey:"***************************"
});
var api = new ParseServer({
databaseURI: databaseUri || 'mongodb://localhost:27017/dev',
cloud: process.env.CLOUD_CODE_MAIN || __dirname + '/cloud/main.js',
appId: process.env.APP_ID || 'myAppId',
masterKey: process.env.MASTER_KEY || '',
fileKey: process.env.FILE_KEY || '******************************',
serverURL: process.env.SERVER_URL || 'http://localhost:1337/parse',
verifyUserEmails: true,
emailVerifyTokenValidityDuration: 2 * 60 * 60,
preventLoginWithUnverifiedEmail: true,
publicServerURL: 'http://***************************/parse',
enableAnonymousUsers: false,
revokeSessionOnPasswordReset: true,
appName: '************************',
emailAdapter: {
module: 'parse-server-simple-mailgun-adapter',
options: {
fromAddress: 'no-reply#***************************.com',
domain: 'mg.******************************.com',
apiKey: 'key-******************************',
}
},
oauth: {
twitter: {
consumer_key: "***************************",
consumer_secret: "***************************"
}
},
push: {
adapter: oneSignalPushAdapter
}
});
var app = express();
app.use('/public', express.static(path.join(__dirname, '/public')));
var mountPath = process.env.PARSE_MOUNT || '/parse';
app.use(mountPath, api);
app.get('/', function(req, res) {
res.status(200).send('Make sure to star the parse-server repo on GitHub!');
});
app.get('/test', function(req, res) {
res.sendFile(path.join(__dirname, '/public/test.html'));
});
var port = process.env.PORT || 1337;
var httpServer = require('http').createServer(app);
httpServer.listen(port, function() {
console.log('parse-server-example running on port ' + port + '.');
});
ParseServer.createLiveQueryServer(httpServer);
the reason is because scheduling of push messages is not supported (yet) by parse server. Only parse.com currently supports it.
You can read about it in here
If you really need this feature i suggest you to try to schedule a job in cloud code that will do it for you. Since scheduling jobs is also not supported out of the box you can find temporary solution in here
I am trying to use SQLLItePlugin for Android but its not working. I will list my steps:
1. I have installed cordova pjhonegap from phonegap. I am developing my mobile app Phonegap, html5, javascript, css3 using Netbeans as IDE.
2. Downloaded plugin from https://github.com/brodysoft/Cordova-SQLitePlugin.
3. Added SQLitePlugin.js to js folder of project.
4. Added com.brodysoft.sqlitePlugin.file=https://github.com/brodysoft/Cordova-SQLitePlugin.git in plugin.properties.
5. Am opening database on deviceready as
var app = {
initialize: function () {
this.bindEvents();
},
bindEvents: function () {
document.addEventListener('deviceready', this.onDeviceReady, false);
},
onDeviceReady: function () {
app.receivedEvent('deviceready');
var db = window.sqlitePlugin.openDatabase('gdata.db');
console.log('ready');
db.transaction(function (tx) {
tx.executeSql('DROP TABLE IF EXISTS test_table');
tx.executeSql('CREATE TABLE IF NOT EXISTS test_table (id integer primary key, data text, data_num integer)');
// demonstrate PRAGMA:
db.executeSql("pragma table_info (test_table);", [], function (res) {
console.log("PRAGMA res: " + JSON.stringify(res));
});
tx.executeSql("INSERT INTO test_table (data, data_num) VALUES (?,?)", ["test", 100], function (tx, res) {
console.log("insertId: " + res.insertId + " -- probably 1");
console.log("rowsAffected: " + res.rowsAffected + " -- should be 1");
db.transaction(function (tx) {
tx.executeSql("select count(id) as cnt from test_table;", [], function (tx, res) {
console.log("res.rows.length: " + res.rows.length + " -- should be 1");
console.log("res.rows.item(0).cnt: " + res.rows.item(0).cnt + " -- should be 1");
});
});
}, function (e) {
console.log("ERROR: " + e.message);
});
});
},
// Update DOM on a Received Event
receivedEvent: function (id) {
var parentElement = document.getElementById(id);
console.log('Received Event: ' + id);
}
};
app.initialize();
Running the build on android devvice directly.
It keeps on throwing the error
Uncaught TypeError: Object # has no method 'exec'
(13:52:13:450 | error, javascript)
at SQLitePlugin.open (www/js/libs/SQLitePlugin.js:112:15)
at SQLitePlugin (www/js/libs/SQLitePlugin.js:54:10)
at (anonymous function) (www/js/libs/SQLitePlugin.js:425:14)
at (anonymous function) (www/js/libs/SQLitePlugin.js:30:20)
at createandpopulatedb (www/js/dborarray.js:30:30)
at onDeviceReady3 (www/dborarray.html:96:33)
at onload (www/dborarray.html:16:155) SQLitePlugin openargs: {"name":"gdataenter code here.db"} (13:52:19:609) at
www/js/libs/SQLitePlugin.js:39
Can somebody help.
try thhis
window.sqlitePlugin.openDatabase({name: "gdata.db"});
instead of this
window.sqlitePlugin.openDatabase('gdata.db');