Insert or Updated getting "Transaction query already complete" - bookshelf.js

How can I correctly search for a row in the database and INSERT/UPDATE accordingly to the search result (INSERT if not found, UPDATE if found)?
I'm currently doing this:
bookshelf.transaction(async function (t) {
for (var x = 0; x < 10; x++) {
let row = pmsParser.getRow(x);
if (_.isEmpty(row)) {
break;
}
let data = {
lastUpdate: moment(row.lastUpdate, 'DD/MM/YYYY - HH:mm').toDate(),
mvs: row.version,
color: row.color,
location: row.location,
status: row.status
};
new Vehicle({ chassi: row.chassi })
.fetch({ require: true })
.then(model => {
return new Vehicle(model)
.save(data, { transacting: t, patch: true });
})
.catch(Vehicle.NotFoundError, err => {
new Vehicle(data)
.save('chassi', row.chassi, { transacting: t })
.then(() => {
console.log(`Inserted... ${row.chassi}`);
});
})
.catch(err => {
console.log(err.message);
});
}
})
.catch(function (err) {
console.error(err);
return res.json({ status: false, count: 0, error: err.message });
});
And I receive this error:
Transaction query already complete, run with DEBUG=knex:tx for more info
Unhandled rejection Error: Transaction query already complete, run with DEBUG=knex:tx for more info
at completedError (/home/node/app/node_modules/knex/lib/transaction.js:297:9)
at /home/node/app/node_modules/knex/lib/transaction.js:266:22
at tryCatcher (/home/node/app/node_modules/bluebird/js/release/util.js:16:23)
at Function.Promise.attempt.Promise.try (/home/node/app/node_modules/bluebird/js/release/method.js:39:29)
at Client_SQLite3.trxClient.query (/home/node/app/node_modules/knex/lib/transaction.js:264:34)
at Runner.<anonymous> (/home/node/app/node_modules/knex/lib/runner.js:138:36)
at Runner.tryCatcher (/home/node/app/node_modules/bluebird/js/release/util.js:16:23)
at Runner.query (/home/node/app/node_modules/bluebird/js/release/method.js:15:34)
at /home/node/app/node_modules/knex/lib/runner.js:61:21
at tryCatcher (/home/node/app/node_modules/bluebird/js/release/util.js:16:23)
at /home/node/app/node_modules/bluebird/js/release/using.js:185:26
at tryCatcher (/home/node/app/node_modules/bluebird/js/release/util.js:16:23)
at Promise._settlePromiseFromHandler (/home/node/app/node_modules/bluebird/js/release/promise.js:512:31)
at Promise._settlePromise (/home/node/app/node_modules/bluebird/js/release/promise.js:569:18)
at Promise._settlePromise0 (/home/node/app/node_modules/bluebird/js/release/promise.js:614:10)
at Promise._settlePromises (/home/node/app/node_modules/bluebird/js/release/promise.js:693:18)
Knex debug output
knex:tx trx1: Starting top level transaction +0ms
knex:tx trx1: releasing connection +28ms
knex:tx undefined: Transaction completed: update "vehicles" set "color" = ?, "lastUpdate" = ?, "location" = ?, "mvs" = ?, "status" = ? where "id" = ? +15ms
Transaction query already complete, run with DEBUG=knex:tx for more info
knex:tx undefined: Transaction completed: update "vehicles" set "color" = ?, "lastUpdate" = ?, "location" = ?, "mvs" = ?, "status" = ? where "id" = ? +8ms
Transaction query already complete, run with DEBUG=knex:tx for more info

When under a transaction ALL related database accesses must be within the context of the transaction.
//...
new Vehicle({ chassi: row.chassi })
.fetch({ require: true, transacting: t })
.then(model => {
//...
Your iterations are not being correctly promisified. That makes your changes to escape the transaction context, causing the 'Transaction query already complete' error. When creating promises within a loop it is always advisable to collect them and submit to a promise collection handling, such as Promise.all(). This will avoid escaping the transaction context before all promises are resolved.
Those changes may lead to a code as below (untested):
bookshelf.transaction(async function (t) {
let promises = [];
for (var x = 0; x < 10; x++) {
let row = pmsParser.getRow(x);
if (_.isEmpty(row)) {
break;
}
let data = {
lastUpdate: moment(row.lastUpdate, 'DD/MM/YYYY - HH:mm').toDate(),
mvs: row.version,
color: row.color,
location: row.location,
status: row.status
};
promises.push(
new Vehicle({ chassi: row.chassi })
.fetch({ require: true, transacting: t })
.then(model => {
return model // no need to use 'new Vehicle()' here
.save(data, { transacting: t, patch: true });
})
.catch(Vehicle.NotFoundError, err => {
return new Vehicle(data) // missing 'return'
.save('chassi', row.chassi, { transacting: t })
.then(() => {
console.log(`Inserted... ${row.chassi}`);
});
})
.catch(err => {
console.log(err.message);
// throw err; // should rethrow it!
})
);
}
return Promise.all(promises)
.catch(function (err) {
console.error(err);
return res.json({ status: false, count: 0, error: err.message });
});
};

Related

react-native-sqlite-storage How to open the specified directory file?

my db in: android\app\src\main\assets\app.db
The way i tried:
open() {
SQLiteStorage.DEBUG(true);
SQLiteStorage.openDatabase({
name: 'file:///android_asset/app.db',
})
.then(() => {
console.info('');
})
.catch(err => {
console.warn(err);
});
}
But error:
How can i do this?
run ok!
open() {
SQLiteStorage.DEBUG(true);
SQLiteStorage.openDatabase({
name: 'app.db', // android/app/src/main/assets/app.db
})
.then(() => {
console.info('');
})
.catch(err => {
console.warn(err);
});
}
In react-native-cli:
1- It is convenient first of all to make sure that the database exists in the documents directory, with rn-fetch-blob you can list the documents that are in a directory like this:
import RNFetchBlob from 'rn-fetch-blob';
let dirs = RNFetchBlob.fs.dirs;
const documentPath = dirs.DocumentDir;
const externalZipPath = dirs.DCIMDir;
RNFetchBlob.fs.ls (documentPath) .then ((files) => {
console.log (files)
})
If you do not carry out this step, you can set that a basic database is being created and opened as it does not find any with that name.
You can also open the database from android studio:
When Launch succeeded:
In Device File Explorer> data> data> com.nameofyourapp> databases
You can also click on the bottom tab of android studio 'Database inspector' to see the database changes in real time.
2- Once you are sure that a database already exists in that directory:
To open the database in directories inside the cell phone but outside your project:
"If your folder is not in app bundle but in app sandbox i.e. downloaded from some remote location"
let openDbExample = () => {
let errorCB = (err) => {
console.log ("SQL Error:" + err);
}
let successCB = () => {
db.transaction ((tx) => {
tx.executeSql (
`SELECT * FROM name_column_table LIMIT 10`, [], (tx, results) => {
var len = results.rows.length;
for (let i = 0; i <len; i ++) {
let row = results.rows.item (i);
console.log (row);
}
})
})
}
if (Platform.OS === 'ios') {
db = SQLite.openDatabase ({name: "example_data_base.db", location:
'Documents'}, successCB, errorCB);
}
else {
db = SQLite.openDatabase ({name: "example_data_base.db", readOnly: true,
location: "default", createFromLocation: 2}, successCB, errorCB)
}
}

Cloud Functions bucket.upload() is not running at all

here is what I am trying to do using Firebase:
create a backup file from realtime database
upload to firebase storage
do this every morning
but I am having problem on number 2; after the log of back up file creation success, no other log appears, not even a failed message.
no log after file creation
Even worse is that it sometimes works, which makes me doubtful about the consistency of the functionality.
my code:
var promiseFileCreation = function(fileName, jsonBackup){
console.log("promiseFileCreation starting");
return new Promise(function (resolve, reject){
fs.writeFile('/tmp/'+fileName, jsonBackup, function(fs_err){
if(!fs_err){
resolve("File "+fileName+" creation success");
} else {
reject("File "+fileName+" creation failure: "+fs_err);
}
})
}).catch(function(error){
reject("FileCreation Error");
})
}
var promiseBucketUpload = function(fileName, fileDest){
console.log("promiseBucketUpload starting")
return new Promise(function (resolve, reject){
console.log("promiseBucketUpload promise starting")
bucket.upload('/tmp/'+fileName, { destination: fileDest }, function(upload_err){
if(!upload_err){
resolve("File "+fileName+" upload to "+fileDest+" success");
} else {
reject("File "+fileName+" upload to "+fileDest+" failure: "+upload_err);
}
})
}).catch(function(error){
reject("BucketUpload Error: "+error);
})
}
Promise.all([promiseText, promiseDate, promiseTitle, promiseLikedCount, promiseViewCount, promiseComments]).then(function (values){
var jsonPostObj = {
post: [],
counter: []
}
jsonPostObj.post.push({
date: values[1],
text: values[0],
title: values[2]
})
jsonPostObj.counter.push({
likedCount: values[3],
viewCount: values[4]
})
var jsonCommentsObj = JSON.parse(values[5]);
const jsonArchiveObj = {...jsonPostObj, ...jsonCommentsObj}
var jsonArchive = JSON.stringify(jsonArchiveObj);
const yesterday = getYesterdayDateFull();
var fileName = "archive_"+yesterday;
var fileDest = "history/"+yesterday.substring(0,4)+"/"+yesterday.substring(4,6)+"/"+fileName;
console.log("Archive file name: "+fileName);
console.log("Archive destination: "+fileDest);
promiseFileCreation(fileName, jsonArchive).then(function(resultSuccessFs){
console.log(resultSuccessFs);
// BUCKETUPLOAD here
promiseBucketUpload(fileName, fileDest).then(function(resultSuccessBucket){
console.log(resultSuccessBucket);
return promiseBackupResult(true);
}, function(resultFailureBucket){
console.log(resultFailureBucket);
return promiseBucketResult(false);
})
}, function(resultFailureFs){
console.log(resultFailureFs);
return promiseBackupResult(false);
});
}).catch(function(errPromiseAll){
console.log("Promise.all error: "+errPromiseAll);
return promiseBackupResult(false);
})
}
I removed unnecessary codes, like other promises. The file creation seems to work fine.
Does anyone see why bucket.upload() is not called at all? Thanks in advance.

alexa sdk: can't get persitentAttributes

i'm trying to add persistent attributes to my lambda function.
i created a dynamoDB table and added it to the triggers of my lambda function.
i copied a sample code from github, but when i try to launch the skill i get an error. The console log shows:
{
"errorMessage": "Could not read item (amzn1.ask.account.AGIIYNRXWDLBD6XEPW72QS2BHGXNP7NWYBEWSH2XLSXZP64X3NCYEMVK233VFDWH77ZB6DAK6YJ53SZLNUFVQ56CYOVCILS7QFZI4CIRDWC3PAHS4QG27YUY5PTT6QEIK46YFNTJT54YAKNGOWV2UO66XZACFDQ5SEXKJYOBNFNIZNUXKNTIAAYZG4R5ZU4FMLPDZZN64KLINNA) from table (Spiele): The provided key element does not match the schema",
"errorType": "AskSdk.DynamoDbPersistenceAdapter Error",
"stackTrace": [
"Object.createAskSdkError (/var/task/node_modules/ask-sdk-dynamodb-persistence-adapter/lib/utils/AskSdkUtils.js:22:17)",
"DynamoDbPersistenceAdapter.<anonymous> (/var/task/node_modules/ask-sdk-dynamodb-persistence-adapter/lib/attributes/persistence/DynamoDbPersistenceAdapter.js:123:49)",
"step (/var/task/node_modules/ask-sdk-dynamodb-persistence-adapter/lib/attributes/persistence/DynamoDbPersistenceAdapter.js:44:23)",
"Object.throw (/var/task/node_modules/ask-sdk-dynamodb-persistence-adapter/lib/attributes/persistence/DynamoDbPersistenceAdapter.js:25:53)",
"rejected (/var/task/node_modules/ask-sdk-dynamodb-persistence-adapter/lib/attributes/persistence/DynamoDbPersistenceAdapter.js:17:65)",
"<anonymous>",
"process._tickDomainCallback (internal/process/next_tick.js:228:7)"
]
}
the table contains a primary key "name" and sort key "UserId". is that wrong?
here is my index.js:
const Alexa = require('ask-sdk');
// Define the skill features
let skill;
/**
* If this is the first start of the skill, grab the user's data from Dynamo and
* set the session attributes to the persistent data.
*/
const GetUserDataInterceptor = {
process(handlerInput) {
let attributes = handlerInput.attributesManager.getSessionAttributes();
if (handlerInput.requestEnvelope.request.type === 'LaunchRequest' && !attributes['isInitialized']) {
return new Promise((resolve, reject) => {
handlerInput.attributesManager.getPersistentAttributes()
.then((attributes) => {
attributes['isInitialized'] = true;
saveUser(handlerInput, attributes, 'session');
resolve();
})
.catch((error) => {
reject(error);
})
});
}
}
};
function saveUser(handlerInput, attributes, mode) {
if(mode === 'session'){
handlerInput.attributesManager.setSessionAttributes(attributes);
} else if(mode === 'persistent') {
console.info("Saving to Dynamo: ",attributes);
return new Promise((resolve, reject) => {
handlerInput.attributesManager.getPersistentAttributes()
.then((persistent) => {
delete attributes['isInitialized'];
handlerInput.attributesManager.setPersistentAttributes(attributes);
resolve(handlerInput.attributesManager.savePersistentAttributes());
})
.catch((error) => {
reject(error);
});
});
}
}
const LaunchHandler = {
canHandle(handlerInput) {
return handlerInput.requestEnvelope.request.type === 'LaunchRequest';
},
handle(handlerInput) {
console.info("LaunchRequest");
let attributes = handlerInput.attributesManager.getSessionAttributes();
console.info("Test the load: " + attributes['isInitialized']);
attributes['FOO'] = "BAR";
saveUser(handlerInput, attributes, 'persistent');
return handlerInput.responseBuilder
.speak('Hello')
.reprompt('Hello')
.getResponse();
}
}
exports.handler = Alexa.SkillBuilders.standard()
.addRequestHandlers(
LaunchHandler
)
.addRequestInterceptors(GetUserDataInterceptor)
.withTableName('Spiele')
.withAutoCreateTable(true)
.withDynamoDbClient()
.lambda();
can anyone tell me what i'm doing wrong?
please confirm the partition key is 'userId' not 'UserId' (notice the uppercase U).
Also I would suggest using 'this' object.
Let me know if that helps.
Cheers
Below code is for python lambda function
from ask_sdk_core.skill_builder import CustomSkillBuilder
from ask_sdk_dynamodb.adapter import DynamoDbAdapter
sb = SkillBuilder()
sb = CustomSkillBuilder(persistence_adapter = dynamodb_adapter)

Redux observable retry on timeout

I'd like to handle ajax timeouts using redux-observable so that if a timeout occurs (after say 10 seconds) it will retry the request another two times (firing a SAVE_RETRYING action every time so the UI can notify the user that it's retrying).
For any other type of error or if we've already retried twice it should just fail and fire a SAVE_FAILURE action.
I can make it work if I trigger the SAVE_RETRYING action using store.dispatch but getting deprecation warnings about this and I'm a bit stuck figuring out how to do it the proper way (adding SAVE_RETRYING to the stream that is returned by the epic).
Here's what I have (simplified):
function saveEpic(action$, store) {
return action$.ofType('SAVE_CLICKED')
.mergeMap(action => (
ajax({
url: '/a-long-request',
})
.timeout(10000)
.map(() => ({ type: 'SAVE_SUCCESS' }))
.retryWhen(errors => (
errors.scan((count, e) => {
if (count >= 2 || e.name !== 'TimeoutError') {
throw e;
} else {
store.dispatch({ type: 'SAVE_RETRYING', count });
return count + 1;
}
}, 0)))
.startWith({ type: 'SAVE_STARTED' })
.catch(() =>
Observable.of({ type: 'SAVE_FAILURE' }))
));
}
How can I get that SAVE_RETRYING action up to the main stream? Thx.
This is not ideal, but you could use catch and undocumented second argument (which is the source observable) to resubscribe. The downside I don't like is you have to count retries in the mergeMap callback closure.
function saveEpic(action$, store) {
return action$.ofType('SAVE_CLICKED')
.mergeMap(action => {
let retries = 0;
return ajax({
url: '/a-long-request',
})
.timeout(10000)
.map(() => ({ type: 'SAVE_SUCCESS' }))
.catch((error, source) => {
retries += 1;
if (retries >= 2 || error.name !== 'TimeoutError') {
return Observable.of({ type: 'SAVE_FAILURE' });
}
return source.startWith({ type: 'SAVE_RETRYING', count: retries });
})
.startWith({ type: 'SAVE_STARTED' });
});
}

Error when trying to use async.concat to retrieve data from redis

I was following an example posted by the async author here but I'm getting an error.
redis-2.2.12
node v0.4.11-pre
Here's my code:
var async = require('async');
var redis = require('redis');
var keys = ['key1', 'key2', 'key3'];
var client = redis.createClient();
var multi = client.multi();
for (var key in keys) {
multi.hmset(key, {'some': 'value'});
}
multi.exec(function(err, res) {
if (err) throw err;
console.dir(res);
var myCallback = function(err, res) {
console.log('in myCallback');
console.dir(res);
client.quit();
process.exit();
};
async.concat(keys, client.hgetall, myCallback);
});
Produces the following output:
$ node redis_test.js
[ 'OK', 'OK', 'OK' ]
node.js:134
throw e; // process.nextTick error, or 'error' event on first tick
^
TypeError: Object #<Object> has no method 'send_command'
at /home/project/node_modules/redis/index.js:666:25
at /home/project/node_modules/async/lib/async.js:508:13
at /home/project/node_modules/async/lib/async.js:97:13
at Array.forEach (native)
at /home/project/node_modules/async/lib/async.js:26:24
at /home/project/node_modules/async/lib/async.js:96:9
at /home/project/node_modules/async/lib/async.js:507:9
at Object.concat (/home/project/node_modules/async/lib/async.js:141:23)
at /home/project/redis_test.js:21:9
at Command.callback (/home/project/node_modules/redis/index.js:827:13)
When async runs client.hgetall, it trashes the value of this inside of hgetall. You can either wrap up an anonymous function to glue this together, or use fn.bind() as shown below.
You also want to avoid using for .. in to iterate over an Array. Use either a regular for loop or arr.forEach(). Your example would have mysteriously failed as written. Here's a version that seems to do what you want:
var async = require('async');
var redis = require('redis');
var keys = ['key1', 'key2', 'key3'];
var client = redis.createClient();
var multi = client.multi();
keys.forEach(function (key) {
multi.hmset(key, {'some': 'value'});
});
multi.exec(function(err, res) {
if (err) throw err;
console.dir(res);
var myCallback = function(err, res) {
console.log('in myCallback');
console.dir(res);
client.quit();
process.exit();
};
async.concat(keys, client.hgetall.bind(client), myCallback);
});
This outputs:
[ 'OK', 'OK', 'OK' ]
in myCallback
[ { some: 'value' },
{ some: 'value' },
{ some: 'value' } ]
To debug the mysterious failure, you can turn on debug logging in node_redis by doing redis.debug_mode = true; before sending any Redis commands.

Resources