I'm unable to use YDN-DB ("Version change transaction was aborted in upgradeneeded event handler.") - ydn-db

I'm having a problem to start with YDN-DB. I mean. I set up a scheme, I initialized the database, but when instantiating throws an error that says "ConstraintError: DOM Exception IDBDatabase 0". In the event handler "fail" gives me "Version change transaction was aborted in upgradeneeded event handler.". Reading about the problem I found with this link. Can not find the way to assign an event to trigger "onupgradeneeded" and I think that would solve the problem
I leave my piece of code:
var shopgroups_schema = {
name: 'shopgroups',
keyPath: 'id_shop_group',
autoIncrement: true,
indexes: [
{keyPath: 'id_shop_group'},
{keyPath: 'name'},
{keyPath: 'share_customer'},
{keyPath: 'share_order'},
{keyPath: 'share_stock'},
{keyPath: 'active'},
{keyPath: 'deleted'},
{keyPath: 'date_add'},
{keyPath: 'date_upd'},
{keyPath: 'date_upd'}
]
};
var shops_schema = {
name: 'shops',
keyPath: 'id_shop',
autoIncrement: true,
indexes: [
{keyPath: 'id_shop'},
{keyPath: 'id_shop_group'},
{keyPath: 'name'},
{keyPath: 'id_category'},
{keyPath: 'id_theme'},
{keyPath: 'active'},
{keyPath: 'deleted'},
{keyPath: 'date_add'},
{keyPath: 'date_upd'}
]
};
var schema = {
stores: [shopgroups_schema, shops_schema]
};
var schemaName = 'chollingApp4';
var db = new ydn.db.Storage(schemaName, schema);
db.addEventListener('error', function (event) {
var e = event.getError();
// common errors are AbortError, ConstraintError and UnknownError (possibliy for Quota exceed error).
// log error for debugging
console.log('connection failed with ' + e.name);
});
db.addEventListener('fail', function (event) {
var err = event.getError();
console.log(event);
console.log(err);
console.log('connection failed with ' + err.name + ' by ' + err.message);
db = null; // no operation can be placed to the database instance
});
db.addEventListener('ready', function (event) {
var is_updated = event.getVersion() != event.getOldVersion();
if (is_updated) {
console.log('database connected with new schema');
} else if (isNaN(event.getOldVersion())) {
console.log('new database created');
} else {
console.log('existing database connected');
}
// heavy database operations should start from this.
});

ConstraintError in database opening suggest that schema could be a problem. I find you have index keyPath, date_upd repeated.

Related

Insert or Updated getting "Transaction query already complete"

How can I correctly search for a row in the database and INSERT/UPDATE accordingly to the search result (INSERT if not found, UPDATE if found)?
I'm currently doing this:
bookshelf.transaction(async function (t) {
for (var x = 0; x < 10; x++) {
let row = pmsParser.getRow(x);
if (_.isEmpty(row)) {
break;
}
let data = {
lastUpdate: moment(row.lastUpdate, 'DD/MM/YYYY - HH:mm').toDate(),
mvs: row.version,
color: row.color,
location: row.location,
status: row.status
};
new Vehicle({ chassi: row.chassi })
.fetch({ require: true })
.then(model => {
return new Vehicle(model)
.save(data, { transacting: t, patch: true });
})
.catch(Vehicle.NotFoundError, err => {
new Vehicle(data)
.save('chassi', row.chassi, { transacting: t })
.then(() => {
console.log(`Inserted... ${row.chassi}`);
});
})
.catch(err => {
console.log(err.message);
});
}
})
.catch(function (err) {
console.error(err);
return res.json({ status: false, count: 0, error: err.message });
});
And I receive this error:
Transaction query already complete, run with DEBUG=knex:tx for more info
Unhandled rejection Error: Transaction query already complete, run with DEBUG=knex:tx for more info
at completedError (/home/node/app/node_modules/knex/lib/transaction.js:297:9)
at /home/node/app/node_modules/knex/lib/transaction.js:266:22
at tryCatcher (/home/node/app/node_modules/bluebird/js/release/util.js:16:23)
at Function.Promise.attempt.Promise.try (/home/node/app/node_modules/bluebird/js/release/method.js:39:29)
at Client_SQLite3.trxClient.query (/home/node/app/node_modules/knex/lib/transaction.js:264:34)
at Runner.<anonymous> (/home/node/app/node_modules/knex/lib/runner.js:138:36)
at Runner.tryCatcher (/home/node/app/node_modules/bluebird/js/release/util.js:16:23)
at Runner.query (/home/node/app/node_modules/bluebird/js/release/method.js:15:34)
at /home/node/app/node_modules/knex/lib/runner.js:61:21
at tryCatcher (/home/node/app/node_modules/bluebird/js/release/util.js:16:23)
at /home/node/app/node_modules/bluebird/js/release/using.js:185:26
at tryCatcher (/home/node/app/node_modules/bluebird/js/release/util.js:16:23)
at Promise._settlePromiseFromHandler (/home/node/app/node_modules/bluebird/js/release/promise.js:512:31)
at Promise._settlePromise (/home/node/app/node_modules/bluebird/js/release/promise.js:569:18)
at Promise._settlePromise0 (/home/node/app/node_modules/bluebird/js/release/promise.js:614:10)
at Promise._settlePromises (/home/node/app/node_modules/bluebird/js/release/promise.js:693:18)
Knex debug output
knex:tx trx1: Starting top level transaction +0ms
knex:tx trx1: releasing connection +28ms
knex:tx undefined: Transaction completed: update "vehicles" set "color" = ?, "lastUpdate" = ?, "location" = ?, "mvs" = ?, "status" = ? where "id" = ? +15ms
Transaction query already complete, run with DEBUG=knex:tx for more info
knex:tx undefined: Transaction completed: update "vehicles" set "color" = ?, "lastUpdate" = ?, "location" = ?, "mvs" = ?, "status" = ? where "id" = ? +8ms
Transaction query already complete, run with DEBUG=knex:tx for more info
When under a transaction ALL related database accesses must be within the context of the transaction.
//...
new Vehicle({ chassi: row.chassi })
.fetch({ require: true, transacting: t })
.then(model => {
//...
Your iterations are not being correctly promisified. That makes your changes to escape the transaction context, causing the 'Transaction query already complete' error. When creating promises within a loop it is always advisable to collect them and submit to a promise collection handling, such as Promise.all(). This will avoid escaping the transaction context before all promises are resolved.
Those changes may lead to a code as below (untested):
bookshelf.transaction(async function (t) {
let promises = [];
for (var x = 0; x < 10; x++) {
let row = pmsParser.getRow(x);
if (_.isEmpty(row)) {
break;
}
let data = {
lastUpdate: moment(row.lastUpdate, 'DD/MM/YYYY - HH:mm').toDate(),
mvs: row.version,
color: row.color,
location: row.location,
status: row.status
};
promises.push(
new Vehicle({ chassi: row.chassi })
.fetch({ require: true, transacting: t })
.then(model => {
return model // no need to use 'new Vehicle()' here
.save(data, { transacting: t, patch: true });
})
.catch(Vehicle.NotFoundError, err => {
return new Vehicle(data) // missing 'return'
.save('chassi', row.chassi, { transacting: t })
.then(() => {
console.log(`Inserted... ${row.chassi}`);
});
})
.catch(err => {
console.log(err.message);
// throw err; // should rethrow it!
})
);
}
return Promise.all(promises)
.catch(function (err) {
console.error(err);
return res.json({ status: false, count: 0, error: err.message });
});
};

Are there conditions under which DynamoDB allows for a duplicate primary key?

While performing an updateItem operation to a dynamo table using the AWS Javascript SDK, I am instead seeing a second row written with the same primary key, which certainly seems contrary to the documentation.
I am using a hash string key "user_id".
The initial write:
var params = {
Item: {
user_id : {S: "foo"},
is_authorized: {BOOL: false},
},
TableName: 'MyTable'
};
db.putItem(params, function(err, data){
if(err){
console.log(err);
}else{
console.log(data);
}
});
The Update Attempt
var updateParams = {
Key: {
user_id : {S: "foo"},
},
AttributeUpdates: {
confirmationCode: {Action: "PUT", Value: {S: "key"}},
phone: {Action: 'PUT', Value: {S: "1234567}},
is_authorized: {Action: 'PUT', Value: {BOOL: false}},
confirmAttempts: {Action: 'PUT', Value: {N: "1"}}
},
TableName: 'MyTable'
};
db.updateItem(params, function(err, data){
if(err){
response = err;
console.log("The error was: " + err);
}else{
response = data;
console.log(data);
}
});
For reference to others who might come across this issue:
This issue was caused by whitespace, specifically a trailing space at the end of some primary key strings. Viewing Dynamo records, at least from within the console, does not convert whitespace characters, so it was an invisible issue.

Phonegap sqlite update query not working

I have an update query, where in I am updating a Boolean column to true, it goes in the success callback but when I fetch same value it is false.
Below is my code snippet:
db.transaction(function(tx){
tx.executeSql("update `"+UsersT+"` set "+LoggedInC+" = 'true' where "+UserIdC+" = '18' ", [], function(tx,res){
alert("it goes here");
getUser();
}, function(err){
alert("Error code: "+err.code);
});
}, function(err){});
function getUser(){
db.transaction(function(tx){
tx.executeSql("select * from `"+UsersT+"` where "+UserIdC+" = 18 ", [],function(tx, res){
alert(res.rows.length+" | "+res.rows.item(0).logged_in+" | "+res.rows.item(0).uid);
}, function(err){alert("Error: "+err.code);});
}, function(err){});
}
Thanks
Any help ?

Kendo UI Grid Fires CRUD Operations multiple times

I've seen this problem in many places but I can't seem to find a solution. So I have defined a Kendo grid with CRUD operations the thing is that the previous fired operations get fired again.
Say you delete entry X and then you add entry Y, the create operation fires and after that the delete operation (for X - which has been deleted) fires again. Same thing if you first create an element and then edit another, it edits the 2nd element and then it re-fires the 1st create statement and inserts a duplicate for the 1st inserted element. If you go on an on with several operations a nightmare happens with all the other previous operations being fired and sent to the controller.
My grid is:
function InitializeIPAddressesGrid(userID) {
selectedUserID = userID;
$(".ipAddresses").kendoGrid({
dataSource: IPAdressesDataSource,
sortable: {
mode: "single",
allowUnsort: false
},
remove: function (e) {
this.refresh();
var canDelete = confirm("Are you sure you want to delete this record?");
if (!canDelete) {
e.preventDefault();
}
},
height: 420,
resizable: true,
pageable: {
refresh: true,
pageSize: 10
},
selectable: "row",
toolbar: ["create"],
editable:{mode: "inline", confirmation:false} ,
columns: [{
field: "IpAddress",
title: "IP Address"
},
{
field: "Status",
title: "Status"
},
{
field: "LockedUntil",
title: "Locked until",
template: "#=kendo.toString(LockedUntil, 'yyyy/MM/dd' )#"
},
{ command: ["edit", "destroy"], title: " ", width: "180px" }
]
});
}
var IPAdressesDataSource = new kendo.data.DataSource({
type: "json",
serverPaging: true,
serverSorting: true,
serverFiltering: true,
pageSize: 10,
//scrollable:false,
transport: {
read: {
url: websiteRootUrl + '/PortalAuthorization/GetIPAddressesList',
},
update: {
url: websiteRootUrl + "/PortalAuthorization/UpdateIP",
dataType: "json",
type: 'POST',
complete: function (e) {
if (e.status != 200) {
alert(eval('(' + e.responseText + ')').Message);
}
}
},
create: {
url: websiteRootUrl + "/PortalAuthorization/CreateIP",
dataType: "json",
type: 'POST',
complete: function (e) {
if (e.status != 200) {
alert(eval('(' + e.responseText + ')').Message);
}
}
},
destroy: {
url: websiteRootUrl + "/PortalAuthorization/DeleteIP",
dataType: "json",
type: 'DELETE',
complete: function (e) {
if (e.status != 200) {
alert(eval('(' + e.responseText + ')').Message);
}
}
},
parameterMap: function (options, operation) {
if (operation == "update" && options) {
return {ipAddress: options.IpAddress ,
status: options.Status ,
lockedUntil: kendo.toString(options.LockedUntil, 'yyyy/MM/dd' ),
pkey: options.ID,
databaseID: selectedDatabaseID };
}
else
if (operation == "destroy" && options)
{
return {
databaseID: selectedDatabaseID,
pkey: options.ID,
userIDParam: selectedUserID
};
}
else
if (operation == "create" && options) {
return {ipAddress: options.IpAddress ,
status: options.Status ,
lockedUntil: kendo.toString(options.LockedUntil, 'yyyy/MM/dd' ),
pkey: options.ID,
userIDParam: selectedUserID,
databaseID: selectedDatabaseID };
}
else
{
options.databaseID = selectedDatabaseID;
options.userID = selectedUserID;
return options;
}
}
},
schema: {
model: {
id: "ID",
fields: {
IpAddress: { type: "string" },
Status: { type: "string" },
LockedUntil: { type: "date" }
}
},
data: function (data) {
return data.Items;
},
total: function (data) {
return data.TotalCount;
}
}
});
My controllers are:
public object UpdateIP(int databaseID, long pkey, string status, string lockedUntil, string ipAddress)
{
var database = [...];
DynamicDataRepository repository = [...];
string query = "...";
repository.ExecuteNonQuery(query);
return new HttpResponseMessage(HttpStatusCode.OK);
}
public object DeleteIP(int databaseID, long pkey, int? userIDParam)
{
var database = [...];
DynamicDataRepository repository = [...];
string query = "...";
repository.ExecuteNonQuery(query);
return new HttpResponseMessage(HttpStatusCode.OK);
}
public object CreateIP(int databaseID, long? pkey, string status, string lockedUntil, string ipAddress, int? userIDParam)
{
var database = [...];
DynamicDataRepository repository = [...];
string query = "...";
repository.ExecuteNonQuery(query);
return new HttpResponseMessage(HttpStatusCode.OK);
}
Do you have any ideea? where I've done something wrong? thanks in advance. P.S. the queries in the controllers work fine.
I fixed the problem, followed OnaBai's suggestion of returning the Updated/Created entity, and in the case of a Delete I returned the ID of the deleted entry.
public object UpdateIP(int databaseID, long pkey, string status, string lockedUntil, string ipAddress)
{
var database = [...];
DynamicDataRepository repository = [...];
string query = [...];
IPList updatedIP = new IPList { ID = pkey, IpAddress = ipAddress, Status = status, LockedUntil = DateTime.Today };
return Json(updatedIP, JsonRequestBehavior.AllowGet);
// return new HttpResponseMessage(HttpStatusCode.OK);
}
Only one mention: in the case of a CREATE, the method didn't seem to work so what I did is in the .complete event of the CREATE operation I did a ipGrid.dataSource.read();
ipGrid.refresh(); - so the operation doesn't repeat itself. ( I read that in this case there might be problem with the model definition - setting the ID field - but I did set that one). Many thanks to OnaBai

Error when trying to use async.concat to retrieve data from redis

I was following an example posted by the async author here but I'm getting an error.
redis-2.2.12
node v0.4.11-pre
Here's my code:
var async = require('async');
var redis = require('redis');
var keys = ['key1', 'key2', 'key3'];
var client = redis.createClient();
var multi = client.multi();
for (var key in keys) {
multi.hmset(key, {'some': 'value'});
}
multi.exec(function(err, res) {
if (err) throw err;
console.dir(res);
var myCallback = function(err, res) {
console.log('in myCallback');
console.dir(res);
client.quit();
process.exit();
};
async.concat(keys, client.hgetall, myCallback);
});
Produces the following output:
$ node redis_test.js
[ 'OK', 'OK', 'OK' ]
node.js:134
throw e; // process.nextTick error, or 'error' event on first tick
^
TypeError: Object #<Object> has no method 'send_command'
at /home/project/node_modules/redis/index.js:666:25
at /home/project/node_modules/async/lib/async.js:508:13
at /home/project/node_modules/async/lib/async.js:97:13
at Array.forEach (native)
at /home/project/node_modules/async/lib/async.js:26:24
at /home/project/node_modules/async/lib/async.js:96:9
at /home/project/node_modules/async/lib/async.js:507:9
at Object.concat (/home/project/node_modules/async/lib/async.js:141:23)
at /home/project/redis_test.js:21:9
at Command.callback (/home/project/node_modules/redis/index.js:827:13)
When async runs client.hgetall, it trashes the value of this inside of hgetall. You can either wrap up an anonymous function to glue this together, or use fn.bind() as shown below.
You also want to avoid using for .. in to iterate over an Array. Use either a regular for loop or arr.forEach(). Your example would have mysteriously failed as written. Here's a version that seems to do what you want:
var async = require('async');
var redis = require('redis');
var keys = ['key1', 'key2', 'key3'];
var client = redis.createClient();
var multi = client.multi();
keys.forEach(function (key) {
multi.hmset(key, {'some': 'value'});
});
multi.exec(function(err, res) {
if (err) throw err;
console.dir(res);
var myCallback = function(err, res) {
console.log('in myCallback');
console.dir(res);
client.quit();
process.exit();
};
async.concat(keys, client.hgetall.bind(client), myCallback);
});
This outputs:
[ 'OK', 'OK', 'OK' ]
in myCallback
[ { some: 'value' },
{ some: 'value' },
{ some: 'value' } ]
To debug the mysterious failure, you can turn on debug logging in node_redis by doing redis.debug_mode = true; before sending any Redis commands.

Resources