I am trying to develop an android mobile application using the PhoneGap framework, and I want to synchronize my table of local database(of my phone) to the server database.
this is my code, but this code allow me to send only one line of table how can i send all line of table.
$.ajax({
type: 'POST',
data: col1+'&lid='+col2,
url: 'http://your-domain.com/comments/save.php',
success: function(data){
console.log(data);
alert('Your data was successfully added');
},
error: function(){
console.log(data);
alert('There was an error adding your data');
}
});
Try to select the data from the localDatabase and send it row by row, here's an example:
db.transaction(function(tx) {
Squery = 'SELECT * FROM news WHERE category_id ='+lid;
tx.executeSql(Squery,
null,
function(tx, results)
{
for(i=0; i<results.rows. length; i++){
row = results.rows.item(0);
$.ajax({ -- your code using row['name_of_column'] -- })
}
},
console.log('error')
});});
sorry for my miss understoods...
If you are using phonegap i guess you are using the Storage feature: http://docs.phonegap.com/en/2.2.0/cordova_storage_storage.md.html#Storage
folowing one of theirs examples, you can do something like:
function queryDB(tx) {
tx.executeSql('SELECT * FROM DEMO', [], querySuccess, errorCB);
}
function querySuccess(tx, results) {
//do you ajax request....
...
data: {
rows : results.rows
}
...
}
function errorCB(err) {
alert("Error processing SQL: "+err.code);
}
var db = window.openDatabase("Database", "1.0", "Cordova Demo", 200000);
db.transaction(queryDB, errorCB);
other option is to send a simple array...
function querySuccess(tx, results) {
var myRowsIds = [];
var len = results.rows.length;
for (var i=0; i<len; i++){
myRowsIds .push( results.rows.item(i).id )
}
//do you ajax request....
...
data: {
rows : myRowsIds
}
...
}
hope it helps!
Related
I have a table that has more than 25 items and wrote a basic script to break them into sub arrays of 25 items each then loops thru that collection of sub arrays to run a batch write item command in the AWS DynamoDB Client. The issue I am getting is a returned validation error. When I run the same seed file via the aws-cli it seeds the table perfectly. This makes me think it has something to do with my script. See anything I am missing? Thanks in advance!
var { DynamoDB } = require('aws-sdk');
var db = new DynamoDB.DocumentClient({
region: 'localhost',
endpoint: 'http://localhost:8000',
});
const allItems = require('./allItems.json');
const tableName = 'some-table-name';
console.log({ tableName, allItems });
var batches = [];
var currentBatch = [];
var count = 0;
for (let i = 0; i < allItems.length; i++) {
//push item to the current batch
count++;
currentBatch.push(allItems[i]);
if (count === 25) {
batches.push(currentBatch);
currentBatch = [];
}
}
//if there are still items left in the curr batch, add to the collection of batches
if (currentBatch.length > 0 && currentBatch.length !== 25) {
batches.push(currentBatch);
}
var completedRequests = 0;
var errors = false;
//request handler for DynamoDB
function requestHandler(err, data) {
console.log('In the request handler...');
return function (err, data) {
completedRequests++;
errors = errors ? true : err;
//log error
if (errors) {
console.error('Request caused a DB error.');
console.error('ERROR: ' + err);
console.error(JSON.stringify(err, null, 2));
} else {
var res = {
statusCode: 200,
headers: {
'Content-Type': 'application/json',
'Access-Control-Allow-Methods': 'GET,POST,OPTIONS',
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Credentials': true,
},
body: JSON.stringify(data),
isBase64Encoded: false,
};
console.log(`Success: returned ${data}`);
return res;
}
if (completedRequests == batches.length) {
return errors;
}
};
}
//Make request
var params;
for (let j = 0; j < batches.length; j++) {
//items go in params.RequestedItems.id array
//format for the items is {PutRequest : {Item: ITEM_OBJECT}}
params = '{"RequestItems": {"' + tableName + '": []}}';
params = JSON.parse(params);
params.RequestItems[tableName] = batches[j];
console.log('before db.batchWriteItem: ', params);
try {
//send to db
db.batchWrite(params, requestHandler(params));
} catch{
console.error(err)
}
}
Here is the formatted request object and the error:
before db.batchWriteItem:
{ RequestItems:
{ 'some-table-name': [ [Object], [Object], [Object], [Object] ] }
}
In the request handler...
Request caused a DB error.
ERROR: ValidationException: Invalid attribute value type
{
"message": "Invalid attribute value type",
"code": "ValidationException",
"time": "2020-08-04T10:51:13.751Z",
"requestId": "dd49628c-6ee9-4275-9349-6edca29636fd",
"statusCode": 400,
"retryable": false,
"retryDelay": 47.94198279972915
}
You are using the DocumentClient in the nodejs code. This will automatically convert the data format used by DynamoDB to a more easily consumable format.
e.g.
{
"id": {
"S": "A string value"
}
}
would become
{
"id": "A string value"
}
The CLI does not perform this data conversion.
You can use the regular DynamoDB client to not perform this conversion in Nodejs. e.g. const db = new Dynamodb()
I recently upgraded from 1.2 to Meteors latest version 1.6.0.1.
I was using observe in a publication and an observe on the client to get changes.
in 1.2 no problems at all, but in 1.6 observed changes are not received in a "changed" client callback, but the client does get the ddp message. I can verify that by looking in Chromes dev tools > websocket, see the incoming message, but it's never fired in a client callback. This only happens when changing 2-3 documents at a time.
So when I delete a few documents from the DB, the publication fires off the callbacks, and the client receives them in the websocket messages, but it only fires once in the "observe" callback on the client.
Here is my code.
Client -
CollectionTest = new Meteor.Collection('collectionTest');
CollectionTest.find({}).observe({
added: function (doc) {
console.log("ADDED DOC ", doc);
},
changed: function (newDoc, oldDoc) {
console.log("CHANGED DOC new ", newDoc);
},
removed: function (doc) {
console.log("REMOVED DOC ", doc);
}
});
Server Publication -
Meteor.publish("ddpPub", function () {
var self = this,
ready = false;
var userId = self.userId;
var subHandle = TestData.find({}).observeChanges({
added: function (id, fields) {
if (ready) {
self.changed("collectionTest", userId, {
type: "added",
data: {
id: id,
fields: fields
}
});
}
},
changed: function (id, fields) {
if (ready) {
self.changed("collectionTest", userId, {
type: "changed",
data: {
id: id,
fields: fields
}
});
}
},
removed: function (id) {
if (ready) {
self.changed("collectionTest", userId, {
type: "removed",
data: id
});
}
}
});
self.added("collectionTest", userId);
self.ready();
ready = true;
self.onStop(function () {
subHandle.stop();
});
});
Attached are images from me removing the documents from the DB. The websocket messages, and then my console on the client. Showing it only fires once for 5 documents.
Showing the document id's I am deleting
DDP messages in 'websocket' confirmed they get to client
Single client message in client callback showing only document changed
UPDATE: 12/15/17 - 7:17pm PST
After working on this for a couple hours, finding some related meteor posts with observe callbacks and “Meteor.call” not working inside, the solution or hack is to wrap the “Meteor.call” in a “setTimeout” with the value of 0, and it fixes it.
I tried that here, and it didn’t work, but then I tried throttle the response, and it works! Not sure if it's a reliable fix, but it's the only one I found so far.
I am not sure why this works, or what causes the problem in the first place, any explanation would be welcome.
Server Publication -
Meteor.publish("ddpPub", function () {
var self = this,
ready = false;
var userId = self.userId;
var subHandle = TestData.find({}).observeChanges({
added: function (id, fields) {
if (ready) {
console.log("ADDING PUBLICATION");
self.changed("collectionTest", userId, {
type: "added",
data: {
id: id,
fields: fields
}
});
}
},
changed: function (id, fields) {
if (ready) {
console.log("CHANGING PUBLICATION");
self.changed("collectionTest", userId, {
type: "changed",
data: {
id: id,
fields: fields
}
});
}
},
removed: function (id) {
if (ready) {
console.log("REMOVING PUBLICATION");
ratePub(id, function (data) {
console.log("OBJECT DATA IS ", data);
self.changed("collectionTest", userId, data);
});
}
}
});
self.added("collectionTest", userId);
self.ready();
ready = true;
self.onStop(function () {
subHandle.stop();
});
});
var returnPub = function (id, callback) {
console.log("RETURNING PUB ");
callback({
id: id,
type: "removed",
data: id
});
};
var ratePub = _.rateLimit(returnPub, 10);
I'm having trouble with the AWS DynamoDb JS SDK v2.4.9. I want to use the DocumentClient class as opposed to the lower level DynamoDb class, but can't get it working.
This works:
function testPutItem( callback ) {
var tableName = 'todos';
var params = {
TableName: tableName,
Item: {
user_id: { S : userId },
id: { N : msFromEpoch }, // ms from epoch
title: { S : makeRandomStringWithLength(16) },
completed: { BOOL: false }
}
};
var dynamodb = new AWS.DynamoDB();
dynamodb.putItem(params, function(err, data) {
if (err) console.log(err, err.stack); // an error occurred
else {
console.log(data); // successful response
if (callback) callback(data);
}
});
}
This does not work and gives the error InvalidParameterType: Expected params.Item[attribute] to be a structure for each attribute--as if DocumentClient is expecting the same input as DynamoDb:
function testPutItem( callback ) {
var tableName = 'todos';
var params = {
TableName: tableName,
Item: {
user_id: userId,
id: msFromEpoch,
title: makeRandomStringWithLength(16),
completed: false
}
};
console.log(params);
var docClient = new AWS.DynamoDB.DocumentClient({region: 'us-east-1'});
docClient.put(params, function(err, data) {
if (err) console.log(err, err.stack); // an error occurred
else {
console.log(data); // successful response
if (callback) callback(data);
}
});
}
Does anyone have any idea what I am doing wrong?
I used to have the same issue,
please try with a simple object first, cause it's due to some special characters in your attributes, see my example :
this generates the error
InvalidParameterType: Expected params.Item[attribute] to be a structure
var Item = {
domain: "knewtone.com",
categorie: "<some HTML Object stuff>",
title: "<some HTML stuff>",
html: "<some HTML stuff>""
};
but when i replace the HTML stuff with a formated Html, simple characters , it works
var Item = {
domain: "knewtone.com",
categorie: $(categorie).html(),
title: $(title).html(),
html: $(html).html()
};
I am new to ionic.I want to add data into SQLite which is coming from remote server. I have successfully populated data into list.so how can i store this data into sqlite. here is my code. how do i pass this data to query.I am unable to do this.
service.js
angular.module('starter.service',[]).
factory('userServices',['$http',function($http){
var users = [];
return {
get: function(){
return $http.get("http://xxxxxxxxx-info").then(function(response){
users = response.data;
return users;
});
},
remove:function(content){
users.splice(users.indexOf(content),1);
},
getUser:function(chatId)
{
for(var i=0; i<users.length;i++){
if(users[i].content_id === parseInt(chatId)){
return users[i];
}
}
return null;
}
}
}]);
controller.js
angular.module('shoppingPad.controller', [])
.controller('ChatCtrl', function ($scope, userServices, $ionicModal, $cordovaSQLite) {
console.log('inside controller');
userServices.get().then(function (users) {
//users is an array of user objects
$scope.contents = users;
console.log($scope.contents);
var query = "INSERT INTO content (content_id, display_name) VALUES (?,?)";
$cordovaSQLite.execute(db, query, [users.content_id, users.display_name]).then(function (res) {
alert(res);
alert('Inserted');
}, function (e) {
alert('Error:' + e.message);
});
});
Where did you define db? It's necessary to wait until device is ready.
$ionicPlatform.ready(function () {
var db = $cordovaSQLite.openDB({ name: "my.db" });
// just first time you need to define content table
$cordovaSQLite.execute(db,"CREATE TABLE content (content_id integer, display_name text)");
userServices.get().then(function (users) {
//users is an array of user objects
$scope.contents = users;
console.log($scope.contents);
var query = "INSERT INTO content (content_id, display_name) VALUES (?,?)";
$cordovaSQLite.execute(db, query, [users.content_id, users.display_name]).then(function (res) {
alert(res);
alert('Inserted');
}, function (e) {
alert('Error:' + e.message);
});
});
});
Are you sure, that your object users look like
{
"content_id":12,
"display_name":"hello world"
}
and not like
[
{
"content_id":12,
"display_name":"hello world"
},
{
"content_id":13,
"display_name":"stackoverflow"
},
...
]
I just ask, because users sounds like more than one entry.
I wish to use Meteor to subscribe a few remote publication via DDP. Then show the documents in one template. Here is what I did:
Posts = {};
var lists = [
{server: "localhost:4000"},
{server: "localhost:5000"}
];
var startup = function () {
_.each(lists, function (list) {
var connection = DDP.connect(`http://${list.server}`);
Posts[`${list.server}`] = new Mongo.Collection('posts', {connection: connection});
connection.subscribe("allPosts");
});
}
startup();
This file is at client folder. Every startup, in this example, at browser I have two client collections Posts["localhost:4000"] and Posts["localhost:5000"], both are same schema. I know this format (Collection[server]) is ugly, please tell me if there is a better way.
Is there a way to show these client collections in the same template with reactive. Like this:
Template.registerHelper("posts", function () {
return Posts.find({}, {sort: {createdAt: -1}});
});
I think Connected Client is a big part of the Meteor. There should be a best practice to solve this problem, right?
Solved.
Connect to multiple servers via DDP, then observe their collections reactive via cursor.observeChanges.
Posts = {};
PostsHandle = {};
// LocalPosts is a local collection lived at browser.
LocalPosts = new Mongo.Collection(null); // null means local
// userId is generated by another Meteor app.
var lists = [
{server: "localhost:4000", userId: [
"hocm8Cd3SjztwtiBr",
"492WZqeqCxrDqfG5u"
]},
{server: "localhost:5000", userId: [
"X3oicwXho45xzmyc6",
"iZY4CdELFN9eQv5sa"
]}
];
var connect = function () {
_.each(lists, function (list) {
console.log("connect:", list.server, list.userId);
var connection = DDP.connect(`http://${list.server}`);
Posts[`${list.server}`] = new Mongo.Collection('posts', {connection: connection}); // 'posts' should be same with remote collection name.
PostsHandle[`${list.server}`] = connection.subscribe("posts", list.userId);
});
};
var observe = function () {
_.each(PostsHandle, function (handle, server) {
Tracker.autorun(function () {
if (handle.ready()) {
console.log(server, handle.ready());
// learn from http://docs.meteor.com/#/full/observe_changes
// thank you cursor.observeChanges
var cursor = Posts[server].find();
var cursorHandle = cursor.observeChanges({
added: function (id, post) {
console.log("added:", id, post);
piece._id = id; // sync post's _id
LocalPosts.insert(post);
},
removed: function (id) {
console.log("removed:", id);
LocalPosts.remove(id);
}
});
}
})
});
}
Template.posts.onCreated(function () {
connect(); // template level subscriptions
});
Template.posts.helpers({
posts: function () {
observe();
return LocalPosts.find({}, {sort: {createdAt: -1}}); // sort reactive
}
});