Insert multiple records in Sqlite using React Native - sqlite

I try to insert multiple rows in React Native using SQLite.
this is the code:
rows = responseJson.rows;
for (i = 0; i < rows.length; i++) {
row=rows[i];
query = `insert into ComuniUserAccountSync values (
${row.IDComuniUserAccountSync},
${row.IdAzienda},
${row.IdComune},
${row.IdUserAccount},
'${row.DescrizioneComune}',
'${row.DateLastUpdateMaster}'
)`;
db.transaction(
tx => {
tx.executeSql(query, [], (a,b) =>
console.log("!OK!!", JSON.stringify(b)), (a, b) =>
console.log("!ERROR!!", a, b)
)
}
);
}
but the result is that I insert only the last row, many times! This is the output with
db.transaction(
tx => {
tx.executeSql("select IDComuniUserAccountSync from ComuniUserAccountSync", [], (a,b) =>
console.log("!OK!", JSON.stringify(b)), (a,b) =>
console.log("!ERROR!!", JSON.stringify(b))
);
}
);
!OK! {"rowsAffected":0,"rows":{"_array":[{"IDComuniUserAccountSync":72},{"IDComuniUserAccountSync":72},{"IDComuniUserAccountSync":72},{"IDComuniUserAccountSync":72},{"IDComuniUserAccountSync":72}, .......
ANY HELP??
Max

insertCategories(arrCateData){
let keys = Object.keys(arrCateData[0])
let arrValues = []
var len = arrCateData.length;
for (let i = 0; i < len; i++) {
arrCateData[i].image = `"${arrCateData[i].image}"`;
arrCateData[i].thumbnail = `"${arrCateData[i].thumbnail}"`;
arrCateData[i].name = `"${arrCateData[i].name}"`;
arrCateData[i].path = `"${arrCateData[i].path}"`;
arrValues.push("(" + Object.values(arrCateData[i]) +")");
}
// console.log(arrValues)
return new Promise((resolve) => {
this.initDB().then((db) => {
db.transaction((tx) => {
tx.executeSql("INSERT INTO category ("+ keys + ") VALUES " + String(arrValues)).then(([tx, results]) => {
resolve(results);
});
}).then((result) => {
this.closeDatabase(db);
}).catch((err) => {
console.log(err);
});
}).catch((err) => {
console.log(err);
});
});
}
this code may help you i just pass my array into function and it will insert data jusr change data according to ur requirement

Related

crawler with ramda.js (functional programming)

I'm trying to crawl movie data from TMDB website. I finished my code with pure javascript, but I want to change the code into functional programming style by using ramda.js.
I attached my code below. I want to get rid of for-loop (if it is possible) and use R.pipe function.
(async () => {
for (let i = 0; i < 1000; i++) {
(() => {
setTimeout(async () => {
let year = startYr + Math.floor(i / 5);
await request.get(path(year, i % 5 + 1), async (err, res, data) => {
const $ = cheerio.load(data);
let list = $('.results_poster_card .poster.card .info .flex a');
_.forEach(list, (element, index) => {
listJSON.push({
MovieID: $(element).attr('id').replace('movie_', ''),
Rank: (i % 5) * 20 + index + 1,
Year: year
});
});
if(i === 1000 - 1) {
await pWriteFile(`${outputPath}/movieList.json`, JSON.stringify(listJSON, null, 2));
}
});
}, 1000 * i);
})(i);
}
})().catch(error => console.log(error));
Steps:
1- Break your code in small functions
2- Stop using async await and use promise.then(otherFunction)
3- When using promise, you could create a sleep function like these: const sleep = (time) => new Promise(resolve => setTimeout(resolve, time));
Ex.:
const process = index => sleep(1000)
.then(() => makeRequest(index))
.then(processData);
R.range(0, 1000)
.reduce(
(prev, actual) => prev.then(() => process(actual),
Promise.resolve()
) // Sequential
.then(printResult);
R.range(0, 1000)
.map(process) // Parallel
.then(printResult);
You can use the Ramda range() function to replace your loop.
https://ramdajs.com/docs/#range
R.range(0, 1000);
That will provide you with a collection of integers (your i) that you can work with (map() or whatever you need).

Multi document creation in sub-collection in Firestore

I am trying to write a function that will:
Create documents in a sub collection
Allow for a then/catch call back after all sub documents have been created
export const doCreateSubs = (Id, count) => {
if (count > 0 && count <= 50){
const times = n => f => {
let iter = i => {
if (i === n) return;
f(i);
iter(i + 1);
};
return iter(0);
};
times(count)(i => {
db
.collection("parent")
.doc(`${Id}`)
.collection("sub")
.add({
subName: `name ${i + 1}`,
dateCreated: new Date()
});
});
}
}
I've played around with batch but it doesn't work with .collection. I know my function is really poor - is there a generally bettery way of doing this?
So i've just realised you can .doc() with no value and it will create a uid for the key. I can also return .commit and recieve a call back when it's complete!
export const doCreateSubs = (Id, count) => {
if (count > 0 && count <= 50){
const times = n => f => {
let iter = i => {
if (i === n) return;
f(i);
iter(i + 1);
};
return iter(0);
};
const batch = db.batch();
times(count)(i => {
const ref = db
.collection("parent")
.doc(`${Id}`)
.collection("subs")
.doc();
batch.set(ref, {
boxName: `Sub ${i + 1}`,
dateCreated: new Date()
});
});
return batch.commit();
}
}

How to set results retried from sqlite db table to array in react native?

Here is my code
var employeeList = [] ;
let db = SQLite.openDatabase({name: 'test.db', createFromLocation : "~example.db", location: 'Library'}, false,false);
db.transaction((tx) => {
tx.executeSql('SELECT * FROM Employees', [], (tx, results) => {
console.log("Query completed");
var len = results.rows.length;
for (let i = 0; i < len; i++) {
let row = results.rows.item(i);
employeeList.push(row.name);
}
this.setState({employees:employeeList});
db.closeDatabase();
});
});
alert(this.state.employees);
I am able to set result to employeeList inside a transaction.But When I am checking employeeList outside the transaction,it is getting blank...
What I have to do to set results.row to employees object..
It's asynchronous. You're asking for a value before the DB transaction has been performed. You need to use promises or callbacks to know when the query has executed.
As Gabriel mentioned, you need to wrap the call in a Promise. I provided an example of how you might want to do it.
const getEmployees = new Promise(function(resolve, reject) {
var employeeList = [] ;
let db = SQLite.openDatabase({name: 'test.db', createFromLocation : "~example.db", location: 'Library'}, false,false);
db.transaction((tx) => {
tx.executeSql('SELECT * FROM Employees', [], (tx, results) => {
console.log("Query completed");
var len = results.rows.length;
for (let i = 0; i < len; i++) {
let row = results.rows.item(i);
employeeList.push(row.name);
}
db.closeDatabase();
// resolve promise
resolve(employeeList)
});
});
getEmployees.then(data => {
this.setState({
employees:employeeList
})
})

Issue with observable fork join

Hi I have 3 tables of which, each one is child of another. I wrote a method to fetch from sqllite db as follows
public downloadFromOfflineDB(db,testSO){
var observableBatch = [];
observableBatch.push(db.executeSql("select * from TMP_AUD WHERE CRE_BY=? AND AUD_NUMBER=? ",
[localStorage.getItem("user_name"), testSO.auditNumber]).then(
response => {
this._util.logData('In downloadPendingInstancesForSyncFromOfflineDB- folder'+response.rows.length+'ID= '+response.rows.item(0).FLD_NUMBER);
if (response && response.rows && response.rows.length > 0) {
if (response && response.rows && response.rows.length > 0) {
var FLD_NUMBER = response.rows.item(0).FLD_NUMBER;
var folderArray = []
observableBatch.push(db.executeSql("select * from TMP_FOLDER WHERE CRE_BY=? AND FLD_NUMBER=? ",
[localStorage.getItem("user_name"), FLD_NUMBER]).then(
a => {
this._util.logData('In downloadPendingInstancesForSyncFromOfflineDB-TMP_FOLDER'+a.rows.length);
if (a && a.rows && a.rows.length > 0) {
for (let i = 0; i < a.rows.length; i++) {
var folderObj = {
folderName: a.rows.item(i).FLD_NAME,
files:[]
}
var FLD_NAME = a.rows.item(i).FLD_NAME
this._util.logData('In downloadPendingInstancesForSyncFromOfflineDB-TMP_FOLDER '+FLD_NAME);
observableBatch.push( db.executeSql("select * from TMP_FILES WHERE CRE_BY=? AND FLD_NAME=? ",
[localStorage.getItem("user_name"), FLD_NAME]).then(
b => {
this._util.logData('In downloadPendingInstancesForSyncFromOfflineDB-TMP_FILES'+b.rows.length);
var fileArray = [];
if (b && b.rows && b.rows.length > 0) {
for (let j = 0; j < b.rows.length; j++) {
var fileSO = {
compliance: b.rows.item(j).COMPLIANCE,
remarks: b.rows.item(j).REMARKS,
fileName: b.rows.item(j).FILE_NAME,
title: b.rows.item(j).TITLE
}
);
fileArray.push(fileSO);
}}
folderObj.files=fileArray;
}).catch(
e => {
this._util.logData('For sync error'+JSON.stringify(e));
return Observable.throw("An error occurred during sync");
})
);
folderArray.push(folderObj);
}}
}).catch(
e => {
this._util.logData('For sync error'+JSON.stringify(e));
return Observable.throw("An error occurred during sync");
})
);
}
}
testSO.folderArray = folderArray;
this._util.logData('Candidate for selected for sync' + JSON.stringify(testSO));
})
);
return Observable.forkJoin(observableBatch);
}
The issue here is below method is not waiting for all the calls to finish
public getFiles(testSO) {
return Observable.create(observer => {
this.platform.ready().then(() => {
this.sqlite.create({
name: 'offline.db',
location: 'default'
}).then((db: SQLiteObject) => {
this.downloadFromOfflineDB(db, testSO).subscribe(c => {
observer.next(c[0]);//This is undefined
observer.complete();
},
error => {
observer.error("An error occurred sync files.");
});
});
});
});
}
First method is executing, while second method returns before first execution is complete and I am not getting my object testSO populated. Can someone please guide me and tel me what I am doing wrong here.I used observable fork Join.
Looks like you are calling Observable.forkJoin(observableBatch) with only one item - result of db.executeSql. When you add more items later on it doesn't affect forkJoin.

How to use batchWriteItem to write more than 25 items into DynamoDB Table using PHP

I am using AWS SDK for PHP 3.x
A single call to BatchWriteItem can write up to 16 MB of data, which can comprise as many as 25 put or delete requests. Individual items to be written can be as large as 400 KB.
$result = $dynamodbClient->batchWriteItem([
'RequestItems' => [
$tableName => [
[
'PutRequest' => [
'Item' => [
'Id' => ['N' => '1'],
'AlbumTitle' => [
'S' => 'Somewhat Famous',
],
'Artist' => [
'S' => 'No One You Know',
],
'SongTitle' => [
'S' => 'Call Me Today',
],
],
],
],
],
],
]);
For single item its working fine. How can I write more than 25 items.
To write more than 25 items, you have to repeatedly call BatchWriteItem, adding items from your collection, 25 at a time.
Something along these lines (pseudo-code):
requests = []; // use an array to stage your put item requests
foreach(item in SourceCollection) {
addItem(item, requests); // add this item to the array
if(count(requests) == 25) { // when you have 25 ready..
// result = dynamodbClient->batchWriteItem(...)
requests = []; // clean up the array of put item requests
// handle the failed items from the result object
}
}
Make sure to handle failed items from each batchWriteItem result by re-adding them back to the requests
Here is my way for a lambda function:
exports.handler = (event, context, callback) => {
console.log(`EVENT: ${JSON.stringify(event)}`);
var AWS = require('aws-sdk');
AWS.config.update({ region: process.env.REGION })
var docClient = new AWS.DynamoDB.DocumentClient();
const {data, table, cb} = JSON.parse(event.body);
console.log('{data, table, cb}:', {data, table, cb});
// Build the batches
var batches = [];
var current_batch = [];
var item_count = 0;
for (var i = 0; i < data.length; i++) {
// Add the item to the current batch
item_count++
current_batch.push({
PutRequest: {
Item: data[i],
},
})
// If we've added 25 items, add the current batch to the batches array
// and reset it
if (item_count % 25 === 0) {
batches.push(current_batch)
current_batch = []
}
}
// Add the last batch if it has records and is not equal to 25
if (current_batch.length > 0 && current_batch.length !== 25) {
batches.push(current_batch)
}
// Handler for the database operations
var completed_requests = 0
var errors = false
function requestHandler (request) {
console.log('in the handler: ', request)
return function (err, data) {
// Increment the completed requests
completed_requests++;
// Set the errors flag
errors = (errors) ? true : err;
// Log the error if we got one
if(err) {
console.error(JSON.stringify(err, null, 2));
console.error("Request that caused database error:");
console.error(JSON.stringify(request, null, 2));
callback(err);
}else {
var response = {
statusCode: 200,
headers: {
'Content-Type': 'application/json',
'Access-Control-Allow-Methods': 'GET,POST,OPTIONS',
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Credentials': true
},
body: JSON.stringify(data),
isBase64Encoded: false
};
console.log(`success: returned ${data}`);
callback(null, response);
}
// Make the callback if we've completed all the requests
if(completed_requests === batches.length) {
cb(errors);
}
}
}
// Make the requests
var params;
for (var j = 0; j < batches.length; j++) {
// Items go in params.RequestItems.id array
// Format for the items is {PutRequest: {Item: ITEM_OBJECT}}
params = '{"RequestItems": {"' + table + '": []}}'
params = JSON.parse(params)
params.RequestItems[table] = batches[j]
console.log('before db.batchWrite: ', params)
// Perform the batchWrite operation
docClient.batchWrite(params, requestHandler(params))
}
};
dealspoondBatchWrite
i am using the following code to add data using batchWriteItem. Suggest if there is a better way.
// Build the batches
$albums= "// collection of album json";
$batches = [];
$current_batch = [];
$item_count = 0;
foreach ($albums as $album) {
// Add the item to the current batch
$item_count++;
$json = json_encode($album);
$data['PutRequest'] = array('Item' => $marshaler->marshalJson($json));
array_push($current_batch, $data);
// If we've added 25 items, add the current batch to the batches array
// and reset it
if ($item_count % 25 == 0) {
array_push($batches, $current_batch);
$current_batch = [];
}
}
// Handler for the database operations
// Add the last batch if it has records and is not equal to 25
if (count($current_batch) > 0 && count($current_batch) != 25) {
array_push($batches, array_values($current_batch));
}
//batches.push(current_batch);
// Handler for the database operations
$completed_requests = 0;
$errors = false;
$batch_count = 0;
foreach ($batches as $batch) {
try {
$batch_count++;
$params = array('RequestItems' => array($tableName => $batch), 'ReturnConsumedCapacity' => 'TOTAL', 'ReturnItemCollectionMetrics' => 'SIZE');
$response = $dynamodb->batchWriteItem($params);
echo "Album $batch_count Added." . "<br>";
echo "<pre>";
// print_r($params);
print_r($response);
echo "</pre>";
}
catch (DynamoDbException $e) {
echo "Unable to add movie:\n";
echo $e->getMessage() . "\n";
// break;
}
}

Resources