react-native-sqlite-storage How to open the specified directory file? - sqlite

my db in: android\app\src\main\assets\app.db
The way i tried:
open() {
SQLiteStorage.DEBUG(true);
SQLiteStorage.openDatabase({
name: 'file:///android_asset/app.db',
})
.then(() => {
console.info('');
})
.catch(err => {
console.warn(err);
});
}
But error:
How can i do this?

run ok!
open() {
SQLiteStorage.DEBUG(true);
SQLiteStorage.openDatabase({
name: 'app.db', // android/app/src/main/assets/app.db
})
.then(() => {
console.info('');
})
.catch(err => {
console.warn(err);
});
}

In react-native-cli:
1- It is convenient first of all to make sure that the database exists in the documents directory, with rn-fetch-blob you can list the documents that are in a directory like this:
import RNFetchBlob from 'rn-fetch-blob';
let dirs = RNFetchBlob.fs.dirs;
const documentPath = dirs.DocumentDir;
const externalZipPath = dirs.DCIMDir;
RNFetchBlob.fs.ls (documentPath) .then ((files) => {
console.log (files)
})
If you do not carry out this step, you can set that a basic database is being created and opened as it does not find any with that name.
You can also open the database from android studio:
When Launch succeeded:
In Device File Explorer> data> data> com.nameofyourapp> databases
You can also click on the bottom tab of android studio 'Database inspector' to see the database changes in real time.
2- Once you are sure that a database already exists in that directory:
To open the database in directories inside the cell phone but outside your project:
"If your folder is not in app bundle but in app sandbox i.e. downloaded from some remote location"
let openDbExample = () => {
let errorCB = (err) => {
console.log ("SQL Error:" + err);
}
let successCB = () => {
db.transaction ((tx) => {
tx.executeSql (
`SELECT * FROM name_column_table LIMIT 10`, [], (tx, results) => {
var len = results.rows.length;
for (let i = 0; i <len; i ++) {
let row = results.rows.item (i);
console.log (row);
}
})
})
}
if (Platform.OS === 'ios') {
db = SQLite.openDatabase ({name: "example_data_base.db", location:
'Documents'}, successCB, errorCB);
}
else {
db = SQLite.openDatabase ({name: "example_data_base.db", readOnly: true,
location: "default", createFromLocation: 2}, successCB, errorCB)
}
}

Related

How to unzip to a folder using yauzl?

I am trying to use yauzl to unzip files. However the example in the repo does not show how to unzip to a folder. It simply says:
readStream.pipe(somewhere);
Is there an easy way to extract the contents to a folder?
Hi :) Replacing 'readStream.pipe(somewhere);' with code under '//--------------------' tag below works for me and the example in the repo
import fs = require('fs');
const unzipper = require('unzipper');
const { pipeline, finished } = require('stream');
//--------------------
cons destDir = 'C:\MyPath'
const writer = fs.createWriteStream(path.join(destDir, entry.fileName));
readStream.pipe(writer);
await finished(readStream, (err) => {
if (err) {
console.error(' ### Streaming to writer failed: ', err);
} else {
console.log(' ### Streaming to writer succeded, file unzipped.');
}
};
DISCLAIMER: I am at the beginning of learning Node/ts! This works for me, but may be wrong to some reason/s.
Here's a promise returning function that does what you're asking with the following caveats:
I have used mkdirp external library. This can be removed if you're more careful with how you create your directories.
I have not tested unzipping on top of an existing directory.
The zipFile.close() statements before the rejects may be unnecessary.
import path = require('path');
import yauzl = require('yauzl');
import mkdirp = require('mkdirp');
/**
* Example:
*
* await unzip("./tim.zip", "./");
*
* Will create directories:
*
* ./tim.zip
* ./tim
*
* #param zipPath Path to zip file.
* #param unzipToDir Path to the folder where the zip folder will be put.
*/
const unzip = (zipPath: string, unzipToDir: string) => {
return new Promise<void>((resolve, reject) => {
try {
// Create folder if not exists
mkdirp.sync(unzipToDir);
// Same as example we open the zip.
yauzl.open(zipPath, { lazyEntries: true }, (err, zipFile) => {
if (err) {
zipFile.close();
reject(err);
return;
}
// This is the key. We start by reading the first entry.
zipFile.readEntry();
// Now for every entry, we will write a file or dir
// to disk. Then call zipFile.readEntry() again to
// trigger the next cycle.
zipFile.on('entry', (entry) => {
try {
// Directories
if (/\/$/.test(entry.fileName)) {
// Create the directory then read the next entry.
mkdirp.sync(path.join(unzipToDir, entry.fileName));
zipFile.readEntry();
}
// Files
else {
// Write the file to disk.
zipFile.openReadStream(entry, (readErr, readStream) => {
if (readErr) {
zipFile.close();
reject(readErr);
return;
}
const file = fs.createWriteStream(path.join(unzipToDir, entry.fileName));
readStream.pipe(file);
file.on('finish', () => {
// Wait until the file is finished writing, then read the next entry.
// #ts-ignore: Typing for close() is wrong.
file.close(() => {
zipFile.readEntry();
});
file.on('error', (err) => {
zipFile.close();
reject(err);
});
});
}
}
catch (e) {
zipFile.close();
reject(e);
}
});
zipFile.on('end', (err) => {
resolve();
});
zipFile.on('error', (err) => {
zipFile.close();
reject(err);
});
});
}
catch (e) {
reject(e);
}
});
}

Cloud Functions bucket.upload() is not running at all

here is what I am trying to do using Firebase:
create a backup file from realtime database
upload to firebase storage
do this every morning
but I am having problem on number 2; after the log of back up file creation success, no other log appears, not even a failed message.
no log after file creation
Even worse is that it sometimes works, which makes me doubtful about the consistency of the functionality.
my code:
var promiseFileCreation = function(fileName, jsonBackup){
console.log("promiseFileCreation starting");
return new Promise(function (resolve, reject){
fs.writeFile('/tmp/'+fileName, jsonBackup, function(fs_err){
if(!fs_err){
resolve("File "+fileName+" creation success");
} else {
reject("File "+fileName+" creation failure: "+fs_err);
}
})
}).catch(function(error){
reject("FileCreation Error");
})
}
var promiseBucketUpload = function(fileName, fileDest){
console.log("promiseBucketUpload starting")
return new Promise(function (resolve, reject){
console.log("promiseBucketUpload promise starting")
bucket.upload('/tmp/'+fileName, { destination: fileDest }, function(upload_err){
if(!upload_err){
resolve("File "+fileName+" upload to "+fileDest+" success");
} else {
reject("File "+fileName+" upload to "+fileDest+" failure: "+upload_err);
}
})
}).catch(function(error){
reject("BucketUpload Error: "+error);
})
}
Promise.all([promiseText, promiseDate, promiseTitle, promiseLikedCount, promiseViewCount, promiseComments]).then(function (values){
var jsonPostObj = {
post: [],
counter: []
}
jsonPostObj.post.push({
date: values[1],
text: values[0],
title: values[2]
})
jsonPostObj.counter.push({
likedCount: values[3],
viewCount: values[4]
})
var jsonCommentsObj = JSON.parse(values[5]);
const jsonArchiveObj = {...jsonPostObj, ...jsonCommentsObj}
var jsonArchive = JSON.stringify(jsonArchiveObj);
const yesterday = getYesterdayDateFull();
var fileName = "archive_"+yesterday;
var fileDest = "history/"+yesterday.substring(0,4)+"/"+yesterday.substring(4,6)+"/"+fileName;
console.log("Archive file name: "+fileName);
console.log("Archive destination: "+fileDest);
promiseFileCreation(fileName, jsonArchive).then(function(resultSuccessFs){
console.log(resultSuccessFs);
// BUCKETUPLOAD here
promiseBucketUpload(fileName, fileDest).then(function(resultSuccessBucket){
console.log(resultSuccessBucket);
return promiseBackupResult(true);
}, function(resultFailureBucket){
console.log(resultFailureBucket);
return promiseBucketResult(false);
})
}, function(resultFailureFs){
console.log(resultFailureFs);
return promiseBackupResult(false);
});
}).catch(function(errPromiseAll){
console.log("Promise.all error: "+errPromiseAll);
return promiseBackupResult(false);
})
}
I removed unnecessary codes, like other promises. The file creation seems to work fine.
Does anyone see why bucket.upload() is not called at all? Thanks in advance.

alexa sdk: can't get persitentAttributes

i'm trying to add persistent attributes to my lambda function.
i created a dynamoDB table and added it to the triggers of my lambda function.
i copied a sample code from github, but when i try to launch the skill i get an error. The console log shows:
{
"errorMessage": "Could not read item (amzn1.ask.account.AGIIYNRXWDLBD6XEPW72QS2BHGXNP7NWYBEWSH2XLSXZP64X3NCYEMVK233VFDWH77ZB6DAK6YJ53SZLNUFVQ56CYOVCILS7QFZI4CIRDWC3PAHS4QG27YUY5PTT6QEIK46YFNTJT54YAKNGOWV2UO66XZACFDQ5SEXKJYOBNFNIZNUXKNTIAAYZG4R5ZU4FMLPDZZN64KLINNA) from table (Spiele): The provided key element does not match the schema",
"errorType": "AskSdk.DynamoDbPersistenceAdapter Error",
"stackTrace": [
"Object.createAskSdkError (/var/task/node_modules/ask-sdk-dynamodb-persistence-adapter/lib/utils/AskSdkUtils.js:22:17)",
"DynamoDbPersistenceAdapter.<anonymous> (/var/task/node_modules/ask-sdk-dynamodb-persistence-adapter/lib/attributes/persistence/DynamoDbPersistenceAdapter.js:123:49)",
"step (/var/task/node_modules/ask-sdk-dynamodb-persistence-adapter/lib/attributes/persistence/DynamoDbPersistenceAdapter.js:44:23)",
"Object.throw (/var/task/node_modules/ask-sdk-dynamodb-persistence-adapter/lib/attributes/persistence/DynamoDbPersistenceAdapter.js:25:53)",
"rejected (/var/task/node_modules/ask-sdk-dynamodb-persistence-adapter/lib/attributes/persistence/DynamoDbPersistenceAdapter.js:17:65)",
"<anonymous>",
"process._tickDomainCallback (internal/process/next_tick.js:228:7)"
]
}
the table contains a primary key "name" and sort key "UserId". is that wrong?
here is my index.js:
const Alexa = require('ask-sdk');
// Define the skill features
let skill;
/**
* If this is the first start of the skill, grab the user's data from Dynamo and
* set the session attributes to the persistent data.
*/
const GetUserDataInterceptor = {
process(handlerInput) {
let attributes = handlerInput.attributesManager.getSessionAttributes();
if (handlerInput.requestEnvelope.request.type === 'LaunchRequest' && !attributes['isInitialized']) {
return new Promise((resolve, reject) => {
handlerInput.attributesManager.getPersistentAttributes()
.then((attributes) => {
attributes['isInitialized'] = true;
saveUser(handlerInput, attributes, 'session');
resolve();
})
.catch((error) => {
reject(error);
})
});
}
}
};
function saveUser(handlerInput, attributes, mode) {
if(mode === 'session'){
handlerInput.attributesManager.setSessionAttributes(attributes);
} else if(mode === 'persistent') {
console.info("Saving to Dynamo: ",attributes);
return new Promise((resolve, reject) => {
handlerInput.attributesManager.getPersistentAttributes()
.then((persistent) => {
delete attributes['isInitialized'];
handlerInput.attributesManager.setPersistentAttributes(attributes);
resolve(handlerInput.attributesManager.savePersistentAttributes());
})
.catch((error) => {
reject(error);
});
});
}
}
const LaunchHandler = {
canHandle(handlerInput) {
return handlerInput.requestEnvelope.request.type === 'LaunchRequest';
},
handle(handlerInput) {
console.info("LaunchRequest");
let attributes = handlerInput.attributesManager.getSessionAttributes();
console.info("Test the load: " + attributes['isInitialized']);
attributes['FOO'] = "BAR";
saveUser(handlerInput, attributes, 'persistent');
return handlerInput.responseBuilder
.speak('Hello')
.reprompt('Hello')
.getResponse();
}
}
exports.handler = Alexa.SkillBuilders.standard()
.addRequestHandlers(
LaunchHandler
)
.addRequestInterceptors(GetUserDataInterceptor)
.withTableName('Spiele')
.withAutoCreateTable(true)
.withDynamoDbClient()
.lambda();
can anyone tell me what i'm doing wrong?
please confirm the partition key is 'userId' not 'UserId' (notice the uppercase U).
Also I would suggest using 'this' object.
Let me know if that helps.
Cheers
Below code is for python lambda function
from ask_sdk_core.skill_builder import CustomSkillBuilder
from ask_sdk_dynamodb.adapter import DynamoDbAdapter
sb = SkillBuilder()
sb = CustomSkillBuilder(persistence_adapter = dynamodb_adapter)

I'm unable to get the size of a file to upload with nativescript

I'm trying evaluate the mime-type and the size of a image uploaded using nativescript-imagepicker. However seems like the module itself can't do it. How can I apply constraits to the file I'm uploading? (Like max size or just png or jpg)
There's my code:
const context = imagepicker.create({ mode: "single" });
context
.authorize()
.then(() => {
console.log('imagePicker.authorize...');
return context.present();
})
.then((selection) => {
if (!selection || !selection.forEach) {
console.log('Error on selection empty or not array:', selection);
return;
}
selection.forEach((selected) => {
this.processPhoto(selected);
});
}).catch((err) => {
.
.
.
Processing the image...
processPhoto (selectedPhoto: any) {
console.log('uploading photo to firebase', selectedPhoto);
this.firebaseService.getImagePickerLocalFilePath(selectedPhoto)
.then((localFilePath: string) => {
console.log('about to upload file:', localFilePath);
return this.firebaseService.uploadFile(localFilePath);
})
.catch((err) => {
this.isLoading = false;
this.messageService.handleErrorRes(err);
});
}
You will have to read the size & extension on File and prevent upload when they do not match your constraints.
const file = fileSystemModule.fromPath(localFilePath);
if (file.size <= YOUR_SIZE_LIMIT_IN_BYTES && file.extension.toLowerCase() === "png") {
// Upload
} else {
alert("File can't be uploaded");
}

Insert or Updated getting "Transaction query already complete"

How can I correctly search for a row in the database and INSERT/UPDATE accordingly to the search result (INSERT if not found, UPDATE if found)?
I'm currently doing this:
bookshelf.transaction(async function (t) {
for (var x = 0; x < 10; x++) {
let row = pmsParser.getRow(x);
if (_.isEmpty(row)) {
break;
}
let data = {
lastUpdate: moment(row.lastUpdate, 'DD/MM/YYYY - HH:mm').toDate(),
mvs: row.version,
color: row.color,
location: row.location,
status: row.status
};
new Vehicle({ chassi: row.chassi })
.fetch({ require: true })
.then(model => {
return new Vehicle(model)
.save(data, { transacting: t, patch: true });
})
.catch(Vehicle.NotFoundError, err => {
new Vehicle(data)
.save('chassi', row.chassi, { transacting: t })
.then(() => {
console.log(`Inserted... ${row.chassi}`);
});
})
.catch(err => {
console.log(err.message);
});
}
})
.catch(function (err) {
console.error(err);
return res.json({ status: false, count: 0, error: err.message });
});
And I receive this error:
Transaction query already complete, run with DEBUG=knex:tx for more info
Unhandled rejection Error: Transaction query already complete, run with DEBUG=knex:tx for more info
at completedError (/home/node/app/node_modules/knex/lib/transaction.js:297:9)
at /home/node/app/node_modules/knex/lib/transaction.js:266:22
at tryCatcher (/home/node/app/node_modules/bluebird/js/release/util.js:16:23)
at Function.Promise.attempt.Promise.try (/home/node/app/node_modules/bluebird/js/release/method.js:39:29)
at Client_SQLite3.trxClient.query (/home/node/app/node_modules/knex/lib/transaction.js:264:34)
at Runner.<anonymous> (/home/node/app/node_modules/knex/lib/runner.js:138:36)
at Runner.tryCatcher (/home/node/app/node_modules/bluebird/js/release/util.js:16:23)
at Runner.query (/home/node/app/node_modules/bluebird/js/release/method.js:15:34)
at /home/node/app/node_modules/knex/lib/runner.js:61:21
at tryCatcher (/home/node/app/node_modules/bluebird/js/release/util.js:16:23)
at /home/node/app/node_modules/bluebird/js/release/using.js:185:26
at tryCatcher (/home/node/app/node_modules/bluebird/js/release/util.js:16:23)
at Promise._settlePromiseFromHandler (/home/node/app/node_modules/bluebird/js/release/promise.js:512:31)
at Promise._settlePromise (/home/node/app/node_modules/bluebird/js/release/promise.js:569:18)
at Promise._settlePromise0 (/home/node/app/node_modules/bluebird/js/release/promise.js:614:10)
at Promise._settlePromises (/home/node/app/node_modules/bluebird/js/release/promise.js:693:18)
Knex debug output
knex:tx trx1: Starting top level transaction +0ms
knex:tx trx1: releasing connection +28ms
knex:tx undefined: Transaction completed: update "vehicles" set "color" = ?, "lastUpdate" = ?, "location" = ?, "mvs" = ?, "status" = ? where "id" = ? +15ms
Transaction query already complete, run with DEBUG=knex:tx for more info
knex:tx undefined: Transaction completed: update "vehicles" set "color" = ?, "lastUpdate" = ?, "location" = ?, "mvs" = ?, "status" = ? where "id" = ? +8ms
Transaction query already complete, run with DEBUG=knex:tx for more info
When under a transaction ALL related database accesses must be within the context of the transaction.
//...
new Vehicle({ chassi: row.chassi })
.fetch({ require: true, transacting: t })
.then(model => {
//...
Your iterations are not being correctly promisified. That makes your changes to escape the transaction context, causing the 'Transaction query already complete' error. When creating promises within a loop it is always advisable to collect them and submit to a promise collection handling, such as Promise.all(). This will avoid escaping the transaction context before all promises are resolved.
Those changes may lead to a code as below (untested):
bookshelf.transaction(async function (t) {
let promises = [];
for (var x = 0; x < 10; x++) {
let row = pmsParser.getRow(x);
if (_.isEmpty(row)) {
break;
}
let data = {
lastUpdate: moment(row.lastUpdate, 'DD/MM/YYYY - HH:mm').toDate(),
mvs: row.version,
color: row.color,
location: row.location,
status: row.status
};
promises.push(
new Vehicle({ chassi: row.chassi })
.fetch({ require: true, transacting: t })
.then(model => {
return model // no need to use 'new Vehicle()' here
.save(data, { transacting: t, patch: true });
})
.catch(Vehicle.NotFoundError, err => {
return new Vehicle(data) // missing 'return'
.save('chassi', row.chassi, { transacting: t })
.then(() => {
console.log(`Inserted... ${row.chassi}`);
});
})
.catch(err => {
console.log(err.message);
// throw err; // should rethrow it!
})
);
}
return Promise.all(promises)
.catch(function (err) {
console.error(err);
return res.json({ status: false, count: 0, error: err.message });
});
};

Resources