I have an array of objects taken from mongodb. Every element in the array is a post, with author as user_id. Now i wish to find the user info related to the user_id.
Since node uses async methods to find the data from db, the forEach loop finishes before the callbacks finish.
docs.forEach(function(doc, index){
//get the user for this doc
User.find({_id: mongo.BSONPure.ObjectID(doc.user_id)}, {name: 1, username: 1, email: 1}).skip(0).limit(1).toArray(function(err, user){
user = user[0]
if(err) {
throw new Error(err)
} else {
docs[index].user = user
if(doc.for_id) {
User.find({_id: mongo.BSONPure.ObjectID(doc.for_id)}, {name: 1, username: 1, email: 1}).skip(0).limit(1).toArray(function(err, for_user){
for_user = for_user[0]
if(err) {
throw new Error(err)
} else {
docs[index].for_user = for_user
}
})
}
}
})
})
So at the end of this loop, if i send a cb(docs), docs do not have the user and for_user attribute. How do I overcome this?
Use Step for node.js. It will run your functions in serial order
var Step = require('step');
Step( docs.forEach(...), function() { cb(docs); } );
Or if you know the total number of records, you can call the callback when you're done processing the last one. Something like this
var count = docs.count(); // or something
var processed = 0;
docs.forEach(... if (++processed == count) cb(docs); );
Related
I am new to Firestore transaction, and would like to update a document field based the current data of the document.
My planned transaction is given below:
const cityRef = db.collection('cities').doc('SF');
try {
await db.runTransaction(async (t) => {
const doc = await t.get(cityRef);
let status = doc.data().myStatus;
if (status == "one") {
throw "err";
} else {
// run some function - next status is based on the return
let output = await someFunction();
if (output) {
await t.update(cityRef, { myStatus: "two" });
return output;
} else {
await t.update(cityRef, { myStatus: "three" });
return output;
}
}
});
console.log("transaction successful");
} catch (err) {
console.log("Alreadu updated");
output = "one";
return output;
}
My queries are given below:
As per the documentation I have returned the data after update, however it does not seem to be working as expected.
Can we have 2 updates within one single transaction (both are updating the same field in the firestore)?
Thank you
You make the following clarification in the comments above:
someFunction() does some processing on other firestore
collection/documents (not the one I am updating) and returns either
true or false.
As you read in the doc on Transactions, "Read operations must come before write operations". If you want to read some docs in the transaction, you need to use the get() method of the Transaction, like you did with the first document. You cannot call a function that is using other Firestore methods like the get() method of a DocumentReference.
I have 2 vertices and an edge named user, device, ownership respectively.
My business logic is when I receive device information, I upsert it with dateCreated and dateUpdated fields added. If I inserted that device then I insert new user with default values and create edge connection to it. If I update I simple return already connected user as a result.
Without losing atomicity how can I achieve this?
I tried single AQL query but without condition it is not possible it seems and traversal also is not supported with insert/update operation.
I can do separate queries but that loses atomicity.
var finalQuery = aql`
UPSERT ${deviceQuery}
INSERT MERGE(${deviceQuery},{dateCreated:DATE_NOW()})
UPDATE MERGE(${deviceQuery},{dateUpdated:DATE_NOW()})
IN ${this.DeviceModel}
RETURN { doc: NEW, type: OLD ? 'update' : 'insert' }`;
var cursor = await db.query(finalQuery);
var result = await cursor.next();
if (result.type == 'insert') {
console.log('Inserted documents')
finalQuery = aql`
LET user=(INSERT {
"_key":UUID(),
"name": "User"
} INTO user
RETURN NEW)
INSERT {
_from:${result.doc._id},
_to:user[0]._id,
"type": "belongs"
}INTO ownership
return user[0]`;
cursor = await db.query(finalQuery);
result = await cursor.next();
console.log('New user:',result);
}
You can try something like this
Upsert ....
FILTER !OLD
Let model = NEW
LET user= First(INSERT {
"_key":UUID(),
"name": "User"
} INTO user
RETURN NEW)
INSERT {
_from:model._id,
_to:user._id,
"type": "belongs"
}INTO ownership
return user
I end up separating the modification and selection queries.
var finalQuery = aql`
LET device=(
UPSERT ${deviceQuery}
INSERT MERGE(${deviceQuery},{dateCreated:DATE_NOW()})
UPDATE MERGE(${deviceQuery},{dateUpdated:DATE_NOW()})
IN ${this.DeviceModel}
RETURN { doc: NEW, type: OLD ? 'update' : 'insert' })
FILTER device[0].type=='insert'
LET user=(INSERT {
"_key":UUID(),
"name": "User"
} INTO user
RETURN NEW)
INSERT {
_from:device[0].doc._id,
_to:user[0]._id,
"type": "belongs"
}INTO ownership
return user[0]`;
var cursor = await db.query(finalQuery);
var result = await cursor.next();
if (result == null) {
const deviceId=this.DeviceModel.name+"/"+queryParams._key;
finalQuery = aql`
FOR v,e,p IN 1..1
OUTBOUND ${deviceId} ownership
FILTER e.type=="belongs"
RETURN v `;
cursor = await db.query(finalQuery);
result = await cursor.next();
isUpdate=true;
}
This way I ensure the atomicity. There are improvements for controling if cursor.extra.stats.writesExecuted true etc.
I am trying to return an array of everyone session (me) is not following with JavaScript's filter function with the help of a promise and sending it as a JSON response.
But it doesn't work.
Thanks in advance!!
app.get('/explore', (req, res) => {
P.coroutine(function *(){
let
{ id: session } = req.session,
followings = yield db.query('SELECT id, username, email FROM users WHERE id <> ? ORDER BY RAND() LIMIT 10', [session]),
d = followings.filter(e => {
db.is_following(session, e.id).then(s => s ) // returns boolean
})
res.json(d)
})()
})
Array.prototype.filter is synchronous - you can't filter an array with an asynchronous filter.
What you can do, is create an array of Promises and then when all of them are resolved, return the response:
var promises = [];
var d = [];
followings.forEach(function(e) {
promises.push(
db.is_following(session,e.id).then(function(s) {
//following, push e onto `d`
d.push(e);
}).catch(function() {
//not following, I assume, do nothing
})
);
});
Promise.all(promises).then(function() {
//send the response after all the is_following requests have finished
res.json(d);
});
Adam's solution worked very well, but I have found another solution which uses async/await.
Code is much less and human-readable!!
app.post('/explore', async function(req, res) {
let
{ id: session } = req.session,
exp = [],
followings = await db.query(
'SELECT id, username, email FROM users WHERE id <> ? ORDER BY RAND() LIMIT 10',
[session]
)
for (let f of followings) {
let is = await db.is_following(session, f.id)
!is ? exp.push(f) : null
}
res.json(exp)
})
I've got what should be a relatively simple issue. I set a session, then a subscribe to a collection using the string stored in the session. But when that session changes, I need to clear the subscription data and start again.
My code is as follows:
let subscriptionReady;
let filteredResults = [];
let rawResults = [];
let county = Session.get('county');
let type = Session.get('type');
This is mostly just prep work to create some empty objects to populate later. This all gets set on a click event. After we set these placeholder objects we go and subscribe by those sessions:
if (county && !type) {
return function() {
if (subscriptionReady) {
subscriptionReady.stop();
}
filteredResults = [];
rawResults = [];
subscriptionReady = Meteor.subscribe('resourcesearch', county, {
onReady: () => {
rawResults = resourceCollection.find({}, { sort: {score: -1} }).fetch();
rawResults.forEach((result) => {
if (result.score) {
filteredResults.push(result);
}
});
}
});
}
At the third line I run a check to see if subscriptionReady exists, then it will have the stop method available. So then I run it. But, it doesn't actually stop anything.
What am I missing?
After trial and error, I've got it solved. The issue was the placement of the stop call. I no longer have to check if subscriptionReady exists, instead I stop the subscription inside of the onReady method:
return function() {
filteredResults = [];
rawResults = [];
subscriptionReady = Meteor.subscribe('resourcesearch', county, {
onReady: () => {
rawResults = resourceCollection.find({}, { sort: {score: -1} }).fetch();
rawResults.forEach((result) => {
if (result.score) {
filteredResults.push(result);
}
});
subscriptionReady.stop();
}
});
It's .stop() not .stop docs
Also you can probably avoid your filtering loop by including score in your query. Are you looking for documents where the score key exists {score: {$exists: true}} or just where it is non zero {$score: {$ne: 0}}?
Also you shouldn't need to clear the subscription and start again. If you make your subscription parameter resourcesearch a reactive data source then the subscription will automatically update to give you the documents you need. Starting/stopping a subscription in response to a search would be an anti-pattern.
I want to implement a parameter based publication in Meteor but I am running into some problems.
Here is what I have.
As the user types the keyup event that subscribes to publication and passes the value of the input.
'keyup #customerSearch': function(event, template){
var keyword = template.find('#customerSearch').value;
if(keyword){
if(keyword.length >= 3){
Meteor.subscribe('sessioncustomers', keyword);
}
}
}
The publication uses this keyword to return the records.
Meteor.publish("sessioncustomers", function(keyword){
if(keyword ){
if(keyword.length >= 3){
query.name = new RegExp(regExpQuoted(keyword), 'i' );
Customers.find(query);
} else {
return null;
}
}else{
return null;
}
});
The problem.
It works and documents are received except when the client changes the keyword or rather as the keywords changes the publication publishes additional documents that match the keywords but the client collection never removes the old documents.
How do I get the old documents that no longer match out of the client collection?
I thought that because the parameters of the subscription had changed that the non-matching documents would be unsubscribed and only the new matching documents would be subscribed.
In your keyup callback you need to "unsubscribe" to the previous publication,
otherwise you'll keep the old documents.
var sessionCustomersHandler = false;
'keyup #customerSearch': function(event, template) {
var keyword = template.find('#customerSearch').value;
if (keyword && keyword.length >= 3)
var newSessionCustomersHandler = Meteor.subscribe('sessioncustomers', keyword);
if (sessionCustomersHandler)
sessionCustomersHandler.stop();
sessionCustomersHandler = newSessionCustomersHandler;
}
Moreover, don't forget to check(keyword, String) in your publish function, for security.
Meteor.publish("sessioncustomers", function(keyword){
check(keyword, String)
if (keyword.length >= 3)
return Customers.find({
name: new RegExp(regExpQuoted(keyword), 'i' )
});
});
Make a local unnamed client collection
this.SessionCustomers = new Meteor.Collection(null);
Call a server method to get the results you want. Make the callback clear (remove all) and then insert to that local collection.
return Meteor.call('sessioncustomers', query, function(err, data) {
if (err) {
return console.log(err.message);
} else {
SessionCustomers.remove({});
var item, _i, _len;
for (_i = 0, _len = data.length; _i < _len; _i++) {
item = array[_i];
SessionCustomers.insert(item);
}
}
});