Firebase: Flutter: Optimizing for stability: many Transactions vs. Batch writes vs. Set calls - firebase

A few months ago, a glorious soul here taught me about transactions. I may have gone a little overboard thinking they were they best thing since sliced bread. The problem they solved was obvious, guaranteed concurrent writes on a single doc. However, I've noticed already with as little as three closely timed function triggers that I produced the dreaded: ------------------------"10 ABORTED: Too much contention on these documents."...-------------------------
Optimizing for stability, my question is: Would it be best practice to use a mixed bag of these write calls for different situations? For example: if a cloud function is writing to a location where I do not expect contention, should it just be a set call? Instead of 4 transactions to various locations, should I use a batch?
Reading the Firebase limitations I assumed I was in the clear with max 60w/doc/sec. However, I've learned now that Transactions can timeout AND only try to write 5 times.
Some background on the app and the contention error:
- It's a basic social media app.
- The contention error came from making three posts in close succession from a single user.
- Each post triggers a cloud function that does several transactions to link the post to appropriate places. i.e. followers, feed, groups, sends notifications, and sets activity feed docs for each follower.
Side question: Am I wrongly understanding that firebase can handle an app with this level of activity?
EDIT: I was aware of these firebase limitations early on and did my best work to keep documents and collections spread apart appropriately.
CODE EDIT: index.js: adminPostReview is the specific function to throw the error (did the best I could to simplify).
The specific transaction to throw the error, I believe, is the call to transactionDayIndexAdd().
function transactionDelete(docRef) {
return db.runTransaction(async t => {
var doc = await t.get(docRef);
if (doc.exists)
t.delete(docRef);
})
}
// THIS FUNCTION. Is it bad to read and set two documents?
function transactionDayIndexAdd(docRef, dayPosted, postId, userId) {
return db.runTransaction(async (t) => {
var postMap = {};
const doc = await t.get(docRef.doc(dayPosted));
if (doc.exists) {
postMap = doc.data().pids;
} else {
const indexDoc = await t.get(docRef.doc('index'));
var newIndex = indexDoc.exists ? indexDoc.data().index : {};
newIndex[dayPosted] = true;
t.set(docRef.doc('index'), { 'index': newIndex });
}
postMap[postId] = userId;
t.set(doc.ref, { 'pids': postMap });
})
}
exports.adminPostReview = functions.firestore
.document('/adminPostReview/{postId}')
.onUpdate(async (change, context) => {
const postId = context.params.postId;
const userId = change.before.data().ownerId;
const approvedMaks = change.after.data().approvedMaks;
const approvedRita = change.after.data().approvedRita;
var promises = [];
if (approvedMaks == false || approvedRita == false) {
promises.push(transactionDelete(db.collection('posts').doc(userId).collection('userPosts').doc(postId)));
}
else if (approvedMaks == true || approvedRita == true) {
var newPost = change.after.data();
promises.push(postLive(newPost));
}
if (approvedMaks != null || approvedRita != null) {
promises.push(transactionDelete(db.collection('activityFeed').doc(MAKS_ID).collection('feedItems').doc(`${postId}_review`)));
promises.push(transactionDelete(db.collection('activityFeed').doc(RITA_ID).collection('feedItems').doc(`${postId}_review`)));
}
});
async function postLive(newPost) {
const userId = newPost.ownerId;
const postId = newPost.postId;
const dayPosted = newPost.dayPosted;
var postToFeed = newPost.postToFeed;
var postToGroups = newPost.postToGroups;
newPost.approved = true;
delete newPost.postToFeed;
delete newPost.postToGroups;
var batch = db.batch();
var promises = [];
if (postToFeed == true) {
batch.set(
db.collection('posts').doc(userId).collection('userPosts').doc(postId),
newPost
);
batch.update(
db.collection('userActivity').doc(userId),
'numPosts',
admin.firestore.FieldValue.increment(1),
)
promises.push(batch.commit());
promises.push(transactionDayIndexAdd(db.collection("feedRandom"), dayPosted, postId, userId));
var querySnap = await db.collection('followers')
.doc(userId)
.collection('userFollowers')
.get();
querySnap.docs.forEach(async follower => {
promises.push(transactionDayIndexAdd(
db.collection('feedFollowing').doc(follower.id).collection('feedItems'),
dayPosted, postId, userId));
promises.push(transactionSet(db.collection('activityFeed').doc(follower.id)
.collection('feedItems').doc(postId),
{
media1Url: newPost.media1Url,
media2Url: newPost.media2Url,
postId: newPost.postId,
timestamp: newPost.timestamp,
type: 'newFollowingPost',
userId: userId,
userProfileImg: newPost.ownerProfileImg,
username: newPost.username,
displayName: newPost.displayName,
}
));
if (follower.data().notificationToken != null) {
const payload = {
notification: {
title: 'Someone you follow made a new post!',
body: `${newPost.username} has a new post.`
},
data: {
click_action: "FLUTTER_NOTIFICATION_CLICK",
vestiq_type: 'newFollowingPost',
vestiq_uid: follower.id,
vestiq_fid: userId,
vestiq_pid: postId,
vestiq_displayName: newPost.displayName,
vestiq_photoUrl: newPost.ownerProfileImg,
vestiq_username: newPost.username,
}
};
var user = await db.collection('users').doc(follower.id).get();
if (user.data().notificationOp3 == true)
promises.push(pushNotification(follower.data().notificationToken, payload));
}
});
if (postToGroups != null && postToGroups.length > 0) {
promises.push(pushGroupPosts(postToGroups, userId, postId, newPost));
return Promise.all(promises);
} else return Promise.all(promises);
}
else if (postToGroups != null && postToGroups.length > 0) {
promises.push(pushGroupPosts(postToGroups, userId, postId, newPost));
return Promise.all(promises);
}
}
async function pushGroupPosts(postToGroups, userId, postId, newPost) {
var groupBatch = db.batch();
postToGroups.forEach((gid) => {
groupBatch.set(
db.collection('groups').doc(gid).collection('posts').doc(postId),
newPost,
);
groupBatch.set(
db.collection('usersGroupPosts').doc(userId).collection(gid).doc(postId),
{ 'gid': gid, 'postId': postId },
);
});
return push(groupBatch.commit());
}

I was able to fix the contention problem by splitting transactionDayIndexAdd() into two separate transactions. I flip a bool to determine if the second should run instead.
This leads me to believe that the nested t.get/t.set transaction significantly increases chances for contention issues. Since the split, I have not been able to reproduce the error. Here is the new transactionDayIndexAdd() for those who are curious.
HOWEVER, my original question still stands regarding optimising for stability.
async function transactionDayIndexAdd(docRef, dayPosted, postId, userId) {
var dayAdd = 0;
var promises = [];
await db.runTransaction(async (t) => {
var postMap = {};
const doc = await t.get(docRef.doc(dayPosted));
if (doc.exists)
postMap = doc.data().pids;
else {
dayAdd = 1;
}
postMap[postId] = userId;
t.set(doc.ref, { 'pids': postMap });
});
if (dayAdd == 1) {
promises.push(db.runTransaction(async (t) => {
const indexDoc = await t.get(docRef.doc('index'));
var newIndex = indexDoc.exists ? indexDoc.data().index : {};
newIndex[dayPosted] = true;
t.set(indexDoc.ref, { 'index': newIndex });
}));
}
return await Promise.all(promises);
}

Related

Batched Write/Transaction in Cloud Function keeps failing

I'm trying to make changes to several documents in a cloud function once I receive a callback. My code was working when I only had to update one document, but now I need to update several documents atomically in the same function.
I need to read a certain document and then update other documents based on the information held in an array in the original document. I tried to do this using forEach but I get this error in the console whether I'm using a transaction or a batched write:
Error: Cannot modify a WriteBatch that has been committed.
at WriteBatch.verifyNotCommitted (/workspace/node_modules/#google-cloud/firestore/build/src/write-batch.js:126:19)
at WriteBatch.update (/workspace/node_modules/#google-cloud/firestore/build/src/write-batch.js:315:14)
at loyaltyIds.forEach (/workspace/index.js:323:31)
at process._tickCallback (internal/process/next_tick.js:68:7)
Error: Process exited with code 16
at process.on.code (/layers/google.nodejs.functions-framework/functions-framework/node_modules/#google-cloud/functions-framework/build/src/invoker.js:92:22)
at process.emit (events.js:198:13)
at process.EventEmitter.emit (domain.js:448:20)
at process.exit (internal/process/per_thread.js:168:15)
at sendCrashResponse (/layers/google.nodejs.functions-framework/functions-framework/node_modules/#google-cloud/functions-framework/build/src/logger.js:44:9)
at process.on.err (/layers/google.nodejs.functions-framework/functions-framework/node_modules/#google-cloud/functions-framework/build/src/invoker.js:88:44)
at process.emit (events.js:198:13)
at process.EventEmitter.emit (domain.js:448:20)
at emitPromiseRejectionWarnings (internal/process/promises.js:140:18)
at process._tickCallback (internal/process/next_tick.js:69:34)
And what I end up with is the document outside the for loop is updated but the documents inside the for loop are not - which defeats the purpose of an atomic operation.
It also takes a long time to complete the write operation to Firestore. Where am I going wrong?
Below is what I've tried:
Using batched write:
const txDoc = await txRef.get();
if (txDoc.exists) {
console.log('Transaction Document Found');
const userId = txDoc.data().userId;
const loyaltyIds = txDoc.data().loyaltyIds;
const pointsAwardedMap = txDoc.data().pointsAwarded;
let batch = db.batch();
loyaltyIds.forEach(async lpId => {
// There are 2 elements in the loyaltyIds lis
console.log('Inside for loop');
console.log(lpId);
let cardId = 'u_' + userId + '-l_' + lpId; // 'u_$userId-l_$lpId'
let cardRef = db.collection('users').doc(userId).collection('userLoyaltyCards').doc(cardId);
let lpMap = pointsAwardedMap[lpId];
// Get the user LC doc
let cardDoc = await cardRef.get();
if (cardDoc.exists) {
batch.update(cardRef, {
'pointsBalance': cardDoc.data().pointsBalance + lpMap['points'],
'totalSpend': cardDoc.data().totalSpend + txDoc.data().transactionAmount,
'numberOfPurchases': cardDoc.data().numberOfPurchases + 1,
'pointsEarned': cardDoc.data().pointsEarned + lpMap['points'],
'lastPurchaseDate': admin.database.ServerValue.TIMESTAMP,
});
}
});
// Then we update the tx doc
batch.update(txRef, {
transactionCode: `${receiptNo}`,
transactionType: "purchase",
transactionSuccess: true,
}); // only this gets update
console.log('Firebase Transaction success');
return batch.commit();
} else { return null; }
Using transaction operation:
await db.runTransaction(async t => {
const txDoc = await t.get(txRef);
if (txDoc.exists) {
// userId
// For each lp we update the user loyalty card that goes with it
const userId = txDoc.data().userId;
const loyaltyIds = txDoc.data().loyaltyIds;
const pointsAwardedMap = txDoc.data().pointsAwarded;
// What the pointsAwarded map looks like from the transaction:
// var pointsAwarded = {
// lp1: {
// lpName: 'Jeff',
// lpId: 'lp.lpId',
// points: 'points1',
// cashbackPct: 'lp.cashbackPct',
// vendorId: 'lp.vendorId',
// vendorName: 'lp.vendorName',
// },
// lp2: {
// lpName: 'Susan',
// lpId: 'lp.lpId',
// points: 'points2',
// cashbackPct: 'lp.cashbackPct',
// vendorId: 'lp.vendorId',
// vendorName: 'lp.vendorName',
// },
// };
loyaltyIds.forEach(async (lpId) => {
// We update the user loyalty cards
console.log('Inside for loop');
console.log(lpId);
let cardId = 'u_' + userId + '-l_' + lpId; // 'u_$userId-l_$lpId'
let cardRef = db.collection('users').doc(userId).collection('userLoyaltyCards').doc(cardId);
let lpMap = pointsAwardedMap[lpId];
// Get the user LC doc
let cardDoc = await t.get(cardRef);
// We create the initial loyalty card doc without relying on the cloud function
if (cardDoc.exists) {
// Users LC found, we simply update with this transaction
// `${mpesaReceiptNo}`, this is how to add a var as a field value in firestore
t.update(cardRef, {
'pointsBalance': cardDoc.data().pointsBalance + lpMap['points'],
'totalSpend': cardDoc.data().totalSpend + txDoc.data().transactionAmount,
'numberOfPurchases': cardDoc.data().numberOfPurchases + 1,
'pointsEarned': cardDoc.data().pointsEarned + lpMap['points'],
'lastPurchaseDate': admin.database.ServerValue.TIMESTAMP,
});
}
}); // end of loyalty card update loop
// Then we update the transaction doc
console.log('Transaction Document Found')
t.update(txRef, {
transactionCode: `${mpesaReceiptNo}`,
transactionType: "purchase",
transactionSuccess: true,
});
console.log('Firebase Transaction success');
}
});
UPDATE
I've tried to use a normal for loop but I still get the same errors. I even tried to incorporate the batch.commit statement in the loop so it only executes when the loop completes. Still - same errors.
try {
return txRef.get().then( async txDoc => {
if (txDoc.exists) {
const userId = txDoc.data().userId;
const loyaltyIds = txDoc.data().loyaltyIds;
const pointsAwardedMap = txDoc.data().pointsAwarded;
const batch = db.batch();
// loyaltyIds.forEach(lpId => {
for (let i = 0; i < loyaltyIds.length; i++) {
// We update the user loyalty cards
const lpId = loyaltyIds[i];
console.log('Inside for loop');
console.log(lpId);
const cardId = 'u_' + userId + '-l_' + lpId; // 'u_$userId-l_$lpId'
const cardRef = db.collection('users').doc(userId).collection('userLoyaltyCards').doc(cardId);
const lpMap = pointsAwardedMap[lpId];
// Get the user LC doc
cardRef.get().then(cardDoc => {
// We created the initial loyalty card doc without relying on the cloud function
if (cardDoc.exists) {
console.log('Card found');
// Users LC found, we simply update with this transaction
// `${mpesaReceiptNo}`, this is how to add a var as a field value in firestore
batch.update(cardRef, {
'pointsBalance': cardDoc.data().pointsBalance + lpMap['points'],
'totalSpend': cardDoc.data().totalSpend + txDoc.data().transactionAmount,
'numberOfPurchases': cardDoc.data().numberOfPurchases + 1,
'pointsEarned': cardDoc.data().pointsEarned + lpMap['points'],
'lastPurchaseDate': admin.database.ServerValue.TIMESTAMP,
});
}
});
if (i + 1 == loyaltyIds.length) {
console.log('Loyalty card loop complete, now going to update other things and commit the batch.');
// Update the transaction document
batch.update(txRef, {
transactionCode: `${mpesaReceiptNo}`,
transactionType: "purchase",
transactionSuccess: true,
});
console.log('Committing the batch');
return batch.commit();
}
} // end of for loop
} else {
console.log('Transaction Doc not found, terminating function.');
return null;
}
}).then(function () {
console.log("SUCCESS")
return null;
}
).catch(function (error) {
console.log("UNABLE TO EXECUTE TX BATCH");
console.log(error);
// throw new functions.https.HttpsError('unknown', 'An error occurred when trying to sort the posts.');
return null;
});
I think your problem is related to promises. You must await for the batch.commit(), which was not done in your code. No need to use the await for batch.update(), only for the batch.commit().
Usage of the map with the Promise.all is very important here to ensure you await for all the loop operations to be completed.
I updated your code using awaits, I could not test it since I don't have access to your DB, but I think it should solve your problem with the batch.
try {
const txDoc = await txRef.get();
if (txDoc.exists) {
const userId = txDoc.data().userId;
const loyaltyIds = txDoc.data().loyaltyIds;
const pointsAwardedMap = txDoc.data().pointsAwarded;
const batch = db.batch();
await Promise.all(loyaltyIds.map(async (lpId, i) => {
console.log(lpId);
const cardId = 'u_' + userId + '-l_' + lpId; // 'u_$userId-l_$lpId'
const cardRef = db.collection('users').doc(userId).collection('userLoyaltyCards').doc(cardId);
const lpMap = pointsAwardedMap[lpId];
const cardDoc = await cardRef.get();
if (cardDoc.exists) {
batch.update(cardRef, {
'pointsBalance': cardDoc.data().pointsBalance + lpMap['points'],
'totalSpend': cardDoc.data().totalSpend + txDoc.data().transactionAmount,
'numberOfPurchases': cardDoc.data().numberOfPurchases + 1,
'pointsEarned': cardDoc.data().pointsEarned + lpMap['points'],
'lastPurchaseDate': admin.database.ServerValue.TIMESTAMP,
});
}
if (i + 1 == loyaltyIds.length) {
batch.update(txRef, {
transactionCode: `${mpesaReceiptNo}`,
transactionType: "purchase",
transactionSuccess: true,
});
}
}));
await batch.commit();
return null;
} else {
console.log('Transaction Doc not found, terminating function.');
return null;
}
} catch (error) {
console.log(error);
return null;
}

Firestore: How to run a batch write statement in a forEach() loop?

I am learning about Firestore's batch writes method and it looks really neat. Almost async-like! However, I am needing some help figuring out how to run a batch statement when doing a forEach() on a query.
My use case is that when a user deletes a post, I need to also "clean up" and update/delete other items associated with that post. That could be all bookmarks users have created for this post, likes, etc.
Here is an example of a deletePost function. How do you run a batch statement on the bookmarksQuery and usersAnswerQuery queries?
async deletePost(post) {
const response = confirm('Delete this post?')
const batch = this.$fire.firestore.batch()
if (response === true && this.userProfile.uid === this.post.uid) {
try {
const postRef = this.$fire.firestore
.collection(`users/${post.uid}/posts`)
.doc(this.post.id)
const answerRef = this.$fire.firestore
.collection('answers')
.doc(this.post.commentIdWithAnswer)
const usersAnswerQuery = await this.$fire.firestore
.collectionGroup('answers')
.where('id', '==', this.post.commentIdWithAnswer)
.get()
const bookmarksQuery = await this.$fire.firestore
.collectionGroup('bookmarks')
.where('id', '==', this.post.id)
.get()
batch.update(postRef, {
published: false,
deleted: true,
updatedAt: this.$fireModule.firestore.FieldValue.serverTimestamp()
})
bookmarksQuery.forEach((doc) => doc.ref.delete()) //<---- how to add this to batch?
usersAnswerQuery.forEach((doc) => doc.ref.delete()) //<---- how to add this to batch?
batch.delete(answerRef)
await batch.commit()
// To do: delete all user 'likes' associated with this post
alert('Post successfully deleted!')
} catch (error) {
console.error('error deleting post.', error)
}
} else {
return null
}
}
To add a document deletion to the batch, you would use WriteBatch#delete() like you have done for answerRef using:
// prepare the batch
const batch = firebase.firestore().batch();
// add each doc's deletion to the batch
docs.forEach((doc) => batch.delete(doc.ref));
// commit the changes
await batch.commit();
While the above approach works fine, a batched write has a limit of 500 operations. As you will likely hit this limit on popular posts while tidying up bookmarks, answers and likes, we need to handle this case. We can achieve this by tracking the number of operations you've added into the batch and create a new batch each time you reach the limit.
// prepare the batch
let currentBatch = firebase.firestore().batch();
let currentBatchSize = 0;
const batches = [ currentBatch ];
// add each doc's deletion to the batch
docs.forEach((doc) => {
// when batch is too large, start a new one
if (++currentBatchSize >= 500) {
currentBatch = firebase.firestore.batch();
batches.push(currentBatch);
currentBatchSize = 1;
}
// add operation to batch
currentBatch.delete(doc.ref);
})
// commit the changes
await Promise.all(batches.map(batch => batch.commit()));
Other things I've noticed in your current code:
deletePost has an inconsistent return type of Promise<void | null> - consider returning a Promise<boolean> (to indicate success, because you are handling errors in your function)
You ask for user confirmation before checking whether the post can actually be deleted by the current user - you should check first
Silently fails to delete another user's post, instead of showing an error (this should also be enforced by security rules)
Silently fails to delete the post, without showing a message to the user
You have a large if block followed by a tiny else block, you should flip it so you can "fail-fast" and not need to indent most of the code.
Applying the solution plus these other changes gives:
async deletePost(post) {
if (this.userProfile.uid !== this.post.uid) {
alert("You can't delete another user's post.");
return false; // denied
}
const response = confirm('Delete this post?')
if (!response)
return false; // cancelled
try {
const postRef = this.$fire.firestore
.collection(`users/${post.uid}/posts`)
.doc(this.post.id)
const answerRef = this.$fire.firestore
.collection('answers')
.doc(this.post.commentIdWithAnswer)
const usersAnswerQuery = await this.$fire.firestore
.collectionGroup('answers')
.where('id', '==', this.post.commentIdWithAnswer)
.get()
const bookmarksQuery = await this.$fire.firestore
.collectionGroup('bookmarks')
.where('id', '==', this.post.id)
.get()
let currentBatch = this.$fire.firestore.batch();
const batches = [currentBatch];
currentBatch.update(postRef, {
published: false,
deleted: true,
updatedAt: this.$fireModule.firestore.FieldValue.serverTimestamp()
});
currentBatch.delete(answerRef);
let currentBatchSize = 2;
const addDocDeletionToBatch = (doc) => {
if (++currentBatchSize >= 500) {
currentBatch = this.$fire.firestore.batch();
batches.push(currentBatch);
currentBatchSize = 1;
}
currentBatch.delete(doc.ref);
}
bookmarksQuery.forEach(addDocDeletionToBatch)
usersAnswerQuery.forEach(addDocDeletionToBatch)
// TODO: delete all user 'likes' associated with this post
// commit changes
await Promise.all(batches.map(batch => batch.commit()));
alert('Post successfully deleted!')
return true;
} catch (error) {
console.error('error deleting post.', error)
alert('Failed to delete post!');
return false;
}
}
Note: If you use the standard comments // TODO and // FIXME, you can make use of many tools that recognise and highlight these comments.
Do as follows. Do not forget the 500 docs limit for a batched write (which includes deletions).
async deletePost(post) {
const response = confirm('Delete this post?')
const batch = this.$fire.firestore.batch()
if (response === true && this.userProfile.uid === this.post.uid) {
try {
// ...
batch.update(postRef, {
published: false,
deleted: true,
updatedAt: this.$fireModule.firestore.FieldValue.serverTimestamp()
})
bookmarksQuery.forEach((doc) => batch.delete(doc.ref))
usersAnswerQuery.forEach((doc) => batch.delete(doc.ref))
batch.delete(answerRef)
await batch.commit()
// To do: delete all user 'likes' associated with this post
alert('Post successfully deleted!')
} catch (error) {
console.error('error deleting post.', error)
}
} else {
return null
}
}

firebase function on db trigger throws Function returned undefined, expected Promise or value

my firebase function based on realtime database trigger looks like below
exports.on_user_created = functions.database.ref("/users/{id}")
.onCreate((change, context) => {
console.log("start of on_user_created ")
const user = change.val();
console.log("New user:::" + JSON.stringify(user))
const uid = user._uid
const referralCode = user._referralCode
console.log("creating referral node for uid:" + uid + " with code:" + referralCode)
if(referralCode === undefined){
console.error("No referral code created for the user while sign up. Referral node cannot be created.")
return true
}
var db = admin.database();
var ref = db.ref('referrals')
ref.child(referralCode).set({"uid": uid}).then(
(resp) => {
console.log("referral node created")
return true
}
).catch(
(err) => {
console.error("unable to create referral node on user create:" + err)
return true
}
)
})
it on run throws
5:47:02.035 AM on_user_created Function returned undefined, expected Promise or value
I am failing to understand why
Adapted following Doug's comment below: "If you have no async work to be done, it's typical to return null"
This is because you don't return the Promise returned by the set() asynchronous operation.
You should do something like:
exports.on_user_created = functions.database.ref("/users/{id}")
.onCreate((change, context) => {
console.log("start of on_user_created ")
const user = change.val();
console.log("New user:::" + JSON.stringify(user))
const uid = user._uid
const referralCode = user._referralCode
console.log("creating referral node for uid:" + uid + " with code:" + referralCode)
if(referralCode === undefined){
console.error("No referral code created for the user while sign up. Referral node cannot be created.")
return null // <-- See Doug's comment below.
}
var db = admin.database();
var ref = db.ref('referrals')
return ref.child(referralCode).set({"uid": uid}).then( // <-- !! Here we return
(resp) => {
console.log("referral node created")
return null
}
).catch(
(err) => {
console.error("unable to create referral node on user create:" + err)
return null
}
)
})
Note that you could streamline your code as follows if you don't need the console.log()s, e.g. for production.
exports.on_user_created = functions.database.ref("/users/{id}")
.onCreate((change, context) => {
const user = change.val();
const uid = user._uid
const referralCode = user._referralCode
if (referralCode === undefined) {
return null;
} else {
var db = admin.database();
var ref = db.ref('referrals')
return ref.child(referralCode).set({"uid": uid});
}
});
For more detail on the importance of returning the promises in a Cloud Function, I would suggest you watch the official video series and in particular the ones titled "Learn JavaScript Promises": https://firebase.google.com/docs/functions/video-series/

firestore cloud functions onCreate/onDelete sometimes immediately triggered twice

I have observed this behavior occasionally with both onCreate and onDelete triggers.
Both the executions happened for the same document created in firestore. There's only one document there so I don't understand how it could trigger the handler twice. the handler itself is very simple:
module.exports = functions.firestore.document('notes/{noteId}').onCreate((event) => {
const db = admin.firestore();
const params = event.params;
const data = event.data.data();
// empty
});
this doesn't happen all the time. What am I missing?
See the Cloud Firestore Triggers Limitations and Guarantees:
Delivery of function invocations is not currently guaranteed. As the
Cloud Firestore and Cloud Functions integration improves, we plan to
guarantee "at least once" delivery. However, this may not always be
the case during beta. This may also result in multiple invocations
for a single event, so for the highest quality functions ensure that
the functions are written to be idempotent.
There is a Firecast video with tips for implementing idempotence.
Also two Google Blog posts: the first, the second.
Based on #saranpol's answer we use the below for now. We have yet to check if we actually get any duplicate event ids though.
const alreadyTriggered = eventId => {
// Firestore doesn't support forward slash in ids and the eventId often has it
const validEventId = eventId.replace('/', '')
const firestore = firebase.firestore()
return firestore.runTransaction(async transaction => {
const ref = firestore.doc(`eventIds/${validEventId}`)
const doc = await transaction.get(ref)
if (doc.exists) {
console.error(`Already triggered function for event: ${validEventId}`)
return true
} else {
transaction.set(ref, {})
return false
}
})
}
// Usage
if (await alreadyTriggered(context.eventId)) {
return
}
In my case I try to use eventId and transaction to prevent onCreate sometimes triggered twice
(you may need to save eventId in list and check if it exist if your function actually triggered often)
const functions = require('firebase-functions')
const admin = require('firebase-admin')
const db = admin.firestore()
exports = module.exports = functions.firestore.document('...').onCreate((snap, context) => {
const prize = 1000
const eventId = context.eventId
if (!eventId) {
return false
}
// increment money
const p1 = () => {
const ref = db.doc('...')
return db.runTransaction(t => {
return t.get(ref).then(doc => {
let money_total = 0
if (doc.exists) {
const eventIdLast = doc.data().event_id_last
if (eventIdLast === eventId) {
throw 'duplicated event'
}
const m0 = doc.data().money_total
if(m0 !== undefined) {
money_total = m0 + prize
}
} else {
money_total = prize
}
return t.set(ref, {
money_total: money_total,
event_id_last: eventId
}, {merge: true})
})
})
}
// will execute p2 p3 p4 if p1 success
const p2 = () => {
...
}
const p3 = () => {
...
}
const p4 = () => {
...
}
return p1().then(() => {
return Promise.all([p2(), p3(), p4()])
}).catch((error) => {
console.log(error)
})
})
Late to the party, I had this issue but having a min instance solved the issue for me
Upon looking #xaxsis attached screenshot, my function took almost the amount of time about 15 seconds for the first request and about 1/4 of that for the second request

DynamoDb : Scan query does not return all the data

I have a DynamoDb table with thousands of data. I am scanning the table using Scan function and I have applied "Between" FilterExpression.
However , the query response only gives 3 records whereas it should return about 100 records.
I have created the Lambda function using Node js.
The other common issue could be whether the scan is executed until LastEvaluatedKey is empty.
If you are already doing this and still not getting all the items, please show your code to look at it in detail.
If the total number of scanned items exceeds the maximum data set size
limit of 1 MB, the scan stops and results are returned to the user as
a LastEvaluatedKey value to continue the scan in a subsequent
operation. The results also include the number of items exceeding the
limit. A scan can result in no table data meeting the filter criteria.
If LastEvaluatedKey is empty, then the "last page" of results has been
processed and there is no more data to be retrieved.
If LastEvaluatedKey is not empty, it does not necessarily mean that
there is more data in the result set. The only way to know when you
have reached the end of the result set is when LastEvaluatedKey is
empty.
Here's example code to get all results:
Map<String, AttributeValue> lastKeyEvaluated = null;
do {
ScanRequest sr = new ScanRequest()
.withTableName("tableName")
.withProjectionExpression("id")
.withExclusiveStartKey(lastKeyEvaluated);
ScanResult result = client.scan(sr);
for (Map<String, AttributeValue> item : result.getItems()) {
System.out.println(item.get("id").getS());
}
lastKeyEvaluated = result.getLastEvaluatedKey();
} while (lastKeyEvaluated != null);
Using Node.js I'm actually using the Query to retrieve the items from the database. A single Query operation can retrieve a maximum of 1 MB of data. That's why I have created a recursive function to retrieving and concatenation data from the database until we receiving LastEvaluatedKey from the response.
When we receiving LastEvaluatedKey as null, that means there are no more data.
My function uses the index to get data from the database. Using the Query functions will work more faster and effectively than Scan.
Actually, getItemByGSI function has a lot of parameters for filtering and customization of the query, which can be useful. And for sure you can remove the parameters which are not nesses for your cases.
So getAllItemsByGSI function can be used to retrieve all data from the DynamoDB, and getItemByGSI can be used to use a single Query.
'use strict';
const omitBy = require('lodash/omitBy');
const isNil = require('lodash/isNil');
const AWS = require('aws-sdk');
const call = (action, params) => {
return new Promise((resolve, reject) => {
try {
const dynamoDb = new AWS.DynamoDB.DocumentClient();
resolve(dynamoDb[action](params).promise());
} catch (error) {
reject(error);
}
});
};
const getItemByGSI = ({
TableName,
IndexName,
attribute,
value,
sortKey,
sortValue,
filter,
filterValue,
operator,
filter1,
filterValue1,
LastEvaluatedKey,
ScanIndexForward,
Limit,
}) => {
return new Promise(async (resolve, reject) => {
try {
const params = {
TableName,
IndexName,
KeyConditionExpression: '#attrKey = :attrValue',
ExpressionAttributeValues: { ':attrValue': value },
ExpressionAttributeNames: { '#attrKey': attribute },
ExclusiveStartKey: LastEvaluatedKey,
Limit,
FilterExpression: null,
};
sortKey && sortValue
? (params.KeyConditionExpression +=
' and #sortKey = :sortValue' &&
(params.ExpressionAttributeNames['#sortKey'] = sortKey) &&
(params.ExpressionAttributeValues[':sortKey'] = sortValue))
: '';
filter && filterValue
? (params.FilterExpression = `#${filter} = :${filter}`) &&
(params.ExpressionAttributeNames[`#${filter}`] = filter) &&
(params.ExpressionAttributeValues[`:${filter}`] = filterValue)
: '';
filter && filterValue && operator && filter1 && filterValue1
? (params.FilterExpression += ` ${operator} #${filter1} = :${filter1}`) &&
(params.ExpressionAttributeNames[`#${filter1}`] = filter1) &&
(params.ExpressionAttributeValues[`:${filter1}`] = filterValue1)
: '';
params = omitBy(params, isNil);
if (ScanIndexForward === false)
params.ScanIndexForward = ScanIndexForward;
const result = await call('query', params);
resolve(result);
} catch (error) {
reject(error);
}
});
};
const getAllItemsByGSI = (data) => {
return new Promise(async (resolve, reject) => {
try {
const finalData = [];
const gettingData = await getItemByGSI(data);
finalData = finalData.concat(gettingData.Items);
if (gettingData.LastEvaluatedKey) {
const final2 = await getAllItemsByGSI({
...data,
LastEvaluatedKey: gettingData.LastEvaluatedKey,
});
finalData = finalData.concat(final2);
}
resolve(finalData);
} catch (err) {
reject(err);
}
});
};
module.exports = {
getItemByGSI,
getAllItemsByGSI,
};

Resources