I am trying to put a hard-coded data item to DynamoDB. I am using AWS SDK object to perform this update. And all the debug "Console.log" in the below code is getting printed but eventually it prints Task timed out after 3.00 seconds
With no update to the DynamoDB
function updatedb(intent, session, callback) {
let country;
const repromptText = null;
const sessionAttributes = {};
let shouldEndSession = false;
console.log("In the function");
const AWS = require("aws-sdk");
const docClient = new AWS.DynamoDB.DocumentClient({ region: 'eu-west-1' });
var params = {
TableName: "Location",
Item: {
"LocationID": { "S": "11" },
"Country": { "S": "10" },
"Description": { "S": "10" },
"Name": { "S": "10" }
}
};
console.log("Param loaded & executing the DocClient Put");
docClient.put(params, function (err, data) {
if (err) {
speechOutput = 'Update failed';
console.error("Unable to create table. Error JSON:", JSON.stringify(err, null, 2));
callback(sessionAttributes,
buildSpeechletResponse(intent.name, speechOutput, repromptText, shouldEndSession));
} else {
console.log("Created table. Table description JSON:", JSON.stringify(data, null, 2));
speechOutput = 'Update successful';
callback(sessionAttributes,
buildSpeechletResponse(intent.name, speechOutput, repromptText, shouldEndSession));
}
});
}
The following items are already checked
1) There is a table named "Location" in DynamoDB
2) Both DynamoDB and this lambda function are in ue-west-1 (Ireland)
3) The role assigned for this Lambda function can do all operation on this table. See the policy details below
{
"Version": "2012-10-17",
"Statement": [
{
"Sid": "Stmt1510603004000",
"Effect": "Allow",
"Action": [
"dynamodb:*"
],
"Resource": [
"arn:aws:dynamodb:eu-west-1:752546663632:table/Location"
]
}
]
}
How does my Lambda function locate the table "location" just with the region?- the code does not appear to have end-point, etc.? - just developed based on a tutorial.
Is that what I am missing?
Please can you help?
I had a similar issue, try putting require statements in the beginning of your function.
const AWS = require("aws-sdk");
const docClient = new AWS.DynamoDB.DocumentClient({ region: 'eu-west-1' });
I believe that AWS locates the table based on your identity, in combination with the region and the table name.
I was able to successfully post to a table using this code:
const AWS = require('aws-sdk');
const dynamoDB = new AWS.DynamoDB({region: 'us-west-2'});
var params = {
TableName: "my-table",
Item: {
"LocationID": { S: "11" },
"Country": { S: "10" },
"Description": { S: "10" },
"Name": { S: "10" }
}
};
dynamoDB.putItem(params, (err, data) => {
if (err){
console.error(err.stack);
} else {
console.log(data);
}
});
If you can in fact post to the table from the CLI, then there is still at least one remaining issue: it appears that you are using the DocumentClient class incorrectly. It looks like you're mixing up the syntax for DynamoDB.putItem with the syntax for DynamoDB.DocumentClient.put.
If you notice, my code uses the DynamoDB class directly-- based on what you're doing, I see no reason why you couldn't do the same. Otherwise, you should change your Item object:
var params = {
TableName: "my-table",
Item: {
"LocationID": "11",
"Country": "10",
"Description": "10",
"Name": "10"
}
};
My guess is your code is currently erroring out because you are trying to insert Maps where you want to insert Strings. If you have Cloudwatch configured you could check the logs.
Finally, I don't see you using callback in your code. If your intention is to respond to a client calling the lambda you should do that. Depending on your NodeJS version, the lambda can simply time out without returning a useful response.
Related
After working a bit with DynamoDb I’ve run into an issue that from what I’ve read so far is not really ideal for DynamoDb. So before I make the switch to RDS, I’d like to see if there’s anyway I can achieve what I need with DynamoDb. I’ve also thought about breaking this out into multiple tables for DynamoDb
Below of my Data schema. There is a list nested inside the item. I need to be able to append strings to the list.
{
“server-id”: “123345678”,
“server-name”: “my-server”
“topics”: [
{
“name”: “my-topic”,
“subscribers”: [] //This is what I need to append
}
]
}
Yes, this is possible.
var AWS = require('aws-sdk');
AWS.config.update({region: 'us-east-1'});
var ddb = new AWS.DynamoDB({apiVersion: '2012-08-10'});
var params = {
ExpressionAttributeNames: {
"#T": "topics",
"#S": "subscribers"
},
ExpressionAttributeValues: {
":vals": {
L: [
{ N: "123" },
{ N: "456" }
]
}
},
Key: {
'server-id': { S: '123345678' }
},
ReturnValues: "ALL_NEW",
TableName: 'dummy-table',
UpdateExpression: "SET #T[0].#S = list_append(#T[0].#S, :vals)"
};
ddb.updateItem(params, function(err, data) {
if (err) {
console.log("Error", err);
} else {
console.log("Success", data);
}
});
I have a single DynamoDB table that has Games and Players. I currently have the following Lambda resolver that works for my AppSync getGame query. The question is, is it possible to write a DynamoDB resolver using the velocity templates that does the same so I can avoid the lambda invocation.
const AWS = require('aws-sdk');
const docClient = new AWS.DynamoDB.DocumentClient();
exports.handler = async (event) => {
let result = null;
let params;
switch(event.field) {
case "getGame":
const id = event.arguments.id;
if (!id) {
throw new Error('Missing game id');
};
params = {
TableName: 'games',
KeyConditionExpression: 'pk = :pk AND sk = :sk',
ExpressionAttributeValues: {
':pk': 'game',
':sk': `meta_${id}`
}
};
const game = (await docClient.query(params).promise()).Items[0];
// get players
const gameKey = `game_${game.sk.split('_')[1]}_${game.sk.split('_')[2]}`;
params = {
TableName: 'games',
KeyConditionExpression: 'pk = :pk AND begins_with(sk, :sk)',
ExpressionAttributeValues: {
':pk': gameKey,
':sk': 'player_'
}
};
game.players = (await docClient.query(params).promise()).Items;
result = game;
break;
}
return result;
};
And the result looks like
{
"gsipk": "NEW_OPEN",
"sk": "meta_1578241126110_35660fcc-3cde-4d30-9ebd-09abba1aedf7",
"gsisk": "level_1_players_4",
"pk": "game",
"players": [
{
"gsipk": "player_3a7bb19c-0ccd-42df-a606-acd8b1f5e288",
"gsisk": "game_1578241126110_35660fcc-3cde-4d30-9ebd-09abba1aedf7",
"points": 0,
"num": 4,
"place": null,
"sk": "player_3a7bb19c-0ccd-42df-a606-acd8b1f5e288",
"pieces": [],
"wilds": 0,
"pk": "game_1578241126110_35660fcc-3cde-4d30-9ebd-09abba1aedf7",
"color": "gold",
"pows": 0
},
{
"gsipk": "player_96b772b1-4127-43da-b550-029d5c632675",
"gsisk": "game_1578241126110_35660fcc-3cde-4d30-9ebd-09abba1aedf7",
"points": 0,
"num": 2,
"place": null,
"sk": "player_96b772b1-4127-43da-b550-029d5c632675",
"pieces": [],
"wilds": 0,
"pk": "game_1578241126110_35660fcc-3cde-4d30-9ebd-09abba1aedf7",
"color": "blue",
"pows": 0
},
{
"gsipk": "player_9d30c675-930f-401b-ac5f-8db32bb2acb8",
"gsisk": "game_1578241126110_35660fcc-3cde-4d30-9ebd-09abba1aedf7",
"points": 0,
"num": 3,
"place": null,
"sk": "player_9d30c675-930f-401b-ac5f-8db32bb2acb8",
"pieces": [],
"wilds": 0,
"pk": "game_1578241126110_35660fcc-3cde-4d30-9ebd-09abba1aedf7",
"color": "green",
"pows": 0
},
{
"gsipk": "player_ab179ad1-a160-44f8-b438-0e93385b6c47",
"gsisk": "game_1578241126110_35660fcc-3cde-4d30-9ebd-09abba1aedf7",
"points": 0,
"num": 1,
"place": null,
"sk": "player_ab179ad1-a160-44f8-b438-0e93385b6c47",
"pieces": [],
"wilds": 0,
"pk": "game_1578241126110_35660fcc-3cde-4d30-9ebd-09abba1aedf7",
"color": "red",
"pows": 0
}
]
}
Okay, thanks to #cyberwombat's comment:
Unless you remap your data to be able to fetch all items in one request
I was able to figure this out. First, I had to refactor my table a bit. I changed the primary key (hash) to be game_<uuid> and then referenced the game details with a sort key (range) like meta_<timestamp> and players with a sort key like player_<uuid>.
Once I did this, I was able to use this resolver to Query and return the game details and all players with this request mapping template:
{
"version": "2017-02-28",
"operation": "Query",
"query" : {
"expression" : "pk = :pk",
"expressionValues" : {
":pk": { "S": "$ctx.arguments.pk" }
}
}
}
The above query returns 5 items (the 4 players and the game metadata). I then used a response mapping template like this:
#set($game = {})
#set($players = [])
#foreach($item in $ctx.result.items)
#if($util.matches('meta_\d*', $item.sk))
## main game object
#set($game = $item)
#else
## player
$util.qr($players.add($item))
#end
#end
$util.qr($game.put("players", $players))
$util.toJson($game)
Now I have a SINGLE query to DynamoDB and no lambda resolvers... beautiful.
Unless you remap your data to be able to fetch all items in one request you will need Pipeline resolvers.. In summary a pipeline is a number of resolvers in line wrapped in a before/after template.
In your case these before/after templates are not really used so the basic setup would be:
Before template (nothing is needed so an empty json is fine)
{}
After template (to pass the result from previous calls on)
$util.toJson($ctx.result)
Then you will have 2 DynamoDB resolvers. These are the same as other DynamoDB resolvers you may have previously done except that in the seecond one, in order to access the first DynamoDB resolvers result you will use $ctx.prev.result. So let's say you passed the game ID in the response of the first call as:
{
"game_id": "$ctx.result.get('theGameId')",
...
}
Then this can be accessed in second request template as $ctx.prev.result.game_id. You can also use the stash instead - $ctx.stash.put() and $ctx.prev.get(). The stash is useful if you need to do something in the BEFORE request (the very first one which we have blank for now) and pass that throughout the resolvers.
Given a collection of Cosmos documents similar to the following, I'd like to generate a grouped (distinct?!?) list of "categories" using Cosmos SQL. Any help in this regard would be greatly appreciated.
[
{
"id": "f0136e76-8e66-6a5a-3790-b577001d6420",
"itemId": "analyze-and-visualize-your-data-with-azure-cosmos-db-notebooks",
"title": "Built-in Jupyter notebooks in Azure Cosmos DB are now available",
"categories": [
"Developer",
"Database",
"Data Science"
]
},
{
"id": "f0136e76-8e66-6a5a-3790-b577001d6420",
"itemId": "analyze-and-visualize-your-data-with-azure-cosmos-db-notebooks",
"title": "Built-in Jupyter notebooks in Azure Cosmos DB are now available",
"categories": [
"Developer",
"Database",
"Data Science"
]
},
{
"id": "d98c1dd4-008f-04b2-e980-0998ecf8427e",
"itemId": "improving-azure-virtual-machines-resiliency-with-project-tardigrade",
"title": "Improving Azure Virtual Machines resiliency with Project Tardigrade",
"categories": [
"Virtual Machines",
"Supportability",
"Monitoring"
]
}
]
GroupBY is not supported by Azure CosmosDB so far. You can alternatively use Stored Procedure to implement your requirement.
Base on the sample documents you have given above, here is a sample stored Procedure
function groupBy() {
var collection = getContext().getCollection();
var collectionLink = collection.getSelfLink();
var isValid = collection.queryDocuments(
collectionLink,
'SELECT * FROM stackoverflow s',
{EnableCrossPartitionQuery: true},
function (err, feed, options) {
if (err) throw err;
if (!feed || !feed.length) {
var response = getContext().getResponse();
console.log(JSON.stringify(response));
response.setBody('no docs found');
}
else {
var response = getContext().getResponse();
var items = {};
for(var i=0;i<feed.length;i++){
var categories = feed[i].categories;
for(var j=0;j<categories.length;j++){
items[categories[j]] = categories[j]
}
var distinctArray = [];
for(var distinctObj in items){
distinctArray.push(items[distinctObj]);
}
}
response.setBody(distinctArray);
}
});
if (!isValid) throw new Error('Kindly check your query, which not accepted by the server.');
}
I can't seem to find how to correctly call PutItem for a StringSet in DynamoDB through API Gateway. If I call it like I would for a List of Maps, then I get objects returned. Example data is below.
{
"eventId": "Lorem",
"eventName": "Lorem",
"companies": [
{
"companyId": "Lorem",
"companyName": "Lorem"
}
],
"eventTags": [
"Lorem",
"Lorem"
]
}
And my example template call for companies:
"companies" : {
"L": [
#foreach($elem in $inputRoot.companies) {
"M": {
"companyId": {
"S": "$elem.companyId"
},
"companyName": {
"S": "$elem.companyName"
}
}
} #if($foreach.hasNext),#end
#end
]
}
I've tried to call it with String Set listed, but it errors out still and tells me that "Start of structure or map found where not expected" or that serialization failed.
"eventTags" : {
"SS": [
#foreach($elem in $inputRoot.eventTags) {
"S":"$elem"
} #if($foreach.hasNext),#end
#end
]
}
What is the proper way to call PutItem for converting an array of strings to a String Set?
If you are using JavaScript AWS SDK, you can use document client API (docClient.createSet) to store the SET data type.
docClient.createSet - converts the array into SET data type
var docClient = new AWS.DynamoDB.DocumentClient();
var params = {
TableName:table,
Item:{
"yearkey": year,
"title": title
"product" : docClient.createSet(['milk','veg'])
}
};
I wish to create an Item in DynamoDB that is a list. This is my code:
var list_update_params = {
TableName: "table01",
Key: {
"MachineID": {
"S": MachineID
},
"Hour": {
"S": Hour
}
},
UpdateExpression: "set var01_list = list_append(var01_list, :ot)",
ExpressionAttributeValues: {
":ot": {"L": [{"N": var01}]}
},
ReturnValues: "NONE"
};
dynamodb.updateItem(list_update_params, function(err, data) {
if (err) console.log(err, err.stack);
else console.log("Updated List to DynamoDB");
});
The problem is list_append expects the attribute var01_list to already be present, but I wouldn't know at the first insert. Is there a technique where it'll let me create an insert a List attribute if one doesn't exist and append to it in later calls?
Got the answer from a similar post here.
UpdateExpression: "set var01_list= list_append(if_not_exists(var01_list, :empty_list), :h)",
ExpressionAttributeValues: {
":h": {"L": [{"N":var01}]},
":empty_list": {"L": []}
},
The key was using if_not_exists with list_append. Didn't know that could be done in this matter