I have a single DynamoDB table that has Games and Players. I currently have the following Lambda resolver that works for my AppSync getGame query. The question is, is it possible to write a DynamoDB resolver using the velocity templates that does the same so I can avoid the lambda invocation.
const AWS = require('aws-sdk');
const docClient = new AWS.DynamoDB.DocumentClient();
exports.handler = async (event) => {
let result = null;
let params;
switch(event.field) {
case "getGame":
const id = event.arguments.id;
if (!id) {
throw new Error('Missing game id');
};
params = {
TableName: 'games',
KeyConditionExpression: 'pk = :pk AND sk = :sk',
ExpressionAttributeValues: {
':pk': 'game',
':sk': `meta_${id}`
}
};
const game = (await docClient.query(params).promise()).Items[0];
// get players
const gameKey = `game_${game.sk.split('_')[1]}_${game.sk.split('_')[2]}`;
params = {
TableName: 'games',
KeyConditionExpression: 'pk = :pk AND begins_with(sk, :sk)',
ExpressionAttributeValues: {
':pk': gameKey,
':sk': 'player_'
}
};
game.players = (await docClient.query(params).promise()).Items;
result = game;
break;
}
return result;
};
And the result looks like
{
"gsipk": "NEW_OPEN",
"sk": "meta_1578241126110_35660fcc-3cde-4d30-9ebd-09abba1aedf7",
"gsisk": "level_1_players_4",
"pk": "game",
"players": [
{
"gsipk": "player_3a7bb19c-0ccd-42df-a606-acd8b1f5e288",
"gsisk": "game_1578241126110_35660fcc-3cde-4d30-9ebd-09abba1aedf7",
"points": 0,
"num": 4,
"place": null,
"sk": "player_3a7bb19c-0ccd-42df-a606-acd8b1f5e288",
"pieces": [],
"wilds": 0,
"pk": "game_1578241126110_35660fcc-3cde-4d30-9ebd-09abba1aedf7",
"color": "gold",
"pows": 0
},
{
"gsipk": "player_96b772b1-4127-43da-b550-029d5c632675",
"gsisk": "game_1578241126110_35660fcc-3cde-4d30-9ebd-09abba1aedf7",
"points": 0,
"num": 2,
"place": null,
"sk": "player_96b772b1-4127-43da-b550-029d5c632675",
"pieces": [],
"wilds": 0,
"pk": "game_1578241126110_35660fcc-3cde-4d30-9ebd-09abba1aedf7",
"color": "blue",
"pows": 0
},
{
"gsipk": "player_9d30c675-930f-401b-ac5f-8db32bb2acb8",
"gsisk": "game_1578241126110_35660fcc-3cde-4d30-9ebd-09abba1aedf7",
"points": 0,
"num": 3,
"place": null,
"sk": "player_9d30c675-930f-401b-ac5f-8db32bb2acb8",
"pieces": [],
"wilds": 0,
"pk": "game_1578241126110_35660fcc-3cde-4d30-9ebd-09abba1aedf7",
"color": "green",
"pows": 0
},
{
"gsipk": "player_ab179ad1-a160-44f8-b438-0e93385b6c47",
"gsisk": "game_1578241126110_35660fcc-3cde-4d30-9ebd-09abba1aedf7",
"points": 0,
"num": 1,
"place": null,
"sk": "player_ab179ad1-a160-44f8-b438-0e93385b6c47",
"pieces": [],
"wilds": 0,
"pk": "game_1578241126110_35660fcc-3cde-4d30-9ebd-09abba1aedf7",
"color": "red",
"pows": 0
}
]
}
Okay, thanks to #cyberwombat's comment:
Unless you remap your data to be able to fetch all items in one request
I was able to figure this out. First, I had to refactor my table a bit. I changed the primary key (hash) to be game_<uuid> and then referenced the game details with a sort key (range) like meta_<timestamp> and players with a sort key like player_<uuid>.
Once I did this, I was able to use this resolver to Query and return the game details and all players with this request mapping template:
{
"version": "2017-02-28",
"operation": "Query",
"query" : {
"expression" : "pk = :pk",
"expressionValues" : {
":pk": { "S": "$ctx.arguments.pk" }
}
}
}
The above query returns 5 items (the 4 players and the game metadata). I then used a response mapping template like this:
#set($game = {})
#set($players = [])
#foreach($item in $ctx.result.items)
#if($util.matches('meta_\d*', $item.sk))
## main game object
#set($game = $item)
#else
## player
$util.qr($players.add($item))
#end
#end
$util.qr($game.put("players", $players))
$util.toJson($game)
Now I have a SINGLE query to DynamoDB and no lambda resolvers... beautiful.
Unless you remap your data to be able to fetch all items in one request you will need Pipeline resolvers.. In summary a pipeline is a number of resolvers in line wrapped in a before/after template.
In your case these before/after templates are not really used so the basic setup would be:
Before template (nothing is needed so an empty json is fine)
{}
After template (to pass the result from previous calls on)
$util.toJson($ctx.result)
Then you will have 2 DynamoDB resolvers. These are the same as other DynamoDB resolvers you may have previously done except that in the seecond one, in order to access the first DynamoDB resolvers result you will use $ctx.prev.result. So let's say you passed the game ID in the response of the first call as:
{
"game_id": "$ctx.result.get('theGameId')",
...
}
Then this can be accessed in second request template as $ctx.prev.result.game_id. You can also use the stash instead - $ctx.stash.put() and $ctx.prev.get(). The stash is useful if you need to do something in the BEFORE request (the very first one which we have blank for now) and pass that throughout the resolvers.
Related
I have current problem in my entity adapter after I receive the response from the api and set the payload to the state it happen is the json format change. I don't know if the setOne method of redux toolkit change it by self.
Original Format :
{
"id": "af3fbedf-4751-413b-abd5-074737b6edd2",
"role_id": null,
"first_name": "Geirge",
"last_name": "Shaw",
"email": "sdf#sdf.com",
"email_verified_at": null,
"username": "coder",
"status": 1,
"created_at": "2021-11-18T06:50:46.000000Z",
"created_by": null,
"updated_at": "2021-11-18T06:50:46.000000Z",
"updated_by": null,
"fullname": "Geirge Shaw",
"token": "94|4O0z51gddqHxeCs5UhLysE9QoSsIOSlP2EYb9iFQ"
}
New Format after I set the response to the state:
af3fbedf-4751-413b-abd5-074737b6edd2: {id: 'af3fbedf-4751-413b-abd5-074737b6edd2', role_id: null, first_name: .....}
ExtraReducers Looks Like:
extraReducers: {
[LoginAuthentication.pending](state, action){
state.isLoading = true
state.isLoggedIn = false
},
[LoginAuthentication.fulfilled](state, {payload}){
console.log(payload);
state.isLoading = false
LoginAdapter.setOne(state, payload)
state.isLoggedIn = true
},
[LoginAuthentication.rejected](state, action) {
state.isLoading = false
state.isLoggedIn = false
}
}
Because that is exactly what createEntityAdapter does. It takes the original item objects, and stores them in a lookup table where the keys are the IDs and the values are the original items.
See these resources for more explanations:
https://redux.js.org/usage/structuring-reducers/normalizing-state-shape
https://redux.js.org/tutorials/essentials/part-6-performance-normalization
https://redux.js.org/tutorials/fundamentals/part-7-standard-patterns#normalized-state
https://redux.js.org/tutorials/fundamentals/part-8-modern-redux#using-createentityadapter
https://redux-toolkit.js.org/usage/usage-guide#managing-normalized-data
https://redux-toolkit.js.org/api/createEntityAdapter
I have the below data schema for my DynamoDb table. I am trying to append list subscribers on the condition if input = name (ex: input = my-topic2). There can be many maps in the “topics” list and I need to search for the map where the name = input and from there add the subscriber to that topic.
{
“server-id”: “123345678”,
“server-name”: “my-server”
“topics”: [
{
“name”: “my-topic”,
“subscribers”: []
},
{
“name”: “my-topic2”,
“subscribers”: [] //This is what I need to append on a condition that the input = “my-topic2”
}
]
}
I have the current following paeans I am using which appends “my-topic” subscribers.
params = {
ExpressionAttributeNames: {
"#T": "topics",
"#S": "subscribers"
},
ExpressionAttributeValues: {
":vals": [
message.author.id
]
},
Key: {
'server-id': serverID
},
ReturnValues: "ALL_NEW",
TableName: tableName,
UpdateExpression: "SET #T[0].#S = list_append(#T[0].#S, :vals)"
};
I am trying to put a hard-coded data item to DynamoDB. I am using AWS SDK object to perform this update. And all the debug "Console.log" in the below code is getting printed but eventually it prints Task timed out after 3.00 seconds
With no update to the DynamoDB
function updatedb(intent, session, callback) {
let country;
const repromptText = null;
const sessionAttributes = {};
let shouldEndSession = false;
console.log("In the function");
const AWS = require("aws-sdk");
const docClient = new AWS.DynamoDB.DocumentClient({ region: 'eu-west-1' });
var params = {
TableName: "Location",
Item: {
"LocationID": { "S": "11" },
"Country": { "S": "10" },
"Description": { "S": "10" },
"Name": { "S": "10" }
}
};
console.log("Param loaded & executing the DocClient Put");
docClient.put(params, function (err, data) {
if (err) {
speechOutput = 'Update failed';
console.error("Unable to create table. Error JSON:", JSON.stringify(err, null, 2));
callback(sessionAttributes,
buildSpeechletResponse(intent.name, speechOutput, repromptText, shouldEndSession));
} else {
console.log("Created table. Table description JSON:", JSON.stringify(data, null, 2));
speechOutput = 'Update successful';
callback(sessionAttributes,
buildSpeechletResponse(intent.name, speechOutput, repromptText, shouldEndSession));
}
});
}
The following items are already checked
1) There is a table named "Location" in DynamoDB
2) Both DynamoDB and this lambda function are in ue-west-1 (Ireland)
3) The role assigned for this Lambda function can do all operation on this table. See the policy details below
{
"Version": "2012-10-17",
"Statement": [
{
"Sid": "Stmt1510603004000",
"Effect": "Allow",
"Action": [
"dynamodb:*"
],
"Resource": [
"arn:aws:dynamodb:eu-west-1:752546663632:table/Location"
]
}
]
}
How does my Lambda function locate the table "location" just with the region?- the code does not appear to have end-point, etc.? - just developed based on a tutorial.
Is that what I am missing?
Please can you help?
I had a similar issue, try putting require statements in the beginning of your function.
const AWS = require("aws-sdk");
const docClient = new AWS.DynamoDB.DocumentClient({ region: 'eu-west-1' });
I believe that AWS locates the table based on your identity, in combination with the region and the table name.
I was able to successfully post to a table using this code:
const AWS = require('aws-sdk');
const dynamoDB = new AWS.DynamoDB({region: 'us-west-2'});
var params = {
TableName: "my-table",
Item: {
"LocationID": { S: "11" },
"Country": { S: "10" },
"Description": { S: "10" },
"Name": { S: "10" }
}
};
dynamoDB.putItem(params, (err, data) => {
if (err){
console.error(err.stack);
} else {
console.log(data);
}
});
If you can in fact post to the table from the CLI, then there is still at least one remaining issue: it appears that you are using the DocumentClient class incorrectly. It looks like you're mixing up the syntax for DynamoDB.putItem with the syntax for DynamoDB.DocumentClient.put.
If you notice, my code uses the DynamoDB class directly-- based on what you're doing, I see no reason why you couldn't do the same. Otherwise, you should change your Item object:
var params = {
TableName: "my-table",
Item: {
"LocationID": "11",
"Country": "10",
"Description": "10",
"Name": "10"
}
};
My guess is your code is currently erroring out because you are trying to insert Maps where you want to insert Strings. If you have Cloudwatch configured you could check the logs.
Finally, I don't see you using callback in your code. If your intention is to respond to a client calling the lambda you should do that. Depending on your NodeJS version, the lambda can simply time out without returning a useful response.
I have a custom class in ES 2.5 of the following:
Title
DataSources
Content
Running a search is fine, except with the middle field - it's built/indexed using a delimiter of '|'.
ex: "|4|7|8|9|10|12|14|19|20|21|22|23|29|30"
I need to build a query that matches some in all fields AND matches at least one number in the DataSource field.
So to summarize what I currently have:
QueryBase query = new SimpleQueryStringQuery
{
//DefaultOperator = !operatorOR ? Operator.And : Operator.Or,
Fields = LearnAboutFields.FULLTEXT,
Analyzer = "standard",
Query = searchWords.ToLower()
};
_boolQuery.Must = new QueryContainer[] {query};
That's the search words query.
foreach (var datasource in dataSources)
{
// Add DataSources with an OR
queryContainer |= new WildcardQuery { Field = LearnAboutFields.DATASOURCE, Value = string.Format("*{0}*", datasource) };
}
// Add this Boolean Clause to our outer clause with an AND
_boolQuery.Filter = new QueryContainer[] {queryContainer};
}
That's for the datasources query. There can be multiple datasources.
It doesn't work, and returns on results with the filter query added on. I think I need some work on the tokenizer/analyzer, but I don't know enough about ES to figure that out.
EDIT: Per Val's comments below I have attempted to recode the indexer like this:
_elasticClientWrapper.CreateIndex(_DataSource, i => i
.Mappings(ms => ms
.Map<LearnAboutContent>(m => m
.Properties(p => p
.String(s => s.Name(lac => lac.DataSources)
.Analyzer("classic_tokenizer")
.SearchAnalyzer("standard")))))
.Settings(s => s
.Analysis(an => an.Analyzers(a => a.Custom("classic_tokenizer", ca => ca.Tokenizer("classic"))))));
var indexResponse = _elasticClientWrapper.IndexMany(contentList);
It builds successfully, with data. However the query still isn't working right.
New query for DataSources:
foreach (var datasource in dataSources)
{
// Add DataSources with an OR
queryContainer |= new TermQuery {Field = LearnAboutFields.DATASOURCE, Value = datasource};
}
// Add this Boolean Clause to our outer clause with an AND
_boolQuery.Must = new QueryContainer[] {queryContainer};
And the JSON:
{"learnabout_index":{"aliases":{},"mappings":{"learnaboutcontent":{"properties":{"articleID":{"type":"string"},"content":{"type":"string"},"dataSources":{"type":"string","analyzer":"classic_tokenizer","search_analyzer":"standard"},"description":{"type":"string"},"fileName":{"type":"string"},"keywords":{"type":"string"},"linkURL":{"type":"string"},"title":{"type":"string"}}}},"settings":{"index":{"creation_date":"1483992041623","analysis":{"analyzer":{"classic_tokenizer":{"type":"custom","tokenizer":"classic"}}},"number_of_shards":"5","number_of_replicas":"1","uuid":"iZakEjBlRiGfNvaFn-yG-w","version":{"created":"2040099"}}},"warmers":{}}}
The Query JSON request:
{
"size": 10000,
"query": {
"bool": {
"must": [
{
"simple_query_string": {
"fields": [
"_all"
],
"query": "\"housing\"",
"analyzer": "standard"
}
}
],
"filter": [
{
"terms": {
"DataSources": [
"1"
]
}
}
]
}
}
}
One way to achieve this is to create a custom analyzer with a classic tokenizer which will break your DataSources field into the numbers composing it, i.e. it will tokenize the field on each | character.
So when you create your index, you need to add this custom analyzer and then use it in your DataSources field:
PUT my_index
{
"settings": {
"analysis": {
"analyzer": {
"number_analyzer": {
"type": "custom",
"tokenizer": "number_tokenizer"
}
},
"tokenizer": {
"number_tokenizer": {
"type": "classic"
}
}
}
},
"mappings": {
"my_type": {
"properties": {
"DataSources": {
"type": "string",
"analyzer": "number_analyzer",
"search_analyzer": "standard"
}
}
}
}
}
As a result, if you index the string "|4|7|8|9|10|12|14|19|20|21|22|23|29|30", you DataSources field will effectively contain the following array of token: [4, 7, 8, 9, 10, 12, 14, 191, 20, 21, 22, 23, 29, 30]
Then you can get rid of your WildcardQuery and simply use a TermsQuery instead:
terms = new TermsQuery {Field = LearnAboutFields.DATASOURCE, Terms = dataSources }
// Add this Boolean Clause to our outer clause with an AND
_boolQuery.Filter = new QueryContainer[] { terms };
At an initial glance at your code I think one problem you might have is that any queries placed within a filter clause will not be analysed. So basically the value will not be broken down into tokens and will be compared in its entirety.
It's easy to forget this so any values that require analysis need to be placed in the must or should clauses.
I wish to create an Item in DynamoDB that is a list. This is my code:
var list_update_params = {
TableName: "table01",
Key: {
"MachineID": {
"S": MachineID
},
"Hour": {
"S": Hour
}
},
UpdateExpression: "set var01_list = list_append(var01_list, :ot)",
ExpressionAttributeValues: {
":ot": {"L": [{"N": var01}]}
},
ReturnValues: "NONE"
};
dynamodb.updateItem(list_update_params, function(err, data) {
if (err) console.log(err, err.stack);
else console.log("Updated List to DynamoDB");
});
The problem is list_append expects the attribute var01_list to already be present, but I wouldn't know at the first insert. Is there a technique where it'll let me create an insert a List attribute if one doesn't exist and append to it in later calls?
Got the answer from a similar post here.
UpdateExpression: "set var01_list= list_append(if_not_exists(var01_list, :empty_list), :h)",
ExpressionAttributeValues: {
":h": {"L": [{"N":var01}]},
":empty_list": {"L": []}
},
The key was using if_not_exists with list_append. Didn't know that could be done in this matter