Frameworks like ASP.NET or Nancy provide a syntax that can be used for specifying routes, such as:
MapRoute("/customers/{id}/invoices/{invoiceId}", ...)
In ASP.NET routes work in two directions. They can match a request URI such as /customers/32/invoices/19 to a route, and they can resolve parameters such as { id: 37, invoiceId: 19 } into a URI.
RFC 6570: URI Templates also defines a similar, though much richer, specification for URI's that are often used to resolve URI's. For example:
UriTemplate("/customers/{id}/invoices{/invoiceId}{?sort}", { id: 37, invoiceId: 19, sort: 'asc' } )
// returns: /customers/37/invoices/19?sort=asc
My question is, can the syntax specified in RFC 6570 be used to match request URI's to routes? Is there a part of the syntax that would make it ambiguous to match a given URI to a given URI template? Are there any libraries that support matching a URI to a URI template?
I suspect it would be very difficult. Certainly things like the prefix syntax would make it impossible to regenerate the original parameters.
For things like path segment expansion
{/list*} /red/green/blue
How would you know which parts of the path were literals and which parts were part of the parameter? There are lots of fairly freaky behavior in the URITemplate spec, I suspect even if it is possible to match, it would be fairly expensive.
Are you interested in doing this for the purposes of routing?
It is simple regarding match but regarding resolve you need to replace the ASP.net part by the RFC 6570.
Unfortunately I am doing this in node with express js and this might not be helpful, but I am sure something like https://github.com/geraintluff/uri-templates (for resolving) is also available in ASP.
Here is some .js code to illustrate the rewriting of a hyperschema
USING RFC 6570 to use with express js (the advantage of the use within schema is that you could also define regexes for your uri templates):
var deref = require('json-schema-deref');
var tv4 = require('tv4');
var url = require('url');
var rql = require('rql/parser');
var hyperschema = {
"$schema": "http://json-schema.org/draft-04/hyper-schema",
"links": [
{
"href": "{/id}{/ooo*}{#q}",
"method": "GET",
"rel": "self",
"schema": {
"type": "object",
"properties": {
"params": {
"type": "object",
"properties": {
"id": {"$ref": "#/definitions/id"}
},
"additionalProperties": false
}
},
"additionalProperties": true
}
}
],
"definitions": {
"id": {
"type": "string",
"pattern": "[a-z]{0,3}"
}
}
}
// DOJO lang AND _
function getDottedProperty(object, parts, create) {
var key;
var i = 0;
while (object && (key = parts[i++])) {
if (typeof object !== 'object') {
return undefined;
}
object = key in object ? object[key] : (create ? object[key] = {} : undefined);
}
return object;
}
function getProperty(object, propertyName, create) {
return getDottedProperty(object, propertyName.split('.'), create);
}
function _rEscape(str) {
return str.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
}
function getPattern(k, ldo, customCat) {
// ...* = explode = array
// ...: = maxLength
var key = ((k.slice(-1) === '*') ? k.slice(0,-1) : k).split(':')[0];
var cat = (customCat) ? customCat : 'params'; // becomes default of customCat in TS
var pattern = '';
if (typeof ldo === 'object' && ldo.hasOwnProperty('schema')) {
var res = getProperty(ldo.schema, ['properties',cat,'properties',key,'pattern'].join('.'));
if (res) {
console.log(['properties',cat,'properties',key,'pattern'].join('.'),res);
return ['(',res,')'].join('');
}
}
return pattern;
}
function ldoToRouter(ldo) {
var expression = ldo.href.replace(/(\{\+)/g, '{') // encoding
.replace(/(\{\?.*\})/g, '') // query
.replace(/\{[#]([^}]*)\}/g, function(_, arg) {
// crosshatch
//console.log(arg);
return ['(?:[/]*)?#:',arg,getPattern(arg,ldo,'anchor')].join('');
})
.replace(/\{([./])?([^}]*)\}/g, function(_, op, arg) {
// path seperator
//console.log(op, '::', arg, '::', ldo.schema);
return [op,':',arg,getPattern(arg,ldo)].join('');
});
return {method: ldo.method.toLowerCase(), args:[expression]};
}
deref(hyperschema, function(err, fullSchema) {
console.log('deref hyperschema:',JSON.stringify(fullSchema));
var router = fullSchema.links.map(ldoToRouter);
console.log('router:',JSON.stringify(router));
});
Related
My understanding is that using serializeIds: 'always' will give me this data, but it does not.
Here's what I'm expecting:
{
id="1"
title="some title"
customerId="2"
}
Instead the output I'm receiving is:
{
id="1"
title="some title"
}
My code looks something like this:
import {
Server,
Serializer,
Model,
belongsTo,
hasMany,
Factory
} from "miragejs";
import faker from "faker";
const ApplicationSerializer = Serializer.extend({
// don't want a root prop
root: false,
// true required to have root:false
embed: true,
// will always serialize the ids of all relationships for the model or collection in the response
serializeIds: "always"
});
export function makeServer() {
let server = newServer({
models: {
invoice: Model.extend({
customer: belongsTo()
}),
customer: Model.extend({
invoices: hasMany()
})
},
factories: {
invoice: Factory.extend({
title(i) {
return `Invoice ${i}`;
},
afterCreate(invoice, server) {
if (!invoice.customer) {
invoice.update({
customer: server.create("customer")
});
}
}
}),
customer: Factory.extend({
name() {
let fullName = () =>
`${faker.name.firstName()} ${faker.name.lastName()}`;
return fullName;
}
})
},
seeds(server) {
server.createList("invoice", 10);
},
serializers: {
application: ApplicationSerializer,
invoice: ApplicationSerializer.extend({
include: ["customer"]
})
},
routes() {
this.namespace = "api";
this.get("/auth");
}
});
}
Changing the config to root: true, embed: false, provides the correct output in the invoice models, but adds the root and sideloads the customer, which I don't want.
You've run into some strange behavior with how how serializeIds interacts with embed.
First, it's confusing why you need to set embed: true when you're just trying to disable the root. The reason is because embed defaults to false, so if you remove the root and try to include related resources, Mirage doesn't know where to put them. This is a confusing mix of options and Mirage should really have different "modes" that take this into account.
Second, it seems that when embed is true, Mirage basically ignores the serializeIds option, since it thinks your resources will always be embedded. (The idea here is that a foreign key is used to fetch related resources separately, but when they're embedded they always come over together.) This is also confusing and doesn't need to be the case. I've opened a tracking issue in Mirage to help address these points.
As for you today, the best way to solve this is to leave root to true and embed false, which are both the defaults, so that serializeIds works properly, and then just write your own serialize() function to remove the key for you:
const ApplicationSerializer = Serializer.extend({
// will always serialize the ids of all relationships for the model or collection in the response
serializeIds: "always",
serialize(resource, request) {
let json = Serializer.prototype.serialize.apply(this, arguments);
let root = resource.models ? this.keyForCollection(resource.modelName) : this.keyForModel(resource.modelName)
return json[root];
}
});
You should be able to test this out on both /invoices and /invoices/1.
Check out this REPL example and try making a request to each URL.
Here's the config from the example:
import {
Server,
Serializer,
Model,
belongsTo,
hasMany,
Factory,
} from "miragejs";
import faker from "faker";
const ApplicationSerializer = Serializer.extend({
// will always serialize the ids of all relationships for the model or collection in the response
serializeIds: "always",
serialize(resource, request) {
let json = Serializer.prototype.serialize.apply(this, arguments);
let root = resource.models ? this.keyForCollection(resource.modelName) : this.keyForModel(resource.modelName)
return json[root];
}
});
export default new Server({
models: {
invoice: Model.extend({
customer: belongsTo(),
}),
customer: Model.extend({
invoices: hasMany(),
}),
},
factories: {
invoice: Factory.extend({
title(i) {
return "Invoice " + i;
},
afterCreate(invoice, server) {
if (!invoice.customer) {
invoice.update({
customer: server.create("customer"),
});
}
},
}),
customer: Factory.extend({
name() {
return faker.name.firstName() + " " + faker.name.lastName();
},
}),
},
seeds(server) {
server.createList("invoice", 10);
},
serializers: {
application: ApplicationSerializer,
},
routes() {
this.resource("invoice");
},
});
Hopefully that clears things up + sorry for the confusing APIs!
Can I scan DynamoDB by 'order.shortCode', in the given example. The console is indicating I can't with dot notation, and I can't find any documentation on it.
{
"key2": "cj11b1ygp0000jcgubpe5mso3",
"order": {
"amount": 74.22,
"dateCreated": "2017-04-02T19:15:33-04:00",
"orderNumber": "cj11b1ygp0000jcgubpe5mso3",
"shortCode": "SJLLDE"
},
"skey2": "SJLLDE"
}
To scan by a nested attribute, you should use ExpressionAttributeNames parameter to pass each path component (i.e. order and shortCode) separately into FilterExpression like shown below:
var params = {
TableName: 'YOUR_TABLE_NAME',
FilterExpression: "#order.#shortCode = :shortCodeValue",
ExpressionAttributeNames: {
'#order': 'order',
"#shortCode": "shortCode"
},
ExpressionAttributeValues: {
':shortCodeValue': 'SJLLDE'
}
};
dynamodbDoc.scan(params, function(err, data) {
});
Here is a link to documentation explaining this:
http://docs.aws.amazon.com/amazondynamodb/latest/developerguide/Expressions.ExpressionAttributeNames.html#Expressions.ExpressionAttributeNames.NestedAttributes
How do you "upsert" a property to a DynamoDB row. E.g. SET address.state = "MA" for some item, when address does not yet exist?
I feel like I'm having a chicken-and-egg problem because DynamoDB doesn't let you define a sloppy schema in advance.
If address DID already exist on that item, of type M (for Map), the internet tells me I could issue an UpdateExpression like:
SET #address.#state = :value
with #address, #state, and :value appropriately mapped to address, state, and MA, respectively.
But if the address property does not already exist, this gives an error:
'''
ValidationException: The document path provided in the update expression is invalid for update
'''
So.. it appears I either need to:
Figure out a way to "upsert" address.state (e.g., SET address = {}; SET address.state = 'MA' in a single command)
or
Issue three (!!!) roundtrips in which I try it, SET address = {}; on failure, and then try it again.
If the latter.... how do I set a blank map?!?
Ugh.. I like Dynamo, but unless I'm missing something obvious this is a bit crazy..
You can do it with two round trips, the first conditionally sets an empty map for address if it doesn't already exist, and the second sets the state:
db.update({
UpdateExpression: 'SET #a = :value',
ConditionExpression: 'attribute_not_exists(#a)',
ExpressionAttributeValues: {
":value": {},
},
ExpressionAttributeNames: {
'#a': 'address'
}
}, ...);
Then:
db.update({
UpdateExpression: 'SET #a.#b = :v',
ExpressionAttributeNames: {
'#a': 'address',
'#b': 'state'
},
ExpressionAttributeValues: {
':v': 'whatever'
}
}, ...);
You cannot set nested attributes if the parent document does not exist. Since address does not exist you cannot set the attribute province inside it. You can achieve your goal if you set address to an empty map when you create the item. Then, you can use the following parameters to condition an update on an attribute address.province not existing yet.
var params = {
TableName: 'Image',
Key: {
Id: 'dynamodb.png'
},
UpdateExpression: 'SET address.province = :ma',
ConditionExpression: 'attribute_not_exists(address.province)',
ExpressionAttributeValues: {
':ma': 'MA'
},
ReturnValues: 'ALL_NEW'
};
docClient.update(params, function(err, data) {
if (err) ppJson(err); // an error occurred
else ppJson(data); // successful response
});
By the way, I had to replace state with province as state is a reserved word.
Another totally different method is to simply create the address node when creating the parent document in the first place. For example assuming you have a hash key of id, you might do:
db.put({
Item: {
id: 42,
address: {}
}
}, ...);
This will allow you to simply set the address.state value as the address map already exists:
db.update({
UpdateExpression: 'SET #a.#b = :v',
AttributeExpressionNames: {
'#a': 'address',
'#b': 'state'
},
AttributeExpressionValues: {
':v': 'whatever'
}
}, ...);
Some kotlin code to do this recursively regardless how deep it goes. It sets existence of parent paths as condition and if condition check fails, recursively creates those paths first. It has to be in the library's package so it can access those package private fields/classes.
package com.amazonaws.services.dynamodbv2.xspec
import com.amazonaws.services.dynamodbv2.document.Table
import com.amazonaws.services.dynamodbv2.model.ConditionalCheckFailedException
import com.amazonaws.services.dynamodbv2.xspec.ExpressionSpecBuilder.attribute_exists
fun Table.updateItemByPaths(hashKeyName: String, hashKeyValue: Any, updateActions: List<UpdateAction>) {
val parentPaths = updateActions.map { it.pathOperand.path.parent() }
.filter { it.isNotEmpty() }
.toSet() // to remove duplicates
try {
val builder = ExpressionSpecBuilder()
updateActions.forEach { builder.addUpdate(it) }
if (parentPaths.isNotEmpty()) {
var condition: Condition = ComparatorCondition("=", LiteralOperand(true), LiteralOperand(true))
parentPaths.forEach { condition = condition.and(attribute_exists<Any>(it)) }
builder.withCondition(condition)
}
this.updateItem(hashKeyName, hashKeyValue, builder.buildForUpdate())
} catch (e: ConditionalCheckFailedException) {
this.updateItemByPaths(hashKeyName, hashKeyValue, parentPaths.map { M(it).set(mapOf<String, Any>()) })
this.updateItemByPaths(hashKeyName, hashKeyValue, updateActions)
}
}
private fun String.parent() = this.substringBeforeLast('.', "")
Here is a helper function I wrote in Typescript that works for this a single level of nesting using a recursive method.
I refer to the top-level attribute as a column.
//usage
await setKeyInColumn('customerA', 'address', 'state', "MA")
// Updates a map value to hold a new key value pair. It will create a top-level address if it doesn't exist.
static async setKeyInColumn(primaryKeyValue: string, colName: string, key: string, value: any, _doNotCreateColumn?:boolean) {
const obj = {};
obj[key] = value; // creates a nested value like {address:value}
// Some conditions depending on whether the column already exists or not
const ConditionExpression = _doNotCreateColumn ? undefined:`attribute_not_exists(${colName})`
const AttributeValue = _doNotCreateColumn? value : obj;
const UpdateExpression = _doNotCreateColumn? `SET ${colName}.${key} = :keyval `: `SET ${colName} = :keyval ` ;
try{
const updateParams = {
TableName: TABLE_NAME,
Key: {key:primaryKeyValue},
UpdateExpression,
ExpressionAttributeValues: {
":keyval": AttributeValue
},
ConditionExpression,
ReturnValues: "ALL_NEW",
}
const resp = await docClient.update(updateParams).promise()
if (resp && resp[colName]) {
return resp[colName];
}
}catch(ex){
//if the column already exists, then rerun and do not create it
if(ex.code === 'ConditionalCheckFailedException'){
return this.setKeyInColumn(primaryKeyValue,colName,key, value, true)
}
console.log("Failed to Update Column in DynamoDB")
console.log(ex);
return undefined
}
}
I've got quite similar situation. I can think of only a one way to do this in 1 query/atomically.
Extract map values to top level attributes.
Example
Given I have this post item in DynamoDB:
{
"PK": "123",
"SK": "post",
"title": "Hello World!"
}
And I want to later add an analytics entry to same partition:
{
"PK": "123",
"SK": "analytics#december",
"views": {
// <day of month>: <views>
"1": "12",
"2": "457463",
// etc
}
}
Like in your case, it's not possible to increment/decrement views days counters in single query if analytics item nor views map might not exist (could be later feature or don't want to put empty items).
Proposed solution:
{
"PK": "123",
"SK": "analytics#december",
// <day of month>: <views>
"1": "12", // or "day1" if "1" seems too generic
"2": "457463",
// etc
}
}
Then you could do something like this (increment +1 example):
{
UpdateExpression: "SET #day = if_not_exists(#day, 0) + 1",
AttributeExpressionNames: {
'#day': "1"
}
}
if day attribute value doesn't exist, set default value to 0
if item in database doesn't exist, update API adds a new one
Let's say we are fetching some data from the server.
data = {
"value": "123456",
"included": true,
"invalid": true,
"warning": false,
"error": false,
}
Depending on the booleans state, the value needs to be displayed with a specific style.
What I am currently doing is formatting the data into a JS constructor
$scope.model = new SomePrototype(data);
to deduce the CSS you compute the rules (in pseudo code):
var SomePrototype = function (data) {
this.computeCSS = function () {
if data.error then css = 'danger'
if (data.included and data.invalid) then css = 'danger'
/*other rules*/
return css
}
}
then you call computeCSS() in the HTML view
<p class="{{model.computeCSS()}}">{{model.value}}</p> which renders as
`<p class="danger">123456</p>`
ISSUE: first, I haven't seen anything like this elsewhere. So I might do something wrong.
Usually you get an object under $scope to hold the class value.
Secondly, it requires a call to SomePrototype into each controllers.
I wonder if using a service/factory would be more legal. The end result looks basically the same for me.
You don't need a function to set class based on scope values, use ng-class which accepts some javascript conditionals
<p ng-class="{danger: data.error || data.included && data.invalid} ">{{data.value}}</p>
function Ctrl($scope) {
$scope.data = {
"value": "123456",
"included": true,
"invalid": true,
"warning": false,
"error": false,
}
}
DEMO
You are on the right track however I would use ng-class as charlietfl suggested.
Keeping the logic inside of a function like you mentioned you are able to unit test your rule of what is considered an invalid state of the model. (your conditional is simple, but having logic in your view is usually not ideal)
Model
var SomePrototype = function (data) {
this.isDataInValid = function () {
return data.error || data.included && data.invalid;
}
}
Test
it('should be invalid with error or included && invalid', function () {
var data = {
"value": "123456",
"included": true,
"invalid": true,
"warning": false,
"error": false,
}
var someModel = new SomePrototype(data);
var isDataInValid = someModel.isDataInValid();
expect(isDataInValid).toBe(true);
});
In your <html/>
<p ng-class="{danger : model.isDataInValid() } ">{{model.value}}</p>
I've been pouring over this for hours and I've yet to make much headway so I was hoping one of the wonderful denizens of SO could help me out. Here's the problem...
I'm implementing a tree via the jstree plugin for jQuery. I'm pulling the data with which I populate the tree programatically from our webapp via json dumped into an asp:HiddenField, basically like this:
JavaScriptSerializer serializer = new JavaScriptSerializer();
string json = serializer.Serialize(Items);
json = json.ToLower();
data.Value = json;
Then, the tree pulls the json from the hidden field to build itself. This works perfectly fine up until I try to persist data for which nodes are selected/opened. To simplify my problem I've hardcoded some json data into the tree and attempted to use the cookie plugin to persist the tree state data. This does not work for whatever reason. I've seen other issues where people need to load the plugins in a specific order, etc, this did not solve my issue. I tried the same setup with html_data and it works perfectly. With this working persistence I converted the cookie plugin to persist the data in a different asp:hiddenfield (we can't use cookies for this type of thing in our application.)
essentially the cookie operations are identical, it just saves the array of nodes as the value of a hidden field. This works with the html_data, still not with the json and I have yet to be able to put my finger on where it's failing.
This is the jQuery.cookie.js replacement:
jQuery.persist = function(name, value) {
if (typeof value != 'undefined') { // name and value given, set persist
if (value === null) {
value = '';
}
jQuery('#' + name).attr('value', value);
} else { // only name given, get value
var persistValue = null;
persistValue = jQuery('#' + name).attr('value');
return persistValue;
}
};
The jstree.cookie.js code is identical save for a few variable name changes.
And this is my tree:
$(function() {
$("#demo1").jstree({
"json_data": {
"data" : [
{
"data" : "A node",
"children" : [ "Child 1", "Child 2" ]
},
{
"attr": { "id": "li.node.id" },
"data" : {
"title": "li.node.id",
"attr": { "href": "#" }
},
"children": ["Child 1", "Child 2"]
}
]
},
"persistence": {
"save_opened": "<%= open.ClientID %>",
"save_selected": "<%= select.ClientID %>",
"auto_save": true
},
"plugins": ["themes", "ui", "persistence", "json_data"]
});
});
The data -is- being stored appropriately in the hiddenfields, the problem occurs on a postback, it does not reopen the nodes. Any help would be greatly appreciated.
After looking through this some more, I just wanted to explain that it appears to me that the issue is that the tree has not yet been built from the JSON_data when the persistence operations are being attempted. Is there any way to postpone these actions until after the tree is fully loaded?
If anyone is still attempting to perform the same type of operation on a jsTree version 3.0+ there is an easier way to accomplish the same type of functionality, without editing any of the jsTree's core JavaScript, and without relying on the "state" plugin (Version 1.0 - "Persistence"):
var jsTreeControl = $("#jsTreeControl");
//Can be a "asp:HiddenField"
var stateJSONControl = $("#stateJSONControl");
var url = "exampleURL";
jsTreeControl.jstree({
'core': {
"data": function (node, cb) {
var thisVar = this;
//On the initial load, if the "state" already exists in the hidden value
//then simply use that rather than make a AJAX call
if (stateJSONControl.val() !== "" && node.id === "#") {
cb.call(thisVar, { d: JSON.parse(stateJSONControl.val()) });
}
else {
$.ajax({
type: "POST",
url: url,
async: true,
success: function (json) {
cb.call(thisVar, json);
},
contentType: "application/json; charset=utf-8",
dataType: "json"
}).responseText;
}
}
}
});
//If the user changes the jsTree, save the full JSON of the jsTree into the hidden value,
//this will then be restored on postback by the "data" function in the jsTree decleration
jsTreeControl.on("changed.jstree", function (e, data) {
if (typeof (data.node) != 'undefined') {
stateJSONControl.val(JSON.stringify(jsTreeControl.jstree(true).get_json()));
}
});
This code will create a jsTree and save it's "state" into a hidden value, then upon postback when the jsTree is recreated, it will use its old "state" restored from the "HiddenField" rather than make a new AJAX call and lose the expansions/selections that the user has made.
Got it working properly with JSON data. I had to edit the "reopen" and "reselect" functions inside jstree itself.
Here's the new functioning reopen function for anyone who needs it.
reopen: function(is_callback) {
var _this = this,
done = true,
current = [],
remaining = [];
if (!is_callback) { this.data.core.reopen = false; this.data.core.refreshing = true; }
if (this.data.core.to_open.length) {
$.each(this.data.core.to_open, function(i, val) {
val = val.replace(/^#/, "")
if (val == "#") { return true; }
if ($(("li[id=" + val + "]")).length && $(("li[id=" + val + "]")).is(".jstree-closed")) { current.push($(("li[id=" + val + "]"))); }
else { remaining.push(val); }
});
if (current.length) {
this.data.core.to_open = remaining;
$.each(current, function(i, val) {
_this.open_node(val, function() { _this.reopen(true); }, true);
});
done = false;
}
}
if (done) {
// TODO: find a more elegant approach to syncronizing returning requests
if (this.data.core.reopen) { clearTimeout(this.data.core.reopen); }
this.data.core.reopen = setTimeout(function() { _this.__callback({}, _this); }, 50);
this.data.core.refreshing = false;
}
},
The problem was that it was trying to find the element by a custom attribute. It was just pushing these strings into the array to search when it was expecting node objects. Using this line
if ($(("li[id=" + val + "]")).length && $(("li[id=" + val + "]")).is(".jstree-closed")) { current.push($(("li[id=" + val + "]"))); }
instead of
if ($(val).length && $(val).is(".jstree-closed")) { current.push(val); }
was all it took. Using a similar process I was able to persist the selected nodes this way as well.
Hope this is of help to someone.