Bookshelf.js: one-to-many relationship setup - bookshelf.js

I'm using Bookshelf for an ORM with MySQL. I've got endpoints and organizations. Each endpoint belongs to an organization, and an organization has many endpoints. I'm trying to fetch a list of endpoints with their associated organization(s.) This is returning the following error:
"A valid target model must be defined for the endpoint belongsTo relation"
Here's my fetchAll request:
Endpoint
.where('organization_id', req.user.attributes.organization_id)
.fetchAll({require: true,withRelated: ['settings', 'organization']})
.then((endpoints) => {
ReS(res, {
endpoints
}, 200);
})
Here's my endpoint model:
'use strict';
const bookshelf = require('../config/bookshelf_instance');
const Organization = require('./organization');
const Settings = require('./endpoint_settings');
const Group = require('./endpoint_group');
module.exports = bookshelf.Model.extend({
tableName: 'endpoint',
organization () {
return this.belongsTo(Organization, 'id');
},
settings () {
return this.hasOne(Settings, 'id');
},
group () {
return this.belongsToMany(Group, 'map_endpoint_endpoint_group');
}
});
Here's my organization model:
'use strict';
const bookshelf = require('../config/bookshelf_instance');
const Endpoint = require('./endpoint');
const User = require('./user');
const Group = require('./endpoint_group');
module.exports = bookshelf.Model.extend({
tableName: 'organization',
endpoints () {
return this.hasMany(Endpoint, 'organization_id');
},
users () {
return this.hasMany(User, 'organization_id');
},
groups () {
return this.hasMany(Group, 'organization_id');
}
});

You should remove the id from the Endpoint model's relation specifications, since that seems to be the primary key, not the foreign key. The foreign key is also organization_id by default which is what you want. It should match what you have in the opposite relation (hasMany in this case).
From the documentation, the second argument to belongsTo is the:
ForeignKey in this model. By default, the foreignKey is assumed to be the singular form of the Target model's tableName, followed by _id.
The target model's table name is organization, which means the foreign key is organization_id by default.

Related

How to backfill new AppSync fields using AWS Amplify

I'm adding a sort field to one of my AppSync tables using GraphQL. The new schema looks like:
type MyTable
#model
#auth(rules: [{allow: owner}])
#key(name: "BySortOrder", fields: ["sortOrder"], queryField: "tableBySortOrder")
{
id: ID!
name: String!
sortOrder: Int
}
However, when retrieving a list using tableBySortOrder I get an empty list because the new field sortOrder is null.
My question is, how do I backfill this data in the DynamoDB table so that my existing users will not be disrupted by this new change? With a traditional database, I would run a SQL update: UPDATE MyTable SET sortOrder = #.
However, I'm new to NoSQL/AWS and couldn't find a way to do this except build a backfill script whenever a user logs into my app. That feels very hacky. What is the best practice for handling this type of scenario?
Have you already created the new field in DDB?
If yes, I think you should backfill it before making the client side change.
Write a script to iterate through and update the table. Options for this:
Java - Call updateItem to update the table if you have any integ tests running.
Bash - Use AWS CLI: aws dynamodb scan --table-name item_attributes --projection-expression "whatever" > /tmp/item_attributes_table.txt and then aws dynamodb update-item --table-name item_attributes --key. This is a dirty way.
Python - Same logic as above.
Ended up using something similar to what Sunny suggested with a nodejs script:
const AWS = require('aws-sdk')
AWS.config.update({
region: 'us-east-1'
})
// To confirm credentials are set
AWS.config.getCredentials(function (err) {
if (err) console.log(err.stack)
// credentials not loaded
else {
console.log('Access key:', AWS.config.credentials.accessKeyId)
console.log('Secret access key:', AWS.config.credentials.secretAccessKey)
}
})
const docClient = new AWS.DynamoDB.DocumentClient()
const table = 'your-table-dev'
const params = {
TableName: table
}
const itemMap = new Map()
// Using scan to retrieve all rows
docClient.scan(params, function (err, data) {
if (err) {
console.error('Unable to query. Error:', JSON.stringify(err, null, 2))
} else {
console.log('Query succeeded.')
data.Items.forEach(item => {
if (itemMap.has(item.owner)) {
itemMap.set(item.owner, [...itemMap.get(item.owner), item])
} else {
itemMap.set(item.owner, [item])
}
})
itemMap.forEach(ownerConnections => {
ownerConnections.forEach((connection, index) => {
connection.sortOrder = index
update(connection)
})
})
}
})
function update(connection) {
const params = {
TableName: table,
Key: {
'id': connection.id
},
UpdateExpression: 'set sortOrder = :s',
ExpressionAttributeValues: {
':s': connection.sortOrder,
},
ReturnValues: 'UPDATED_NEW'
};
console.log('Updating the item...');
docClient.update(params, function (err, data) {
if (err) {
console.error('Unable to update item. Error JSON:', JSON.stringify(err, null, 2));
} else {
console.log('UpdateItem succeeded:', JSON.stringify(data, null, 2));
}
});
}

Do CosmosDB Mongo API compound unique indexes require each field to be unique?

I'm trying to set up a collection of versioned documents in which I insert a new document with the same id and a timestamp whenever there's an edit operation. I use a unique compound index for this on the id and timestamp fields. CosmosDB is giving me MongoError: E11000 duplicate key error whenever I try to insert a document with a different id but an identical timestamp to another document. The MongoDB documentation says that I should be able to do this:
https://docs.mongodb.com/v3.4/core/index-unique/#unique-compound-index
You can also enforce a unique constraint on compound indexes. If you use the unique constraint on a compound index, then MongoDB will enforce uniqueness on the combination of the index key values.
I tried using a non-unique index but the Resource Manager template failed, saying that non-unique compound indexes are not supported. I'm using the node.js native driver v3.2.4. I also tried to use Azure Portal to insert documents but received the same error. This makes me believe it's not a problem between CosmosDB and the node.js driver.
Here's a small example to demonstrate the problem. I'm running it with Node v10.15.3.
const { MongoClient } = require('mongodb');
const mongoUrl = process.env.COSMOSDB_CONNECTION_STRING;
const collectionName = 'indextest';
const client = new MongoClient(mongoUrl, { useNewUrlParser: true });
let connection;
const testIndex = async () => {
const now = Date.now();
connection = await client.connect();
const db = connection.db('master');
await db.collection(collectionName).drop();
const collection = await db.createCollection(collectionName);
await collection.createIndex({ id: 1, ts: -1 }, { unique: true });
await collection.insertOne({ id: 1, ts: now, title: 'My first document' });
await collection.insertOne({ id: 2, ts: now, title: 'My other document' });
};
(async () => {
try {
await testIndex();
console.log('It works');
} catch (err) {
console.error(err);
} finally {
await connection.close();
}
})();
I would expect the two insert operations to work and for the program to exit with It works. What I get instead is an Error:
{ MongoError: E11000 duplicate key error collection: master.indextest Failed _id or unique key constraint
at Function.create (/home/node/node_modules/mongodb-core/lib/error.js:43:12)
at toError (/home/node/node_modules/mongodb/lib/utils.js:149:22)
at coll.s.topology.insert (/home/node/node_modules/mongodb/lib/operations/collection_ops.js:859:39)
at handler (/home/node/node_modules/mongodb-core/lib/topologies/replset.js:1155:22)
at /home/node/node_modules/mongodb-core/lib/connection/pool.js:397:18
at process._tickCallback (internal/process/next_tick.js:61:11)
driver: true,
name: 'MongoError',
index: 0,
code: 11000,
errmsg:
'E11000 duplicate key error collection: master.indextest Failed _id or unique key constraint',
[Symbol(mongoErrorContextSymbol)]: {} }
Is this expected behavior or a bug in CosmosDB's MongoDB API?

Fetching data by composite primary key in dynamodb

I am using following function ->
const params = {
TableName: process.env.dummyTable,
Key: {
outlet_id:event.pathParametrs.id,
id:{"S":"default"}
}
}
dynamoDb.get(params).promise()
.then(result => {
console.log('-->',result);
const response = {
statusCode: 200,
body: JSON.stringify(result),
};
callback(null, response);
})
.catch(error => {
console.error(error);
callback(new Error('Couldn\'t fetch table Data'));
return;
});
}
I want to fetch records based on outlet_id and id.Here outlet_id is primary partition key while id is primary sort key(with uuid).
How to specify id(primary sort key) with default value so that I can fetch data
It's unclear whether you are trying to fetch a single item by specifying a composite partition+sort key, or trying to query all items with the same partition key (ie. without specifying a sort key).
If you were trying to get a single item, with a particular partition key and sort key, your code looks good with one exception. The partition key should be specified with the type as well, like so:
const params = {
TableName: process.env.dummyTable,
Key: {
outlet_id:{"S": event.pathParametrs.id},
id:{"S":"default"}
}
}
However, if you are trying to query for all items with the same partition key (ie. without specifying a sort key) then you would need to modify the code to do a query rather than a get:
const params = {
TableName: process.env.dummyTable,
Key: {
outlet_id: {"S": event.pathParametrs.id}
}
}
dynamoDb.query(params).promise()
.then(
//...
)
.catch(
//...
)

Redux - cross-entity state management

I'm using Redux and ImmutableJS to manage the state of my app. I've created the following two Records:
export const OrderRecord = Record({
id: null,
productId: null,
amount: 1,
});
export const ProductRecord = Record({
id: null,
name: '',
price: 0,
});
My global state is normalized based on the normalizr approach like this:
const state = {
entities: {
orders: new OrderedMap(new Map({
1: new OrderRecord(createOrderItem(1, 1)),
})),
products: new OrderedMap(new Map({
1: new ProductRecord(createProductItem(1)),
})),
},
};
I'm using this specification for testing purposes.
Now I'm trying to make some selects with computed fields using Reselect.
export const getVisibleOrders = createSelector(
[getProducts, getOrders],
(products, orders) => {
orders.map(order => {
const product = products.get(order.productId.toString());
if (!product) {
return order;
}
const totalPrice = order.amount * product.price;
order.set('productName', product.name);
order.set('totalPrice', totalPrice);
return order;
});
}
);
, but I get the following error message:
Error: Cannot set unknown key "productName" on Record
I know the reason - Record cannot contain any undefined keys, but my question is: Is there any suggested approach how gracefully solved this problem?
I don't want to extend my Records to support this kind of computed parameters (product.name and totalPrice).
I don't want to keep the static and computed parameters in one place, because for example the 'productName' parametr is from "Product" entity and not from "Order" entity.
Thank you.
The whole point of using Immutable.Record is to not let you add new keys to your record, hence the error message you get. And the whole point of selectors is to expose such "computed" property if you want to consume them outside. In your case, you can simply return a new Map() object or a new record type if you need to use the dotted syntax :
return Map({
productName: 'foobar'
}).merge(order)

Transaction for collection

How can I do using transacting t, I want to make sure the row is successful remove before saving the record:
var Roles = bookshelf.Collection.extend({
model: Role
);
Role.where('name', '=', 'Staff').destroy();
var roles = Roles.forge([{name: 'Staff'}, {name: 'Guest'}]);
Promise.all(roles.invoke('save')).then(function(role) {
resolve(role);
}).catch(function (err) {
reject({"status":"error", "data": err});
});
You may just use Bookshelf's transaction() method.
But first your save() MUST be in the context of the destroy() promise, so ensuring proper sequence, otherwise you risk having your saved data being also deleted by the destroy.
So it may look like:
var Roles = bookshelf.Collection.extend({
model: Role
});
bookshelf.transaction(function(t) {
return Role
.where('name', '=', 'Staff')
.destroy({transacting: t})
.then(function() {
var roles = Roles.forge([{name: 'Staff'}, {name: 'Guest'}]);
return roles
.invokeThen('save', null, {transacting: t});
});
});

Resources