I'm building a document management application with the following Data Models:
Doc_Metadata
- Approval_Requests
- WorkflowStage
- Approver
- Comment
I am trying to use the Document Approval Workflow template as a starting point, and associating the Doc_Metadata parent to the "Requests" model, such that each approval request is associated to (owned by) a parent Metadata record.
I have gotten it to work from start to finish without throwing any errors, HOWEVER, no matter what I do I cannot get the Metadata - Request relation to save.
I've posted my client scripts for the Add Request page below, and also attached the zip of my application in case someone wants to look in more detail.
Any and all suggestions are incredibly appreciated, I love the idea of appmaker but have been struggling to understand relations versus how they are traditionally handled in SQL.
/**
* #fileoverview Client script functions for AddRequest page.
*/
/**
* Navigates user to the add request page and sets page URL parameters.
* #param {string=} metadataKey - optional metadata with this key will be used
* as default for the new approval request.
*/
function gotoAddRequestPage(metadataKey) {
var params = {
metadataKey: metadataKey
};
// DEBUG
console.log(params.metadataKey);
console.log(metadataKey);
gotoPage(app.pages.AddRequest, params);
}
/**
* Creates a new request and redirects user to the edit screen afterwards.
* #param {Widget} submitButton - button that triggers the action.
*/
function createRequest(submitButton) {
var addRequestPage = submitButton.root;
if (addRequestPage.validate()) {
submitButton.enabled = false;
submitButton.datasource.saveChanges({
success: function() {
submitButton.enabled = true;
//DEBUG
console.log("requestId:" + submitButton.datasource.item._key);
goToRequestDetailsPage(submitButton.datasource.item._key);
},
failure: function(error) {
submitButton.enabled = true;
}
});
}
}
/**
* Creates a new request and redirects user to the edit screen afterwards.
* #param {Widget} cancelButton - button that triggers the action.
*/
function cancelCreateRequest(cancelButton) {
cancelButton.datasource.clearChanges();
app.showPage(app.pages.Main);
}
function onRequestCreate () {
google.script.url.getLocation(function(location) {
var metadataKey = location.parameter.metadataKey;
var props = {
metadataKey: metadataKey
};
var allMetadataDs = app.datasources.AllMetadata;
var metadataDs = allMetadataDs.item;
var requestDs = app.datasources.RequestsByKey;
//DERBUG//
console.log("metadataKey: " + metadataKey);
var newRequest = requestDs.createItem();
newRequest.Metadata = metadataDs;
var requests = metadataDs.Request;
requests.push(newRequest);
});
}
struggling to understand relations versus how they are traditionally handled in SQL
You can configure your app to use Cloud SQL database: https://developers.google.com/appmaker/models/cloudsql
I cannot get the Metadata - Request relation to save
Here is a snippet that should work(assuming that you are using datasource in autosave mode).
var allMetadataDs = app.datasources.AllMetadata;
// metadata record that you need should be selected at this point of time
var metadata = allMetadataDs.item;
var requestDs = app.datasources.RequestsByKey.modes.create;
var requestDraft = requestDs.item;
// This line should create relation between draft request record and
// existing Metadata record.
requestDraft.Metadata = metadata;
// send your draft to server to save
requestDs.createItem(function(newRecord) {
// log persisted request record asynchronously
console.log(newRecord);
});
By the way, your life will become way easier, if you add a drop down with metadata items to the request creation form.
Related
Scenario:
I have a Node.JS 12.x Lambda that is backing an Alexa Skill. The user asks the skill a question, and the search parameter is sent to my Lambda in a slot. I query the SQL DB using the mssql package, then return the result to the user.
Issue:
If I fetch results from the DB, the Lambda executes the query successfully and returns the result, but then the Lambda times out and Alexa seems to have not received the response. If I remove the DB query and just return a string, it all works just fine.
Suspicions:
I think there may be some issue with the async/await stuff in here. I just can't figure out what the issue is though. I have checked my use of async/await several times
If I left anything out just let me know. Thanks in advance!
Code:
/**
* Intent handler for FindSomething intent
*/
const MyHandler = {
/**
* Determine whether this handler is able to process this input
* #param {Object} handlerInput The input object
*/
canHandle(handlerInput) {
// This part works fine
return util.checkIntentMatch(handlerInput, INTENT_NAME);
},
/**
* Handle the input
* #param {Object} handlerInput The input object
*/
async handle(handlerInput) {
// Extract slot values
const [
searchTerm,
] = [
Alexa.getSlotValue(handlerInput.requestEnvelope, 'search_term'),
];
// Fulfill request
const responseText = await getResponseText(searchTerm);
// Respond
return handlerInput.responseBuilder
.speak(responseText)
.getResponse();
},
};
And then getResponseText looks like this:
/**
* Get the response text for the query
* #param {string} searchTerm The search term from the user
*/
async function getResponseText(searchTerm) {
let sectorName = await getSectorForTerm(searchTerm);
console.log(`Inside getResponseText. sectorName: ${sectorName}`);
if (!sectorName) return format(NOT_FOUND_LANGUAGE, { searchTerm });
return format(FOUND_LANGUAGE, { searchTerm, sectorName });
}
/**
* Find the sector for a search term
* #param {string} searchTerm The search term from the user
*/
async function getSectorForTerm(searchTerm) {
// ========================================================================
// If I uncomment this line, it works great -- all the way back to the user
// ========================================================================
//return 'fake result';
// Gather prerequisites in parallel
let [
query,
pool
] = await Promise.all([
fs.readFile(path.join(__dirname, 'queries', 'FindQuery.sql'), 'utf8'),
sql.connect(process.env['connectionString'])
]);
console.log('Pre query');
// Run query
let queryResult = await pool.request()
.input('EntityName', sql.TYPES.VarChar, searchTerm)
.query(query);
console.log('Post query');
// Extract result if any
let result = undefined;
if(queryResult.recordset.length > 0) {
result = queryResult.recordset[0]['SectorName'];
}
console.log(`result of getSectorForTerm: ${result}`);
return result;
}
Edit:
Here is what the log looks like. You can see that the file has loaded, the query has executed, and the return statement is hit within ~500ms. Then several seconds pass before the function times out.
Edit 2:
I have structured my index.js like this example from the AWS docs, so I don't have direct access to context or similar. That can be changed if needed.
You are using 2 time consuming operations in your skill - file read and sql connection. Probably your skill exceeds 8s timeout. Please check CloudWatch logs related to your skill if there is message like
Task timed out after 8.01 seconds
you should work on some enhancements here.
Also make sure that both methods in Promise.all argument return a Promise.
I want to write and send a pull request for a Firestore batch write method to the FirestoreGoogleAppsScript library. The only problem is, I have no idea how to do that. So I need help getting started. The above link is the file where I think I should add the following.
/**
* Create a batch write with the given fields and an auto-generated ID.
*
* #param {string} path the path where the document will be written
* #param {object} fields the document's fields
* #return {object} the Document object written to Firestore
*/
this.batch = function (path, fields) {
const request = new FirestoreRequest_(baseUrl, authToken)
return createDocument_(path, fields, request)
}
When finished, I would expect to call the function as follows.
// Get a new write batch
let batch = db.batch();
// Set the value of 'NYC'
let nycRef = db.collection('cities').doc('NYC');
batch.set(nycRef, {name: 'New York City'});
// Update the population of 'SF'
let sfRef = db.collection('cities').doc('SF');
batch.update(sfRef, {population: 1000000});
// Delete the city 'LA'
let laRef = db.collection('cities').doc('LA');
batch.delete(laRef);
// Commit the batch
return batch.commit().then(function () {
// ...
});
Is this the correct way to approach it or am I missing something?
Is there a way to verify if the Rewarded Video Ad was watched by client (OnAdRewarded) on a server? Is there any integration I can use with Google Cloud Functions?
I thought it's possible to verify a token sent by client to the server with admob admin SDK but it seems that it's not possible and we can only verify the ad on the client.
It is now possible with Server-Side Verification (SSV) Callbacks.
Server-side verification callbacks are URL requests, with query parameters expanded by Google, that are sent by Google to an external system to notify it that a user should be rewarded for interacting with a rewarded video ad. Rewarded video SSV (server-side verification) callbacks provide an extra layer of protection against spoofing of client-side callbacks to reward users.
Not sure this is relevant to Firebase but here are some details in case anyone is using Node / JS. You can use Node's inbuilt crypto library. First fetch the available Google AdMob verifier keys from https://gstatic.com/admob/reward/verifier-keys.json.
You'll then need to loop through the returned JSON keys array and grab the pem public key file string corresponding to the req.query.key_id parameter of your incoming req.url string.
Then the "message" we wish to verify signatures with is the incoming req.url substring inbetween the parameter ? symbol and &signature... strings.
Now we can verify easily:
const verifier = crypto.createVerify("sha256");
verifier.update(message);
if(verifier.verify(pem, req.query.signature, "base64"))
console.log("Ad was successfully verified.");
else
console.log("Ad could not be verified - quick call the cops !");
One caveat to beware of is you may need to unescape(...) your req.url string before you use it since some characters may have been escaped. I was stuck on that for a good hour or two. You can do this using e.g. Node's built in querystring library.
Not at the moment. The feature has been in closed beta for a while from what I've gathered recently. The last mention I could find was in the linked discussion where someone, from Google presumably, says the feature will be rolled out to the public soon. The post is from Jan 22.
https://groups.google.com/forum/#!topic/google-admob-ads-sdk/weXTAGZfYQ8
I know its a bit late but here is a piece of code that helped me. It is in javascript for Node users.
https://github.com/hypeben/admob-rewarded-ads-ssv
const queryString = require('query-string');
const crypto = require('crypto');
const axios = require('axios');
const GOOGLE_AD_KEY_URL = 'https://gstatic.com/admob/reward/verifier-keys.json';
/**
* Fetches the google public keys for the admob providers.
* These keys changes time to time.
*/
const getGoogleKeysMap = async () => {
let googleKeyRes = await axios.get(GOOGLE_AD_KEY_URL);
let {keys} = googleKeyRes.data;
if (!keys) {
throw new Error('No keys found from google keys');
}
/** For each of the keys array save it base 64 in decoded form in the key map */
let keyMap = {};
keys.forEach(k => {
keyMap[`${k.keyId}`] = crypto.createPublicKey(k.pem);
console.log(keyMap[`${k.keyId}`]);
});
return keyMap;
};
/**
* Verifies the callback url query params string,
* Resolves the promise if verification was successful, else fails.
* Wanna 'debug' then pass the second parameter as true.
* #param {String} queryUrl
* #param {Boolean} debug
*/
async function verify(queryUrl, debug) {
try {
if (typeof queryUrl !== "string") throw new TypeError("URL needs to be string!");
/**
* Request coming as callback from admob must contain the 'signature' and the 'user_id'.
* For more info https://developers.google.com/admob/android/rewarded-video-ssv
*/
const {signature, key_id} = queryString.parse(queryUrl);
if (!signature) {
throw new Error('No signature value exist in the URL param');
}
if(debug) {
console.debug('Signature and KeyId ---');
console.debug(signature, key_id);
// console.log('Signature and KeyId ---');
// console.log(signature, key_id);
}
let queryParamsString = queryUrl;
if (queryParamsString.indexOf('?') > -1) {
queryParamsString = queryUrl.split('?')[1];
}
if(debug) {
console.debug('Query param string ---');
// console.log('Query param string ---');
console.debug(queryParamsString);
// console.log(queryParamsString);
}
/**
* As per admob,
* The last two query parameters of rewarded video SSV callbacks are always signature and key_id, in that order.
* The remaining query parameters specify the content to be verified.
*/
let contentToVerify = queryParamsString.substring(0, queryParamsString.indexOf('signature') -1);
if(debug) {
console.debug('Content to verify ---');
// console.log(contentToVerify);
// console.log('Content to verify ---');
console.debug(contentToVerify);
}
let keyMap = await getGoogleKeysMap();
if(keyMap[`${key_id}`]) {
let publicKey = keyMap[`${key_id}`];
const verifier = crypto.createVerify('RSA-SHA256');
verifier.update(contentToVerify);
let result = verifier.verify(publicKey, signature, 'base64');
if (result) {
console.debug('Result ---');
console.debug(result);
return true;
} else {
console.debug('Failure ---');
console.debug(result);
throw new Error('Invalid Signature Supplied');
}
} else {
console.debug('Key id provided doesn\'t exist ---');
throw new Error('Key id provided doesn\'t exist in the google public keys');
}
} catch (error) {
}
}
module.exports.verify = verify;
If you guys looking for a simple way on Golang for the Admob SSV.
Just use this hiyali/go-lib-ssv, hope to save your life :)
I wrote an event listener for kernel.request to make me able to logout user automatically when he is idle for more than an amount of time.
I use this to calculate idle time:
$idle = time() - $this->session->getMetadataBag()->getLastUsed()
But I have a periodic Ajax request in my pages (for notification counts in pages) and they constantly change the LastUsed field of MetadataBag so Idle limit never reaches.
Is it possible to prevent a specific Controller (that ajax controller) to update session LastUsed ?
If yes, How?
If no, what else can I do to handle this?
Thanks
I don't know how to prevent the update of MetadataBag's lastUsed, but you can manually set the time for the user's last request, in the session and use it.
You can create a listener like below and make it listen to the kernel.request event, and in your other listener, get the data you store using this listener in the session instead of $this->session->getMetadataBag()->getLastUsed().
public function listen(GetResponseEvent $event){
// in your listeners main function
$request = $event->getRequest();
$route = $request->attributes->get('_route');
if($route != 'your_ajax_check_route'){
// update the session and etc.
// retrieve what you store here in your other listener.
}
}
The feature you are talking about (prevent update session lastUsed) can't be done without some Symfony hacking which is unnecesary as you can simple create your own logic for this. For example you can create KernelRequest listener which will update last used session variable for all request except the one you will use to check how much time left too logout:
public function onKernelRequest(GetResponseEvent $event): void
{
if (HttpKernelInterface::MASTER_REQUEST !== $event->getRequestType()) {
return;
}
//$this->sessionMaxIdleTime is number from config parameter
//representing max idle time in seconds
if ($this->sessionMaxIdleTime > 0) {
$this->session->start();
$time = time();
$route = $event->getRequest()->attributes->get('_route');
//for all routes but one - checking how much time is left
if ('session_check' !== $route) {
//manual set lastUsed time
$this->session->set('manualLastUsedTime', $time);
}
$idleTime = $time - $this->session->get('manualLastUsedTime');
if ($idleTime > $this->sessionMaxIdleTime) {
$this->session->invalidate();
$this->session->getFlashBag()->set('info', 'Logged out due to inactivity.');
if ($event->getRequest()->isXmlHttpRequest()) {
$event->setResponse(new Response('Logged out due to inactivity.', Response::HTTP_FORBIDDEN));
} else {
$event->setResponse(new RedirectResponse($this->router->generate('login')));
}
}
}
Then you can simple create method in some controller, which will be used by some ajax function to check how much time left to logout for example:
/**
* #Route("/session/check", name="session_check", methods={"GET"})
* #param Request $request
* #param SessionInterface $session
* #param int $sessionMaxIdleTime
* #return JsonResponse
*/
public function checkSecondsToExpire(Request $request, SessionInterface $session, $sessionMaxIdleTime = 0): JsonResponse
{
$idleTime = time() - $session->get('manualLastUsedTime');
$secToExp = $sessionMaxIdleTime - $idleTime;
return new JsonResponse(['secToExp' => $secToExp]);
}
Last piece is to make some check mechanism. It can be done as simple as starting some JS function in your base template. The parameter is in twig syntax and is from config (it is the same as sessionMaxIdleTime in kernelRequest listener):
<script>
$(document).ready(function () {
SessionIdler.start({{ session_max_idle_time }});
});
</script>
SessionIdler.start is just a function that runs some other function in specific interval (in this example it will run 1 minute before configured sessionmaxIdleTime):
function start(time) {
checkSessionCounter = setInterval(checkSession, (time - 60) * 1000);
isCheckSessionCounterRunning = true;
// console.debug("checkSession will START in: " + (time - 60) + "s");
}
checkSession function make ajax request to our session_check route and depends on result it shows modal with proper information about too long inactivity. Modal can have a button or action when hiding, that will make another request to session_extend route (which can do nothing - it just need to be captured by kernelRequest listener to overwrite manualLastUsedTime)
This three pieces together creates a mechanism for notify user about too long inactivity without any influence on session metadataBag.
I always use methods to insert, update and remove. This is the way my code look just now:
Client side
Template.createClient.events({
'submit form': function(event, tmpl) {
e.preventDefault();
var client = {
name: event.target.name.value,
// .... more fields
}
var validatedData = Clients.validate(client);
if (validatedData.errors) {
// Display validation errors
return;
}
Meteor.call('createClient', validatedData.client, function(error) {
if (error)
// Display error
});
}
});
Client and server side:
Clients = new Mongo.Collection("clients");
Clients.validate = function(client) {
// ---- Clean data ----
client.name = _.str.trim(client.name);
// .... more fields clean
// ---- Validate data ---
var errors = [];
if (!client.name)
errors.push("The name is required.");
// .... more fields validation
// Return and object with errors and cleaned data
return { errors: _.isEmpty(errors) ? undefined : errors, client: client };
}
Meteor.methods({
'createClient': function (client) {
// --- Validate user permisions ---
// If server, validate data again
if (Meteor.isServer) {
var validatedData = Clients.validate(client);
if (validatedData.errors)
// There is no need to send a detailed error, because data was validated on client before
throw new Meteor.Error(500, "Invalid client.");
client = validatedData.client;
}
check(client, {
name: String,
// .... more fields
});
return Clients.insert(client);
}
});
Meteor.call is executed on client and server side, but Meteor doesn't have a way stop the running on the server side if the validation on the client side fails (or at least, I don't know how). With this pattern, I avoid sending data to the server with Meteor.call if validation fail.
I want to start using Collection2, but I can't figure how to get the same pattern. All the examples I found involve the usage of direct Insert and Update on client side and Allow/Deny to manage security, but I want to stick with Meteor.call.
I found on documentation that I can validate before insert or update, but I don't know how to get this to work:
Books.simpleSchema().namedContext().validate({title: "Ulysses", author: "James Joyce"}, {modifier: false});
I know the autoform package, but I want to avoid that package for now.
How can I validate with Collection2 on the client side before sending data to the server side with Meteor.call? Is my pattern wrong or incompatible with Collection2 and I need to do it in another way?
In under 30 lines you can write your very own, full-featured validation package for Collection2. Let's walk through an example:
"use strict"; //keep it clean
var simplyValid = window.simplyValid = {}; //OK, not that clean (global object)
simplyValid.RD = new ReactiveDict(); //store error messages here
/**
*
* #param data is an object with the collection name, index (if storing an array), and field name, as stored in the schema (e.g. 'foo.$.bar')
* #param value is the user-inputted value
* #returns {boolean} true if it's valid
*/
simplyValid.validateField = function (data, value) {
var schema = R.C[data.collection]._c2._simpleSchema; //access the schema from the local collection, 'R.C' is where I store all my collections
var field = data.field;
var fieldVal = field.replace('$', data.idx); //make a seperate key for each array val
var objToValidate = {};
var dbValue = schema._schema[field].dbValue; //custom conversion (standard to metric, dollars to cents, etc.) IGNORE
if (dbValue && value) value = dbValue.call({value: value}); //IGNORE
objToValidate[field] = value; //create a doc to clean
schema.clean(objToValidate, {removeEmptyStrings: false}); //clean the data (trim, etc.)
var isValid = schema.namedContext().validateOne(objToValidate, field, {extendedCustomContext: true}); //FINALLY, we validate
if (isValid) {
simplyValid.RD.set(fieldVal, undefined); //The RD stores error messages, if it's valid, it won't have one
return true;
}
var errorType = schema.namedContext()._getInvalidKeyObject(field).type; //get the error type
var errorMessage = schema.messageForError(errorType, field); //get the message for the given error type
simplyValid.RD.set(fieldVal, errorMessage); //set the error message. it's important to validate on error message because changing an input could get rid of an error message & produce another one
return false;
};
simplyValid.isFieldValid = function (field) {
return simplyValid.RD.equals(field, undefined); //a very cheap function to get the valid state
};
Feel free to hack out the pieces you need and shoot me any questions you might have.
You can send the schema to the client and validate before sending to the server. If you want to use Collection" you need to attach the schema to the collection and use the insert which is something that you don't want. So the best option, for your scenario, is sending the schema to the client and use it to validate.
Also reconsider using mini-mongo instead of using Methods for everything, it will save you lots of time and don't think your app is secure jut because you're using Methods.