Kibana Server-Side Plugin in 7.16.3, Response.Body is always empty and fails validation step - kibana

The plugin I'm working on is for Kibana 7.16.3.
The server side code currently looks like the following:
import { schema } from '#kbn/config-schema';
import { logger } from 'elastic-apm-node';
import { IRouter } from '../../../../src/core/server';
import { ComplexityAndChurnFactory } from "../resources/cxchquery";
import { validateBody, linearmap } from "../resources/utility";
let elasticSearchHost = ""
export function defineHosts(host: string) {
elasticSearchHost = host
}
export function defineRoutes(router: IRouter) {
router.get(
{
path: '/api/complexity_and_churn/agg',
validate: {
params: schema.object({}),
body: schema.object({
Size: schema.number({}),
Index: schema.string({}),
StartDate: schema.string({}),
EndDate: schema.string({}),
FileTypeFilters: schema.arrayOf(schema.string({}), {})
}, { })
},
},
async (context, request, response) => {
console.log(`Recv Req: ${JSON.stringify(request.body)}`);
let reqBody = request.body;
validateBody(reqBody);
let query = ComplexityAndChurnFactory(reqBody.Index, reqBody.StartDate, reqBody.EndDate, reqBody.FileTypeFilters, 10000);
let resultSize = reqBody.Size;
let minScore = 0;
let maxScore = 50;
// If the user needs to scan over 10 million files after date range and filtering, there is likely a bigger problem.
const MAX_QUERIES = 1000;
let topXScores: Array<Object> = []
/**Strategy for getting top scores in one pass of the dataset
* Composite aggreggation returns subset of data => update global min/max complexity/churn based on this data.
* Based on global min/max complexity/churn, calculate the score of the composite aggregation subset.
* Based on global min/max complexity/churn, update the score of the previously saved top scores.
* Join the current aggregation subset and previously saved top scores into one dataset.
* Remove all but the top x scores.
* Repeat with previous composite aggregation after key until data is exhausted.
*/
let minComplexity = Number.POSITIVE_INFINITY;
let maxComplexity = Number.NEGATIVE_INFINITY;
let minChurn = Number.POSITIVE_INFINITY;
let maxChurn = Number.NEGATIVE_INFINITY;
let i = 0;
for (i=0; i<MAX_QUERIES; i++)
{
let resp = await context.core.elasticsearch.client.asCurrentUser.search(
query
);
logger.info(`query responded with: ${resp}`);
// Check for completion
let buckets = resp.body.aggregations.buckets.buckets;
if (buckets.length == 0 || !query?.after_key) {
break;
}
// Set up next query if buckets were returned.
query.after_key = resp.body.aggregations.buckets.after_key;
minComplexity = buckets.reduce((p: Object, v: Object)=>p.complexity.value < v.complexity.value? p.complexity.value : v.complexity.value, minComplexity);
maxComplexity = buckets.reduce((p: Object, v: Object)=>p.complexity.value > v.complexity.value? p.complexity.value : v.complexity.value, maxComplexity);
minChurn = buckets.reduce((p: Object, v: Object)=>p.churn.value < v.churn.value? p.churn.value : v.churn.value, minChurn);
maxChurn = buckets.reduce((p: Object, v: Object)=>p.churn.value > v.churn.value? p.churn.value : v.churn.value, maxChurn);
// Recalculate scores for topXScores based on updated min and max complexity and churn.
topXScores.forEach(element => {
let complexityScore = linearmap(element.complexity.value, minComplexity, maxComplexity, minScore, maxScore);
let churnScore = linearmap(element.churn.value, minChurn, maxChurn, minScore, maxScore);
element.score = complexityScore + churnScore;
});
// For new data, calculate score and add to topXScores array.
buckets.forEach(element => {
let complexityScore = linearmap(element.complexity.value, minComplexity, maxComplexity, minScore, maxScore);
let churnScore = linearmap(element.churn.value, minChurn, maxChurn, minScore, maxScore);
element.score = complexityScore + churnScore;
topXScores.push(element);
});
// Sort the topXScores by score.
topXScores = topXScores.sort((a, b) => a.score - b.score);
// Remove all but the top x scores from the array.
let numberBucketsToRemove = Math.max(topXScores.length - resultSize, 0);
topXScores.splice(0, numberBucketsToRemove);
}
if (i == MAX_QUERIES) {
throw new Error(`[ERROR] Exceeded maximum allowed queries (${MAX_QUERIES}) for composite aggregations please reach out to an administrator to get this amount changed or limit your query's date range and filters.`)
}
return response.ok({
body: {
buckets: topXScores
}
});
}
);
}
When I make a request to the endpoint like in the following:
curl --request GET 'http://localhost:5601/api/complexity_and_churn/agg' --header 'kbn-xsrf: anything' --header 'content-type: application/json; charset=utf-8' --header 'Authorization: Basic <Auth>' -d '{
"Size": 100,
"Index": "mainindexfour",
"StartDate": "2010/10/10",
"EndDate": "2022/10/10",
"FileTypeFilters": ["xml"]
}'
I get the response:
{
"statusCode": 400,
"error": "Bad Request",
"message": "[request body.Size]: expected value of type [number] but got [undefined]"
}
If I remove the validation on the body and print out JSON.stringify(request.body), I see that it is an empty object, regardless of what data I send. If I try to use params or query, they also end up being undefined.
Is my server side code or the request I'm sending incorrect?

Related

DynamoDB table seed works in cli but not AWS-SDK

I have a table that has more than 25 items and wrote a basic script to break them into sub arrays of 25 items each then loops thru that collection of sub arrays to run a batch write item command in the AWS DynamoDB Client. The issue I am getting is a returned validation error. When I run the same seed file via the aws-cli it seeds the table perfectly. This makes me think it has something to do with my script. See anything I am missing? Thanks in advance!
var { DynamoDB } = require('aws-sdk');
var db = new DynamoDB.DocumentClient({
region: 'localhost',
endpoint: 'http://localhost:8000',
});
const allItems = require('./allItems.json');
const tableName = 'some-table-name';
console.log({ tableName, allItems });
var batches = [];
var currentBatch = [];
var count = 0;
for (let i = 0; i < allItems.length; i++) {
//push item to the current batch
count++;
currentBatch.push(allItems[i]);
if (count === 25) {
batches.push(currentBatch);
currentBatch = [];
}
}
//if there are still items left in the curr batch, add to the collection of batches
if (currentBatch.length > 0 && currentBatch.length !== 25) {
batches.push(currentBatch);
}
var completedRequests = 0;
var errors = false;
//request handler for DynamoDB
function requestHandler(err, data) {
console.log('In the request handler...');
return function (err, data) {
completedRequests++;
errors = errors ? true : err;
//log error
if (errors) {
console.error('Request caused a DB error.');
console.error('ERROR: ' + err);
console.error(JSON.stringify(err, null, 2));
} else {
var res = {
statusCode: 200,
headers: {
'Content-Type': 'application/json',
'Access-Control-Allow-Methods': 'GET,POST,OPTIONS',
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Credentials': true,
},
body: JSON.stringify(data),
isBase64Encoded: false,
};
console.log(`Success: returned ${data}`);
return res;
}
if (completedRequests == batches.length) {
return errors;
}
};
}
//Make request
var params;
for (let j = 0; j < batches.length; j++) {
//items go in params.RequestedItems.id array
//format for the items is {PutRequest : {Item: ITEM_OBJECT}}
params = '{"RequestItems": {"' + tableName + '": []}}';
params = JSON.parse(params);
params.RequestItems[tableName] = batches[j];
console.log('before db.batchWriteItem: ', params);
try {
//send to db
db.batchWrite(params, requestHandler(params));
} catch{
console.error(err)
}
}
Here is the formatted request object and the error:
before db.batchWriteItem:
{ RequestItems:
{ 'some-table-name': [ [Object], [Object], [Object], [Object] ] }
}
In the request handler...
Request caused a DB error.
ERROR: ValidationException: Invalid attribute value type
{
"message": "Invalid attribute value type",
"code": "ValidationException",
"time": "2020-08-04T10:51:13.751Z",
"requestId": "dd49628c-6ee9-4275-9349-6edca29636fd",
"statusCode": 400,
"retryable": false,
"retryDelay": 47.94198279972915
}
You are using the DocumentClient in the nodejs code. This will automatically convert the data format used by DynamoDB to a more easily consumable format.
e.g.
{
"id": {
"S": "A string value"
}
}
would become
{
"id": "A string value"
}
The CLI does not perform this data conversion.
You can use the regular DynamoDB client to not perform this conversion in Nodejs. e.g. const db = new Dynamodb()

Trying to grab league statistics in order to compute win rates for inhouse games

I have been trying to use the riot games api to compute all the previous custom games and then find the win loss streaks for individual players, I have built the following code to grab matches for a particular user.
See https://github.com/FriendlyUser/deno-riot-games-custom-games
But I feel like the riot games api is only returning data with its v4 api up to season 11, if anyone could clarify how the api works or explain how I could possibly get more data, that would be fantastic.
import { writeJson } from "https://deno.land/std/fs/mod.ts"
import "https://deno.land/x/dotenv/load.ts"
const player_id = Deno.env.get('ACCOUNT_ID')
const region_url = 'https://na1.api.riotgames.com'
let riot_URL = new URL(`${region_url}/lol/match/v4/matchlists/by-account/${player_id}`)
enum HTTP {
GET = 'GET',
POST = 'POST',
PUT = 'PUT',
DELETE = 'DELETE'
}
interface MatchlistDto {
startIndex: number
totalGames: number
endIndex: number
matches: Array<any>
}
function makeFetchOptions(
riotKey = Deno.env.get('RIOT_API_KEY'),
method: HTTP = HTTP.GET
): object {
return {
method: method,
headers: {
"Accept-Charset": "application/x-www-form-urlencoded; charset=UTF-8",
"Accept-Language": "en-US,en;q=0.9",
'X-Riot-Token': riotKey
}
}
}
function appendMatchHistory(riot_endpoint: string): Promise<MatchlistDto> {
const riotKey = Deno.env.get('RIOT_API_KEY')
console.log(riotKey)
const options = makeFetchOptions(riotKey)
return fetch(riot_endpoint, options)
.then( (resp: any) => {
console.log(resp)
return resp.json()
})
.then( (matchData: MatchlistDto) => {
return matchData
})
}
const max_iterations = 1000
let bIndex = 0
let eIndex = 100
let current_url = riot_URL
let riot_endpoint = null
let allMatches = []
let customGames = []
const sleep = (milliseconds: number) => {
return new Promise(resolve => setTimeout(resolve, milliseconds))
}
for (let i = 0; i < max_iterations; i++) {
console.log(`beginIndex: ${bIndex} endIndex: ${eIndex}`)
riot_endpoint = current_url.toString()
const newMatches = await appendMatchHistory(riot_endpoint)
await sleep(1500)
current_url.searchParams.delete('beginIndex')
current_url.searchParams.delete('endIndex')
const {matches} = newMatches
if (matches.length == 0) {
console.log(`ENDING SCRIPT AT ${eIndex} with ${matches.length}`)
break
}
// startIndex becomes endIndex
bIndex = eIndex
eIndex = eIndex + 100
allMatches.push(newMatches.matches)
// get new url
current_url.searchParams.append('beginIndex', String(bIndex))
current_url.searchParams.append('endIndex', String(eIndex))
}
await writeJson(
"./allData.json",
allMatches
);
Sorry if this answer is late. But yes the Riot API is only for "current" data, and that is why sites like U.GG, OP.GG, etc actually run scripts to store data continuously. So to get statistics you would have to write scripts to store it into your own DB over time.
Sadly, there is no way to get previous season data

Pick random array item from response in Paw

Given the following API response, I'd like to use the "Response Parsed Body" dynamic variable and select a random id from the array:
[
{
"id": 1
},
{
"id": "2"
}
]
Using [0].id gives me 1, but there's no way to select a random item. This is likely a problem with JSONPath, but it would be nice to have Paw implement a way to do this.
The best way to do this is to create a custom dynamic value.
function getRandomInt(min, max) {
return Math.floor(Math.random() * (max - min)) + min;
}
function evaluate(context) {
var request = context.getRequestByName('OtherRequestName')
var lastExchange = request.getLastExchange()
var body = JSON.parse(lastExchange.responseBody)
var list = body // path to list within body
var i = getRandomInt(0, list.length)
return list[i].id
}
let list = JSON.parse(responseBody);
let random_num = _.random(list.length);
let randomId= list[random_num].id;
postman.setEnvironmentVariable("randomId", randomId);

Scripted Dashboard in Grafana with opentsdb as the source

I want to create a scripted dashboard that takes one OpenTSDB metric as the datasource. On the Grafana website, I couldn't find any example. I hope I can add some line like:
metric = 'my.metric.name'
into the JavaScript code, and than I can access the dashboard on the fly.
var rows = 1;
var seriesName = 'argName';
if(!_.isUndefined(ARGS.rows)) {
rows = parseInt(ARGS.rows, 10);
}
if(!_.isUndefined(ARGS.name)) {
seriesName = ARGS.name;
}
for (var i = 0; i < rows; i++) {
dashboard.rows.push({
title: 'Scripted Graph ' + i,
height: '300px',
panels: [
{
title: 'Events',
type: 'graph',
span: 12,
fill: 1,
linewidth: 2,
targets: [
{
'target': "randomWalk('" + seriesName + "')"
},
{
'target': "randomWalk('random walk2')"
}
],
}
]
});
}
return dashboard;
Sorry to answer my own question. But I just figured it out and hopefully post here will benefit somebody.
The script is here. Access the dashboard on the fly with:
http://grafana_ip:3000/dashboard/script/donkey.js?name=tsdbmetricname
/* global _ */
/*
* Complex scripted dashboard
* This script generates a dashboard object that Grafana can load. It also takes a number of user
* supplied URL parameters (in the ARGS variable)
*
* Return a dashboard object, or a function
*
* For async scripts, return a function, this function must take a single callback function as argument,
* call this callback function with the dashboard object (look at scripted_async.js for an example)
*/
// accessible variables in this scope
var window, document, ARGS, $, jQuery, moment, kbn;
// Setup some variables
var dashboard;
// All url parameters are available via the ARGS object
var ARGS;
// Intialize a skeleton with nothing but a rows array and service object
dashboard = {
rows : [],
};
// Set a title
dashboard.title = 'From Shrek';
// Set default time
// time can be overriden in the url using from/to parameters, but this is
// handled automatically in grafana core during dashboard initialization
dashboard.time = {
from: "now-6h",
to: "now"
};
var rows = 1;
var metricName = 'argName';
//if(!_.isUndefined(ARGS.rows)) {
// rows = parseInt(ARGS.rows, 10);
//}
if(!_.isUndefined(ARGS.name)) {
metricName = ARGS.name;
}
for (var i = 0; i < rows; i++) {
dashboard.rows.push({
title: metricName,
height: '300px',
panels: [
{
title: metricName,
type: 'graph',
span: 12,
fill: 1,
linewidth: 2,
targets: [
{
"aggregator": "avg",
"downsampleAggregator": "avg",
"errors": {},
"metric":ARGS.name,
//"metric": "search-engine.relevance.latency.mean",
"tags": {
"host": "*"
}
}
],
tooltip: {
shared: true
}
}
]
});
}
return dashboard;

elasticsearch fuzziness setting in Meteorjs

I use ElasticSearch for my Meteor app. elasticsearch-river-mongodb hooks ES and MongoDB together.
The regular full-text search works as expected via below code:
var result = Meteor.http.get(url, {
params: {
q: q,
fields: '_id,feed_category,summary,title,description,author',
from: from, // offset into results (defaults to 0)
size: size // number of results to return (defaults to 10)
}
});
I'd like to support fuzzy search, I tried:
var result = Meteor.http.get(url, {
params: {
fuzzy: q,
fields: '_id,feed_category,summary,title,description,author',
from: from, // offset into results (defaults to 0)
size: size // number of results to return (defaults to 10)
}
});
It doesn't return the correct result, and it always has 36 hits.
I then tried:
var result = Meteor.http.get(url, {
params: {
"fuzzy_like_this" : {
"fields" : ["_id", "summary", "title", "description", "author"],
"like_text" : q,
"max_query_terms" : 12
},
from: from,
size:size
}
});
Same as above, doesn't return correct result, and always has 36 hits.
How do I configure the parameters for ES fuzziness in Meteor?

Resources