Trying to grab league statistics in order to compute win rates for inhouse games - http

I have been trying to use the riot games api to compute all the previous custom games and then find the win loss streaks for individual players, I have built the following code to grab matches for a particular user.
See https://github.com/FriendlyUser/deno-riot-games-custom-games
But I feel like the riot games api is only returning data with its v4 api up to season 11, if anyone could clarify how the api works or explain how I could possibly get more data, that would be fantastic.
import { writeJson } from "https://deno.land/std/fs/mod.ts"
import "https://deno.land/x/dotenv/load.ts"
const player_id = Deno.env.get('ACCOUNT_ID')
const region_url = 'https://na1.api.riotgames.com'
let riot_URL = new URL(`${region_url}/lol/match/v4/matchlists/by-account/${player_id}`)
enum HTTP {
GET = 'GET',
POST = 'POST',
PUT = 'PUT',
DELETE = 'DELETE'
}
interface MatchlistDto {
startIndex: number
totalGames: number
endIndex: number
matches: Array<any>
}
function makeFetchOptions(
riotKey = Deno.env.get('RIOT_API_KEY'),
method: HTTP = HTTP.GET
): object {
return {
method: method,
headers: {
"Accept-Charset": "application/x-www-form-urlencoded; charset=UTF-8",
"Accept-Language": "en-US,en;q=0.9",
'X-Riot-Token': riotKey
}
}
}
function appendMatchHistory(riot_endpoint: string): Promise<MatchlistDto> {
const riotKey = Deno.env.get('RIOT_API_KEY')
console.log(riotKey)
const options = makeFetchOptions(riotKey)
return fetch(riot_endpoint, options)
.then( (resp: any) => {
console.log(resp)
return resp.json()
})
.then( (matchData: MatchlistDto) => {
return matchData
})
}
const max_iterations = 1000
let bIndex = 0
let eIndex = 100
let current_url = riot_URL
let riot_endpoint = null
let allMatches = []
let customGames = []
const sleep = (milliseconds: number) => {
return new Promise(resolve => setTimeout(resolve, milliseconds))
}
for (let i = 0; i < max_iterations; i++) {
console.log(`beginIndex: ${bIndex} endIndex: ${eIndex}`)
riot_endpoint = current_url.toString()
const newMatches = await appendMatchHistory(riot_endpoint)
await sleep(1500)
current_url.searchParams.delete('beginIndex')
current_url.searchParams.delete('endIndex')
const {matches} = newMatches
if (matches.length == 0) {
console.log(`ENDING SCRIPT AT ${eIndex} with ${matches.length}`)
break
}
// startIndex becomes endIndex
bIndex = eIndex
eIndex = eIndex + 100
allMatches.push(newMatches.matches)
// get new url
current_url.searchParams.append('beginIndex', String(bIndex))
current_url.searchParams.append('endIndex', String(eIndex))
}
await writeJson(
"./allData.json",
allMatches
);

Sorry if this answer is late. But yes the Riot API is only for "current" data, and that is why sites like U.GG, OP.GG, etc actually run scripts to store data continuously. So to get statistics you would have to write scripts to store it into your own DB over time.
Sadly, there is no way to get previous season data

Related

Webkit2 with gjs - get response headers

I'm experimenting with gjs and webkit2, how can i get the http headers of a request made with load_uri
i have the following code
const Gtk = imports.gi.Gtk, WebKit=imports.gi.WebKit2, contentManager=new WebKit.UserContentManager,
view = WebKit.WebView.new_with_user_content_manager(contentManager);
Gtk.init(null);
let win = new Gtk.Window(), Response=new WebKit.URIResponse();
contentManager.add_script (new WebKit.UserScript("alert ('test');",0,1,null,null));
view.load_uri('https://www.gnome.org');
win.add(view);
win.set_title("test");
win.set_icon_from_file("/games/aptdaemon-resolve.png");
win.connect('destroy', () => { Gtk.main_quit(); });
win.set_size_request(640, 480);
win.show_all();
view.connect("load-changed",function (instance,state)
{
if (state == 3)
{
log ("URL"+Response.get_uri());
view.run_javascript ("alert (document.body.innerHTML)",null,null);
}
});
Gtk.main();
for example Response.get_uri returns an empty string, how to access response headers, and how to exchange messages between scripts injected with view.run_javascript and gjs. i want the body html be sent to gjs-?
got it
const Gtk = imports.gi.Gtk;
const WebKit=imports.gi.WebKit2;
Gtk.init(null);
const win = new Gtk.Window(), contentManager=new WebKit.UserContentManager, view = WebKit.WebView.new_with_user_content_manager(contentManager);
let response_STR;
contentManager.connect("script-message-received::pipe", function (instance, message)
{
message=message.get_js_value().to_string ();
log (message);
});
contentManager.register_script_message_handler("pipe");
view.load_uri('https://www.gnome.org');
win.add(view);
win.set_title("test");
win.connect('destroy', () => { Gtk.main_quit(); });
win.set_size_request(640, 480);
win.show_all();
view.connect("load-changed",function (instance,status)
{
let headers, response_STR="";
if (status == 3)
{
/* WebKitView.get_main_resource -> returns WebResource
WebResource.get_response -> returns URIResponse
URIResponse.get_http_headers -> returns Soup.MessageHeaders */
headers=view.get_main_resource().get_response().get_http_headers();
response_STR="";
headers.foreach ((name, value) => { response_STR+=name+": "+value+"\n"});
view.run_javascript('window.webkit.messageHandlers.pipe.postMessage(document.body.innerHTML);', null, null);
log (response_STR);
}
});
Gtk.main();

Kibana Server-Side Plugin in 7.16.3, Response.Body is always empty and fails validation step

The plugin I'm working on is for Kibana 7.16.3.
The server side code currently looks like the following:
import { schema } from '#kbn/config-schema';
import { logger } from 'elastic-apm-node';
import { IRouter } from '../../../../src/core/server';
import { ComplexityAndChurnFactory } from "../resources/cxchquery";
import { validateBody, linearmap } from "../resources/utility";
let elasticSearchHost = ""
export function defineHosts(host: string) {
elasticSearchHost = host
}
export function defineRoutes(router: IRouter) {
router.get(
{
path: '/api/complexity_and_churn/agg',
validate: {
params: schema.object({}),
body: schema.object({
Size: schema.number({}),
Index: schema.string({}),
StartDate: schema.string({}),
EndDate: schema.string({}),
FileTypeFilters: schema.arrayOf(schema.string({}), {})
}, { })
},
},
async (context, request, response) => {
console.log(`Recv Req: ${JSON.stringify(request.body)}`);
let reqBody = request.body;
validateBody(reqBody);
let query = ComplexityAndChurnFactory(reqBody.Index, reqBody.StartDate, reqBody.EndDate, reqBody.FileTypeFilters, 10000);
let resultSize = reqBody.Size;
let minScore = 0;
let maxScore = 50;
// If the user needs to scan over 10 million files after date range and filtering, there is likely a bigger problem.
const MAX_QUERIES = 1000;
let topXScores: Array<Object> = []
/**Strategy for getting top scores in one pass of the dataset
* Composite aggreggation returns subset of data => update global min/max complexity/churn based on this data.
* Based on global min/max complexity/churn, calculate the score of the composite aggregation subset.
* Based on global min/max complexity/churn, update the score of the previously saved top scores.
* Join the current aggregation subset and previously saved top scores into one dataset.
* Remove all but the top x scores.
* Repeat with previous composite aggregation after key until data is exhausted.
*/
let minComplexity = Number.POSITIVE_INFINITY;
let maxComplexity = Number.NEGATIVE_INFINITY;
let minChurn = Number.POSITIVE_INFINITY;
let maxChurn = Number.NEGATIVE_INFINITY;
let i = 0;
for (i=0; i<MAX_QUERIES; i++)
{
let resp = await context.core.elasticsearch.client.asCurrentUser.search(
query
);
logger.info(`query responded with: ${resp}`);
// Check for completion
let buckets = resp.body.aggregations.buckets.buckets;
if (buckets.length == 0 || !query?.after_key) {
break;
}
// Set up next query if buckets were returned.
query.after_key = resp.body.aggregations.buckets.after_key;
minComplexity = buckets.reduce((p: Object, v: Object)=>p.complexity.value < v.complexity.value? p.complexity.value : v.complexity.value, minComplexity);
maxComplexity = buckets.reduce((p: Object, v: Object)=>p.complexity.value > v.complexity.value? p.complexity.value : v.complexity.value, maxComplexity);
minChurn = buckets.reduce((p: Object, v: Object)=>p.churn.value < v.churn.value? p.churn.value : v.churn.value, minChurn);
maxChurn = buckets.reduce((p: Object, v: Object)=>p.churn.value > v.churn.value? p.churn.value : v.churn.value, maxChurn);
// Recalculate scores for topXScores based on updated min and max complexity and churn.
topXScores.forEach(element => {
let complexityScore = linearmap(element.complexity.value, minComplexity, maxComplexity, minScore, maxScore);
let churnScore = linearmap(element.churn.value, minChurn, maxChurn, minScore, maxScore);
element.score = complexityScore + churnScore;
});
// For new data, calculate score and add to topXScores array.
buckets.forEach(element => {
let complexityScore = linearmap(element.complexity.value, minComplexity, maxComplexity, minScore, maxScore);
let churnScore = linearmap(element.churn.value, minChurn, maxChurn, minScore, maxScore);
element.score = complexityScore + churnScore;
topXScores.push(element);
});
// Sort the topXScores by score.
topXScores = topXScores.sort((a, b) => a.score - b.score);
// Remove all but the top x scores from the array.
let numberBucketsToRemove = Math.max(topXScores.length - resultSize, 0);
topXScores.splice(0, numberBucketsToRemove);
}
if (i == MAX_QUERIES) {
throw new Error(`[ERROR] Exceeded maximum allowed queries (${MAX_QUERIES}) for composite aggregations please reach out to an administrator to get this amount changed or limit your query's date range and filters.`)
}
return response.ok({
body: {
buckets: topXScores
}
});
}
);
}
When I make a request to the endpoint like in the following:
curl --request GET 'http://localhost:5601/api/complexity_and_churn/agg' --header 'kbn-xsrf: anything' --header 'content-type: application/json; charset=utf-8' --header 'Authorization: Basic <Auth>' -d '{
"Size": 100,
"Index": "mainindexfour",
"StartDate": "2010/10/10",
"EndDate": "2022/10/10",
"FileTypeFilters": ["xml"]
}'
I get the response:
{
"statusCode": 400,
"error": "Bad Request",
"message": "[request body.Size]: expected value of type [number] but got [undefined]"
}
If I remove the validation on the body and print out JSON.stringify(request.body), I see that it is an empty object, regardless of what data I send. If I try to use params or query, they also end up being undefined.
Is my server side code or the request I'm sending incorrect?

How to access deep value of realtime db with cloud function?

In childSnapshot.val().k I have this with cloud function:
{ '-LdmZIlKZh3O9cR8MOBU':
{ id: 'ceccomcpmoepincin3ipwc',
k: 'test',
p: 'somepath',
t: 1556700282278,
u: 'username' },
'-Llkocp3ojmrpemcpo3mc':
{ id: '[epc[3pc[3m,',
k: 'test2',
p: 'somepath2',
t: 1556700292290,
u: 'username2' }
}
I need each path value so I can delete that file from storage. How to access this value?
My cloud function for refreshing states, removing and deleting files from storage:
var db = admin.database();
var ref = db.ref('someref');
ref.once("value").then((snapshot) => {
var updates = {};
var patObject = {
fid: null,
ft: null,
ftr: null,
fu: null,
id: null,
lid: null,
lt: null,
ltr: null,
lu: null,
t: null,
tr: null,
v: null,
g: null,
l: null,
k: null
};
snapshot.forEach((childSnapshot) => {
if(childSnapshot.numChildren() >= 14){
var t = childSnapshot.val().t;
if((t===1 || t===5) && childSnapshot.val().tr > 0) {
if(childSnapshot.val().tr - 12 > 0){
updates[childSnapshot.key + '/tr'] = childSnapshot.val().tr - 12;
if(childSnapshot.val().k !== ""){
console.log('path: ', childSnapshot.val().k);
childSnapshot.val().k.snapshot.forEach(kpath => {
console.log('path: ', "path");
});
}
} else {
updates[childSnapshot.key] = patObject;
}
}
if(childSnapshot.val().tr<=0){
updates[childSnapshot.key] = patObject;
}
} else {
updates[childSnapshot.key] = patObject;
}
});
ref.update(updates);
res.send("");
return "";
}).catch(reason => {
res.send(reason);
})
return "";
If you want to delete all the files corresponding to the values of the ps, you need to use Promise.all() to execute in parallel the asynchronous deletion tasks (Since the delete() method returns a Promise). You need to iterate over the object that contains the p paths.
It is not easy to understand your code, so you'll find below the part corresponding to the above explanations. It's up to you to integrate it in your code!
const functions = require('firebase-functions');
const admin = require('firebase-admin');
admin.initializeApp();
const defaultStorage = admin.storage(); //Note this line
//.....
exports.date = functions.https.onRequest((req, res) => { //I understand that you use an HTTP Cloud Function
//....
.then(...
// Somehow you get the object you mention in your question, through childSnapshot.val().k
const kObject = childSnapshot.val().k;
const bucket = defaultStorage.bucket(yourFileBucket);
const promises = [];
Object.keys(kObject).forEach(
//The values of the path p are obtained via kObject[key].p
//Based on that we push the Promise returned by delete() to the promises array
promises.push(bucket.file(kObject[key].p).delete());
);
return Promise.all(promises)
.then(results => {
//Here all the Promises that were in the promises array are resolved, which means that all the files are deleted
res.send({result: results.length + ' files(s) deleted'});
})
.catch(error => {
res.status(500).send(error);
});
});
Watch may be interested by watching the following official Firebase video by Doug Stevenson: https://youtu.be/7IkUgCLr5oA

Quickly finding users by phone number with Firebase backend

I’m working on an app with a Firebase backend. During sign up I would like to let new users see which of their contacts are already on the app to add them as friends. So basically, use phone numbers to match users with contacts.
I am having a big performance headache when querying the database to find users.
Since Firestore does not support OR queries, I run two queries per phone number (one to check national format, the other for international format), and if any returns a document, set that document as the found user:
findUserByPhoneNumber = (number, callback) => {
//utility function to, well, sanitize phone numbers
sanitizeNumber = (str) => {
if (str) {
var num = str.match(/\d/g);
num = num.join("");
return num;
} else {
return null
}
}
var foundUser = null
Promise.all([
usersRef.where('phoneNumbers.nationalFormat', '==', sanitizeNumber(number)).get()
.then(snapshot => {
if (snapshot.docs.length > 0 && snapshot.docs[0].data()) {
// console.log('nationalFormat result: ', snapshot.docs[0]);
foundUser = snapshot.docs[0].data()
}
return foundUser
}),
usersRef.where('phoneNumbers.internationalFormat', '==', sanitizeNumber(number)).get()
.then(snapshot => {
if (snapshot.docs.length > 0 && snapshot.docs[0].data()) {
// console.log('internationalFormat result: ', snapshot.docs[0]);
foundUser = snapshot.docs[0].data()
}
return foundUser
})
])
.then(results => {
res = results.filter(el => { return el != null })
if (results.length > 0) {
callback(res[0])
}
})
}
findUserByPhoneNumber runs for each contact in a loop. When testing on my phone with 205 contacts, the whole process takes about 30 seconds, which is about 29 seconds longer than I would like, especially given the test database has only 8 records...
getContacts = () => {
getCs = () => {
// Declare arrays
const contactsWithAccount = []
const contactsWithNoAccount = []
// Get contacts from user's phone
Contacts.getAll((err, contacts) => {
if (err) throw err
// For each contact, iterate
for (var i = 0; i < contacts.length; i++) {
const item = contacts[i]
if (item.phoneNumbers && item.phoneNumbers.length > 0) {
const phone = item.phoneNumbers[0].number
// If the sanitized phone number is different from the current user's phone number (saved in DB), run the following logic
if (this.state.user.phoneNumbers.nationalFormat != sanitizeNumber(phone)
&& this.state.user.phoneNumbers.internationalFormat != sanitizeNumber(phone)
) {
findUserByPhoneNumber(phone, (fu) => {
contactObject = {
key: item.recordID,
name: item.givenName,
normalizedName: item.givenName.toLowerCase(),
phoneNumber: phone,
user: this.state.user,
hasAccount: null,
friendId: null,
isFriend: null
}
const foundUser = fu
// if found user, push in contactsWithAccount, otherwise push in contactsWithNoAccount
if (foundUser && foundUser._id != this.state.user._id) {
contactObject.hasAccount = true
contactObject.friendId = foundUser._id
if (this.state.user.friends && this.state.user.friends.includes(foundUser._id)) {
contactObject.isFriend = true
}
contactsWithAccount.push(contactObject)
}
else {
contactsWithNoAccount.push(contactObject)
}
// if the two arrays are filled up, run the callback
// NOTE_1: we use the two lengths +1 to account for the current
// user's document that we skip and dont add to any of the arrays
// NOTE_2: this bizare method was the only way to handle the results
// coming in asynchronously
if (contactsWithAccount.length + contactsWithNoAccount.length + 1 == contacts.length) {
console.log('finished');
sortCs(contactsWithAccount, contactsWithNoAccount)
}
})
}
}
}
})
}
// sorts the two arrays alphabetically
sortCs = (withAccount, withNoAccount) => {
compare = (a,b) => {
if (a.name < b.name)
return -1;
if (a.name > b.name)
return 1;
return 0;
}
withAccount.sort(compare)
withNoAccount.sort(compare)
this.setState({ withAccount, withNoAccount })
}
// unleash the monster
getCs(sortCs)
}
I am sure the process could be optimized in various ways. Maybe:
different database structure
bundling all queries into one
better use
of async
starting the process at an earlier step in the signup flow
Whatsapp, HouseParty and a bunch of other apps have this feature in place and it loads instantly. I’m not trying to reach that level of perfection yet but there must be some better way…
Any help/suggestions would be greatly appreciated.

Puppeteer / Node - Target.createTarget - Target Closed

I'm using Node/Puppeteer in the code below, passing in a large list of URL's for traversal and scraping. It has been difficult to do it asynchronously, though I find that I am getting closer and closer to the answer. I am currently stuck on an issue related to the following error.
UnhandledPromiseRejectionWarning: Unhandled promise rejection (rejection id: 17): Error: Protocol error (Target.createTarget): Target closed.
This error occurs once upon every iteration of the while loop. Though I'm not sure what I may be doing incorrectly.
Could someone help me do the following:
1) Diagnose the source of the error.
2) Potentially find a more effective way to traverse a large list of URLs asynchronously.
async function subProc(list, batchSize) {
let subList = null;
let i = 0;
while (list.length > 0) {
let browser = await puppeteer.launch();
subList = list.splice(0, batchSize);
console.log("Master List Size :: " + list.length);
console.log("SubList Size :: " + subList.length);
for (let j = 0; j < subList.length; j++) {
promiseArray.push(new Promise((resolve, reject) => {
resolve(pageScrape(subList[j], browser));
}));
}
Promise.all(promiseArray)
.then(response => {
procArray.concat(response);
});
promiseArray = new Array();
try {
await browser.close();
} catch(ex){
console.log(ex);
}
};
}
async function pageScrape(url, browser) {
let page = await browser.newPage();
await page.goto(url, {
timeout: 0
});
await page.waitFor(1000);
return await page.evaluate(() => {
let appTitle = document.querySelector('').innerText;
let companyName = document.querySelector('').innerText;
let dateListed = document.evaluate("", document, null, XPathResult.FIRST_ORDERED_NODE_TYPE, null).singleNodeValue.innerText;
let category = document.evaluate("']//a//strong", document, null, XPathResult.FIRST_ORDERED_NODE_TYPE, null).singleNodeValue.innerText;
/* */
return {
appTitle,
companyName,
dateListed,
category
}
}).then(response => {
let urlData = {
id: subList[j],
appName: response.appTitle,
companyName: response.companyName,
dateListed: response.dateListed,
category: response.category
}
return urlData;
});
};
I figured out the solution to the problem I was having.
Every computer is limited in its processing ability, so instead of iterating through 1000 urls simultaneously you have to break it down into smaller pieces.
By using a PromiseAll, and iterating and scraping 10 urls at a time and storing these values in an array, I was able to throttle the processing required to iterate through all 1000 urls.
processBatch(subData, 10, procArray).then((processed)=>{
for(let i = 0; i < procArray.length; i++){
for(let j = 0; j < procArray[i].length; j++){
results.push(procArray[i][j]);
}
}
function processBatch(masterList, batchSize, procArray){
return Promise.all(masterList.splice(0, batchSize).map(async url =>
{
return singleScrape(url)
})).then((results) => {
if (masterList.length < batchSize) {
console.log('done');
procArray.push(results);
return procArray;
} else {
console.log('MasterList Size :: ' + masterList.length);
procArray.push(results);
return processBatch(masterList, batchSize, procArray);
}
})
}

Resources