Logic in if else statement to divide path if path shape is two or more '0' - paperjs

Hi all trying to write some logic in paper.js (also using opentype.js for font data) so that when a number contains two or more consecutive zeros' the zero path is divided so that it is solid.
Things i know a zero path, using my particular font, is made up of an outer path with 19 segments and an inner path made up of 18 segments
So I thought would try to iterate over all paths check if a path has 19 segments and the next path has 18 segments and call path.unite() which kind of works. But I only want it to do this with consecutive '0' eg '100', '1000' but not 10.
So i was trying to do an if else statment where if-else (the current path has 18 segments and the next path is less than 18 segments) if true then do nothin or call path.divide()?
Im sure there is a way better way of doing this. Can you help please.
link to codepen
paper.install(window);
window.onload = () => {
paper.setup("canvas");
opentype.load(
"https://assets.codepen.io/1070/pphatton-ultralight-webfont.woff",
(err, font) => {
if (err) {
console.log(err);
} else {
const fontPath = font.getPath("10k", 0, 100, 100).toSVG();
const count = new paper.CompoundPath(fontPath);
count.unite();
count.children.forEach((child, i) => {
if (
child.segments.length === 19 &&
count.children[i + 1].segments.length === 18
) {
const eye = child.unite();
eye.selected = true;
} else if(
count.children[i + 1].segments.length === 18
&& child.segments.length < 18
) {
console.log('hello');
// const target = child.divide();
count.children[i].fillColor = 'black'
} else{
}
});
// const flatCount = count.children[1].unite()
// console.log(count.children[2].segments.length)
// const flatCountTwo = count.children[5].unite()
// flatCount.translate(5,0)
count.fillColor = "red";
project.activeLayer.fitBounds(view.bounds.scale(0.6));
}
}
);
};

I think that rather than using Font.getPath to retrieve a single svg path for the whole text, you should use Font.getPaths to retrieve an svg path for each character.
This way you can quite simply do your analysis on the input string directly and handle the consecutive 0 differently than other characters.
Edit
In order to detect the consecutive zeros, yes, you could use a regex or loop over the characters, like I did in the following example.
Here's a fiddle showcasing a possible solution.
const handleZero = (path) => {
path.children = path.children.slice(0, 1);
};
const getConsecutiveZerosIndices = (content) => {
const zero = '0';
return [...content]
.map((char, i) => ({ char, i }))
.filter(({ char, i }) => {
const previousCharacter = content?.[i - 1];
const nextCharacter = content?.[i + 1];
return char === zero && (previousCharacter === zero || nextCharacter === zero);
})
.map(({ i }) => i);
};
const drawText = (content, font) => {
const fontPaths = font.getPaths(content, 0, 100, 100);
const consecutiveZerosIndices = getConsecutiveZerosIndices(content);
const paths = fontPaths.map((fontPath, i) => {
const path = new paper.CompoundPath(fontPath.toSVG());
if (consecutiveZerosIndices.includes(i)) {
handleZero(path);
}
return path;
});
const group = new paper.Group(paths);
group.fillColor = 'red';
return group;
};
const draw = (font) => {
const path1 = drawText('10k', font);
const path2 = drawText('100k', font);
const path3 = drawText('1000k', font);
path2.position = path1.position.add(0, path1.bounds.height * 1.2);
path3.position = path2.position.add(0, path2.bounds.height * 1.2);
paper.project.activeLayer.fitBounds(paper.view.bounds.scale(0.6));
};
paper.setup('canvas');
opentype.load(
'https://assets.codepen.io/1070/pphatton-ultralight-webfont.woff',
(err, font) => draw(font)
);

Related

The custom marker of the lightningchart doesn't work properly

I am using the marker from current example but with vertical oriented chart and a few my upgrades. So my problem that in case of vertical chart the labels and values of variables isn't shown. But the same logic is properly works with horizontal oriented chart.
private createCustomMarker(): void {
if (!this.seriesInstances.length) return;
const resultTable: UIElementColumn<UIBackground> = this.chartInstance
.addUIElement(UILayoutBuilders.Column, {
x: this.chartInstance.getDefaultAxisX(),
y: this.chartInstance.getDefaultAxisY()
})
.setMouseInteractions(false)
.setOrigin(UIOrigins.LeftCenter)
.setMargin(5);
const datetimeRow: UITextBox<UIBackground> = resultTable
.addElement(UILayoutBuilders.Row)
.addElement(UIElementBuilders.TextBox);
const rowsY: UITextBox<UIBackground>[] = this.seriesInstances
.map((el: ISeriesInstance, i: number) => {
return resultTable
.addElement(UILayoutBuilders.Row)
.addElement(UIElementBuilders.TextBox)
.setTextFillStyle(this.seriesInstances[i].instance.getStrokeStyle().getFillStyle());
});
const tick: CustomTick = (this.isAppearanceHorizontal ? this.chartInstance.getDefaultAxisX() : this.chartInstance.getDefaultAxisY())
.addCustomTick()
.setAllocatesAxisSpace(false)
.disposeMarker();
// Hide custom cursor components initially.
resultTable.dispose();
tick.dispose();
this.chartInstance.onSeriesBackgroundMouseMove((_: ChartXY<PointMarker, UIBackground>, event: MouseEvent): void => {
const mouseLocationClient: { x: number; y: number } = { x: event.clientX, y: event.clientY };
const mouseLocationEngine: Point = this.chartInstance.engine.clientLocation2Engine(
mouseLocationClient.x,
mouseLocationClient.y
);
// Translate mouse location to LCJS coordinate system for solving data points from series, and translating to Axes.
// Translate mouse location to Axis.
const mouseLocationAxis: Point = translatePoint(
mouseLocationEngine,
this.chartInstance.engine.scale,
this.seriesInstances[0].instance.scale
);
// Solve the nearest data point to the mouse on each series.
const nearestDataPoints: CursorPoint<Series2D>[] = this.seriesInstances.map((el: ISeriesInstance) => {
return el.instance.solveNearestFromScreen(mouseLocationEngine) // on this line the most of elements have undefined, but data for it exists and poits are near beetween each other
});
// console.log(nearestDataPoints);
// Find the nearest solved data point to the mouse.
const nearestPoint: CursorPoint<Series2D> = nearestDataPoints.reduce((prev: CursorPoint<Series2D>, curr: CursorPoint<Series2D>) => {
if (!prev) return curr;
if (!curr) return prev;
if (this.isAppearanceHorizontal) {
return Math.abs(mouseLocationAxis.y - curr.location.y) < Math.abs(mouseLocationAxis.y - prev.location.y) ? curr : prev;
} else {
return Math.abs(mouseLocationAxis.x - curr.location.x) < Math.abs(mouseLocationAxis.x - prev.location.x) ? curr : prev
}
});
if (nearestPoint) {
// Set custom cursor location.
resultTable.setPosition({
x: mouseLocationAxis.x,
y: mouseLocationAxis.y,
});
// Change origin of result table based on cursor location.
let resultTableOrigin;
const yScale: number = this.chartInstance.engine.scale.y.getInnerInterval();
const isResultTableOriginXRight: boolean = mouseLocationEngine.x > this.chartInstance.engine.scale.x.getInnerInterval() / 2;
if (mouseLocationEngine.y > yScale - (yScale / 100 * 30)) { // mouseLocationEngine.y > yScale - 30%
resultTableOrigin = isResultTableOriginXRight ? UIOrigins.RightTop : UIOrigins.LeftTop;
} else if (mouseLocationEngine.y < yScale / 100 * 30) { // mouseLocationEngine.y > 30% of yScale
resultTableOrigin = isResultTableOriginXRight ? UIOrigins.RightBottom : UIOrigins.LeftBottom;
} else {
resultTableOrigin = isResultTableOriginXRight ? UIOrigins.RightCenter : UIOrigins.LeftCenter;
}
resultTable.setOrigin(resultTableOrigin);
// Format result table text.
const datetimeValue = this.isAppearanceHorizontal
? this.chartInstance.getDefaultAxisX().formatValue(nearestPoint.location.x)
: this.chartInstance.getDefaultAxisY().formatValue(nearestPoint.location.y)
datetimeRow.setText(`${datetimeValue}`);
rowsY.map((rowY: UITextBox<UIBackground>, i: number) => {
// this.seriesInstances[i].instance.isDisposed() ? rowY.dispose() : rowY.restore(); after this line labels of the table is low font contrast
if (nearestDataPoints[i]?.location) {
const foundSeries = chain(this.track.series)
.flatMap()
.value()[i]
const value: string = this.isAppearanceHorizontal
? this.chartInstance.getDefaultAxisY().formatValue(nearestDataPoints[i].location.y)
: this.chartInstance.getDefaultAxisX().formatValue(nearestDataPoints[i].location.x)
rowY.setText(`${this.seriesInstances[i].instance.getName()}: ${value} ${foundSeries.unit}`) // probleblem on this line
}
});
tick.setValue(
this.isAppearanceHorizontal
? nearestPoint.location.x
: nearestPoint.location.y
);
resultTable.restore();
tick.restore();
} else {
resultTable.dispose();
tick.dispose();
}
});
this.chartInstance.onSeriesBackgroundMouseLeave(() => {
resultTable.dispose();
tick.dispose();
});
this.chartInstance.onSeriesBackgroundMouseDragStart(() => {
resultTable.dispose();
tick.dispose();
});
}
I want to understand why almost the same code don't work on vertical oriented chart, but on horizontal works good.

crawler with ramda.js (functional programming)

I'm trying to crawl movie data from TMDB website. I finished my code with pure javascript, but I want to change the code into functional programming style by using ramda.js.
I attached my code below. I want to get rid of for-loop (if it is possible) and use R.pipe function.
(async () => {
for (let i = 0; i < 1000; i++) {
(() => {
setTimeout(async () => {
let year = startYr + Math.floor(i / 5);
await request.get(path(year, i % 5 + 1), async (err, res, data) => {
const $ = cheerio.load(data);
let list = $('.results_poster_card .poster.card .info .flex a');
_.forEach(list, (element, index) => {
listJSON.push({
MovieID: $(element).attr('id').replace('movie_', ''),
Rank: (i % 5) * 20 + index + 1,
Year: year
});
});
if(i === 1000 - 1) {
await pWriteFile(`${outputPath}/movieList.json`, JSON.stringify(listJSON, null, 2));
}
});
}, 1000 * i);
})(i);
}
})().catch(error => console.log(error));
Steps:
1- Break your code in small functions
2- Stop using async await and use promise.then(otherFunction)
3- When using promise, you could create a sleep function like these: const sleep = (time) => new Promise(resolve => setTimeout(resolve, time));
Ex.:
const process = index => sleep(1000)
.then(() => makeRequest(index))
.then(processData);
R.range(0, 1000)
.reduce(
(prev, actual) => prev.then(() => process(actual),
Promise.resolve()
) // Sequential
.then(printResult);
R.range(0, 1000)
.map(process) // Parallel
.then(printResult);
You can use the Ramda range() function to replace your loop.
https://ramdajs.com/docs/#range
R.range(0, 1000);
That will provide you with a collection of integers (your i) that you can work with (map() or whatever you need).

How to access deep value of realtime db with cloud function?

In childSnapshot.val().k I have this with cloud function:
{ '-LdmZIlKZh3O9cR8MOBU':
{ id: 'ceccomcpmoepincin3ipwc',
k: 'test',
p: 'somepath',
t: 1556700282278,
u: 'username' },
'-Llkocp3ojmrpemcpo3mc':
{ id: '[epc[3pc[3m,',
k: 'test2',
p: 'somepath2',
t: 1556700292290,
u: 'username2' }
}
I need each path value so I can delete that file from storage. How to access this value?
My cloud function for refreshing states, removing and deleting files from storage:
var db = admin.database();
var ref = db.ref('someref');
ref.once("value").then((snapshot) => {
var updates = {};
var patObject = {
fid: null,
ft: null,
ftr: null,
fu: null,
id: null,
lid: null,
lt: null,
ltr: null,
lu: null,
t: null,
tr: null,
v: null,
g: null,
l: null,
k: null
};
snapshot.forEach((childSnapshot) => {
if(childSnapshot.numChildren() >= 14){
var t = childSnapshot.val().t;
if((t===1 || t===5) && childSnapshot.val().tr > 0) {
if(childSnapshot.val().tr - 12 > 0){
updates[childSnapshot.key + '/tr'] = childSnapshot.val().tr - 12;
if(childSnapshot.val().k !== ""){
console.log('path: ', childSnapshot.val().k);
childSnapshot.val().k.snapshot.forEach(kpath => {
console.log('path: ', "path");
});
}
} else {
updates[childSnapshot.key] = patObject;
}
}
if(childSnapshot.val().tr<=0){
updates[childSnapshot.key] = patObject;
}
} else {
updates[childSnapshot.key] = patObject;
}
});
ref.update(updates);
res.send("");
return "";
}).catch(reason => {
res.send(reason);
})
return "";
If you want to delete all the files corresponding to the values of the ps, you need to use Promise.all() to execute in parallel the asynchronous deletion tasks (Since the delete() method returns a Promise). You need to iterate over the object that contains the p paths.
It is not easy to understand your code, so you'll find below the part corresponding to the above explanations. It's up to you to integrate it in your code!
const functions = require('firebase-functions');
const admin = require('firebase-admin');
admin.initializeApp();
const defaultStorage = admin.storage(); //Note this line
//.....
exports.date = functions.https.onRequest((req, res) => { //I understand that you use an HTTP Cloud Function
//....
.then(...
// Somehow you get the object you mention in your question, through childSnapshot.val().k
const kObject = childSnapshot.val().k;
const bucket = defaultStorage.bucket(yourFileBucket);
const promises = [];
Object.keys(kObject).forEach(
//The values of the path p are obtained via kObject[key].p
//Based on that we push the Promise returned by delete() to the promises array
promises.push(bucket.file(kObject[key].p).delete());
);
return Promise.all(promises)
.then(results => {
//Here all the Promises that were in the promises array are resolved, which means that all the files are deleted
res.send({result: results.length + ' files(s) deleted'});
})
.catch(error => {
res.status(500).send(error);
});
});
Watch may be interested by watching the following official Firebase video by Doug Stevenson: https://youtu.be/7IkUgCLr5oA

Apply CSS Filters to cropped image and save/upload

I can already, input the image and crop it. I tried to apply CSS filters to it, but seems the CSS filters only apply on the img tag, not the actual image.
I am using both #Alyle-cropping and ngx-image-cropper(tests). Both give to me a base64 string for the cropped image. I am able to load the cropped image to the img tag and also upload it to the database.
onCropped(e: ImgCropperEvent) {
this.croppedImage = e.dataURL;
// console.log('cropped img: ', e.dataURL);
}
onloaded(e: ImgCropperEvent) {
this.imagemOriginal = e.originalDataURL;
this.cropper.center();
console.log('img loaded', e.name);
}
onerror(e: ImgCropperErrorEvent) {
console.warn(`'${e.name}' is not a valid image`, e);
}
// Aplicar Filtros /////////////////////////////////////////////////
change(crop: Crop): void {
this.stylus = crop.nome;
this.crops.forEach(function (value) {
(value.nome === crop.nome) ? value.ehSelec = true : value.ehSelec = false;
});
// const canvas = document.getElementById('cropping'), image = document.createElement('img');
// image.src = canvas.toDataURL('image/jpeg', 1.0);
// document.body.appendChild(image);
}
enviarParanue(): void {
const ref = firebase.storage().ref(`imagens/usuarios/idTeste`).child(`nomeTeste`);
const stringa = this.removerString(this.croppedImage);
ref.put(this.base64toBlob(stringa, 'image/png')).then((snapshot) => {
// console.log('snapshot', snapshot.valueOf());
ref.getDownloadURL().then(function(downloadURL) {
console.log('File available at', downloadURL);
});
});
// ref.putString(stringa, 'base64', {contentType: 'image/png'}).then((snapshot) => {
// // console.log('snapshot', snapshot.valueOf());
// ref.getDownloadURL().then(function(downloadURL) {
// console.log('File available at', downloadURL);
// });
// });
}
removerString(stringa: string): string {
return stringa.substring(23);
}
base64toBlob(base64Data: any, contentType: any) {
contentType = contentType || '';
const sliceSize = 1024;
const byteCharacters = atob(base64Data);
const bytesLength = byteCharacters.length;
const slicesCount = Math.ceil(bytesLength / sliceSize);
const byteArrays = new Array(slicesCount);
for (let sliceIndex = 0; sliceIndex < slicesCount; ++ sliceIndex) {
const begin = sliceIndex * sliceSize;
const end = Math.min(begin + sliceSize, bytesLength);
const bytes = new Array(end - begin);
for (let offset = begin, i = 0 ; offset < end; ++i, ++offset) {
bytes[i] = byteCharacters[offset].charCodeAt(0);
}
byteArrays[sliceIndex] = new Uint8Array(bytes);
}
return new Blob(byteArrays, { type: contentType });
}
EXAMPLE OF THE CSS FILTERS:
.none {filter:none;}
.blur {filter:blur(2.5px);}
.brightness {filter:brightness(200%);}
.contrast {filter:contrast(200%);}
.drop-shadow {filter:drop-shadow(8px 8px 10px gray);}
.grayscale {filter:grayscale(100%);}
.hue-rotate {filter:hue-rotate(90deg);}
.invert {filter:invert(100%);}
.opacity {filter:opacity(30%);}
.saturate {filter:saturate(8);}
.sepia {filter:sepia(100%);}
.contrast-brightness {filter:contrast(200%) brightness(150%);}
Problem is... I don't know how to apply the CSS filters to the image to upload the cropped with the effects(sepia, contrast, etc).
I tried to get the img src and convert it to Blob, but didn't work.
I ended up saving in the database a string with the name of the filter. So I apply the filter when I load the image. A good side of it, is that I can change the filter whenever I want.

3D force directed graph replacing nodes with images

Ref: 3d Force Directed Graph - Replacing Nodes with Images
How might I add the images to the following excellent code in the same manner as the Stack Overflow answer above?
https://github.com/jexp/neo4j-3d-force-graph/blob/master/particles.html
Assuming that each node may have a property of n.image=/images/imagexxx.jpg how might I apply this image from a local filesystem to its respective node ?
If the property isn't present then render the node as the normal sphere.
Here is my sample code which just renders all nodes as small_image.jpg :
const elem = document.getElementById('3d-graph');
const driver = neo4j.v1.driver("bolt://192.168.1.251", neo4j.v1.auth.basic("neo4j", "test"));
const session = driver.session();
const start = new Date()
session
.run('MATCH (n)-[r]->(m) RETURN { id: id(n), label:head(labels(n)), community:n.name, caption:n.name, size:log(n.links_from+n.links_to)} as source, { id: id(m), label:head(labels(m)), community:m.name, caption:m.name, size:log(m.links_from+m.links_to)} as target, {weight:r.weight, type:type(r), community:case when n.community < m.community then n.community else m.community end} as rel LIMIT $limit', {limit: 5000})
.then(function (result) {
const nodes = {}
const links = result.records.map(r => {
var source = r.get('source');source.id = source.id.toNumber();
nodes[source.id] = source;
var target = r.get('target');target.id = target.id.toNumber();
nodes[target.id] = target;
var rel = r.get('rel'); if (rel.weight) { rel.weight = rel.weight.toNumber(); }
return Object.assign({source:source.id,target:target.id}, rel);
});
session.close();
console.log(links.length+" links loaded in "+(new Date()-start)+" ms.")
const gData = { nodes: Object.values(nodes), links: links}
const Graph = ForceGraph3D()(elem)
.graphData(gData)
.nodeAutoColorBy('community')
.nodeVal('size')
.linkAutoColorBy('community')
.linkWidth(0)
.linkDirectionalParticles('weight')
.linkDirectionalParticleSpeed(0.001)
.nodeLabel(node => `${node.label}: ${node.caption}`)
.onNodeHover(node => elem.style.cursor = node ? 'pointer' : null)
.nodeThreeObject(node => {
var map = new THREE.TextureLoader().load( "small_image.jpg" );
map.minFilter = THREE.LinearFilter;
var material = new THREE.SpriteMaterial( { map: map } );
var sprite = new THREE.Sprite( material );
sprite.scale.set(32,32,1);
return sprite;
});
// Spread nodes a little wider
Graph.d3Force('charge').strength(-150);
})
.catch(function (error) {
console.log(error);
});
const elem = document.getElementById('3d-graph');
const driver = neo4j.v1.driver("bolt://localhost", neo4j.v1.auth.basic("neo4j", "test"));
const session = driver.session();
const start = new Date()
session
.run('MATCH (n:Entity)-[r]->(m:Entity) WHERE n.name="new york" RETURN { id: id(n), label:head(labels(n)), community:n.name, caption:n.name, image:n.image, size:log(n.links_from+n.links_to)} as source, { id: id(m), label:head(labels(m)), community:m.name, caption:m.name, image:m.image, size:log(m.links_from+m.links_to)} as target, {weight:r.weight, type:type(r), community:case when n.community < m.community then n.community else m.community end, image:case when n.image < m.image then n.image else m.image end} as rel LIMIT $limit', {limit: 5000})
.then(function (result) {
const nodes = {}
const links = result.records.map(r => {
var source = r.get('source');source.id = source.id.toNumber();
nodes[source.id] = source;
var target = r.get('target');target.id = target.id.toNumber();
nodes[target.id] = target;
var rel = r.get('rel'); if (rel.weight) { rel.weight = rel.weight.toNumber(); }
return Object.assign({source:source.id,target:target.id}, rel);
});
session.close();
console.log(links.length+" links loaded in "+(new Date()-start)+" ms.")
const gData = { nodes: Object.values(nodes), links: links}
const Graph = ForceGraph3D()(elem)
.graphData(gData)
.nodeAutoColorBy('community')
.nodeVal('size')
.linkAutoColorBy('community')
.linkWidth(0)
.linkDirectionalParticles('weight')
.linkDirectionalParticleSpeed(0.001)
.nodeLabel(node => `${node.label}: ${node.caption}`)
.onNodeHover(node => elem.style.cursor = node ? 'pointer' : null)
.nodeThreeObject(node => {
var map = new THREE.TextureLoader().load((node.image != null ? node.image : ""));
map.minFilter = THREE.LinearFilter;
var material = new THREE.SpriteMaterial( { map: map } );
var sprite = new THREE.Sprite( material );
sprite.scale.set(32,32,1);
if (node.image){
return sprite; }
else return false;
});
// Spread nodes a little wider
Graph.d3Force('charge').strength(-150);
})
.catch(function (error) {
console.log(error);
});

Resources