3D force directed graph replacing nodes with images - graph

Ref: 3d Force Directed Graph - Replacing Nodes with Images
How might I add the images to the following excellent code in the same manner as the Stack Overflow answer above?
https://github.com/jexp/neo4j-3d-force-graph/blob/master/particles.html
Assuming that each node may have a property of n.image=/images/imagexxx.jpg how might I apply this image from a local filesystem to its respective node ?
If the property isn't present then render the node as the normal sphere.
Here is my sample code which just renders all nodes as small_image.jpg :
const elem = document.getElementById('3d-graph');
const driver = neo4j.v1.driver("bolt://192.168.1.251", neo4j.v1.auth.basic("neo4j", "test"));
const session = driver.session();
const start = new Date()
session
.run('MATCH (n)-[r]->(m) RETURN { id: id(n), label:head(labels(n)), community:n.name, caption:n.name, size:log(n.links_from+n.links_to)} as source, { id: id(m), label:head(labels(m)), community:m.name, caption:m.name, size:log(m.links_from+m.links_to)} as target, {weight:r.weight, type:type(r), community:case when n.community < m.community then n.community else m.community end} as rel LIMIT $limit', {limit: 5000})
.then(function (result) {
const nodes = {}
const links = result.records.map(r => {
var source = r.get('source');source.id = source.id.toNumber();
nodes[source.id] = source;
var target = r.get('target');target.id = target.id.toNumber();
nodes[target.id] = target;
var rel = r.get('rel'); if (rel.weight) { rel.weight = rel.weight.toNumber(); }
return Object.assign({source:source.id,target:target.id}, rel);
});
session.close();
console.log(links.length+" links loaded in "+(new Date()-start)+" ms.")
const gData = { nodes: Object.values(nodes), links: links}
const Graph = ForceGraph3D()(elem)
.graphData(gData)
.nodeAutoColorBy('community')
.nodeVal('size')
.linkAutoColorBy('community')
.linkWidth(0)
.linkDirectionalParticles('weight')
.linkDirectionalParticleSpeed(0.001)
.nodeLabel(node => `${node.label}: ${node.caption}`)
.onNodeHover(node => elem.style.cursor = node ? 'pointer' : null)
.nodeThreeObject(node => {
var map = new THREE.TextureLoader().load( "small_image.jpg" );
map.minFilter = THREE.LinearFilter;
var material = new THREE.SpriteMaterial( { map: map } );
var sprite = new THREE.Sprite( material );
sprite.scale.set(32,32,1);
return sprite;
});
// Spread nodes a little wider
Graph.d3Force('charge').strength(-150);
})
.catch(function (error) {
console.log(error);
});

const elem = document.getElementById('3d-graph');
const driver = neo4j.v1.driver("bolt://localhost", neo4j.v1.auth.basic("neo4j", "test"));
const session = driver.session();
const start = new Date()
session
.run('MATCH (n:Entity)-[r]->(m:Entity) WHERE n.name="new york" RETURN { id: id(n), label:head(labels(n)), community:n.name, caption:n.name, image:n.image, size:log(n.links_from+n.links_to)} as source, { id: id(m), label:head(labels(m)), community:m.name, caption:m.name, image:m.image, size:log(m.links_from+m.links_to)} as target, {weight:r.weight, type:type(r), community:case when n.community < m.community then n.community else m.community end, image:case when n.image < m.image then n.image else m.image end} as rel LIMIT $limit', {limit: 5000})
.then(function (result) {
const nodes = {}
const links = result.records.map(r => {
var source = r.get('source');source.id = source.id.toNumber();
nodes[source.id] = source;
var target = r.get('target');target.id = target.id.toNumber();
nodes[target.id] = target;
var rel = r.get('rel'); if (rel.weight) { rel.weight = rel.weight.toNumber(); }
return Object.assign({source:source.id,target:target.id}, rel);
});
session.close();
console.log(links.length+" links loaded in "+(new Date()-start)+" ms.")
const gData = { nodes: Object.values(nodes), links: links}
const Graph = ForceGraph3D()(elem)
.graphData(gData)
.nodeAutoColorBy('community')
.nodeVal('size')
.linkAutoColorBy('community')
.linkWidth(0)
.linkDirectionalParticles('weight')
.linkDirectionalParticleSpeed(0.001)
.nodeLabel(node => `${node.label}: ${node.caption}`)
.onNodeHover(node => elem.style.cursor = node ? 'pointer' : null)
.nodeThreeObject(node => {
var map = new THREE.TextureLoader().load((node.image != null ? node.image : ""));
map.minFilter = THREE.LinearFilter;
var material = new THREE.SpriteMaterial( { map: map } );
var sprite = new THREE.Sprite( material );
sprite.scale.set(32,32,1);
if (node.image){
return sprite; }
else return false;
});
// Spread nodes a little wider
Graph.d3Force('charge').strength(-150);
})
.catch(function (error) {
console.log(error);
});

Related

Logic in if else statement to divide path if path shape is two or more '0'

Hi all trying to write some logic in paper.js (also using opentype.js for font data) so that when a number contains two or more consecutive zeros' the zero path is divided so that it is solid.
Things i know a zero path, using my particular font, is made up of an outer path with 19 segments and an inner path made up of 18 segments
So I thought would try to iterate over all paths check if a path has 19 segments and the next path has 18 segments and call path.unite() which kind of works. But I only want it to do this with consecutive '0' eg '100', '1000' but not 10.
So i was trying to do an if else statment where if-else (the current path has 18 segments and the next path is less than 18 segments) if true then do nothin or call path.divide()?
Im sure there is a way better way of doing this. Can you help please.
link to codepen
paper.install(window);
window.onload = () => {
paper.setup("canvas");
opentype.load(
"https://assets.codepen.io/1070/pphatton-ultralight-webfont.woff",
(err, font) => {
if (err) {
console.log(err);
} else {
const fontPath = font.getPath("10k", 0, 100, 100).toSVG();
const count = new paper.CompoundPath(fontPath);
count.unite();
count.children.forEach((child, i) => {
if (
child.segments.length === 19 &&
count.children[i + 1].segments.length === 18
) {
const eye = child.unite();
eye.selected = true;
} else if(
count.children[i + 1].segments.length === 18
&& child.segments.length < 18
) {
console.log('hello');
// const target = child.divide();
count.children[i].fillColor = 'black'
} else{
}
});
// const flatCount = count.children[1].unite()
// console.log(count.children[2].segments.length)
// const flatCountTwo = count.children[5].unite()
// flatCount.translate(5,0)
count.fillColor = "red";
project.activeLayer.fitBounds(view.bounds.scale(0.6));
}
}
);
};
I think that rather than using Font.getPath to retrieve a single svg path for the whole text, you should use Font.getPaths to retrieve an svg path for each character.
This way you can quite simply do your analysis on the input string directly and handle the consecutive 0 differently than other characters.
Edit
In order to detect the consecutive zeros, yes, you could use a regex or loop over the characters, like I did in the following example.
Here's a fiddle showcasing a possible solution.
const handleZero = (path) => {
path.children = path.children.slice(0, 1);
};
const getConsecutiveZerosIndices = (content) => {
const zero = '0';
return [...content]
.map((char, i) => ({ char, i }))
.filter(({ char, i }) => {
const previousCharacter = content?.[i - 1];
const nextCharacter = content?.[i + 1];
return char === zero && (previousCharacter === zero || nextCharacter === zero);
})
.map(({ i }) => i);
};
const drawText = (content, font) => {
const fontPaths = font.getPaths(content, 0, 100, 100);
const consecutiveZerosIndices = getConsecutiveZerosIndices(content);
const paths = fontPaths.map((fontPath, i) => {
const path = new paper.CompoundPath(fontPath.toSVG());
if (consecutiveZerosIndices.includes(i)) {
handleZero(path);
}
return path;
});
const group = new paper.Group(paths);
group.fillColor = 'red';
return group;
};
const draw = (font) => {
const path1 = drawText('10k', font);
const path2 = drawText('100k', font);
const path3 = drawText('1000k', font);
path2.position = path1.position.add(0, path1.bounds.height * 1.2);
path3.position = path2.position.add(0, path2.bounds.height * 1.2);
paper.project.activeLayer.fitBounds(paper.view.bounds.scale(0.6));
};
paper.setup('canvas');
opentype.load(
'https://assets.codepen.io/1070/pphatton-ultralight-webfont.woff',
(err, font) => draw(font)
);

How to access deep value of realtime db with cloud function?

In childSnapshot.val().k I have this with cloud function:
{ '-LdmZIlKZh3O9cR8MOBU':
{ id: 'ceccomcpmoepincin3ipwc',
k: 'test',
p: 'somepath',
t: 1556700282278,
u: 'username' },
'-Llkocp3ojmrpemcpo3mc':
{ id: '[epc[3pc[3m,',
k: 'test2',
p: 'somepath2',
t: 1556700292290,
u: 'username2' }
}
I need each path value so I can delete that file from storage. How to access this value?
My cloud function for refreshing states, removing and deleting files from storage:
var db = admin.database();
var ref = db.ref('someref');
ref.once("value").then((snapshot) => {
var updates = {};
var patObject = {
fid: null,
ft: null,
ftr: null,
fu: null,
id: null,
lid: null,
lt: null,
ltr: null,
lu: null,
t: null,
tr: null,
v: null,
g: null,
l: null,
k: null
};
snapshot.forEach((childSnapshot) => {
if(childSnapshot.numChildren() >= 14){
var t = childSnapshot.val().t;
if((t===1 || t===5) && childSnapshot.val().tr > 0) {
if(childSnapshot.val().tr - 12 > 0){
updates[childSnapshot.key + '/tr'] = childSnapshot.val().tr - 12;
if(childSnapshot.val().k !== ""){
console.log('path: ', childSnapshot.val().k);
childSnapshot.val().k.snapshot.forEach(kpath => {
console.log('path: ', "path");
});
}
} else {
updates[childSnapshot.key] = patObject;
}
}
if(childSnapshot.val().tr<=0){
updates[childSnapshot.key] = patObject;
}
} else {
updates[childSnapshot.key] = patObject;
}
});
ref.update(updates);
res.send("");
return "";
}).catch(reason => {
res.send(reason);
})
return "";
If you want to delete all the files corresponding to the values of the ps, you need to use Promise.all() to execute in parallel the asynchronous deletion tasks (Since the delete() method returns a Promise). You need to iterate over the object that contains the p paths.
It is not easy to understand your code, so you'll find below the part corresponding to the above explanations. It's up to you to integrate it in your code!
const functions = require('firebase-functions');
const admin = require('firebase-admin');
admin.initializeApp();
const defaultStorage = admin.storage(); //Note this line
//.....
exports.date = functions.https.onRequest((req, res) => { //I understand that you use an HTTP Cloud Function
//....
.then(...
// Somehow you get the object you mention in your question, through childSnapshot.val().k
const kObject = childSnapshot.val().k;
const bucket = defaultStorage.bucket(yourFileBucket);
const promises = [];
Object.keys(kObject).forEach(
//The values of the path p are obtained via kObject[key].p
//Based on that we push the Promise returned by delete() to the promises array
promises.push(bucket.file(kObject[key].p).delete());
);
return Promise.all(promises)
.then(results => {
//Here all the Promises that were in the promises array are resolved, which means that all the files are deleted
res.send({result: results.length + ' files(s) deleted'});
})
.catch(error => {
res.status(500).send(error);
});
});
Watch may be interested by watching the following official Firebase video by Doug Stevenson: https://youtu.be/7IkUgCLr5oA

Apply CSS Filters to cropped image and save/upload

I can already, input the image and crop it. I tried to apply CSS filters to it, but seems the CSS filters only apply on the img tag, not the actual image.
I am using both #Alyle-cropping and ngx-image-cropper(tests). Both give to me a base64 string for the cropped image. I am able to load the cropped image to the img tag and also upload it to the database.
onCropped(e: ImgCropperEvent) {
this.croppedImage = e.dataURL;
// console.log('cropped img: ', e.dataURL);
}
onloaded(e: ImgCropperEvent) {
this.imagemOriginal = e.originalDataURL;
this.cropper.center();
console.log('img loaded', e.name);
}
onerror(e: ImgCropperErrorEvent) {
console.warn(`'${e.name}' is not a valid image`, e);
}
// Aplicar Filtros /////////////////////////////////////////////////
change(crop: Crop): void {
this.stylus = crop.nome;
this.crops.forEach(function (value) {
(value.nome === crop.nome) ? value.ehSelec = true : value.ehSelec = false;
});
// const canvas = document.getElementById('cropping'), image = document.createElement('img');
// image.src = canvas.toDataURL('image/jpeg', 1.0);
// document.body.appendChild(image);
}
enviarParanue(): void {
const ref = firebase.storage().ref(`imagens/usuarios/idTeste`).child(`nomeTeste`);
const stringa = this.removerString(this.croppedImage);
ref.put(this.base64toBlob(stringa, 'image/png')).then((snapshot) => {
// console.log('snapshot', snapshot.valueOf());
ref.getDownloadURL().then(function(downloadURL) {
console.log('File available at', downloadURL);
});
});
// ref.putString(stringa, 'base64', {contentType: 'image/png'}).then((snapshot) => {
// // console.log('snapshot', snapshot.valueOf());
// ref.getDownloadURL().then(function(downloadURL) {
// console.log('File available at', downloadURL);
// });
// });
}
removerString(stringa: string): string {
return stringa.substring(23);
}
base64toBlob(base64Data: any, contentType: any) {
contentType = contentType || '';
const sliceSize = 1024;
const byteCharacters = atob(base64Data);
const bytesLength = byteCharacters.length;
const slicesCount = Math.ceil(bytesLength / sliceSize);
const byteArrays = new Array(slicesCount);
for (let sliceIndex = 0; sliceIndex < slicesCount; ++ sliceIndex) {
const begin = sliceIndex * sliceSize;
const end = Math.min(begin + sliceSize, bytesLength);
const bytes = new Array(end - begin);
for (let offset = begin, i = 0 ; offset < end; ++i, ++offset) {
bytes[i] = byteCharacters[offset].charCodeAt(0);
}
byteArrays[sliceIndex] = new Uint8Array(bytes);
}
return new Blob(byteArrays, { type: contentType });
}
EXAMPLE OF THE CSS FILTERS:
.none {filter:none;}
.blur {filter:blur(2.5px);}
.brightness {filter:brightness(200%);}
.contrast {filter:contrast(200%);}
.drop-shadow {filter:drop-shadow(8px 8px 10px gray);}
.grayscale {filter:grayscale(100%);}
.hue-rotate {filter:hue-rotate(90deg);}
.invert {filter:invert(100%);}
.opacity {filter:opacity(30%);}
.saturate {filter:saturate(8);}
.sepia {filter:sepia(100%);}
.contrast-brightness {filter:contrast(200%) brightness(150%);}
Problem is... I don't know how to apply the CSS filters to the image to upload the cropped with the effects(sepia, contrast, etc).
I tried to get the img src and convert it to Blob, but didn't work.
I ended up saving in the database a string with the name of the filter. So I apply the filter when I load the image. A good side of it, is that I can change the filter whenever I want.

Scrolling only side menu div and others should be fixed when menu is open

I have a side menu and when it's open, the body can be partially seen. My side menu might be long so you could scroll on it. But when the menu is at the bottom you then scroll on the body, and I don't want this behaviour.
Similar to Scrolling only content div, others should be fixed but I'm using React. Other content should be scrollable when my side menu is closed. Think of the content as side menu in the example in the link. So far I'm using the same technique provided by that answer but it's ugly (kinda jQuery):
preventOverflow = (menuOpen) => { // this is called when side menu is toggled
const body = document.getElementsByTagName('body')[0]; // this should be fixed when side menu is open
if (menuOpen) {
body.className += ' overflow-hidden';
} else {
body.className = body.className.replace(' overflow-hidden', '');
}
}
// css
.overflow-hidden {
overflow-y: hidden;
}
What should I do with Reactjs?
You should make a meta component in react to change things on the body as well as changing things like document title and things like that. I made one a while ago to do that for me. I'll add it here.
Usage
render() {
return (
<div>
<DocumentMeta bodyClasses={[isMenuOpen ? 'no-scroll' : '']} />
... rest of your normal code
</div>
)
}
DocumentMeta.jsx
import React from 'react';
import _ from 'lodash';
import withSideEffect from 'react-side-effect';
var HEADER_ATTRIBUTE = "data-react-header";
var TAG_NAMES = {
META: "meta",
LINK: "link",
};
var TAG_PROPERTIES = {
NAME: "name",
CHARSET: "charset",
HTTPEQUIV: "http-equiv",
REL: "rel",
HREF: "href",
PROPERTY: "property",
CONTENT: "content"
};
var getInnermostProperty = (propsList, property) => {
return _.result(_.find(propsList.reverse(), property), property);
};
var getTitleFromPropsList = (propsList) => {
var innermostTitle = getInnermostProperty(propsList, "title");
var innermostTemplate = getInnermostProperty(propsList, "titleTemplate");
if (innermostTemplate && innermostTitle) {
return innermostTemplate.replace(/\%s/g, innermostTitle);
}
return innermostTitle || "";
};
var getBodyIdFromPropsList = (propsList) => {
var bodyId = getInnermostProperty(propsList, "bodyId");
return bodyId;
};
var getBodyClassesFromPropsList = (propsList) => {
return propsList
.filter(props => props.bodyClasses && Array.isArray(props.bodyClasses))
.map(props => props.bodyClasses)
.reduce((classes, list) => classes.concat(list), []);
};
var getTagsFromPropsList = (tagName, uniqueTagIds, propsList) => {
// Calculate list of tags, giving priority innermost component (end of the propslist)
var approvedSeenTags = {};
var validTags = _.keys(TAG_PROPERTIES).map(key => TAG_PROPERTIES[key]);
var tagList = propsList
.filter(props => props[tagName] !== undefined)
.map(props => props[tagName])
.reverse()
.reduce((approvedTags, instanceTags) => {
var instanceSeenTags = {};
instanceTags.filter(tag => {
for(var attributeKey in tag) {
var value = tag[attributeKey].toLowerCase();
var attributeKey = attributeKey.toLowerCase();
if (validTags.indexOf(attributeKey) == -1) {
return false;
}
if (!approvedSeenTags[attributeKey]) {
approvedSeenTags[attributeKey] = [];
}
if (!instanceSeenTags[attributeKey]) {
instanceSeenTags[attributeKey] = [];
}
if (!_.has(approvedSeenTags[attributeKey], value)) {
instanceSeenTags[attributeKey].push(value);
return true;
}
return false;
}
})
.reverse()
.forEach(tag => approvedTags.push(tag));
// Update seen tags with tags from this instance
_.keys(instanceSeenTags).forEach((attr) => {
approvedSeenTags[attr] = _.union(approvedSeenTags[attr], instanceSeenTags[attr])
});
instanceSeenTags = {};
return approvedTags;
}, []);
return tagList;
};
var updateTitle = title => {
document.title = title || document.title;
};
var updateBodyId = (id) => {
document.body.setAttribute("id", id);
};
var updateBodyClasses = classes => {
document.body.className = "";
classes.forEach(cl => {
if(!cl || cl == "") return;
document.body.classList.add(cl);
});
};
var updateTags = (type, tags) => {
var headElement = document.head || document.querySelector("head");
var existingTags = headElement.querySelectorAll(`${type}[${HEADER_ATTRIBUTE}]`);
existingTags = Array.prototype.slice.call(existingTags);
// Remove any duplicate tags
existingTags.forEach(tag => tag.parentNode.removeChild(tag));
if (tags && tags.length) {
tags.forEach(tag => {
var newElement = document.createElement(type);
for (var attribute in tag) {
if (tag.hasOwnProperty(attribute)) {
newElement.setAttribute(attribute, tag[attribute]);
}
}
newElement.setAttribute(HEADER_ATTRIBUTE, "true");
headElement.insertBefore(newElement, headElement.firstChild);
});
}
};
var generateTagsAsString = (type, tags) => {
var html = tags.map(tag => {
var attributeHtml = Object.keys(tag)
.map((attribute) => {
const encodedValue = HTMLEntities.encode(tag[attribute], {
useNamedReferences: true
});
return `${attribute}="${encodedValue}"`;
})
.join(" ");
return `<${type} ${attributeHtml} ${HEADER_ATTRIBUTE}="true" />`;
});
return html.join("\n");
};
var reducePropsToState = (propsList) => ({
title: getTitleFromPropsList(propsList),
metaTags: getTagsFromPropsList(TAG_NAMES.META, [TAG_PROPERTIES.NAME, TAG_PROPERTIES.CHARSET, TAG_PROPERTIES.HTTPEQUIV, TAG_PROPERTIES.CONTENT], propsList),
linkTags: getTagsFromPropsList(TAG_NAMES.LINK, [TAG_PROPERTIES.REL, TAG_PROPERTIES.HREF], propsList),
bodyId: getBodyIdFromPropsList(propsList),
bodyClasses: getBodyClassesFromPropsList(propsList),
});
var handleClientStateChange = ({title, metaTags, linkTags, bodyId, bodyClasses}) => {
updateTitle(title);
updateTags(TAG_NAMES.LINK, linkTags);
updateTags(TAG_NAMES.META, metaTags);
updateBodyId(bodyId);
updateBodyClasses(bodyClasses)
};
var mapStateOnServer = ({title, metaTags, linkTags}) => ({
title: HTMLEntities.encode(title),
meta: generateTagsAsString(TAG_NAMES.META, metaTags),
link: generateTagsAsString(TAG_NAMES.LINK, linkTags)
});
var DocumentMeta = React.createClass({
propTypes: {
title: React.PropTypes.string,
titleTemplate: React.PropTypes.string,
meta: React.PropTypes.arrayOf(React.PropTypes.object),
link: React.PropTypes.arrayOf(React.PropTypes.object),
children: React.PropTypes.oneOfType([
React.PropTypes.object,
React.PropTypes.array
]),
bodyClasses: React.PropTypes.array,
},
render() {
if (Object.is(React.Children.count(this.props.children), 1)) {
return React.Children.only(this.props.children);
} else if (React.Children.count(this.props.children) > 1) {
return (
<span>
{this.props.children}
</span>
);
}
return null;
},
});
DocumentMeta = withSideEffect(reducePropsToState, handleClientStateChange, mapStateOnServer)(DocumentMeta);
module.exports = DocumentMeta;
This component could probably be changed a little for your case (withSideEffect is used for both client and server side rendering... if you arent using server side rendering then its probably not completely necessary) but the component will work on client side rendering if you would like to use it there as well.
ReactJS doesn't have direct access to the <body> element, and that's the element that needs to have its overflow-y style changed. So while what you're doing isn't perhaps the prettiest code, it's not entirely wrong either.
The only real suggestion I'd give is (shudder) using inline styles on the body instead of a classname so as to avoid having to introduce the CSS declaration. As long as your menu is the only thing responsible for updating the overflow-y attribute, there's no reason you can't use an inline style on it. Mashing that down with the ?: operator results in fairly simple code:
body.style.overflowY = menuOpen ? "hidden" : "";
And then you can just delete the .overflow-hidden class in its entirety.
If for some reason multiple things are managing the overflow state of the body, you might want to stick with classnames and assign a unique one for each thing managing it, something like this:
if (menuOpen) {
body.className += ' menu-open';
}
else {
// Use some tricks from jQuery to remove the "menu-open" class more elegantly.
var className = " " + body.className + " ";
className = className.replace(" overflow-hidden ", " ").replace(/\s+/, " ");
className = className.substr(1, className.length - 2);
}
CSS:
body.menu-open {
overflow-y: hidden;
}

How to avoid blockin while uploading file using Meteor method

I've created a Meteor method to upload a file, it's working well but until the file is fully uploaded, I cannot move around, all subscriptions seem to wait that the upload finishes... is there a way to avoid that ?
Here is the code on the server :
Meteor.publish('product-photo', function (productId) {
return Meteor.photos.find({productId: productId}, {limit: 1});
});
Meteor.methods({
/**
* Creates an photo
* #param obj
* #return {*}
*/
createPhoto: function (obj) {
check(obj, Object);
// Filter attributes
obj = filter(obj, [
'name',
'productId',
'size',
'type',
'url'
]);
// Check user
if (!this.userId) {
throw new Meteor.Error('not-connected');
}
// Check file name
if (typeof obj.name !== 'string' || obj.name.length > 255) {
throw new Meteor.Error('invalid-file-name');
}
// Check file type
if (typeof obj.type !== 'string' || [
'image/gif',
'image/jpg',
'image/jpeg',
'image/png'
].indexOf(obj.type) === -1) {
throw new Meteor.Error('invalid-file-type');
}
// Check file url
if (typeof obj.url !== 'string' || obj.url.length < 1) {
throw new Meteor.Error('invalid-file-url');
}
// Check file size
if (typeof obj.size !== 'number' || obj.size <= 0) {
throw new Meteor.Error('invalid-file-size');
}
// Check file max size
if (obj.size > 1024 * 1024) {
throw new Meteor.Error('file-too-large');
}
// Check if product exists
if (!obj.productId || Meteor.products.find({_id: obj.productId}).count() !== 1) {
throw new Meteor.Error('product-not-found');
}
// Limit the number of photos per user
if (Meteor.photos.find({productId: obj.productId}).count() >= 3) {
throw new Meteor.Error('max-photos-reached');
}
// Resize the photo if the data is in base64
if (typeof obj.url === 'string' && obj.url.indexOf('data:') === 0) {
obj.url = resizeImage(obj.url, 400, 400);
obj.size = obj.url.length;
obj.type = 'image/png';
}
// Add info
obj.createdAt = new Date();
obj.userId = this.userId;
return Meteor.photos.insert(obj);
}
});
And the code on the client :
Template.product.events({
'change [name=photo]': function (ev) {
var self = this;
readFilesAsDataURL(ev, function (event, file) {
var photo = {
name: file.name,
productId: self._id,
size: file.size,
type: file.type,
url: event.target.result
};
Session.set('uploadingPhoto', true);
// Save the file
Meteor.call('createPhoto', photo, function (err, photoId) {
Session.set('uploadingPhoto', false);
if (err) {
displayError(err);
} else {
notify(i18n("Transfert terminé pour {{name}}", photo));
}
});
});
}
});
I finally found the solution myself.
Explication : the code I used was blocking the subscriptions because it was using only one method call to transfer all the file from the first byte to the last one, that leads to block the thread (I think, the one reserved to each users on the server) until the transfer is complete.
Solution : I splitted the file into chunks of about 8KB, and send chunk by chunk, this way the thread or whatever was blocking the subscriptions is free after each chunk transfer.
The final working solution is on that post : How to write a file from an ArrayBuffer in JS
Client Code
// data comes from file.readAsArrayBuffer();
var total = data.byteLength;
var offset = 0;
var upload = function() {
var length = 4096; // chunk size
// adjust the last chunk size
if (offset + length > total) {
length = total - offset;
}
// I am using Uint8Array to create the chunk
// because it can be passed to the Meteor.method natively
var chunk = new Uint8Array(data, offset, length);
if (offset < total) {
// Send the chunk to the server and tell it what file to append to
Meteor.call('uploadFileData', fileId, chunk, function (err, length) {
if (!err) {
offset += length;
upload();
}
}
}
};
upload();
Server code
var fs = Npm.require('fs');
var Future = Npm.require('fibers/future');
Meteor.methods({
uploadFileData: function(fileId, chunk) {
var fut = new Future();
var path = '/uploads/' + fileId;
// I tried that with no success
chunk = String.fromCharCode.apply(null, chunk);
// how to write the chunk that is an Uint8Array to the disk ?
fs.appendFile(path, new Buffer(chunk), function (err) {
if (err) {
fut.throw(err);
} else {
fut.return(chunk.length);
}
});
return fut.wait();
}
});
Improving #Karl's code:
Client
This function breaks the file into chunks and sends them to the server one by one.
function uploadFile(file) {
const reader = new FileReader();
let _offset = 0;
let _total = file.size;
return new Promise((resolve, reject) => {
function readChunk() {
var length = 10 * 1024; // chunk size
// adjust the last chunk size
if (_offset + length > _total) {
length = _total - _offset;
}
if (_offset < _total) {
const slice = file.slice(_offset, _offset + length);
reader.readAsArrayBuffer(slice);
} else {
// EOF
setProgress(100);
resolve(true);
}
}
reader.onload = function readerOnload() {
let buffer = new Uint8Array(reader.result) // convert to binary
Meteor.call('fileUpload', file.name, buffer, _offset,
(error, length) => {
if (error) {
console.log('Oops, unable to import!');
return false;
} else {
_offset += length;
readChunk();
}
}
);
};
reader.onloadend = function readerOnloadend() {
setProgress(100 * _offset / _total);
};
readChunk();
});
}
Server
The server then writes to a file when offset is zero, or appends to its end otherwise, returning a promise, as I used an asynchronous function to write/append in order to avoid blocking the client.
if (Meteor.isServer) {
var fs = require('fs');
var Future = require('fibers/future');
}
Meteor.methods({
// Upload file from client to server
fileUpload(
fileName: string,
fileData: Uint8Array,
offset: number) {
check(fileName, String);
check(fileData, Uint8Array);
check(offset, Number);
console.log(`[x] Received file ${fileName} data length: ${fileData.length}`);
if (Meteor.isServer) {
const fut = new Future();
const filePath = '/tmp/' + fileName;
const buffer = new Buffer(fileData);
const jot = offset === 0 ? fs.writeFile : fs.appendFile;
jot(filePath, buffer, 'binary', (err) => {
if (err) {
fut.throw(err);
} else {
fut.return(buffer.length);
}
});
return fut.wait();
}
}
)};
Usage
uploadFile(file)
.then(() => {
/* do your stuff */
});

Resources