Use polaroid effect in node.js imageMagick - node-imagemagick

In the terminal I can use the following snippet to create an image with a "polaroid design" (see http://www.imagemagick.org/Usage/thumbnails/#polaroid).
convert -caption 'mycaption' myimage.jpeg -thumbnail 250x250 \
-bordercolor Lavender -background gray40 -gravity South \
-font "Helvetica.ttf" -pointsize 12 -density 144 +polaroid \
polaroid.jpeg
How would I accomplish this in the node version of imageMagick/gm (https://github.com/aheckmann/gm)?
var gm = require('gm');
var fs = require('fs');
var imageMagick = gm.subClass({ imageMagick: true });
imageMagick("myimage.jpeg")
.resize(250, 250)
//CREATE POLAROID HERE SOMEHOW???
.write("polaroid.jpeg", function (err) {});
Thanks for any hints!

Use gm().command(), gm().in(), and gm().out (see https://github.com/aheckmann/gm#custom-arguments)
var gm = require('gm');
var fs = require('fs');
var imageMagick = gm.subClass({ imageMagick: true });
imageMagick()
.command("convert")
.in("-caption", "mycaption")
.in("myimage.jpeg")
.in("-thumbnail", "250x250")
.in("+polaroid")
// insert other options...
.write("polaroid.jpeg", function (err) {
if (err) return console.log(err);
});

Related

How to store data from multi page to json?

thank you for ur attention, so i write a mini project that scrape news site and store main texts from them. i tried many solutions to add json in my project without have consol.log but always after scraping its show only one main text. so i show my code to you so u could help me how to have json with all three news.
const { Cluster } = require('puppeteer-cluster');
const fs = require('fs');
const launchOptions = {
headless: false,
args: [
'--disable-gpu',
'--disable-dev-shm-usage',
'--disable-web-security',
'--disable-xss-auditor',
'--disable-accelerated-2d-canvas',
'--ignore-certifcate-errors',
'--ignore-certifcate-errors-spki-list',
'--no-zygote',
'--no-sandbox',
'--disable-setuid-sandbox',
'--disable-webgl',
],
ignoreHTTPSErrors: true,
waitUntil: 'networkidle2',
};
(async() => {
// Create a cluster with 2 workers
const cluster = await Cluster.launch({
monitor: true,
concurrency: Cluster.CONCURRENCY_PAGE,
maxConcurrency: 2,
puppeteerOptions: launchOptions,
});
// Define a task (in this case: screenshot of page)
await cluster.task(async({ page, data: url }) => {
await page.setRequestInterception(true);
page.on('request', (request) => {
if (['stylesheet', 'font', 'image', 'styles','other', 'media'].indexOf(request.resourceType()) !== -1) {
request.abort();
} else {
request.continue();
}
});
await page.goto(url);
const scrapedData = await page.$eval('div[class="entry-content clearfix"]', el => el.innerText)
fs.writeFileSync('test.json', JSON.stringify(scrapedData, null, 2))
});
// Add some pages to queue
cluster.queue('https://www.ettelaat.com/?p=526642');
cluster.queue('https://www.ettelaat.com/?p=526640');
cluster.queue('https://www.ettelaat.com/?p=526641');
// Shutdown after everything is done
await cluster.idle();
await cluster.close();
})();
for gather all outputs i had to put my fs in the bottom of cluster.close
kanopyDB = []
.
.
.
kanopyDB = kanopyDB.concat(name);
.
.
.
await cluster.idle();
await cluster.close();
fs.writeFileSync('output.json', kanopyDB, 'utf8');

Filereader progress in Vue.js component

I have a component that should to handle the upload file. It holds a bootstrap vue progress component.
I would to handle file loading of filereader.
This is part of vue.js component:
<b-form-file accept=".jpg, .png, .gif, jpeg" v-model="file" size="sm" #change="fileUpload"></b-form-file>
<b-progress :value="progress" :max="maxvalue" show-progress animated></b-progress>
This is my data:
data () {
return {
...
file:null,
progress:0,
maxvalue:100
}
},
This is my code:
fileUpload(ev){
var files = ev.target.files || ev.dataTransfer.files;
const file=files[0];
var reader = new FileReader();
let _vue=this;
reader.onprogress=function(e){
let progress=Math.round((e.loaded / e.total) * 100);
if(progress<100){_vue.progress=progress;}
};
reader.onload = function(event) {
var dataURL = event.target.result;
let image=new Image();
if(file.size>3000000) {
_vue.form.file=null;
alert('Dimensioni file eccessive');
return;
}
image.onload=function(){
_vue.$refs.card.style.maxWidth='250px';
_vue.$refs.card.style.width=`${this.width}px`;
}
image.src=dataURL;
_vue.form.file=dataURL;
};
reader.readAsDataURL(file);
}
If I set an alert, I get the progress values else no.
I noted if I setting two alert sequentially, I see the first alert for every value until the end and then the other one in reverse.
Sorry for my english
I resolved.
The link that resolved my issue:
link
Thanks

Save JPEG images from HTTP multipart/x-mixed-replace keep-alive stream to ubuntu server

I have a camera which sends JPEG images to a webserver through a continuous multipart http stream. When I visit the IP address of the stream, the browser reads this stream as a series of images which mimics a video. I am wanting to download the files from this stream to a remote server.
I do not know how to parse the stream and the save the files to either my ubuntu server directly, or through my ruby on rails application filesystem.
Here is how the browser sees the stream:
Response Headers:
HTTP/1.1 200 OK
Content-Type: multipart/x-mixed-replace; boundary=frame
Request Headers:
Accept: text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8
Accept-Encoding: gzip, deflate
Accept-Language: en-US,en;q=0.9
Cache-Control: max-age=0
Connection: keep-alive
DNT: 1
Host: my-ip-address
Please help me find the correct approach to this problem.
You can use ffmpeg to download a video stream from a continues video stream. Since you are using ubuntu, you can do it by simply running a command in your terminal and save the stream to your remote server. Following command is a sample ffmpeg command to save a live stream to your local disk.
ffmpeg.exe -y -i http://stream2.cnmns.net/hope-mp3 hopestream-latest.mp3
In above command -i indicates URL to be recorded. "hopestream-latest.mp3"
is the output mp3 file. You can replace this with your remote server file path.
I don't have a sample server which does so. I made one myself and tried to test the solution.
const request = require('request');
const fs = require('fs')
var boundary = "";
var first = null;
var last_image = "";
let next_type = 3;
let content_length = -1;
let content_type = '';
request.get({
url: "http://localhost:9192/online.png",
forever: true,
headers: {
'referer': 'http://localhost:9192/'
},
// encoding: 'utf-8'
}
)
.on('error', (err) =>
console.log(err)
).on('response', (resp) => {
// console.log(resp)
boundary = resp.headers['content-type'].split('boundary=')[1]
// 0 - data
// 1 - content-type
// 2 - content-length
// 3 - boundary
// 4 - blank line
resp.on('data', (data)=> {
switch (next_type) {
case 0:
if (data.length + last_image.length == content_length)
{
last_image = data;
next_type = 3
} else {
last_image += data;
}
break;
case 1:
if (data.toString() == "\r\n")
{
next_type = 3
} else {
content_type = data.toString().toLowerCase().split("content-type:")[1].trim()
next_type = 2
}
break;
case 2:
content_length = parseInt(data.toString().toLowerCase().split("content-length:")[1].trim())
next_type =4
break;
case 3:
// we have got a boundary
next_type = 1;
if (last_image) {
fs.writeFileSync("image.png", last_image)
}
console.log(last_image)
last_image = ""
break;
case 4:
next_type = 0;
break;
}
})
})
This is node, since you were open to non ROR solutions also. Below is the test server I had used
streamServer.js
/* Real-Time PNG-Streaming HTTP User Counter
Copyright Drew Gottlieb, 2012
Free for any use, but don't claim
that this is your work.
Doesn't work on Windows because
node-canvas only works on Linux and OSX. */
var moment = require('moment');
var http = require('http');
var _ = require('underscore');
var Backbone = require('backbone');
var Canvas = require('canvas');
var config = {
port: 9192,
host: "0.0.0.0",
updateInterval: 3000, // 5 seconds
multipartBoundary: "whyhellothere"
};
var Client = Backbone.Model.extend({
initialize: function() {
var req = this.get('req');
var res = this.get('res');
console.log("Page opened:", req.headers.referer);
res.on('close', _.bind(this.handleClose, this));
req.on('close', _.bind(this.handleClose, this));
this.sendInitialHeaders();
this.set('updateinterval', setInterval(_.bind(this.sendUpdate, this), config.updateInterval));
},
// Re-send the image in case it needs to be re-rendered
sendUpdate: function() {
if (this.get('sending')) return;
if (!this.get('imagecache')) return;
this.sendFrame(this.get('imagecache'));
},
// Sends the actual HTTP headers
sendInitialHeaders: function() {
this.set('sending', true);
var res = this.get('res');
res.writeHead(200, {
'Connection': 'Close',
'Expires': '-1',
'Last-Modified': moment().utc().format("ddd, DD MMM YYYY HH:mm:ss") + ' GMT',
'Cache-Control': 'no-store, no-cache, must-revalidate, max-age=0, post-check=0, pre-check=0, false',
'Pragma': 'no-cache',
'Content-Type': 'multipart/x-mixed-replace; boundary=--' + config.multipartBoundary
});
res.write("--" + config.multipartBoundary + "\r\n");
this.set('sending', false);
},
// Sends an image frame, followed by an empty part to flush the image through
sendFrame: function(image) {
this.set('sending', true);
this.set('imagecache', image);
var res = this.get('res');
res.write("Content-Type: image/png\r\n");
res.write("Content-Length: " + image.length + "\r\n");
res.write("\r\n");
res.write(image);
res.write("--" + config.multipartBoundary + "\r\n");
res.write("\r\n");
res.write("--" + config.multipartBoundary + "\r\n");
this.set('sending', false);
},
// Handle a disconnect
handleClose: function() {
if (this.get('closed')) return;
this.set('closed', true);
console.log("Page closed:", this.get('req').headers.referer);
this.collection.remove(this);
clearInterval(this.get('updateinterval'));
}
});
var Clients = Backbone.Collection.extend({
model: Client,
initialize: function() {
this.on("add", this.countUpdated, this);
this.on("remove", this.countUpdated, this);
},
// Handle the client count changing
countUpdated: function() {
var image = this.generateUserCountImage(this.size());
this.each(function(client) {
client.sendFrame(image);
});
console.log("Connections:", this.size());
},
// Generate a new image
generateUserCountImage: function(count) {
var canvas = new Canvas(200, 30);
var ctx = canvas.getContext('2d');
// Background
ctx.fillStyle = "rgba(100, 149, 237, 0)";
ctx.fillRect(0, 0, 200, 30);
// Text
ctx.fillStyle = "rgb(0, 100, 0)";
ctx.font = "20px Impact";
ctx.fillText("Users online: " + count, 10, 20);
return canvas.toBuffer();
}
});
function handleRequest(req, res) {
switch (req.url) {
case '/':
case '/index.html':
showDemoPage(req, res);
break;
case '/online.png':
showImage(req, res);
break;
default:
show404(req, res);
break;
}
}
function showDemoPage(req, res) {
res.writeHead(200, {'Content-Type': 'text/html'});
res.write("<h1>Users viewing this page:</h1>");
res.write("<img src=\"/online.png\" />");
res.write("<h5>(probably won't work on IE or Opera)</h5>");
res.end();
}
function showImage(req, res) {
// If this image is not embedded in a <img> tag, don't show it.
if (!req.headers.referer) {
res.writeHead(403, {'Content-Type': 'text/html'});
res.end("You can't view this image directly.");
return;
}
// Create a new client to handle this connection
clients.add({
req: req,
res: res
});
}
function show404(req, res) {
res.writeHead(404, {'Content-Type': 'text/html'});
res.end("<h1>not found</h1><br />go home");
}
// Ready, Set, Go!
var clients = new Clients();
http.createServer(handleRequest).listen(config.port, config.host);
console.log("Started.");
PS: Taken from https://gist.github.com/dag10/48e6d25415ca92318815
So I found that ffmpeg also has featrue for saving the images
-vframes option
Output a single frame from the video into an image file:
ffmpeg -f mjpeg -i http://192.168.1.203/stream -vframes 1 out.png
This example will output one frame (-vframes 1) into a PNG file.
fps video filter
Output one image every second, named out1.png, out2.png, out3.png, etc.
ffmpeg -f mjpeg -i http://192.168.1.203/stream -vf fps=1 out%d.png
Output one image every minute, named img001.jpg, img002.jpg, img003.jpg, etc. The %03d dictates that the ordinal number of each output image will be formatted using 3 digits.
ffmpeg -f mjpeg -i http://192.168.1.203/stream -vf fps=1/60 img%03d.jpg
Output one image every ten minutes:
ffmpeg -f mjpeg -i http://192.168.1.203/stream -vf fps=1/600 thumb%04d.bmp
PS: Taken from https://trac.ffmpeg.org/wiki/Create%20a%20thumbnail%20image%20every%20X%20seconds%20of%20the%20video

Compress .xls/xlsx files into .zip files jsZip

I'm a newbie to the field of javascript/angularJS, so please bear with me.I need a way to convert .xls/.xlsx files into .zip files by using jsZip library. I'm making use of alasql for generating the .xls file. I've looked all over for any possible solutions to create zip file of all xls files, but haven't come across any demo. (.txt and .doc files generate just fine, but .xls files does not open if jsZip is used). Any help would be appreciated!!
What I need is an xls file to be generated dynamically, and the same file to be compressed as zip
EDIT :-
Here's some of the code which I tried (but with no success)
var newExcelData = {'Name':'abc'};
//var res = alasql("SELECT * INTO XLSX('Summary.xlsx',{headers:true}) FROM ? ", [newExcelData]);
var zip = new JSZip();
zip.file(alasql("SELECT * INTO XLSX('Summary.xlsx',{headers:true}) FROM ? ", [newExcelData]));
zip.generateAsync({ type: "blob" })
.then(function (content) {
saveAs(content, "example.zip");
});
PS:- I'm able to make it work in case of generating .xls file.
Please refer below code:-
var newExcelData = {'Name':'abc', 'Age':'12'};
var zip = new JSZip();
zip.file("test.xls", [newExcelData]);
zip.generateAsync({ type: "blob" })
.then(function (content) {
saveAs(content, "example.zip");
});
But although excel sheet is generated, on opening excel sheet is blank.
Please help!!
Hi, here's an update :-
I've tried to make use of js-xlsx library - https://github.com/SheetJS/js-xlsx - to generate xls file and then zip it. Please refer the below code..
function Create_Zip() {
function datenum(v, date1904) {
if (date1904) v += 1462;
var epoch = Date.parse(v);
return (epoch - new Date(Date.UTC(1899, 11, 30))) / (24 * 60 * 60 * 1000);
}
function sheet_from_array_of_arrays(data, opts) {
var ws = {};
var range = { s: { c: 10000000, r: 10000000 }, e: { c: 0, r: 0 } };
for (var R = 0; R != data.length; ++R) {
for (var C = 0; C != data[R].length; ++C) {
if (range.s.r > R) range.s.r = R;
if (range.s.c > C) range.s.c = C;
if (range.e.r < R) range.e.r = R;
if (range.e.c < C) range.e.c = C;
var cell = { v: data[R][C] };
if (cell.v === null) continue;
var cell_ref = XLSX.utils.encode_cell({ c: C, r: R });
if (typeof cell.v === 'number') cell.t = 'n';
else if (typeof cell.v === 'boolean') cell.t = 'b';
else if (cell.v instanceof Date) {
cell.t = 'n'; cell.z = XLSX.SSF._table[14];
cell.v = datenum(cell.v);
}
else cell.t = 's';
ws[cell_ref] = cell;
}
}
if (range.s.c < 10000000) ws['!ref'] = XLSX.utils.encode_range(range);
return ws;
}
var data = [[1, 2, 3], [true, false, null, "sheetjs"], ["foo", "bar", new Date("2014-02-19T14:30Z"), "0.3"], ["baz", null, "qux"]];
var ws_name = "SheetJS";
function Workbook() {
if (!(this instanceof Workbook)) return new Workbook();
this.SheetNames = [];
this.Sheets = {};
}
var wb = new Workbook(), ws = sheet_from_array_of_arrays(data);
/* add worksheet to workbook */
wb.SheetNames.push(ws_name);
wb.Sheets[ws_name] = ws;
var wbout = XLSX.write(wb, { bookType: 'xlsx', bookSST: true, type: 'binary' });
function s2ab(s) {
var buf = new ArrayBuffer(s.length);
var view = new Uint8Array(buf);
for (var i = 0; i != s.length; ++i) view[i] = s.charCodeAt(i) & 0xFF;
return buf;
}
var jsonse = JSON.stringify([s2ab(wbout)]);
var testblob = new Blob([jsonse], { type: "application/json" });
console.log(testblob);
var zip = new JSZip();
zip.file("trial.xls", testblob);
var downloadFile = zip.generateAsync({ type: "blob" });
saveAs(downloadFile, 'test.zip');
}
But, the problem here is that I keep getting this error: 'The data of 'trial.xls' is in an unsupported format !' in the console :(. Is there any way I can make this work?
I'm at my wits end now :(
Not an answer (see below) but an explanation of what's going on:
To add a file, JSZip needs its binary content (as Blob, Uint8Array, etc). The line zip.file("test.xls", [newExcelData]); can't work for example: [newExcelData] is not a binary content but an array of js object.
What you need to figure out is how to get the content of the xlsx file. SELECT * INTO XLSX('Summary.xlsx') will trigger a download and return 1, it's not what you want. I searched on my side but can't find a way to do it with alasql.
Once/if you find the solution, the JSZip part looks correct.
Edit, following your switch to js-xlsx:
You use JSZip v2 (needed by js-xlsx) which doesn't support Blob inputs. However, wbout is a binary string which is supported:
zip.file("trial.xls", wbout, {binary: true});
Then, replace zip.generateAsync (added in JSZip v3):
var downloadFile = zip.generate({type: "blob" });
saveAs(downloadFile, 'test.zip');
Here is the solution I found using JSZip, XLSX and File Saver libraries.
Import:
import * as XLSX from "xlsx";
import JSZip from 'jszip';
import { saveAs } from 'file-saver';
Here is an example of compressing a .xlsx inside a zip:
let zip = new JSZip();
const jsonData = [
{
"Product": "Red Velvet Cupcake",
"Price": "6",
"GluttenFree": "Yes",
},
{
"Product": "Cheesecake",
"Price": "15",
"GluttenFree": "No",
}
];
const workBook: XLSX.WorkBook = XLSX.utils.book_new();
const workSheet: XLSX.WorkSheet = XLSX.utils.json_to_sheet(jsonData);
XLSX.utils.book_append_sheet(workBook, workSheet, 'Bakery');
const workBookBuffer = XLSX.write(workBook, { bookType: 'xlsx', type: 'array' });
const fileData = new Blob([workBookBuffer], {type: 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet;charset=UTF-8'});
zip.file('Products.xlsx', fileData);
zip.generateAsync({type:"blob"}).then(function (blob) {
saveAs(blob, "WorkBooks.zip");
});
This code generates a zip file named 'WorkBooks.zip' that contains the file 'Products.xlsx'. This is how the excel looks like:
Some file-saver examples: https://www.tabnine.com/code/javascript/modules/file-saver.
Here is the JSZip method used:
https://stuk.github.io/jszip/documentation/api_jszip/file_data.html

Browserify - multiple entry points

I am using Browserify within gulp. I am trying to compile down my tests to a single file as well. But unlike my main app, which I have working just fine, I am having trouble getting the tests to compile. The major difference is the tests have multiple entry points, there isn't one single entry point like that app. But I am getting errors fro Browserify that it can't find the entry point.
browserify = require 'browserify'
gulp = require 'gulp'
source = require 'vinyl-source-stream'
gulp.task 'tests', ->
browserify
entries: ['./app/js/**/*Spec.coffee']
extensions: ['.coffee']
.bundle
debug: true
.pipe source('specs.js')
.pipe gulp.dest('./specs/')
Below is a task I was able to build that seems to solve the problem. Basically I use an outside library to gather the files names as an array. And then pass that array as the entry points
'use strict;'
var config = require('../config');
var gulp = require('gulp');
var plumber = require('gulp-plumber');
var glob = require('glob');
var browserify = require('browserify');
var source = require('vinyl-source-stream');
gulp.task('tests', function(){
var testFiles = glob.sync('./spec/**/*.js');
return browserify({
entries: testFiles,
extensions: ['.jsx']
})
.bundle({debug: true})
.pipe(source('app.js'))
.pipe(plumber())
.pipe(gulp.dest(config.dest.development));
});
Here's an alternate recipe that fits more with the gulp paradigm using gulp.src()
var gulp = require('gulp');
var browserify = require('browserify');
var transform = require('vinyl-transform');
var concat = require('gulp-concat');
gulp.task('browserify', function () {
// use `vinyl-transform` to wrap around the regular ReadableStream returned by b.bundle();
// so that we can use it down a vinyl pipeline as a vinyl file object.
// `vinyl-transform` takes care of creating both streaming and buffered vinyl file objects.
var browserified = transform(function(filename) {
var b = browserify(filename, {
debug: true,
extensions: ['.coffee']
});
// you can now further configure/manipulate your bundle
// you can perform transforms, for e.g.: 'coffeeify'
// b.transform('coffeeify');
// or even use browserify plugins, for e.g. 'minifyiy'
// b.plugins('minifyify');
// consult browserify documentation at: https://github.com/substack/node-browserify#methods for more available APIs
return b.bundle();
});
return gulp.src(['./app/js/**/*Spec.coffee'])
.pipe(browserified)/
.pipe(concat('spec.js'))
.pipe(gulp.dest('./specs'));
});
gulp.task('default', ['browserify']);
For more details about how this work, this article that I wrote goes more in-depth: http://medium.com/#sogko/gulp-browserify-the-gulp-y-way-bb359b3f9623
For start, you can write a suite.js to require all the tests which you want to run and browserify them.
You can see two examples from my project https://github.com/mallim/sbangular.
One example for grunt-mocha-phantomjs
https://github.com/mallim/sbangular/blob/master/src/main/resources/js/suite.js
One example for protractor
https://github.com/mallim/sbangular/blob/master/src/main/resources/js/suite.js
This is just a start and I am sure there are more fancy ways available.
A little more complicated example to build files by glob pattern into many files with watching and rebuilding separated files. Not for .coffee, for es2015, but not a big difference:
var gulp = require("gulp");
var babelify = require("babelify");
var sourcemaps = require("gulp-sourcemaps");
var gutil = require("gulp-util");
var handleErrors = require("../utils/handleErrors.js");
var browserify = require("browserify");
var eventStream = require("event-stream");
var glob = require("glob");
var source = require("vinyl-source-stream");
var buffer = require("vinyl-buffer");
var watchify = require("watchify");
var SRC_PATH = "./src";
var BUILD_PATH = "./build";
var bundle = function (bundler, entryFilepath) {
console.log(`Build: ${entryFilepath}`);
return bundler.bundle()
.on("error", handleErrors)
.pipe(source(entryFilepath.replace(SRC_PATH, BUILD_PATH)))
.pipe(buffer())
.on("error", handleErrors)
.pipe(
process.env.TYPE === "development" ?
sourcemaps.init({loadMaps: true}) :
gutil.noop()
)
.on("error", handleErrors)
.pipe(
process.env.TYPE === "development" ?
sourcemaps.write() :
gutil.noop()
)
.on("error", handleErrors)
.pipe(gulp.dest("."))
.on("error", handleErrors);
};
var buildScripts = function (done, watch) {
glob(`${SRC_PATH}/**/[A-Z]*.js`, function (err, files) {
if (err) {
done(err);
}
var tasks = files.map(function (entryFilepath) {
var bundler = browserify({
entries: [entryFilepath],
debug: process.env.TYPE === "development",
plugin: watch ? [watchify] : undefined
})
.transform(
babelify,
{
presets: ["es2015"]
});
var build = bundle.bind(this, bundler, entryFilepath);
if (watch) {
bundler.on("update", build);
}
return build();
});
return eventStream
.merge(tasks)
.on("end", done);
});
};
gulp.task("scripts-build", function (done) {
buildScripts(done);
});
gulp.task("scripts-watch", function (done) {
buildScripts(done, true);
});
Complete code here https://github.com/BigBadAlien/browserify-multy-build

Resources