Compress .xls/xlsx files into .zip files jsZip - xls

I'm a newbie to the field of javascript/angularJS, so please bear with me.I need a way to convert .xls/.xlsx files into .zip files by using jsZip library. I'm making use of alasql for generating the .xls file. I've looked all over for any possible solutions to create zip file of all xls files, but haven't come across any demo. (.txt and .doc files generate just fine, but .xls files does not open if jsZip is used). Any help would be appreciated!!
What I need is an xls file to be generated dynamically, and the same file to be compressed as zip
EDIT :-
Here's some of the code which I tried (but with no success)
var newExcelData = {'Name':'abc'};
//var res = alasql("SELECT * INTO XLSX('Summary.xlsx',{headers:true}) FROM ? ", [newExcelData]);
var zip = new JSZip();
zip.file(alasql("SELECT * INTO XLSX('Summary.xlsx',{headers:true}) FROM ? ", [newExcelData]));
zip.generateAsync({ type: "blob" })
.then(function (content) {
saveAs(content, "example.zip");
});
PS:- I'm able to make it work in case of generating .xls file.
Please refer below code:-
var newExcelData = {'Name':'abc', 'Age':'12'};
var zip = new JSZip();
zip.file("test.xls", [newExcelData]);
zip.generateAsync({ type: "blob" })
.then(function (content) {
saveAs(content, "example.zip");
});
But although excel sheet is generated, on opening excel sheet is blank.
Please help!!
Hi, here's an update :-
I've tried to make use of js-xlsx library - https://github.com/SheetJS/js-xlsx - to generate xls file and then zip it. Please refer the below code..
function Create_Zip() {
function datenum(v, date1904) {
if (date1904) v += 1462;
var epoch = Date.parse(v);
return (epoch - new Date(Date.UTC(1899, 11, 30))) / (24 * 60 * 60 * 1000);
}
function sheet_from_array_of_arrays(data, opts) {
var ws = {};
var range = { s: { c: 10000000, r: 10000000 }, e: { c: 0, r: 0 } };
for (var R = 0; R != data.length; ++R) {
for (var C = 0; C != data[R].length; ++C) {
if (range.s.r > R) range.s.r = R;
if (range.s.c > C) range.s.c = C;
if (range.e.r < R) range.e.r = R;
if (range.e.c < C) range.e.c = C;
var cell = { v: data[R][C] };
if (cell.v === null) continue;
var cell_ref = XLSX.utils.encode_cell({ c: C, r: R });
if (typeof cell.v === 'number') cell.t = 'n';
else if (typeof cell.v === 'boolean') cell.t = 'b';
else if (cell.v instanceof Date) {
cell.t = 'n'; cell.z = XLSX.SSF._table[14];
cell.v = datenum(cell.v);
}
else cell.t = 's';
ws[cell_ref] = cell;
}
}
if (range.s.c < 10000000) ws['!ref'] = XLSX.utils.encode_range(range);
return ws;
}
var data = [[1, 2, 3], [true, false, null, "sheetjs"], ["foo", "bar", new Date("2014-02-19T14:30Z"), "0.3"], ["baz", null, "qux"]];
var ws_name = "SheetJS";
function Workbook() {
if (!(this instanceof Workbook)) return new Workbook();
this.SheetNames = [];
this.Sheets = {};
}
var wb = new Workbook(), ws = sheet_from_array_of_arrays(data);
/* add worksheet to workbook */
wb.SheetNames.push(ws_name);
wb.Sheets[ws_name] = ws;
var wbout = XLSX.write(wb, { bookType: 'xlsx', bookSST: true, type: 'binary' });
function s2ab(s) {
var buf = new ArrayBuffer(s.length);
var view = new Uint8Array(buf);
for (var i = 0; i != s.length; ++i) view[i] = s.charCodeAt(i) & 0xFF;
return buf;
}
var jsonse = JSON.stringify([s2ab(wbout)]);
var testblob = new Blob([jsonse], { type: "application/json" });
console.log(testblob);
var zip = new JSZip();
zip.file("trial.xls", testblob);
var downloadFile = zip.generateAsync({ type: "blob" });
saveAs(downloadFile, 'test.zip');
}
But, the problem here is that I keep getting this error: 'The data of 'trial.xls' is in an unsupported format !' in the console :(. Is there any way I can make this work?
I'm at my wits end now :(

Not an answer (see below) but an explanation of what's going on:
To add a file, JSZip needs its binary content (as Blob, Uint8Array, etc). The line zip.file("test.xls", [newExcelData]); can't work for example: [newExcelData] is not a binary content but an array of js object.
What you need to figure out is how to get the content of the xlsx file. SELECT * INTO XLSX('Summary.xlsx') will trigger a download and return 1, it's not what you want. I searched on my side but can't find a way to do it with alasql.
Once/if you find the solution, the JSZip part looks correct.
Edit, following your switch to js-xlsx:
You use JSZip v2 (needed by js-xlsx) which doesn't support Blob inputs. However, wbout is a binary string which is supported:
zip.file("trial.xls", wbout, {binary: true});
Then, replace zip.generateAsync (added in JSZip v3):
var downloadFile = zip.generate({type: "blob" });
saveAs(downloadFile, 'test.zip');

Here is the solution I found using JSZip, XLSX and File Saver libraries.
Import:
import * as XLSX from "xlsx";
import JSZip from 'jszip';
import { saveAs } from 'file-saver';
Here is an example of compressing a .xlsx inside a zip:
let zip = new JSZip();
const jsonData = [
{
"Product": "Red Velvet Cupcake",
"Price": "6",
"GluttenFree": "Yes",
},
{
"Product": "Cheesecake",
"Price": "15",
"GluttenFree": "No",
}
];
const workBook: XLSX.WorkBook = XLSX.utils.book_new();
const workSheet: XLSX.WorkSheet = XLSX.utils.json_to_sheet(jsonData);
XLSX.utils.book_append_sheet(workBook, workSheet, 'Bakery');
const workBookBuffer = XLSX.write(workBook, { bookType: 'xlsx', type: 'array' });
const fileData = new Blob([workBookBuffer], {type: 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet;charset=UTF-8'});
zip.file('Products.xlsx', fileData);
zip.generateAsync({type:"blob"}).then(function (blob) {
saveAs(blob, "WorkBooks.zip");
});
This code generates a zip file named 'WorkBooks.zip' that contains the file 'Products.xlsx'. This is how the excel looks like:
Some file-saver examples: https://www.tabnine.com/code/javascript/modules/file-saver.
Here is the JSZip method used:
https://stuk.github.io/jszip/documentation/api_jszip/file_data.html

Related

Protractor: define Allure reporter resultsDir to be located elsewhere

I'm using Protractor and jasmine-allure-reporter. I'm running protractor from bash script and the problem is with resultsDir, because I want results to generate in a specific folder. Currently they generate in ~/e2e/project_name/conf/allure-results/ folder. What I need is to have them generated in ~/e2e/reports/project_name/allure_results/. Simply entering full path resultsDir: '/home/e2e/reports/project_name/allure-results' in resultsDir parameter changes nothing. How can I solve this?
Current setup in conf.js file:
browser.manage().timeouts().implicitlyWait(15000);
var AllureReporter = require('jasmine-allure-reporter');
jasmine.getEnv().addReporter(new AllureReporter({
allureReport: {
resultsDir: 'allure-results'
}
}));
Desired setup in conf.js file:
browser.manage().timeouts().implicitlyWait(15000);
var AllureReporter = require('jasmine-allure-reporter');
jasmine.getEnv().addReporter(new AllureReporter({
allureReport: {
resultsDir: '~/e2e/reports/project_name/allure_results/allure-results'
}
}));
I found answer for you:
There is one file named Jasmine2AllureReporter.js under \node_modules\jasmine-allure-reporter\src\jasmine2AllureReporter.js.
Open the file:
Change the following and try:
As i have taken example of D:\\K\\allure-results
Change the same under pluginConfig.resultsDir and var outDir, It will work.
function Jasmine2AllureReporter(userDefinedConfig, allureReporter) {
var Status = {PASSED: 'passed', FAILED: 'failed', BROKEN: 'broken', PENDING: 'pending'};
this.allure = allureReporter || allure;
this.configure = function(userDefinedConfig) {
var pluginConfig = {};
userDefinedConfig = userDefinedConfig || {};
pluginConfig.resultsDir = 'D:\\K\\allure-results';
//pluginConfig.resultsDir = userDefinedConfig.resultsDir || 'allure-results';
pluginConfig.basePath = userDefinedConfig.basePath || '.';
// var outDir = path.resolve(pluginConfig.basePath, pluginConfig.resultsDir);
var outDir = 'D:\\K\\allure-results';
this.allure.setOptions({targetDir: outDir});
};

How To Modifying The Filename Before Uploading When Using Meteor Edgee:SlingShot Package

Please i am trying to modify the filename of a selected file posted by a user before uploading to Amazon S3 using the edgee:slinghot package. I can upload the file quite alright but the problem is how do i modify the filename.
I modified it on the client using by saving the modified name into a variable. My problem now is how to access that variable declared and saved on the Client in the Server environment. I just can't seem to wrap my head around it.
'change .js-submitTeamPaper' : function(event , template){
event.preventDefault();
let paper = template.paperDetails.get();
newFilename = paper[0].paper_name + "_"
_.map(paper[0].member , (member)=>{
newFilename += "_" + member.regnum + "_"
});
newFilename += paper[0]._id;
let file = event.target.value;
let fileArray = file.split(".");
let ext = fileArray[fileArray.length - 1];
newFilename += "." + ext;
studentFileUpload(event , template , 'submitTeamTermPaper' , 'divProgress');
}
The code to upload the file.
let _collectfile = (event , template) =>{
let file = event.target.files[0]
return file
}
let _showProgressBar = (div) => {
let _div = document.getElementById(div);
_div.classList.remove("hide");
}
let _closeProgressBar = (div) => {
let _div = document.getElementById(div);
_div.classList.add("hide");
}
let _slingShotUploadConfigure = (event , template , folder ,div) => {
let _upload = new Slingshot.Upload(folder);
let _file = _collectfile(event , template);
_showProgressBar(div);
_upload.send(_file , (error , downloadUrl) => {
template.uploader.set();
if (error){
//throw new Meteor.Error('500' , error.reason);
event.target.value = '';
sAlert.error(error.reason , {effect: 'bouncyflip',
position: 'bottom-right', timeout: 3000, onRouteClose: false, stack: false, offset: '150px'});
_closeProgressBar(div);
}
else{
sAlert.success('File was uploaded successfully' , {effect: 'genie',
position: 'bottom-right', timeout: 3000, onRouteClose: false, stack: false, offset: '150px'});
event.target.value = '';
template.downloadUrl.set(downloadUrl);
_closeProgressBar(div);
//return downloadUrl;
}
});
template.uploader.set(_upload);
}
export default function(event , template , folder ,div , progress){
return _slingShotUploadConfigure(event , template , folder,div , progress)
}
I then imported the module as studentFileUpload from '../../modules/handle-fileuploads';
Below is the meteor-slingshot code to do the upload
Slingshot.createDirective("submitTeamTermPaper", Slingshot.S3Storage, {
bucket: Meteor.settings.BucketName,
AWSAccessKeyId : Meteor.settings.AWSAccessKeyId,
AWSSecretAccessKey : Meteor.settings.AWSSecretAccessKey,
acl: "public-read",
authorize: function () {
// do some validation
// e.g. deny uploads if user is not logged in.
if (this.userId) {
return true;
}
},
key: function (file) {
//file here is the file to be uploaded how do i get the modified file name i defined in the client as newFilename here
let timeStamp = + new Date;
//let newFilename = file.name.replace(/_/g , "-");
return 'Team_Term_Papers/' + timeStamp + '_' + '_' + newFilename;
}
});
From my code newFilename is the variable that holds the modified filename. How do i access it from the server environment. Any help is really appreciated. Thanks
You can pass extra information through to slingshot using metacontext:
metacontext = {newName: "foo"};
let _upload = new Slingshot.Upload(folder,metacontext);
Then you can access that metacontext in your key function:
key: function (file,metacontext) {
let timeStamp = + new Date;
let newFilename = metacontext ? metacontext.newName : file.name;
newFilename = newFilename.replace(/_/g , "-");
return 'Team_Term_Papers/' + timeStamp + '_' + '_' + newFilename;
}

Handling forking on different levels of Tasks

I'm really stuck on handling different levels of Tasks in Ramda. I'm trying to build a script to parse LESS files for comments and build a pattern library site from the data in the comments and inline HTML from an example file. It's all working (with a lot of help from SO) except the inlining the example file contents.
const target = path.join(__dirname, 'app/dist/templates/');
const source = path.join(__dirname, 'source/');
const stylesSource = path.join(__dirname, 'source/less/');
const template = path.join(__dirname, 'app/src/templates/page-template.html');
const writeTemplate = function(data) {
var rs = fs.createReadStream(template);
var ws = fs.createWriteStream(path.join(target, R.toLower(R.concat(data.name, ".html"))));
rs
.pipe(replaceStream("{{name}}", data.name))
.pipe(replaceStream("{{description}}", data.description))
.pipe(replaceStream("{{example}}", data.example))
.pipe(ws);
}
const inlineExample = function(data) {
return readFile(path.join(source, data.example));
}
// parseFile :: String -> { name :: String
// , description :: String
// , example :: String }
const parseFile = function parseFile(data) {
return {
name: R.trim(R.nth(1, R.match(/[$]name:(.*)/, data))),
description: R.trim(R.nth(1, R.match(/[$]description:(.*)/, data))),
example: R.trim(R.nth(1, R.match(/[$]example:(.*)/, data)))
};
};
// readDirectories :: String -> Task [String]
const readDirectories = function readDirectories(dir) {
return new Task(function (reject, resolve) {
glob(path.join(dir, "/**/*.less"), function (err, files) {
err == null ? resolve(files) : reject(err);
})
});
};
// readFile :: String -> Task String
const readFile = function readFile(filename) {
return new Task(function (reject, resolve) {
fs.readFile(path.normalize(filename), 'utf8', function (err, data) {
err == null ? resolve(data) : reject(err);
});
});
};
// dirs :: Task [String]
const dirs = readDirectories(stylesSource);
// files :: Task [Task String]
const files = R.map(R.map(readFile), dirs);
// commuted :: Task (Task [String])
const commuted = R.map(R.commute(Task.of), files);
// unnested :: Task [String]
const unnested = R.unnest(commuted);
// parsed :: Task [{ name :: String
// , description :: String
// , example :: String }]
const parsed = R.map(R.map(parseFile), unnested);
const inlined = R.map(R.chain(inlineExample), parsed);
inlined.fork(err => {
process.stderr.write(err.message);
},
data => {
R.map(writeTemplate, data);
util.log(R.concat('Library successfully generated at: ', target));
});
});
I can go through, read the directory, open the files (returning a new Task) and extract the template path from the comments. I'm then running (I think) parseFile on the path (which returns a Task) and that's where it's failing. I'm struggling with getting the example template Task forked so I can use the contents.
I'm open to any suggestions but suspect that the problem is somewhere in R.map(writeTemplate, data) in the original success fork but I'm a bit out of my depth now.

How to avoid blockin while uploading file using Meteor method

I've created a Meteor method to upload a file, it's working well but until the file is fully uploaded, I cannot move around, all subscriptions seem to wait that the upload finishes... is there a way to avoid that ?
Here is the code on the server :
Meteor.publish('product-photo', function (productId) {
return Meteor.photos.find({productId: productId}, {limit: 1});
});
Meteor.methods({
/**
* Creates an photo
* #param obj
* #return {*}
*/
createPhoto: function (obj) {
check(obj, Object);
// Filter attributes
obj = filter(obj, [
'name',
'productId',
'size',
'type',
'url'
]);
// Check user
if (!this.userId) {
throw new Meteor.Error('not-connected');
}
// Check file name
if (typeof obj.name !== 'string' || obj.name.length > 255) {
throw new Meteor.Error('invalid-file-name');
}
// Check file type
if (typeof obj.type !== 'string' || [
'image/gif',
'image/jpg',
'image/jpeg',
'image/png'
].indexOf(obj.type) === -1) {
throw new Meteor.Error('invalid-file-type');
}
// Check file url
if (typeof obj.url !== 'string' || obj.url.length < 1) {
throw new Meteor.Error('invalid-file-url');
}
// Check file size
if (typeof obj.size !== 'number' || obj.size <= 0) {
throw new Meteor.Error('invalid-file-size');
}
// Check file max size
if (obj.size > 1024 * 1024) {
throw new Meteor.Error('file-too-large');
}
// Check if product exists
if (!obj.productId || Meteor.products.find({_id: obj.productId}).count() !== 1) {
throw new Meteor.Error('product-not-found');
}
// Limit the number of photos per user
if (Meteor.photos.find({productId: obj.productId}).count() >= 3) {
throw new Meteor.Error('max-photos-reached');
}
// Resize the photo if the data is in base64
if (typeof obj.url === 'string' && obj.url.indexOf('data:') === 0) {
obj.url = resizeImage(obj.url, 400, 400);
obj.size = obj.url.length;
obj.type = 'image/png';
}
// Add info
obj.createdAt = new Date();
obj.userId = this.userId;
return Meteor.photos.insert(obj);
}
});
And the code on the client :
Template.product.events({
'change [name=photo]': function (ev) {
var self = this;
readFilesAsDataURL(ev, function (event, file) {
var photo = {
name: file.name,
productId: self._id,
size: file.size,
type: file.type,
url: event.target.result
};
Session.set('uploadingPhoto', true);
// Save the file
Meteor.call('createPhoto', photo, function (err, photoId) {
Session.set('uploadingPhoto', false);
if (err) {
displayError(err);
} else {
notify(i18n("Transfert terminé pour {{name}}", photo));
}
});
});
}
});
I finally found the solution myself.
Explication : the code I used was blocking the subscriptions because it was using only one method call to transfer all the file from the first byte to the last one, that leads to block the thread (I think, the one reserved to each users on the server) until the transfer is complete.
Solution : I splitted the file into chunks of about 8KB, and send chunk by chunk, this way the thread or whatever was blocking the subscriptions is free after each chunk transfer.
The final working solution is on that post : How to write a file from an ArrayBuffer in JS
Client Code
// data comes from file.readAsArrayBuffer();
var total = data.byteLength;
var offset = 0;
var upload = function() {
var length = 4096; // chunk size
// adjust the last chunk size
if (offset + length > total) {
length = total - offset;
}
// I am using Uint8Array to create the chunk
// because it can be passed to the Meteor.method natively
var chunk = new Uint8Array(data, offset, length);
if (offset < total) {
// Send the chunk to the server and tell it what file to append to
Meteor.call('uploadFileData', fileId, chunk, function (err, length) {
if (!err) {
offset += length;
upload();
}
}
}
};
upload();
Server code
var fs = Npm.require('fs');
var Future = Npm.require('fibers/future');
Meteor.methods({
uploadFileData: function(fileId, chunk) {
var fut = new Future();
var path = '/uploads/' + fileId;
// I tried that with no success
chunk = String.fromCharCode.apply(null, chunk);
// how to write the chunk that is an Uint8Array to the disk ?
fs.appendFile(path, new Buffer(chunk), function (err) {
if (err) {
fut.throw(err);
} else {
fut.return(chunk.length);
}
});
return fut.wait();
}
});
Improving #Karl's code:
Client
This function breaks the file into chunks and sends them to the server one by one.
function uploadFile(file) {
const reader = new FileReader();
let _offset = 0;
let _total = file.size;
return new Promise((resolve, reject) => {
function readChunk() {
var length = 10 * 1024; // chunk size
// adjust the last chunk size
if (_offset + length > _total) {
length = _total - _offset;
}
if (_offset < _total) {
const slice = file.slice(_offset, _offset + length);
reader.readAsArrayBuffer(slice);
} else {
// EOF
setProgress(100);
resolve(true);
}
}
reader.onload = function readerOnload() {
let buffer = new Uint8Array(reader.result) // convert to binary
Meteor.call('fileUpload', file.name, buffer, _offset,
(error, length) => {
if (error) {
console.log('Oops, unable to import!');
return false;
} else {
_offset += length;
readChunk();
}
}
);
};
reader.onloadend = function readerOnloadend() {
setProgress(100 * _offset / _total);
};
readChunk();
});
}
Server
The server then writes to a file when offset is zero, or appends to its end otherwise, returning a promise, as I used an asynchronous function to write/append in order to avoid blocking the client.
if (Meteor.isServer) {
var fs = require('fs');
var Future = require('fibers/future');
}
Meteor.methods({
// Upload file from client to server
fileUpload(
fileName: string,
fileData: Uint8Array,
offset: number) {
check(fileName, String);
check(fileData, Uint8Array);
check(offset, Number);
console.log(`[x] Received file ${fileName} data length: ${fileData.length}`);
if (Meteor.isServer) {
const fut = new Future();
const filePath = '/tmp/' + fileName;
const buffer = new Buffer(fileData);
const jot = offset === 0 ? fs.writeFile : fs.appendFile;
jot(filePath, buffer, 'binary', (err) => {
if (err) {
fut.throw(err);
} else {
fut.return(buffer.length);
}
});
return fut.wait();
}
}
)};
Usage
uploadFile(file)
.then(() => {
/* do your stuff */
});

Replace all the text with specified replacement using grunt replace

I have a .html file which contains id="fixedtext", I want to replace all these id with id="uniquetext"
the grunt-text-replace just replaces the first id it finds and doesnot parse the entire text.
Any idea how can I make either grunt-text-replace https://github.com/yoniholmes/grunt-text-replace
or
grunt-replace https://www.npmjs.com/package/grunt-replace
to do this for the entire document and not just for the first occurrence.
replace: {
dist: {
options:{
patterns:[{
match:'id="fixedtext"',
replacement: 'id="'+something[i++] +'"'
}],
files:[
{
expand: true,
src:['./source.html'],
dest:'./dest.html'
}
]
}
}
},
this is what can be done if unique ids are to be added.
Assuming that you already have an array of all the id you want to add when you run your task.
In this case the id's are the file names
create your own wrapper task
var path = require('path');
var fs = require('fs');
grunt.initconfig({
wrap:{
html:{
header:'<script type="text/ng-template" ',
footer:'</script>',
src:'./yourPathToFile/',
dest'./yourPathToDest/'
}
}
});
grunt.registerMultiTask('wrap', 'wrap header and footer with custom id', function(){
var data = this.data;
getListOfFiles(data.src);
function getListOfFiles(expand_path){
var listOfFiles = fs.readdirSync(expand_path);
for(var i=0; i<listOfFiles.length; i++){
var completePath = expand_path + listOfFiles[i];
var extension = path.extname(completePath);
if(fs.lstatSync(completePath).isDirectory()){
var newDirPath = completePath + '/';
console.log('true------ : \n',newDirPath);
getListofFiles(newDirPath);
}
else if(extension == '.html'){
console.log('F:\n', completePath);
fullSrcPath = path.resolve(completePath);
content = grunt.file.read(fullSrcPath);
scriptId = 'id="' + listOfFiles[i]+'">';
header = (grunt.template.process(data.header));
footer = (grunt.template.process(data.footer));
wholeFile = header + scriptId + content + footer;
grunt.file.write(fullSrcPath, wholeFile);
}
}
}
});

Resources