I was using #firebase/testing on Jest, but since it's deprecated and instructed to use new one, I decided to move to #firebase/rules-unit-testing.
Here is the code where I just switched them.
import { initializeAdminApp } from "#firebase/rules-unit-testing";
import "babel-polyfill";
it("is ok", async () => {
const admin = initializeAdminApp({ projectId: "my-project" });
try {
const doc = admin.firestore().collection("items").doc("item-1");
await doc.set({ name: "Item 1" });
const ss = await doc.get();
expect(ss.data()?.name).toBe("Item 1");
} finally {
await admin.delete();
}
});
When I run this test with emu, it results "Cannot find module 'firebase-admin' from 'index.cjs.js'" error.
Cannot find module 'firebase-admin' from 'index.cjs.js'
3 |
4 | it("is ok", async () => {
> 5 | const admin = initializeAdminApp({ projectId: "my-project" });
| ^
It passes if it is the old #firebase/testing.
What did I miss?
Node.js v14.8.0
Jest 25.5.4
firebase 7.21.1
firebase-tools 8.11.2
#firebase/testing 0.20.11
#firebase/rules-unit-testing 1.0.4
The firebase-admin is an npm package. Just installed it and all worked.
$ npm install -D firebase-admin
I started to use firebase-admin#^10.0.1 and I had this error when running jest tests. It couldn't map "firebase-admin/app" to "firebase-admin/lib/app" as expected. So I have mapped this manually on jest.config.ts:
import { pathsToModuleNameMapper } from 'ts-jest/utils';
import { compilerOptions } from './tsconfig.json';
...
export default {
...
moduleNameMapper: pathsToModuleNameMapper(
{
...compilerOptions.paths,
'firebase-admin/*': ['node_modules/firebase-admin/lib/*'],
},
{
prefix: '<rootDir>',
},
),
...
}
That worked for me.
And just to note, my tsconfig.json is like this:
{
"compilerOptions": {
...
"baseUrl": ".",
"paths": {
"modules/*": [
"src/modules/*"
],
"shared/*": [
"src/shared/*"
],
}
}
}
I have a function that will find a zip file in the directory and unzip the file and it work fine. however, I'm wondering how i can run a test on this function using Jest.
var fs = require('fs'),
PNG = require('pngjs').PNG
const unzipper = require('unzipper')
PNG = require('pngjs').PNG
const dir = __dirname + "/";
function unzipFile(fileName, outputPath) {
return new Promise((resolve, reject) => {
if (fs.existsSync(fileName) !== true) {
reject("there is no zip file");
}
const createdFile = dir + fileName;
const stream = fs
.createReadStream(createdFile)
.pipe(unzipper.Extract({ path: outputPath }));
stream.on("finish", () => {
console.log("file unzipped");
resolve(outputPath);
});
});
}
I have read through the doc and try to use .then but I'm not sure how my expected output should be like when using Jest.
Here is the unit test solution:
index.js:
const fs = require("fs");
const unzipper = require("./unzipper");
const dir = __dirname + "/";
function unzipFile(fileName, outputPath) {
return new Promise((resolve, reject) => {
if (fs.existsSync(fileName) !== true) {
reject("there is no zip file");
}
const createdFile = dir + fileName;
const stream = fs
.createReadStream(createdFile)
.pipe(unzipper.Extract({ path: outputPath }));
stream.on("finish", () => {
console.log("file unzipped");
resolve(outputPath);
});
});
}
module.exports = unzipFile;
index.spec.js:
const unzipFile = require("./");
const fs = require("fs");
const unzipper = require("./unzipper");
jest.mock("fs", () => {
return {
existsSync: jest.fn(),
createReadStream: jest.fn().mockReturnThis(),
pipe: jest.fn()
};
});
describe("unzipFile", () => {
afterEach(() => {
jest.restoreAllMocks();
jest.resetAllMocks();
});
it("should unzip file correctly", async () => {
const filename = "go.pdf";
const outputPath = "workspace";
const eventHandlerMap = {};
const mStream = {
on: jest.fn().mockImplementation((event, handler) => {
eventHandlerMap[event] = handler;
})
};
const logSpy = jest.spyOn(console, "log");
fs.existsSync.mockReturnValueOnce(true);
fs.createReadStream().pipe.mockReturnValueOnce(mStream);
jest.spyOn(unzipper, "Extract").mockReturnValueOnce({});
const pending = unzipFile(filename, outputPath);
eventHandlerMap["finish"]();
const actual = await pending;
expect(actual).toEqual(outputPath);
expect(fs.existsSync).toBeCalledWith(filename);
expect(fs.createReadStream).toBeCalled();
expect(fs.createReadStream().pipe).toBeCalledWith({});
expect(logSpy).toBeCalledWith("file unzipped");
expect(mStream.on).toBeCalledWith("finish", eventHandlerMap["finish"]);
});
});
Unit test result with coverage report:
PASS src/stackoverflow/59027031/index.spec.js
unzipFile
✓ should unzip file correctly (10ms)
console.log node_modules/jest-mock/build/index.js:860
file unzipped
-------------|----------|----------|----------|----------|-------------------|
File | % Stmts | % Branch | % Funcs | % Lines | Uncovered Line #s |
-------------|----------|----------|----------|----------|-------------------|
All files | 92.31 | 50 | 75 | 92.31 | |
index.js | 91.67 | 50 | 100 | 91.67 | 8 |
unzipper.js | 100 | 100 | 0 | 100 | |
-------------|----------|----------|----------|----------|-------------------|
Test Suites: 1 passed, 1 total
Tests: 1 passed, 1 total
Snapshots: 0 total
Time: 5.618s, estimated 9s
Source code: https://github.com/mrdulin/jest-codelab/tree/master/src/stackoverflow/59027031
I'm currently using TypeORM and Sinonjs in my project. But I'm not sure how to write the unit test in the right way. Especially how to stub a chained function call, like this
async find(id: number): Promise<User> {
const user = await this.connection
.getRepository(User)
.createQueryBuilder("user")
.where("user.id = :id", { id: id })
.getOne();
return user;
}
My test file
it('should return a data from db', async () => {
let user = {
id: 1,
name: 'my name'
};
const getOne = Sinon.stub().resolves(user);
const where = Sinon.stub().callsArg(0);
const createQueryBuilder = Sinon.stub().callsArg(0);
const connection = {
getRepository: Sinon.stub()
};
connection.getRepository.withArgs(User).returns(createQueryBuilder);
createQueryBuilder.withArgs('user').returns(where);
where.withArgs('user.id = :id', { id: user.id }).returns(getOne);
});
I always got this error
TypeError: this.connection.getRepository(...).createQueryBuilder is not a function
Any advice is welcome!
Thank you very much!
You should use sinon.stub(obj, 'method').returnsThis() to
Causes the stub to return its this value.
E.g.
index.ts:
type User = any;
export const model = {
connection: {
getRepository(model) {
return this;
},
createQueryBuilder(model) {
return this;
},
where(query, bindings) {
return this;
},
getOne() {
console.log('get one');
}
},
async find(id: number): Promise<User> {
const user = await this.connection
.getRepository('User')
.createQueryBuilder('user')
.where('user.id = :id', { id: id })
.getOne();
return user;
}
};
index.spec.ts:
import { model } from './';
import sinon from 'sinon';
import { expect } from 'chai';
describe('model', () => {
describe('#find', () => {
afterEach(() => {
sinon.restore();
});
it('should find user', async () => {
let mUser = {
id: 1,
name: 'my name'
};
const getRepository = sinon.stub(model.connection, 'getRepository').returnsThis();
const createQueryBuilder = sinon.stub(model.connection, 'createQueryBuilder').returnsThis();
const where = sinon.stub(model.connection, 'where').returnsThis();
const getOne = sinon.stub(model.connection, 'getOne').resolves(mUser);
const user = await model.find(1);
expect(user).to.be.eql(mUser);
expect(getRepository.calledWith('User')).to.be.true;
expect(createQueryBuilder.calledWith('user')).to.be.true;
expect(where.calledWith('user.id = :id', { id: 1 })).to.be.true;
expect(getOne.calledOnce).to.be.true;
});
});
});
Unit test result with coverage report:
model
#find
✓ should find user
1 passing (13ms)
---------------|----------|----------|----------|----------|-------------------|
File | % Stmts | % Branch | % Funcs | % Lines | Uncovered Line #s |
---------------|----------|----------|----------|----------|-------------------|
All files | 86.21 | 100 | 55.56 | 84.62 | |
index.spec.ts | 100 | 100 | 100 | 100 | |
index.ts | 50 | 100 | 20 | 42.86 | 6,9,12,15 |
---------------|----------|----------|----------|----------|-------------------|
Source code: https://github.com/mrdulin/mocha-chai-sinon-codelab/tree/master/src/stackoverflow/57843415
I am getting the following error while trying to setup a component in my jest test :
Invariant Violation: Could not find "store" in either the context or
props of "Connect(TestComponent)". Either wrap the root component in a
, or explicitly pass "store" as a prop to
"Connect(TestComponent)".
My test looks like this :
import React from 'react';
import { shallow } from 'enzyme';
import { Map } from 'immutable';
import { createStore } from 'redux';
import TestComponent from '../TestComponent ';
import { Provider } from 'react-redux';
describe('test ', () => {
test(' testing', () => {
const state = { blah: 1 };
const reducer = s => s; //dummy reducer
const store = createStore(reducer, state);
const component = (
<Provider store={store}>
<TestComponent />
</Provider>
);
const wrapper = shallow(component);
let json = wrapper.html();
expect(json).toMatchSnapshot();
});
});
and the component being tested looks like this :
import React, { Component } from 'react';
import { connect } from 'react-redux';
class TestComponent extends Component {
render = () => {};
}
function mapStateToProps(state) {
return { blah: state };
}
export default connect(
mapStateToProps,
null
)(TestComponent);
I'm not exactly sure what is wrong with this. It all looks kosher to me. When calling html() it cannot find the store.
Try making your wrapper component a function instead of variable:
export const testWrapper = Component => {
return (
<Provider store={store}>
{Component}
</Provider>
);
};
const wrapper = shallow(testWrapper(<TestComponent/>));
let json = wrapper.html();
expect(json).toMatchSnapshot();
Also I'd recommend looking into redux-mock-store for testing.
Here is the solution, you need to create mocked store and pass it to the component wrapped by connect function.
index.ts:
import React, { Component } from 'react';
import { connect } from 'react-redux';
interface ITestComponentProps {
blah: any;
store: any;
}
export class TestComponent extends Component<ITestComponentProps> {
constructor(props) {
super(props);
}
public render() {
return <div>{this.props.blah}</div>;
}
}
function mapStateToProps(state) {
return { blah: state };
}
export default connect(mapStateToProps)(TestComponent);
Unit test:
import React from 'react';
import { shallow } from 'enzyme';
import ConnectedTestComponent, { TestComponent } from './';
import configureMockStore from 'redux-mock-store';
const state = 1;
const mockStore = configureMockStore();
const store = mockStore(state);
describe('test', () => {
it('t1', () => {
const connetedTestComponentWrapper = shallow(<ConnectedTestComponent store={store} />);
const testComponentWrapper = connetedTestComponentWrapper.find(TestComponent);
expect(testComponentWrapper.prop('blah')).toBe(state);
expect(testComponentWrapper.html()).toMatchSnapshot();
});
});
Unit test result with 100% coverage:
PASS src/stackoverflow/57290601/index.spec.tsx
test
✓ t1 (54ms)
-----------|----------|----------|----------|----------|-------------------|
File | % Stmts | % Branch | % Funcs | % Lines | Uncovered Line #s |
-----------|----------|----------|----------|----------|-------------------|
All files | 100 | 100 | 100 | 100 | |
index.tsx | 100 | 100 | 100 | 100 | |
-----------|----------|----------|----------|----------|-------------------|
Test Suites: 1 passed, 1 total
Tests: 1 passed, 1 total
Snapshots: 1 passed, 1 total
Time: 3.089s, estimated 5s
snapshot:
// Jest Snapshot v1
exports[`test t1 1`] = `"<div>1</div>"`;
Here is the completed demo: https://github.com/mrdulin/jest-codelab/tree/master/src/stackoverflow/57290601
With ES6, I can import several exports from a file like this:
import {ThingA, ThingB, ThingC} from 'lib/things';
However, I like the organization of having one module per file. I end up with imports like this:
import ThingA from 'lib/things/ThingA';
import ThingB from 'lib/things/ThingB';
import ThingC from 'lib/things/ThingC';
I would love to be able to do this:
import {ThingA, ThingB, ThingC} from 'lib/things/*';
or something similar, with the understood convention that each file contains one default export, and each module is named the same as its file.
Is this possible?
I don't think this is possible, but afaik the resolution of module names is up to module loaders so there might a loader implementation that does support this.
Until then, you could use an intermediate "module file" at lib/things/index.js that just contains
export * from 'ThingA';
export * from 'ThingB';
export * from 'ThingC';
and it would allow you to do
import {ThingA, ThingB, ThingC} from 'lib/things';
Just a variation on the theme already provided in the answer, but how about this:
In a Thing,
export default function ThingA () {}
In things/index.js,
export {default as ThingA} from './ThingA'
export {default as ThingB} from './ThingB'
export {default as ThingC} from './ThingC'
Then to consume all the things elsewhere,
import * as things from './things'
things.ThingA()
Or to consume just some of things,
import {ThingA,ThingB} from './things'
The current answers suggest a workaround but it's bugged me why this doesn't exist, so I've created a babel plugin which does this.
Install it using:
npm i --save-dev babel-plugin-wildcard
then add it to your .babelrc with:
{
"plugins": ["wildcard"]
}
see the repo for detailed install info
This allows you to do this:
import * as Things from './lib/things';
// Do whatever you want with these :D
Things.ThingA;
Things.ThingB;
Things.ThingC;
again, the repo contains further information on what exactly it does, but doing it this way avoids creating index.js files and also happens at compile-time to avoid doing readdirs at runtime.
Also with a newer version you can do exactly like your example:
import { ThingsA, ThingsB, ThingsC } from './lib/things/*';
works the same as the above.
You now can use async import():
import fs = require('fs');
and then:
fs.readdir('./someDir', (err, files) => {
files.forEach(file => {
const module = import('./' + file).then(m =>
m.callSomeMethod();
);
// or const module = await import('file')
});
});
Great gugly muglys! This was harder than it needed to be.
Export one flat default
This is a great opportunity to use spread (... in { ...Matters, ...Contacts } below:
// imports/collections/Matters.js
export default { // default export
hello: 'World',
something: 'important',
};
// imports/collections/Contacts.js
export default { // default export
hello: 'Moon',
email: 'hello#example.com',
};
// imports/collections/index.js
import Matters from './Matters'; // import default export as var 'Matters'
import Contacts from './Contacts';
export default { // default export
...Matters, // spread Matters, overwriting previous properties
...Contacts, // spread Contacts, overwriting previosu properties
};
// imports/test.js
import collections from './collections'; // import default export as 'collections'
console.log(collections);
Then, to run babel compiled code from the command line (from project root /):
$ npm install --save-dev #babel/core #babel/cli #babel/preset-env #babel/node
(trimmed)
$ npx babel-node --presets #babel/preset-env imports/test.js
{ hello: 'Moon',
something: 'important',
email: 'hello#example.com' }
Export one tree-like default
If you'd prefer to not overwrite properties, change:
// imports/collections/index.js
import Matters from './Matters'; // import default as 'Matters'
import Contacts from './Contacts';
export default { // export default
Matters,
Contacts,
};
And the output will be:
$ npx babel-node --presets #babel/preset-env imports/test.js
{ Matters: { hello: 'World', something: 'important' },
Contacts: { hello: 'Moon', email: 'hello#example.com' } }
Export multiple named exports w/ no default
If you're dedicated to DRY, the syntax on the imports changes as well:
// imports/collections/index.js
// export default as named export 'Matters'
export { default as Matters } from './Matters';
export { default as Contacts } from './Contacts';
This creates 2 named exports w/ no default export. Then change:
// imports/test.js
import { Matters, Contacts } from './collections';
console.log(Matters, Contacts);
And the output:
$ npx babel-node --presets #babel/preset-env imports/test.js
{ hello: 'World', something: 'important' } { hello: 'Moon', email: 'hello#example.com' }
Import all named exports
// imports/collections/index.js
// export default as named export 'Matters'
export { default as Matters } from './Matters';
export { default as Contacts } from './Contacts';
// imports/test.js
// Import all named exports as 'collections'
import * as collections from './collections';
console.log(collections); // interesting output
console.log(collections.Matters, collections.Contacts);
Notice the destructuring import { Matters, Contacts } from './collections'; in the previous example.
$ npx babel-node --presets #babel/preset-env imports/test.js
{ Matters: [Getter], Contacts: [Getter] }
{ hello: 'World', something: 'important' } { hello: 'Moon', email: 'hello#example.com' }
In practice
Given these source files:
/myLib/thingA.js
/myLib/thingB.js
/myLib/thingC.js
Creating a /myLib/index.js to bundle up all the files defeats the purpose of import/export. It would be easier to make everything global in the first place, than to make everything global via import/export via index.js "wrapper files".
If you want a particular file, import thingA from './myLib/thingA'; in your own projects.
Creating a "wrapper file" with exports for the module only makes sense if you're packaging for npm or on a multi-year multi-team project.
Made it this far? See the docs for more details.
Also, yay for Stackoverflow finally supporting three `s as code fence markup.
Similar to the accepted answer but it allows you to scale without the need of adding a new module to the index file each time you create one:
./modules/moduleA.js
export const example = 'example';
export const anotherExample = 'anotherExample';
./modules/index.js
// require all modules on the path and with the pattern defined
const req = require.context('./', true, /.js$/);
const modules = req.keys().map(req);
// export all modules
module.exports = modules;
./example.js
import { example, anotherExample } from './modules'
If you are using webpack. This imports files automatically and exports as api namespace.
So no need to update on every file addition.
import camelCase from "lodash-es";
const requireModule = require.context("./", false, /\.js$/); //
const api = {};
requireModule.keys().forEach(fileName => {
if (fileName === "./index.js") return;
const moduleName = camelCase(fileName.replace(/(\.\/|\.js)/g, ""));
api[moduleName] = {
...requireModule(fileName).default
};
});
export default api;
For Typescript users;
import { camelCase } from "lodash-es"
const requireModule = require.context("./folderName", false, /\.ts$/)
interface LooseObject {
[key: string]: any
}
const api: LooseObject = {}
requireModule.keys().forEach(fileName => {
if (fileName === "./index.ts") return
const moduleName = camelCase(fileName.replace(/(\.\/|\.ts)/g, ""))
api[moduleName] = {
...requireModule(fileName).default,
}
})
export default api
I've used them a few times (in particular for building massive objects splitting the data over many files (e.g. AST nodes)), in order to build them I made a tiny script (which I've just added to npm so everyone else can use it).
Usage (currently you'll need to use babel to use the export file):
$ npm install -g folder-module
$ folder-module my-cool-module/
Generates a file containing:
export {default as foo} from "./module/foo.js"
export {default as default} from "./module/default.js"
export {default as bar} from "./module/bar.js"
...etc
Then you can just consume the file:
import * as myCoolModule from "my-cool-module.js"
myCoolModule.foo()
Just an other approach to #Bergi's answer
// lib/things/index.js
import ThingA from './ThingA';
import ThingB from './ThingB';
import ThingC from './ThingC';
export default {
ThingA,
ThingB,
ThingC
}
Uses
import {ThingA, ThingB, ThingC} from './lib/things';
Nodejs ? Do like this:
Create a folder with index.js, in index file, add this:
var GET = require('./GET');
var IS = require('./IS');
var PARSE = require('./PARSE');
module.exports = { ...GET, ...IS, ...PARSE};
And, in file GET.js, or IS.js export as normal:
module.exports = { /* something as you like */}
ANd now, you need only including index.js like:
const Helper = require('./YourFolder');
Helper will include all of function in YourFolder.
Good day!
This is not exactly what you asked for but, with this method I can Iterate throught componentsList in my other files and use function such as componentsList.map(...) which I find pretty usefull !
import StepOne from './StepOne';
import StepTwo from './StepTwo';
import StepThree from './StepThree';
import StepFour from './StepFour';
import StepFive from './StepFive';
import StepSix from './StepSix';
import StepSeven from './StepSeven';
import StepEight from './StepEight';
const componentsList= () => [
{ component: StepOne(), key: 'step1' },
{ component: StepTwo(), key: 'step2' },
{ component: StepThree(), key: 'step3' },
{ component: StepFour(), key: 'step4' },
{ component: StepFive(), key: 'step5' },
{ component: StepSix(), key: 'step6' },
{ component: StepSeven(), key: 'step7' },
{ component: StepEight(), key: 'step8' }
];
export default componentsList;
You can use require as well:
const moduleHolder = []
function loadModules(path) {
let stat = fs.lstatSync(path)
if (stat.isDirectory()) {
// we have a directory: do a tree walk
const files = fs.readdirSync(path)
let f,
l = files.length
for (var i = 0; i < l; i++) {
f = pathModule.join(path, files[i])
loadModules(f)
}
} else {
// we have a file: load it
var controller = require(path)
moduleHolder.push(controller)
}
}
Then use your moduleHolder with dynamically loaded controllers:
loadModules(DIR)
for (const controller of moduleHolder) {
controller(app, db)
}
I was able to take from user atilkan's approach and modify it a bit:
For Typescript users;
require.context('#/folder/with/modules', false, /\.ts$/).keys().forEach((fileName => {
import('#/folder/with/modules' + fileName).then((mod) => {
(window as any)[fileName] = mod[fileName];
const module = new (window as any)[fileName]();
// use module
});
}));
if you don't export default in A, B, C but just export {} then it's possible to do so
// things/A.js
export function A() {}
// things/B.js
export function B() {}
// things/C.js
export function C() {}
// foo.js
import * as Foo from ./thing
Foo.A()
Foo.B()
Foo.C()