Unit and integration test of Express REST API and multer single file update middleware - rest

Introduction
Hello everybody,
I'm pretty new to unit and integration testing. The current REST API I'm working on involves file uploads and file system. If you want me to explain what's API this is, I can explain it to you using few sentences. Imagine a system like Microsoft Word. There are only users and users have documents. Users' documents are only JSON files and they are able to upload JSON file to add a document. My API currently has 3 routes, 2 middlewares.
Routes:
auth.js (authorization route)
documents.js (document centered CRUD operations)
users.js
Middlewares:
auth.js (To check if there is valid JSON web token to continue)
uploadFile.js (To upload single file using multer)
I have been able to unit/integration test auth.js, users.js routes and auth.js middleware. These routes and middlewares were only involving small packages of data I/O, so they were pretty easy for me. But documents.js router and uploadFile.js middleware is pretty hard for me to overcome.
Let me share my problems.
Source codes
documents.js Router
.
.
.
router.post('/mine', [auth, uploadFile], async (req, res) => {
const user = await User.findById(req.user._id);
user.leftDiskSpace(function(err, leftSpace) {
if(err) {
return res.status(400).send(createError(err.message, 400));
} else {
if(leftSpace < 0) {
fs.access(req.file.path, (err) => {
if(err) {
res.status(403).send(createError('Your plan\'s disk space is exceeded.', 403));
} else {
fs.unlink(req.file.path, (err) => {
if(err) res.status(500).send('Silinmek istenen doküman diskten silinemedi.');
else res.status(403).send(createError('Your plan\'s disk space is exceeded.', 403));
});
}
});
} else {
let document = new Document({
filename: req.file.filename,
path: `/uploads/${req.user.username}/${req.file.filename}`,
size: req.file.size
});
document.save()
.then((savedDocument) => {
user.documents.push(savedDocument._id);
user.save()
.then(() => res.send(savedDocument));
});
}
}
});
});
.
.
.
uploadFile.js Middleware
const fs = require('fs');
const path = require('path');
const createError = require('./../helpers/createError');
const jsonFileFilter = require('./../helpers/jsonFileFilter');
const multer = require('multer');
const storage = multer.diskStorage({
destination: function(req, file, cb) {
console.log('file: ', file);
if(!req.user.username) return cb(new Error('Dokümanın yükleneceği klasör için isim belirtilmemiş.'), null);
let uploadDestination = path.join(process.cwd(), 'uploads', req.user.username);
fs.access(uploadDestination, (err) => {
if(err) {
// Directory with username doesn't exist in uploads folder, so create one
fs.mkdir(uploadDestination, (err) => {
if(err) cb(err, null);
cb(null, uploadDestination);
});
} else {
// Directory with username exists
cb(null, uploadDestination);
}
});
},
filename: function(req, file, cb) {
cb(null, `${file.originalname.replace('.json', '')}--${Date.now()}.json`);
}
});
module.exports = function(req, res, next) {
multer({ storage: storage, fileFilter: jsonFileFilter }).single('document')(req, res, function(err) {
if(req.fileValidationError) return res.status(400).send(createError(req.fileValidationError.message, 400));
else if(!req.file) return res.status(400).send(createError('Herhangi bir doküman seçilmedi.', 400));
else if(err instanceof multer.MulterError) return res.status(500).send(createError(err.message, 500));
else if(err) return res.status(500).send(createError(err, 500));
else next();
});
}
Questions
1. How can I test user.leftDiskSpace(function(err, leftSpace) { ... }); function which has a callback and contains some Node.js fs methods which also has callbacks?
I want to reach branches and statements user.leftDiskSpace() function containing. I thought of using mock functions to mock out the function but I don't know how to do so.
2. How to change multer disk storage's upload destination for a specified testing folder?
Currently my API uploads the test documents to development/production uploads disk storage destination. What is the best way to change upload destination for testing? I thought to use NODE_ENV global variable to check if the API is being tested or not and change destination in uploadFile.js middleware but I'm not sure if it's a good solution of this problem. What should I do?
Current documents.test.js file
const request = require('supertest');
const { Document } = require('../../../models/document');
const { User } = require('../../../models/user');
const mongoose = require('mongoose');
const path = require('path');
let server;
describe('/api/documents', () => {
beforeEach(() => { server = require('../../../bin/www'); });
afterEach(async () => {
server.close();
await User.deleteMany({});
await Document.deleteMany({});
});
.
.
.
describe('POST /mine', () => {
let user;
let token;
let file;
const exec = async () => {
return await request(server)
.post('/api/documents/mine')
.set('x-auth-token', token)
.attach('document', file);
}
beforeEach(async () => {
user = new User({
username: 'user',
password: '1234'
});
await user.save();
token = user.generateAuthToken();
file = path.join(process.cwd(), 'tests', 'integration', 'files', 'test.json');
});
it('should return 400 if no documents attached', async () => {
file = undefined;
const res = await exec();
expect(res.status).toBe(400);
});
it('should return 400 if a non-JSON document attached', async () => {
file = path.join(process.cwd(), 'tests', 'integration', 'files', 'test.png');
const res = await exec();
expect(res.status).toBe(400);
});
});
});

Related

Error [ERR_HTTP_HEADERS_SENT] : Can't figure out the multipe requests

I have this error : Error [ERR_HTTP_HEADERS_SENT]: Cannot set headers after they are sent to the client. From my understanding, the problem is that I am trying to send more than one response to the same http request. My instinct tell me that it’s this part that messes up :
catch (err) {
res.status(400).json(err);
}
Because if no user/password found in the DB, we already send status(400). Am I right ? More importantly (and that’s what drives me crazy), I am following a YT tuto and his code is exactly like mine, yet his seems to be working without any problem.
My code :
const router = require("express").Router();
const User = require("../models/Users");
const bcrypt = require("bcrypt");
//LOGIN
router.post("/login", async (req, res) => {
try {
const user = await User.findOne({ username: req.body.username });
!user && res.status(400).json("Wrong credentials!");
const validated = await bcrypt.compare(req.body.password, user.password);
!validated && res.status(400).json("Wrong credentiaaaals!");
const { password, ...others } = user._doc;
res.status(200).json(others);
} catch (err) {
res.status(500).json(err);
}
});
module.exports = router;
His code :
//LOGIN
router.post("/login", async (req, res) => {
try {
const user = await User.findOne({ username: req.body.username });
!user && res.status(400).json("Wrong credentials!");
const validated = await bcrypt.compare(req.body.password, user.password);
!validated && res.status(400).json("Wrong credentials!");
const { password, ...others } = user._doc;
res.status(200).json(others);
} catch (err) {
res.status(500).json(err);
}
});
module.exports = router;
Am I doing something wrong ? Is my reflexion bad ? Thanks !
You are right, your code is trying to send data to the client multiple times. The issue is that after the call .json("Wrong credentials!") completed, the write stream to the client will be closed, and you will not be able to send any other data to the client. The framework knows to detect it and show you the bug.
In your code, after the method .json("Wrong credentials!") finishes own execution, your program will continue and will try to execute the next lines...
You just need to add return, so the program will exit the current flow after it sends the response to the client.
const router = require("express").Router();
const User = require("../models/Users");
const bcrypt = require("bcrypt");
//LOGIN
router.post("/login", async (req, res) => {
try {
const user = await User.findOne({ username: req.body.username });
if (!user) {
return res.status(400).json("Wrong credentials!"); // without return the code will continue to execute next lines
}
const validated = await bcrypt.compare(req.body.password, user.password);
if (!validated) {
return res.status(400).json("Wrong credentiaaaals!"); // without return the code will continue to execute next lines
}
const { password, ...others } = user._doc;
res.status(200).json(others); // return is not necessary, because there is no cod which will be executed after we back from the json method
} catch (err) {
res.status(500).json(err); // return is not necessary, because there is no cod which will be executed after we back from the json method
}
});
module.exports = router;

Express and MongoDB without Mongoose

This is not so much of a question but more of a consult request. I couldn't find resources to check my method's validity so I would like to hear MongoDB experts' opinion.
I was playing around with MongoDB and came up with this middleware method to pass client to my routes. I have this Express middleware:
const addClientToRequest = async (req, _, next) => {
const client = new MongoClient(uri);
await client.connect();
req.client = client;
next();
};
app.use(addClientToRequest);
After that, I use req.client in my routes to access my database.
app.get("/:id", async (req, res) => {
const client = req.client;
const id = req.params.id;
try {
const data = await client.db("mydb").collection("mycollection").findOne({ id });
if (data) return res.status(200).json(data);
} catch (error) {
return res
.status(500)
.json({ message: "Error fetching requested data", error });
}
return res.status(404).json({ message: "Requested data cannot be found" });
});
What would be a problem in this approach? Is it okay to use MongoDB client like this?
In my experience, we have always defined a separate utility to load a connection pool at the app startup and then reused those connections.
In the above approach, you seem to be creating a new connection for every HTTP request that is made and then not terminating (or) closing the connection. This may be expensive for a large app.
db.util.js
const { MongoClient } = require("mongodb");
const uri = `mongodb://${process.env.DB_USER}:${process.env.DB_PASSWORD}#localhost:27017/${process.env.DATABASE}?maxPoolSize=2-&w=majority`;
const client = new MongoClient(uri);
const init = async () => {
try {
await client.connect();
console.log("Connected");
} catch (error) {
console.log(error);
}
};
const getClient = () => {
return client;
};
module.exports.init = init;
module.exports.getClient = getClient;
app.js
//Import modules
require("dotenv").config({ path: __dirname + "/.env" });
const express = require("express");
const dogRoutes = require("./routes/dog.routes");
const db = require("./utils/db.util");
// Define PORT for HTTP Server
const PORT = 9900;
// Initialize Express
const app = express();
app.use(express.json());
app.use(dogRoutes);
(async () => {
await db.init();
app.listen(PORT, (err) => {
console.log(`Server is up at localhost ${PORT}`);
});
})();
I think that what you could do is to put the client outside of the middleware, so you doesn't re define it and re connect to it each time a request is done.
To do so, simply define it and connect before the middleware, and in the middleware, set the client as req.mongoClient or how you want to name it.
const client = new MongoClient(uri);
await client.connect(); // if this is outside of an async function, either use an async function like (async () => {..script..})(), either define a variable isClientReady and set it on true after the promise resolved.
const addClientToRequest = (req, _, next) => {
req.client = client;
next();
};
app.use(addClientToRequest);

Why are my tests occasionally passing and occasionally failing? (Jest, Mongoose, MongoDB)

I have a setup file for my tests that looks like this:
const mongoose = require('mongoose');
mongoose.set('useCreateIndex', true);
mongoose.promise = global.Promise;
async function removeAllCollections() {
const collections = Object.keys(mongoose.connection.collections);
for (const collectionName of collections) {
const collection = mongoose.connection.collections[collectionName];
await collection.deleteMany();
}
}
async function dropAllCollections() {
const collections = Object.keys(mongoose.connection.collections);
for (const collectionName of collections) {
const collection = mongoose.connection.collections[collectionName];
try {
await collection.drop();
} catch (error) {
// Sometimes this error happens, but you can safely ignore it
if (error.message === 'ns not found') return;
// This error occurs when you use it.todo. You can
// safely ignore this error too
if (error.message.includes('a background operation is currently running'))
return;
console.log(error.message);
}
}
}
export default function setupDB(databaseName) {
// Connect to Mongoose
beforeAll(async () => {
const url = `mongodb://127.0.0.1/${databaseName}`;
await mongoose.connect(
url,
{
useNewUrlParser: true,
useCreateIndex: true,
useUnifiedTopology: true
},
err => {
if (err) {
console.error(err);
process.exit(1);
}
}
);
});
// Cleans up database between each test
afterEach(async () => {
await removeAllCollections();
});
// Disconnect Mongoose
afterAll(async () => {
await dropAllCollections();
await mongoose.connection.close();
});
}
I am then writing tests like this:
import User from 'db/models/User';
import setupDB from 'utils/setupDbForTesting';
setupDB('mongoose_bcrypt_test');
it('correctly hashes and salts passwords', async done => {
// create a user a new user
const newUser = new User({
username: 'jmar777',
password: 'Password123'
});
await newUser.save(function (err) {
if (err) {
console.log(err);
}
});
const user = await User.findOne({ username: 'jmar777' });
user.comparePassword('Password123', function (err, isMatch) {
if (err) throw err;
expect(isMatch).toBeTruthy();
});
user.comparePassword('123Password', function (err, isMatch) {
if (err) throw err;
expect(isMatch).toBeFalsy();
});
done();
});
However, every other time I run these tests, they pass (or fail) so for every time T that the tests pass, T + 1 they will fail. My question is - why?
The tests fail because user (in the callback for User.findOne) returns null, even though the user has been saved.
I think the issue lies in the tearing down of the database, but I really can't see any problems. Any help would be appreciated, thanks.

delete and create folder before uploading files into the folder using Multer

I am trying to upload files into a folder using multer and it is working fine.
Now my requirement is before it upload file into 'uploads' folder, it should delete it first, create the upload folder and then upload it.
I just want to do operation on uploaded file not on the previous data stored.
Code:
const fs = require("fs-extra");
const path = require("path");
const uploadPath = path.resolve(__dirname, "uploads");
const multer = require("multer");
const storage = multer.diskStorage({
destination: "./uploads/",
filename: function(req, file, cb) {
cb(null, file.originalname);
}
});
const upload = multer({ storage: storage });
router.post("/fileupload", upload.array("docs", 10), async function(
req,
res,
next
) {
let result = {};
try {
if (fs.existsSync(uploadPath)) {
fs.removeSync(uploadPath);
console.log("dir removed");
fs.ensureDirSync(uploadPath);
console.log("directory created");
} else {
fs.ensureDirSync(uploadPath);
console.log("directory created");
}
const uploadObj = util.promisify(upload.any());
await uploadObj(req, res);
result.message = "Upload successful";
res.send(result);
} catch (e) {
console.error(e);
console.error("Upload error");
}
});
I tried to make the code async also but after that it is not uploading any file. What I understood is upload.array is a middleware so it run first whenever POST request is called and rest run after this. So multer is uploading the data in existing folder and then once it comes inside the POST fs is deleting and creating it again.
how can I make it work?
Thanks
I found a way to make it work.
As multer is a middleware it executes first then the rest code. So i put that middleware in my code instead at the header. Below is the full code.
const create_upload_dir = () => {
if (fs.existsSync(uploadPath)) {
fs.removeSync(uploadPath);
console.log("dir removed");
fs.ensureDirSync(uploadPath);
console.log("directory created");
} else {
fs.ensureDirSync(uploadPath);
console.log("directory created");
}
return Promise.resolve("Success");
};
const multer = require("multer");
const upload_documents = () => {
const storage = multer.diskStorage({
destination: "./uploads/",
filename: function(req, file, cb) {
cb(null, file.originalname);
}
});
const upload = multer({ storage: storage });
return upload;
};
router.post("/fileupload", async function(req, res, next) {
let result = {};
try {
await create_upload_dir();
const upload = upload_documents();
upload.array("docs", 10);
const uploadObj = util.promisify(upload.any());
await uploadObj(req, res);
console.log("upload successful");
res.send("Upload successful");
} catch (e) {
console.error(e);
console.error("Upload error");
}
});
notice I put in try block
upload.array("docs", 10);
instead of
router.post("/fileupload",upload.array("docs", 10), async function(req, res, next)

Puppeteer Generate PDF from multiple HTML strings

I am using Puppeteer to generate PDF files from HTML strings.
Reading the documentation, I found two ways of generating the PDF files:
First, passing an url and call the goto method as follows:
page.goto('https://example.com');
page.pdf({format: 'A4'});
The second one, which is my case, calling the method setContent as follows:
page.setContent('<p>Hello, world!</p>');
page.pdf({format: 'A4'});
The thing is that I have 3 different HTML strings that are sent from the client and I want to generate a single PDF file with 3 pages (in case I have 3 HTML strings).
I wonder if there exists a way of doing this with Puppeteer? I accept other suggestions, but I need to use chrome-headless.
I was able to do this by doing the following:
Generate 3 different PDFs with puppeteer. You have the option of saving the file locally or to store it in a variable.
I saved the files locally, because all the PDF Merge plugins that I found only accept URLs and they don't accept buffers for instance. After generating synchronously the PDFs locally, I merged them using PDF Easy Merge.
The code is like this:
const page1 = '<h1>HTML from page1</h1>';
const page2 = '<h1>HTML from page2</h1>';
const page3 = '<h1>HTML from page3</h1>';
const browser = await puppeteer.launch();
const tab = await browser.newPage();
await tab.setContent(page1);
await tab.pdf({ path: './page1.pdf' });
await tab.setContent(page2);
await tab.pdf({ path: './page2.pdf' });
await tab.setContent(page3);
await tab.pdf({ path: './page3.pdf' });
await browser.close();
pdfMerge([
'./page1.pdf',
'./page2.pdf',
'./page3.pdf',
],
path.join(__dirname, `./mergedFile.pdf`), async (err) => {
if (err) return console.log(err);
console.log('Successfully merged!');
})
I was able to generate multiple PDF from multiple URLs from below code:
package.json
{
............
............
"dependencies": {
"puppeteer": "^1.1.1",
"easy-pdf-merge": "0.1.3"
}
..............
..............
}
index.js
const puppeteer = require('puppeteer');
const merge = require('easy-pdf-merge');
var pdfUrls = ["http://www.google.com","http://www.yahoo.com"];
(async () => {
const browser = await puppeteer.launch();
const page = await browser.newPage();
var pdfFiles=[];
for(var i=0; i<pdfUrls.length; i++){
await page.goto(pdfUrls[i], {waitUntil: 'networkidle2'});
var pdfFileName = 'sample'+(i+1)+'.pdf';
pdfFiles.push(pdfFileName);
await page.pdf({path: pdfFileName, format: 'A4'});
}
await browser.close();
await mergeMultiplePDF(pdfFiles);
})();
const mergeMultiplePDF = (pdfFiles) => {
return new Promise((resolve, reject) => {
merge(pdfFiles,'samplefinal.pdf',function(err){
if(err){
console.log(err);
reject(err)
}
console.log('Success');
resolve()
});
});
};
RUN Command: node index.js
pdf-merger-js is another option. page.setContent should work just the same as a drop-in replacement for page.goto below:
const PDFMerger = require("pdf-merger-js"); // 3.4.0
const puppeteer = require("puppeteer"); // 14.1.1
const urls = [
"https://news.ycombinator.com",
"https://en.wikipedia.org",
"https://www.example.com",
// ...
];
const filename = "merged.pdf";
let browser;
(async () => {
browser = await puppeteer.launch();
const [page] = await browser.pages();
const merger = new PDFMerger();
for (const url of urls) {
await page.goto(url);
merger.add(await page.pdf());
}
await merger.save(filename);
})()
.catch(err => console.error(err))
.finally(() => browser?.close())
;