Jest mongo concurrency error - mongodb

Update This may be a bug in Jest - watching this issue here.
I am running Jest tests with Mongo as instructed in the Jest docs
When I run more than one test file at the time I get this error:
TypeError: Cannot read property 'getConnectionString' of undefined
3 | class MongoEnvironment extends NodeEnvironment {
4 | async setup () {
> 5 | this.global.__MONGO_URI__ = await global.__MONGOD__.getConnectionString()
6 | this.global.__MONGO_DB_NAME__ = global.__MONGO_DB_NAME__
7 |
8 | await super.setup()
at MongoEnvironment.setup (test/mongo-environment.js:5:57)
The only way I can run without error is with the --runInBand or --maxWorkers 1 option.
It doesn't matter what my tests are - I can do dummt tests in each file like:
test('Yo', () => {
})
So let's say I add this content to foo.spec.js and bar.spec.js in test/ dir and do jest test/ it will yield error.
My jest config:
module.exports = {
globalSetup: './test/setup.js',
globalTeardown: './test/teardown.js',
testEnvironment: './test/mongo-environment.js',
setupTestFrameworkScriptFile: './test/bootstrap.js',
testPathIgnorePatterns: ['/node_modules/', '/config/']
}
The setup, teardown and mongo-environment are the same as in Jest docs. My bootstrap file is the before/after hooks:
const { MongoClient } = require('mongodb')
let db
beforeAll(async () => {
connection = await MongoClient.connect(global.__MONGO_URI__)
db = await connection.db(global.__MONGO_DB_NAME__)
})
afterAll(async () => {
await connection.close()
await db.close()
})
Any idea what is going on?
Here are the other files for completness in case I made a booboo:
//setup.js
const MongodbMemoryServer = require('mongodb-memory-server')
const MONGO_DB_NAME = 'jest'
const mongod = new MongodbMemoryServer.default({ // eslint-disable-line new-cap
instance: {
dbName: MONGO_DB_NAME
},
binary: {
version: '3.2.19'
}
})
module.exports = function () {
global.__MONGOD__ = mongod
global.__MONGO_DB_NAME__ = MONGO_DB_NAME
}
// teardown.js
module.exports = async function () {
await global.__MONGOD__.stop()
}
//mongo-environment.js
const NodeEnvironment = require('jest-environment-node')
class MongoEnvironment extends NodeEnvironment {
async setup () {
this.global.__MONGO_URI__ = await global.__MONGOD__.getConnectionString()
this.global.__MONGO_DB_NAME__ = global.__MONGO_DB_NAME__
await super.setup()
}
async teardown () {
await super.teardown()
}
runScript (script) {
return super.runScript(script)
}
}
module.exports = MongoEnvironment
Note that the error is not 100% consistent - perhaps some kind of timing issue.
This is not just a Mongo thing. If I dump globals in my MongoEnvironment setup() method I can see that when running more than one test a whole bunch of Jest globals are missing.

Related

trying to get uploads saving in MongoDB

I currently have the following code, which saves the temp file to public/files I have tried to understand the MongoDB GridFS documentation but with no success.
I am wondering how do I get the files to save inside MongoDB GridFS instead of my public/file directory
I am aware I am missing the part where I need to send the uploaded file to mongodb - this is the part I don't know how to do.
In mongodb example they say to do something like:
fs.createReadStream('./myFile').pipe(
bucket.openUploadStream('myFile', {
chunkSizeBytes: 1048576,
metadata: { field: 'myField', value: 'myValue' },
})
);
however I am not using FS or do I need to upload the file to the temp and then do the fs
import formidable from 'formidable';
import { MongoClient, ObjectId } from 'mongodb';
var Grid = require('gridfs-stream');
export const config = {
api: {
bodyParser: false,
},
};
export default async (req, res) => {
const uri = process.env.MONGODB_URI;
let client;
let clientPromise;
const options = {};
client = new MongoClient(uri, options);
clientPromise = client.connect();
const clients = await clientPromise;
const database = clients.db('AdStitchr');
var gfs = Grid(database, client);
gfs.collection('uploads');
const form = new formidable.IncomingForm();
form.uploadDir = 'public/files';
form.keepExtensions = true;
form.parse(req, (err, fields, files) => {
var file = files.file;
console.log(JSON.stringify(file));
try {
const newFile = File.create({
name: `files\${file.newFilename}.mp3`,
});
res.status(200).json({ status: 'success' });
} catch (error) {
res.send(error);
}
});
};

Is there a mongodb server for Cypress to be able to query inside my tests

I need to query mongo inside my Cypress tests to basically see if my POST is updating some fields, but I don't see a npm package for it like there is for sql server. Googling it I only see documentation and examples on how to seed the db.
Any thoughts, comments?
Thank you
Take a look at this post: https://glebbahmutov.com/blog/testing-mongo-with-cypress/
The gist of it:
-- plugins/index.js
/// <reference types="cypress" />
const { connect } = require('../../db')
module.exports = async (on, config) => {
const db = await connect()
const pizzas = db.collection('pizzas')
on('task', {
async clearPizzas() {
console.log('clear pizzas')
await pizzas.remove({})
return null
},
})
}
-- db.js
const { MongoClient } = require('mongodb')
const uri = process.env.MONGO_URI
if (!uri) {
throw new Error('Missing MONGO_URI')
}
const client = new MongoClient(uri)
async function connect() {
// Connect the client to the server
await client.connect()
return client.db('foods')
}
async function disconnect() {
// Ensures that the client will close when you finish/error
await client.close()
}
module.exports = { connect, disconnect }
Change the line await pizzas.remove({}) to whatever query you want to run, I'll assume you know how to get the result of the query and assert it.

sails model is getting unrecognized datastore error on nested await method call

I just cant figure out why this fails where it does.
This code is in a sails model called in Job.js
The Init() is called from bootstrap.js
This is all based on the ORM adapter sails-disk.
Init: async function() {
_jobRunner();
},
_jobRunner: function() {
async function doJobRunner() {
let job = await Job.findOne({state: 'inactive'});
let updated = await Job.Update(job);
}
setInterval(doJobRunner,3000);
},
Update: async function(job) {
let query = {id:job.id}
let updates,updated;
try {
let origJob = await Job.findOne(query); <-- success
updates = {status: 'active'} <-- added for illustration
updated = await Job.updateOne(query).set(updates); <----- this fails into catch (why?)
Job.publish(job.id,updated); // notify
} catch(err) {
sails.log.error(err);
}
return updated;
},
the Error is (err):
Unexpected error from database adapter: Unrecognized datastore: `default`, It doesn't seem to have been registered with this adapter (sails-disk).'
modelIdentity:'job'
**Node version**: v14.17.4
**Sails version** _(sails)_: 1.4.4
**ORM hook version** _(sails-hook-orm)_: 3.0.2
**sails-disk**_: 2.1.0

MongoDB and Mocha

Has anyone ever used async/await in their mocha tests?
I'm creating this simple test to check if my code successfully saves an object to the database:
const mocha = require('mocha');
const assert = require('assert');
const marioChar = require('../models/mariochar');
async function saveAMarioChar (paramname,paramweight) {
var char = new marioChar({
name: paramname,
weight: paramweight
});
const saveresult = await char.save()
return !saveresult.isNew;
}
describe(
'saving record',
() => {
it('Save a mariochar',
async () => {
const result = await saveAMarioChar('luigi',64);
assert(result)
}
)
}
)
Sorry for the trouble guys, this code is working fine, I used "mongooose" with a triple "o" on my schema creation.

Next JS connection with Apollo and MongoDB

I am new to Next.js and using this example from Next.js https://github.com/zeit/next.js/tree/master/examples/api-routes-apollo-server-and-client.
However, the example is silent on MongoDB integration (also I could not find any other example for the same). I have been able to make database-connection but NOT able to use it in resolvers.
My Code
pages/api/graphql.js
import { ApolloServer } from 'apollo-server-micro'
import { schema } from '../../apollo/schema'
const MongoClient = require('mongodb').MongoClient;
let db
const apolloServer = new ApolloServer({
schema,
context: async () => {
if (!db) {
try {
const client = await MongoClient.connect(uri)
db = await client.db('dbName')
const post = await Posts.findOne()
console.log(post)
// It's working fine here
}
catch (e) {
// handle any errors
}
}
return { db }
},
})
export const config = {
api: {
bodyParser: false,
},
}
export default apolloServer.createHandler({ path: '/api/graphql' })
apollo/schema.js
import {makeExecutableSchema} from 'graphql-tools';
import {typeDefs} from './type-defs';
import {resolvers} from './resolvers';
export const schema = makeExecutableSchema({
typeDefs,
resolvers
});
apollo/resolvers.js
const Items = require('./connector').Items;
export const resolvers = {
Query: {
viewer(_parent, _args, _context, _info) {
//want to populate values here, using database connection
return { id: 1, name: 'John Smith', status: 'cached' }
},
...
}
}
I am stuck in the resolvers.js part. Don't know how to get the cached database connection inside resolvers.js. If I create a new database connection file, top-level await is not supported there, so how do I proceed?
If context is a function, whatever you return from the function will be available as the context parameter in your resolver. So if you're returning { db }, that's what your context parameter will be -- in other words, you can access it as context.db inside your resolver.