db.open(function(err,db){
//handle error
db.collection("book",function(err, collection){
//handle error
collection.doSomething1(... function(err, result){
//handle error
collection.doSomething2(... function(err, result){
...
})
})
})
})
but we wont wrote db.open every time when we want do something, but we must make sure that db has opened when we use it.
we still wont like handle error every time in the same code.
we can also reuse the collection.
just like this
errorHandledDB.doSomething1("book",... function(result){
errorHandledDB.doSomething2("book",...function(result){
...
})
})
I implemented a server-application using mongodb for logging. I implemented data access using some provider classes, as shown in the example.
provider.filelog.js
var Db= require('mongodb/db').Db,
ObjectID= require('mongodb/bson/bson').ObjectID,
Server= require('mongodb/connection').Server,
log = require('lib/common').log;
FilelogProvider = function (host, port, database) {
this.db= new Db(database, new Server(host, port, {auto_reconnect: true}, {}));
this.db.open(function(){});
};
FilelogProvider.prototype.getCollection= function(callback) {
this.db.collection('filelogs', function(error, log_collection) {
if (error) callback(error);
else {
log_collection.ensureIndex([[ 'created', 1 ]], false, function(err, indexName) {
if (error) callback(error);
callback(null, log_collection);
});
}
});
};
FilelogProvider.prototype.findAll = function(callback) {
this.getCollection(function(error, log_collection) {
if (error) callback(error);
else {
log_collection.find(function(error, cursor) {
if (error) callback(error);
else {
cursor.toArray(function(error, results) {
if (error) callback(error);
else callback(null, results);
});
}
});
}
});
};
Since i use Grasshopper as my http-middleware, i can easily inject the providers using the DI functionality provided by gh:
server.js
gh.addToContext({
providers: {
filelog: new FilelogProvider(conf.mongodb_host, conf.mongodb_port, conf.mongodb_database),
status: new ServerstatusProvider(conf.mongodb_host, conf.mongodb_port, conf.mongodb_database)
},
log: log
});
Accessing the providers in every controller function is now a breeze:
gh.get('/serve', function() {
this.providers.filelog.findAll(function(err, res) {
// access data here
});
});
This implementation is pretty specific to Grasshopper (as it's using DI) but i think you'll get the idea. I also implemented a solution using express and mongoose, you find it here. This solution is a bit cleaner than using the native driver, as it exposes models to use against the database.
Update
Just for the sake of it: if you really want to stick to the DRY-principle, stop tinkering on an ORM implementation yourself and use Mongoose. If you need special functionality like Map/Reduce, you still can use the native driver (on which Mongoose is built).
Answer my own question. Because there is no more good options, I do it myself, I start a project to simplify it, check node-mongoskin.
I'm talking theoretically here, with no regards to mongo.
I would recommend you to try building a wrapping of a kind.
A Data access layer or at least models, it all depends on your architecture and needs,
and that's on your side.
Just wrap the access to mongodb with a layer of abstract commands, than write an abstract model object and all other model objects will inherit from it, and will automatically set all getters and setters for the attributes of the record you pulled from the mongo db.
for updating you just give it a save method, that iterates and saves all the changes made to it.
Since it's not a relational and I don't know if this is well suited for your design, the model may not be useful here.
Hope this helps, Good luck!
Related
userSchema.pre('save',async function(next){
//hash the password before saving user to database
next()
})
Hey guys I'm trying to understand the concept of middleware in mongoose. Assuming that I have an userSchema that I run the pre hook on to hash the password before saving the user to the database. On the surface, as far as I understand, the above code that I have will hash the password (not the important part for this question) and then call next() to signal that the function is done. However, I am trying to understand how things work under the hood. I want to know what is next() ? Can you guys walk me through an example of how everything works together under the hood from start to end once the code get executed or somehow help me to have a better understanding of this concept? Thanks
Short : with the pre method you can register listeners for certain events of your Schemas. So pre('save', callback) will fire whenever you save a document of said Model. pre means it will execute before the event, so it can be used (for example) to hash a password before saving it to the document.
However, you have several options to define them, see below :
The combination of using an async callback function and providing the next parameter is not necessary, you can either :
use normal callback with next parameter
the next parameter is a function provided to you by mongoose to have a way out, or to tell mongoose you are done and to continue with the next step in the execution chain. Also it is possible to pass an Error to next it will stop the execution chain.
schema.pre('save', function(next) {
// do stuff
if (error) { return next(new Error("something went wrong"); }
return next(null);
});
use async callback
Here the execution chain will continue once your async callback has finished. If there is an error and you want to break/stop execution chain you just throw it
schema.pre('save', async function() {
// do stuff
await doStuff()
await doMoreStuff()
if (error) { throw new Error("something went wrong"); }
return;
});
Straight from the docs : https://mongoosejs.com/docs/middleware.html#pre
Example
const { Schema, model } = require('mongoose');
const SomeSchema = new Schema ({
name : { type : String }
});
SomeSchema.pre('save', function (next) {
console.log('pre save fired!');
return next();
});
const Some = model('Some', SomeSchema);
console.log('before create');
const doc = new Some({ name : 'test' });
doc.save((err) => {
console.log('after saved');
});
This will output
before create
pre save fired!
after saved
I am just starting using Sails.js and it's an amazing framework. But I've met some situation and I cannot find solution by Google so I came here for help.
I have a controller to connect to another remote service with very old-designed API full of XML response and inconsistency, wrapping that service in simple and clean APIs. So I have some routers like:
list: function(req, res) {
params = {
...
}
FooService.request(data, function(error, response) {
res.send(response)
})
process.once('uncaughtException', function(err) {
res.send(500, '[Foo] ' + err);
});
},
The 'process.once' is for async exceptions which may raised in the FooService.request process. I know this is bad code and my question is: how to handle such situation more Sails.js way?
In Node.js we have Domain and connect-domain, which are designed for such problems. Because Sails.js is basically Express, which can facilitate connect-domain very well, I think there may be some idiomatic way to do that.
I've tried adding this in config/local.js:
module.exports = {
...
express: {
customMiddleware: function(app) {
console.log('Custom middleware config called')
var domain = require('connect-domain')
app.use(domain())
.use(function(err, req, res, next) {
console.log('Error catched!')
res.send(500, '[Foo] ' + err)
})
}
}
};
When un-catched exception occurred, it will not crash server and error 500 being returned to client side ('app.use(domain())' works). But the custom error handler does not called. Have no idea why.
If you're in control of the FooService code, then the best option is to handle all errors that happen there by calling the callback for FooService.request early with the error, and then using res.serverError or some other response in your controller:
FooService.request(data, function(error, response) {
if (error) {return res.serverError(errror);}
res.send(response)
});
If the FooService is using packages that you don't control, which may themselves throw errors inside of async code that they aren't catching (bad code!) then another good option is to use Node's error domains. See this answer for an example of someone doing a quick wrapper to use domains to catch errors in asynchronous code.
I am an absolute NodeJS beginner and want to create a simple REST-Webservice with Express and Mongoose.
Whats the best practice to handle errors of Mongoose in one central place?
When anywhere an database error occurs I want to return a Http-500-Error-Page with an error message:
if(error) {
res.writeHead(500, {'Content-Type': 'application/json'});
res.write('{error: "' + error + '"}');
res.end();
}
In the old tutorial http://blog-next-stage.learnboost.com/mongoose/ I read about an global error listener:
Mongoose.addListener('error',function(errObj,scope_of_error));
But this doesn't seem to work and I cannot find something in the official Mongoose documentation about this listener. Have I check for errors after every Mongo request?
If you're using Express, errors are typically handled either directly in your route or within an api built on top of mongoose, forwarding the error along to next.
app.get('/tickets', function (req, res, next) {
PlaneTickets.find({}, function (err, tickets) {
if (err) return next(err);
// or if no tickets are found maybe
if (0 === tickets.length) return next(new NotFoundError));
...
})
})
The NotFoundError could be sniffed in your error handler middleware to provide customized messaging.
Some abstraction is possible but you'll still require access to the next method in order to pass the error down the route chain.
PlaneTickets.search(term, next, function (tickets) {
// i don't like this b/c it hides whats going on and changes the (err, result) callback convention of node
})
As for centrally handling mongoose errors, theres not really one place to handle em all. Errors can be handled at several different levels:
connection errors are emitted on the connection your models are using, so
mongoose.connect(..);
mongoose.connection.on('error', handler);
// or if using separate connections
var conn = mongoose.createConnection(..);
conn.on('error', handler);
For typical queries/updates/removes the error is passed to your callback.
PlaneTickets.find({..}, function (err, tickets) {
if (err) ...
If you don't pass a callback the error is emitted on the Model if you are listening for it:
PlaneTickets.on('error', handler); // note the loss of access to the `next` method from the request!
ticket.save(); // no callback passed
If you do not pass a callback and are not listening to errors at the model level they will be emitted on the models connection.
The key take-away here is that you want access to next somehow to pass the error along.
hey this is the simplest way i found..
try { } catch (error) {
console.log(error);
// checking validation
if (error.name === "ValidationError") {
const message = Object.values(error.errors).map(value => value.message);
return res.status(400).json({
error: message
})
}
res.status(400).json(error.message)
}
}
just copy paste
I have been searching for an example of how I can stream the result of a MongoDB query to a nodejs client. All solutions I have found so far seem to read the query result at once and then send the result back to the server.
Instead, I would (obviously) like to supply a callback to the query method and have MongoDB call that when the next chunk of the result set is available.
I have been looking at mongoose - should I probably use a different driver?
Jan
node-mongodb-driver (the underlying layer that every mongoDB client uses in nodejs) except the cursor API that others mentioned has a nice stream API (#458). Unfortunately i did not find it documented elsewhere.
Update: there are docs.
It can be used like this:
var stream = collection.find().stream()
stream.on('error', function (err) {
console.error(err)
})
stream.on('data', function (doc) {
console.log(doc)
})
It actually implements the ReadableStream interface, so it has all the goodies (pause/resume etc)
Streaming in Mongoose became available in version 2.4.0 which appeared three months after you've posted this question:
Model.where('created').gte(twoWeeksAgo).stream().pipe(writeStream);
More elaborated examples can be found on their documentation page.
mongoose is not really "driver", it's actually an ORM wrapper around the MongoDB driver (node-mongodb-native).
To do what you're doing, take a look at the driver's .find and .each method. Here's some code from the examples:
// Find all records. find() returns a cursor
collection.find(function(err, cursor) {
sys.puts("Printing docs from Cursor Each")
cursor.each(function(err, doc) {
if(doc != null) sys.puts("Doc from Each " + sys.inspect(doc));
})
});
To stream the results, you're basically replacing that sys.puts with your "stream" function. Not sure how you plan to stream the results. I think you can do response.write() + response.flush(), but you may also want to checkout socket.io.
Here is the solution I found (please correct me anyone if thatis the wrong way to do it):
(Also excuse the bad coding - too late for me now to prettify this)
var sys = require('sys')
var http = require("http");
var Db = require('/usr/local/src/npm/node_modules/mongodb/lib/mongodb').Db,
Connection = require('/usr/local/src/npm/node_modules/mongodb/lib/mongodb').Connection,
Collection = require('/usr/local/src/npm/node_modules/mongodb/lib/mongodb').Collection,
Server = require('/usr/local/src/npm/node_modules/mongodb/lib/mongodb').Server;
var db = new Db('test', new Server('localhost',Connection.DEFAULT_PORT , {}));
var products;
db.open(function (error, client) {
if (error) throw error;
products = new Collection(client, 'products');
});
function ProductReader(collection) {
this.collection = collection;
}
ProductReader.prototype = new process.EventEmitter();
ProductReader.prototype.do = function() {
var self = this;
this.collection.find(function(err, cursor) {
if (err) {
self.emit('e1');
return;
}
sys.puts("Printing docs from Cursor Each");
self.emit('start');
cursor.each(function(err, doc) {
if (!err) {
self.emit('e2');
self.emit('end');
return;
}
if(doc != null) {
sys.puts("doc:" + doc.name);
self.emit('doc',doc);
} else {
self.emit('end');
}
})
});
};
http.createServer(function(req,res){
pr = new ProductReader(products);
pr.on('e1',function(){
sys.puts("E1");
res.writeHead(400,{"Content-Type": "text/plain"});
res.write("e1 occurred\n");
res.end();
});
pr.on('e2',function(){
sys.puts("E2");
res.write("ERROR\n");
});
pr.on('start',function(){
sys.puts("START");
res.writeHead(200,{"Content-Type": "text/plain"});
res.write("<products>\n");
});
pr.on('doc',function(doc){
sys.puts("A DOCUMENT" + doc.name);
res.write("<product><name>" + doc.name + "</name></product>\n");
});
pr.on('end',function(){
sys.puts("END");
res.write("</products>");
res.end();
});
pr.do();
}).listen(8000);
I have been studying mongodb streams myself, while I do not have the entire answer you are looking for, I do have part of it.
you can setup a socket.io stream
this is using javascript socket.io and socket.io-streaming available at NPM
also mongodb for the database because
using a 40 year old database that has issues is incorrect, time to modernize
also the 40 year old db is SQL and SQL doesn't do streams to my knowledge
So although you only asked about data going from server to client, I also want to get client to server in my answer because I can NEVER find it anywhere when I search and I wanted to setup one place with both the send and receive elements via stream so everyone could get the hang of it quickly.
client side sending data to server via streaming
stream = ss.createStream();
blobstream=ss.createBlobReadStream(data);
blobstream.pipe(stream);
ss(socket).emit('data.stream',stream,{},function(err,successful_db_insert_id){
//if you get back the id it went into the db and everything worked
});
server receiving stream from the client side and then replying when done
ss(socket).on('data.stream.out',function(stream,o,c){
buffer=[];
stream.on('data',function(chunk){buffer.push(chunk);});
stream.on('end',function(){
buffer=Buffer.concat(buffer);
db.insert(buffer,function(err,res){
res=insertedId[0];
c(null,res);
});
});
});
//This is the other half of that the fetching of data and streaming to the client
client side requesting and receiving stream data from server
stream=ss.createStream();
binarystring='';
stream.on('data',function(chunk){
for(var I=0;i<chunk.length;i++){
binarystring+=String.fromCharCode(chunk[i]);
}
});
stream.on('end',function(){ data=window.btoa(binarystring); c(null,data); });
ss(socket).emit('data.stream.get,stream,o,c);
server side replying to request for streaming data
ss(socket).on('data.stream.get',function(stream,o,c){
stream.on('end',function(){
c(null,true);
});
db.find().stream().pipe(stream);
});
The very last one there is the only one where I am kind of just pulling it out of my butt because I have not yet tried it, but that should work. I actually do something similar but I write the file to the hard drive then use fs.createReadStream to stream it to the client. So not sure if 100% but from what I read it should be, I'll get back to you once I test it.
P.s. anyone want to bug me about my colloquial way of talking, I'm Canadian, and I love saying "eh" come at me with your hugs and hits bros/sis' :D
Everything I can find for rending a page with mongoose results says to do it like this:
users.find({}, function(err, docs){
res.render('profile/profile', {
users: docs
});
});
How could I return the results from the query, more like this?
var a_users = users.find({}); //non-working example
So that I could get multiple results to publish on the page?
like:
/* non working example */
var a_users = users.find({});
var a_articles = articles.find({});
res.render('profile/profile', {
users: a_users
, articles: a_articles
});
Can this be done?
You're trying to force a synchronous paradigm. Just does't work. node.js is single threaded, for the most part -- when io is done, the execution context is yielded. Signaling is managed with a callback. What this means is that you either have nested callbacks, named functions, or a flow control library to make things nicer looking.
https://github.com/caolan/async#parallel
async.parallel([
function(cb){
users.find({}, cb);
},
function(cb){
articles.find({}, cb);
}
], function(results){
// results contains both users and articles
});
I'll play the necromancer here, as I still see another, better way to do it.
Using wonderful promise library Bluebird and its promisifyAll() method:
var Promise = require('bluebird');
var mongoose = require('mongoose');
Promise.promisifyAll(mongoose); // key part - promisification
var users, articles; // load mongoose models "users" and "articles" here
Promise.props({
users: users.find().execAsync(),
articles: articles.find().execAsync()
})
.then(function(results) {
res.render('profile/profile', results);
})
.catch(function(err) {
res.send(500); // oops - we're even handling errors!
});
Key parts are as follows:
Promise.promisifyAll(mongoose);
Makes all mongoose (and its models) methods available as functions returning promises, with Async suffix (.exec() becomes .execAsync(), and so on). .promisifyAll() method is nearly-universal in Node.JS world - you can use it on anything providing asynchronous functions taking in callback as their last argument.
Promise.props({
users: users.find().execAsync(),
articles: articles.find().execAsync()
})
.props() bluebird method takes in object with promises as its properties, and returns collective promise that gets resolved when both database queries (here - promises) return their results. Resolved value is our results object in the final function:
results.users - users found in the database by mongoose
results.articles - articles found in the database by mongoose (d'uh)
As you can see, we are not even getting near to the indentation callback hell. Both database queries are executed in parallel - no need for one of them to wait for the other. Code is short and readable - practically corresponding in length and complexity (or rather lack of it) to wishful "non-working example" posted in the question itself.
Promises are cool. Use them.
The easy way:
var userModel = mongoose.model('users');
var articleModel = mongoose.model('articles');
userModel.find({}, function (err, db_users) {
if(err) {/*error!!!*/}
articleModel.find({}, function (err, db_articles) {
if(err) {/*error!!!*/}
res.render('profile/profile', {
users: db_users,
articles: db_articles
});
});
});
Practically every function is asynchronous in Node.js. So is Mongoose's find. And if you want to call it serially you should use something like Slide library.
But in your case I think the easiest way is to nest callbacks (this allows f.e. quering articles for selected previously users) or do it completly parallel with help of async libraries (see Flow control / Async goodies).
I have a function that I use quite a bit as a return to Node functions.
function freturn (value, callback){
if(callback){
return callback(value);
}
return value;
};
Then I have an optional callback parameter in all of the signatures.
I was dealing with a very similar thing but using socket.io and DB access from a client. My find was throwing the contents of my DB back to the client before the database had a chance to get the data... So for what it's worth I will share my findings here:
My function for retrieving the DB:
//Read Boards - complete DB
var readBoards = function() {
var callback = function() {
return function(error, data) {
if(error) {
console.log("Error: " + error);
}
console.log("Boards from Server (fct): " + data);
}
};
return boards.find({}, callback());
};
My socket event listener:
socket.on('getBoards', function() {
var query = dbConnection.readBoards();
var promise = query.exec();
promise.addBack(function (err, boards) {
if(err)
console.log("Error: " + err);
socket.emit('onGetBoards', boards);
});
});
So to solve the problem we use the promise that mongoose gives us and then once we have received the data from the DB my socket emits it back to the client...
For what its worth...
You achieve the desired result by the following code. Hope this will help you.
var async = require('async');
// custom imports
var User = require('../models/user');
var Article = require('../models/article');
var List1Objects = User.find({});
var List2Objects = Article.find({});
var resourcesStack = {
usersList: List1Objects.exec.bind(List1Objects),
articlesList: List2Objects.exec.bind(List2Objects),
};
async.parallel(resourcesStack, function (error, resultSet){
if (error) {
res.status(500).send(error);
return;
}
res.render('home', resultSet);
});