Mongoose JS promises? Or how to manage batch save - mongodb

How do I manage batch save in Mongoose? I saw it may not be possible yet:
How can I save multiple documents concurrently in Mongoose/Node.js?
Theres some mention about using some flow control library like q, but I also notice there promises in mongoose, can it be used? Can I do like in jQuery Deferred/Promises
$.when(obj1.save(), obj2.save(), obj3.save()).then ->
# do something?

Yes, you can do this with promises. If you were using the Q promise library, you could re-write #matz3's code like:
var tasks = [];
for (var i=0; i < docs.length; i++) {
tasks.push(docs[i].save());
}
Q.all(tasks)
.then(function(results) {
console.log(results);
}, function (err) {
console.log(err);
});
We start all the operations one at a time in the loop, but we don't wait for any of them to complete, so they run in parallel. We add a promise (that acts like a placeholder for the result) to an array. We then wait for all the promises in the array of promises to complete.
Most good Promises/A+ compatible libraries have some equivalent to Q.all

mongoose now allows you to choose which Promise implementation.
Here I am using the node.js default system Promise (ES6) baked into nodejs
var mongoose = require('mongoose');
mongoose.Promise = global.Promise; // use system implementation
Promise.all(obj1.save(), obj2.save(), obj3.save())
.then(function(resultSaves) {
console.log('parallel promise save result :');
console.log(resultSaves);
mongoose.disconnect();
}).catch(function(err) {
console.log('ERROR on promise save :');
console.log(err);
mongoose.disconnect();
});
node --version
v4.1.1
mongoose#4.1.8

Since mongoose now supports promises you may use Promise.all().then(), so it will return when all promises are resolved.
Promise.all([
obj1.save(),
obj2.save(),
obj3.save()
])
.then(console.log)
.catch(console.error)
In fact, if you're always calling the save() method you can use the Array.map() here:
Promise.all([ obj1, obj2, obj3 ].map( obj => obj.save() )
Aaand also use es6 syntax to destructure the resulting array:
Promise.all(
[ obj1, obj2, obj3 ]
.map( obj => obj.save() )
)
.then( ([ savedObj1, savedObj2, savedObj3 ]) => {
// do something with your saved objects...
})

Try the parallel function of the async module.
var functions = [];
for (var i=0; i < docs.length; i++) {
functions.push((function(doc) {
return function(callback) {
doc.save(callback);
};
})(docs[i]));
}
async.parallel(functions, function(err, results) {
console.log(err);
console.log(results);
});

To save multiple mongoose docs in parallel, you can do something simple like this (assuming you have an array named docs of documents to save):
var count = docs.length;
docs.forEach(function(doc) {
doc.save(function(err, result) {
if (--count === 0) {
// All done; call containing function's callback
return callback();
}
});
});

A refined example on how to use async parallel would be:
async.parallel([obj1.save, obj2.save, obj3.save], callback);
Since the convention is the same in Mongoose as in async (err, callback) you don't need to wrap them in your own callbacks, just add your save calls in an array and you will get a callback when all is finished.

What about async.queue.
A simple example:
var queue = async.queue(function(obj, callback) {
return obj.save(callback);
});
for (var i in objs) {
var obj = objs[i];
// Some changes on object obj
queue.push(obj);
}
If you need a callback after the queue is emptied:
var emptyQueue = true;
var queue = async.queue(function(obj, callback) {
return obj.save(callback);
});
queue.drain = function() {
// Every callbacks are finished
// bigCallback();
};
for (var i in objs) {
var obj = objs[i];
// Some changes on object obj
queue.push(obj);
emptyQueue = false;
}
if (emptyQueue) {
// Call manually queue drain in case of the queue is empty
// and we need to call bigCallback() for example
return queue.drain();
}

#ForbesLindesay Why loading an external library when you can use mongoose implementation of promises and create your own All ?
Create a module that enhance mongoose promise with all.
var Promise = require("mongoose").Promise;
Promise.all = function(promises) {
var mainPromise = new Promise();
if (promises.lenght == 0) {
mainPromise.resolve(null, promises);
}
var pending = 0;
promises.forEach(function(p, i) {
pending++;
p.then(function(val) {
promises[i] = val;
if (--pending === 0) {
mainPromise.resolve(null, promises);
}
}, function(err) {
mainPromise.reject(err);
});
});
return mainPromise;
}
module.exports = Promise;
Then use it with mongoose:
require('./promise')
...
var tasks = [];
for (var i=0; i < docs.length; i++) {
tasks.push(docs[i].save());
}
mongoose.Promise.all(tasks)
.then(function(results) {
console.log(results);
}, function (err) {
console.log(err);
});

Related

Order of save() and find() in NodeJS with MongoDB

I'm trying to create a new record in my MongoDB ("thisPlayer") and save it to my database, then find all records in my database (including the new one) and render them.
I am having trouble understanding why my save() function actually occurs after my find() function. When this code executes, the find() function does not include my new thisPlayer record. However, after the find() runs, the save occurs -- the record is saved to the database AFTER the find() ran.
Thanks in advance!
const playerNumber = async function countPlayers() {
return new Promise((resolve, reject) => {
Player.count(function(err, numOfDocs) {
err ? reject(err) : resolve(numOfDocs);
console.log('I have '+numOfDocs+' documents in my collection');
});
});
}
async function playerProfile() {
var count = await playerNumber();
console.log("count already in db: "+ count);
if (count===0) {
teamCaptain=1;
} else {teamCaptain=0};
count++;
const thisPlayer = new Player({
playerNum: count,
playerName: Name,
});
thisPlayer.save();
Player.find({}, function(err, playaz){
var playerOne;
if (playaz.length > 0) {
playerOne = playaz[0].playerName;
} else {
playerOne = "";
}
res.renderPjax("leavetakings",
{player1: "1: " + playerOne}
);
});
}
playerProfile();
You need to use await
for example.
await Player.find({})

Waiting for meteor cursor in method

I have a large aggrogate query that required me to pass "allowDiskUse: true" as an option. This would not work with the aggegate as described here:
https://github.com/meteorhacks/meteor-aggregate/issues/11
My meteor method is defined here. When I call the method I need to wait for ondata to complete before anything is returned to the client, but nothing I try allows me to get that data in a safe way up to the front end.
Meteor.methods({
'getSummary': function (dept,startDate,endDate,filterType) {
f = myQuery(startdate,enddate,dayFinalGroup);
f.on("data", Meteor.bindEnvironment(function(row) {
//load an array or something here to return
}));
f.once("end", Meteor.bindEnvironment(function() {
// tidy up, in my case end the stream
}));
//here I'd return the array loaded
},
});
This is my front end.
Meteor.call(
'getSummary',0,Session.get('start_date'),Session.get('end_date'),1,
function(error, result){
if(error){
console.log(error);
} else {
Session.set('sumTotals',result);
}
}
);
Finally Got it. I utilized wrapSync
'getSummary': function (dept,startDate,endDate,filterType) {
console.log(dept);
console.log(startDate);
console.log(endDate);
console.log(filterType);
var startdate = new Date(startDate);
var enddate = new Date(endDate);
var arr = [];
f = myQuery(startdate,enddate,dayFinalGroup);
var fetchCursor = Meteor.wrapAsync(function fetchCursor (cursor, cb) {
cursor.each(function (err, doc) {
if (err) return cb(err);
if (!doc) return cb(null, { done: true }); // no more documents
arr.push(doc);
});
});
var myData = fetchCursor(f);
return arr;

sails.js the return object from service is undefined when using a find query

I created a service called AppService.
Its function getUserPostionOptions is supposed to return an object:
getUserPostionOptions: function (user) {
// PositionOptions.findOne({id:'53f218deed17760200778cfe'}).exec(function (err, positionOptions) {
var positionDirectionsOptions = [1,2,3];
var positionLengthsOptions = [4,5,6];
var object = {
directions:positionDirectionsOptions,
lengths:positionLengthsOptions
};
return object;
// });
}
This works, in my controller positionOptions gets populated correctly:
var positionOptions = AppService.getUserPostionOptions(user);
However, when I uncomment the find query the item is found but the object returns undefined.
Thank in advance for your help
SailsJs ORM (and almost NodeJs database querying methods) uses non-blocking mechanism via callback function. So you have to change your code into:
getUserPostionOptions: function (user, callback) {
PositionOptions.findOne({id:'53f218deed17760200778cfe'}).exec(function (err, positionOptions) {
var positionDirectionsOptions = [1,2,3];
var positionLengthsOptions = [4,5,6];
var object = {
directions:positionDirectionsOptions,
lengths:positionLengthsOptions
};
callback(null, object); // null indicates that your method has no error
});
}
Then just use it:
AppService.getUserPostionOptions(user, function(err, options) {
if (!err) {
sails.log.info("Here is your received data:");
sails.log.info(options);
}
});

Add new data from restful api to angularjs scope

I'm trying to create a list with endless scroll in angularjs. For this I need to fetch new data from an api and then append it to the existing results of a scope in angularjs. I have tried several methods, but none of them worked so far.
Currently this is my controller:
userControllers.controller('userListCtrl', ['$scope', 'User',
function($scope, User) {
$scope.users = User.query();
$scope.$watch('users');
$scope.orderProp = 'name';
window.addEventListener('scroll', function(event) {
if (document.body.offsetHeight < window.scrollY +
document.documentElement.clientHeight + 300) {
var promise = user.query();
$scope.users = $scope.users.concat(promise);
}
}, false);
}
]);
And this is my service:
userServices.factory('User', ['$resource',
function($resource) {
return $resource('api/users', {}, {
query: {
method: 'GET',
isArray: true
}
});
}
]);
How do I append new results to the scope instead of replacing the old ones?
I think you may need to use $scope.apply()
When the promise returns, because it isnt
Part of the angular execution loop.
Try something like:
User.query().then(function(){
$scope.apply(function(result){
// concat new users
});
});
The following code did the trick:
$scope.fetch = function() {
// Use User.query().$promise.then(...) to parse the results
User.query().$promise.then(function(result) {
for(var i in result) {
// There is more data in the result than just the users, so check types.
if(result[i] instanceof User) {
// Never concat and set the results, just append them.
$scope.users.push(result[i]);
}
}
});
};
window.addEventListener('scroll', function(event) {
if (document.body.offsetHeight < window.scrollY +
document.documentElement.clientHeight + 300) {
$scope.fetch();
}
}, false);

Mongodb inserts not completing successfully (using node.js)

I've got a node.js script that loads an XML file. It loops through each element in the Mongo array and says that they're all getting inserted correctly, but when the script has completed a check of db.collection.count(); tells me that far fewer records have been inserted into the database than the number expected.
How can I make mongo and node.js play nicely with inserts?
GrabRss = function() {
var http = require('http');
var sys = require('sys');
var xml2js = require('xml2js');
var fs = require('fs');
var Db = require('../lib/mongodb').Db,
Conn = require('../lib/mongodb').Connection,
Server = require('../lib/mongodb').Server,
// BSON = require('../lib/mongodb').BSONPure;
BSON = require('../lib/mongodb').BSONNative;
var data;
var checked = 0;
var len = 0;
GotResponse = function(res) {
var ResponseBody = "";
res.on('data', DoChunk);
res.on('end', EndResponse);
function DoChunk(chunk){
ResponseBody += chunk;
}
function EndResponse() {
//console.log(ResponseBody);
var parser = new xml2js.Parser();
parser.addListener('end', GotRSSObject);
parser.parseString(ResponseBody);
}
}
GotError = function(e) {
console.log("Got error: " + e.message);
}
GotRSSObject = function(r){
items = r.item;
//console.log(sys.inspect(r));
var db = new Db('myrssdb', new Server('localhost', 27017, {}), {native_parser:false});
db.open(function(err, db){
db.collection('items', function(err, col) {
len = movies.length;
for (i in items) {
SaveItem(items[i], col);
}
});
});
}
SaveItem = function(m, c) {
/* REPLACE FROM HERE IN ANSWER */
c.find({'id': m.id}, function(err, cursor){
cursor.nextObject(function(err, doc) {
if (doc == null) {
c.insert(m, function(err, docs) {
docs.forEach(function(doc) {
console.log('Saved: '+doc.id+' '+doc.keywords);
});
});
} else {
console.log('Skipped: '+m.id);
}
if (++checked >= len) {
process.exit(0);
}
});
});
/* REPLACE TO HERE IN ANSWER */
}
//http.get(options, GotResponse).on('error', GotError);
var x2js = new xml2js.Parser();
fs.readFile('/home/ubuntu/myrss.rss', function(err, data) {
x2js.parseString(data);
});
x2js.addListener('end', GotRSSObject);
}
GrabRss();
As requested, the code is above. The file is read locally (though is used to be an HTTP request, but it's a 25 meg file now, lots of RSS records)
I just ran the file with some ~10k records in it and a count of the items in the mongoDB after the script has run is about 800 items.
As per the answer I replaced the insert code:
with:
c.update({'id': m.id}, {$set: m}, {upsert: true, safe: true}, function(err){
if (err) console.warn(err.message);
else console.log(m.keywords);
if (++checked >= len) {
console.log(len);
//process.exit(0);
process.exit(0);
}
});
By default, MongoDB writes do not check for an error.
You need to set safe:true in the options to your insert, as explained in the documentation for node-mongodb-native:
var collection = new mongodb.Collection(client, 'test_collection');
collection.insert({hello: 'world'}, {safe:true},
function(err, objects) {
if (err) console.warn(err.message);
if (err && err.message.indexOf('E11000 ') !== -1) {
// this _id was already inserted in the database
}
});
Otherwise your callback will not be invoked for errors and your client won't know about them.
You probably also want to look at upserts and updates, as it is incredibly inefficient to do find & insert if null in a loop.
Instead, upsert will update if the matching document exists, otherwise it will insert a new one. An explanation on how to do this in Node is in the documentaiton for the driver.