Bluebird Promises in waterline .native() sailsjs with sails-mongo - mongodb

According to the .native() documentation, the way to use .native() query for sails-mongo is :
Pet.native(function(err, collection) {
if (err) return res.serverError(err);
collection.find({}, {
name: true
}).toArray(function (err, results) {
if (err) return res.serverError(err);
return res.ok(results);
});
});
How can I avoid callback here and use promises instead. Note that I have to use aggregate queries, so have to use .native()

As mentioned here Open bootstrap.js in config and monkey patch all methods with promise like this
module.exports.bootstrap = function(cb) {
var Promise = require('bluebird');
Object.keys(sails.models).forEach(function (key) {
if (sails.models[key].query) {
sails.models[key].query = Promise.promisify(sails.models[key].query);
}
});
cb(); };
On the bonus side you get to use the latest version of bluebird with all models. Hope it helps

Related

Loopback - How to use bulkUpdate method

I'm using Loopback v3 currently and wanted to upsert many records at once in a collection; I found this method bulkUpsert from the documentation (http://apidocs.loopback.io/loopback/#persistedmodel-bulkupdate) but I couldn't figure out how to make it work.
How can I create the updates array from createUpdates() method as mentioned in the documentation? Can anyone help me with a simple example of using this method?
There is an alternative way to do the bulkUpdate method, found in Stackoverflow MongoDB aggregation on Loopback
A mixin can be easily created and reused over the Models. My sample code of bulkUpsert mixin is below:
Model.bulkUpsert = function(body, cb) {
try {
Model.getDataSource().connector.connect(async (err, db) => {
if (err) {
return cb(err);
}
// Define variable to hold the description of the first set of validation errors found
let validationErrors = '';
// Build array of updateOne objects used for MongoDB connector's bulkWrite method
const updateOneArray = [];
// Loop through all body content and stop the loop if a validation error is found
const hasError = body.some(row => {
// Check if it is a valid model instance
const instance = new Model(row);
if (!instance.isValid()) {
// A validation error has been found
validationErrors = JSON.stringify(instance.errors);
// By returning true we stop/break the loop
return true;
}
// Remove ID in the row
const data = JSON.stringify(row);
delete data.id;
// Push into the update array
updateOneArray.push({
updateOne: {
filter: { _id: row.id },
update: { $set: Object.assign({ _id: row.id }, data) },
upsert: true
}
});
// No validation error found
return false;
});
// Check if a validation error was found while looping through the body content
if (hasError) {
return cb(new Error(validationErrors));
}
// No validation data error was found
// Get database collection for model
const collection = db.collection(Model.name);
// Execute Bulk operation
return collection.bulkWrite(updateOneArray, {}, (err, res) => {
// Check if the process failed
if (err) {
console.err('The bulk upsert finished unsuccessfully', err);
return cb(err);
}
// Check if there were errors updating any record
if (res.hasWriteErrors()) {
console.error(`The bulk upsert had ${res.getWriteErrorCount()} errors`, res.getWriteErrors());
}
// Finished successfully, return result
return cb(null, {
received: body.length,
handled: res.upsertedCount + res.insertedCount + res.matchedCount
});
});
});
}
catch (err) {
console.error('A critical error occurred while doing bulk upsert', err);
return cb(err);
}
return null;
};
Ref: Mongodb query documentation

meteor - How to update subscription documents?

I subscribe some collection from server. After I try to delete one document from client side it shows remove failed: Access denied. so I tried to delete it from server side by Meteor.call it works fine but client side has same number of documents.
Below code will explain you better.
ClientJS:
Template.Message.onCreated(function () {
this.autorun(function () {
this.subscription = Meteor.subscribe('mymessage');
}.bind(this));
});
Template.Message.onRendered(function () {
this.autorun(function () {
if (this.subscription.ready()) {
console.log(Message.find().count()); //10
}
}.bind(this));
});
ServerJS:
Meteor.publish('mymessage', function() {
console.log(Message.find().count()); //10
return Message.find();
});
In a click event
ClientJS:
Meteor.call("deletemsg", this._id._str, function(error, result){
if(!error){
console.log(Message.find().count()); // 10, Want to update document here.
}
});
Serverjs
Meteor.methods({
deletemsg: function (delmsg) {
if(Message.remove({"_id":new Mongo.ObjectID(delmsg)})){
console.log(Message.find().count()); //9
return true;
} else {
throw new Meteor.Error("some error message");
}
}
});
Note : I am using existing Mongodb.
Your error is probably related to your configuration of the native rules allow and deny. You should have somewhere on your server a piece of code looking like that (Message being your collection name):
Message.allow({
insert: function (userId, doc) {
//allow rule
},
update: function (userId, doc, fields, modifier) {
//allow rule
},
remove: function (userId, doc) {
//allow rule
}
});
Or an equivalent with deny. It looks like your current user is not allowed to delete (i.e. remove) messages from the collection.
Quick sidenote: you don't need to wrap your subscriptions in an autorun. If you use iron-router, you can use the built in functions to subscribe. In your routes options, you can add something like this:
action: function() {
if(this.isReady()) { this.render(); } else { this.render("loading");}
},
isReady: function() {
var subs = [
Meteor.subscribe("yourPublication")
];
var ready = true;
_.each(subs, function(sub) {
if(!sub.ready())
ready = false;
});
return ready;
},
You don't need to use _str as _id is already string.
Client JS
Meteor.call("deletemsg", this._id, function(error, result){ //Remove _str from here
if(!error){
console.log(Message.find().count()); // 10, Want to update document here.
}
});
Server JS
When you delete document, you need to pass only id, not object.
Meteor.methods({
deletemsg: function (delmsg) {
if(Message.remove(delmsg)){
console.log(Message.find().count()); //9
return true;
} else {
throw new Meteor.Error("some error message");
}
}
});
Allowing CRUD operations.
if above method doesnt work, try allowing CRUD operations for that collections from server block. Here is documentation.

Differences betwen findById and load?

What's the main difference between FindById(id,callBack) and Load(id,callback) ?
More details:
I'm new to MEAN stack web development, so i'm just playing with Mean.io:
This code is generated by Mean.io(controllers/article.js):
Article.load(id, function(err, article) {
if (err) return next(err);
if (!article) return next(new Error('Failed to load article ' + id));
req.article = article;
next();
});
Just i wanted to do the same result using findById it' well documented at: Model.findById
Article.findById(id, function (err, article){
if (err) return next(err);
if (!article) return next(new Error('Failed to load article ' + id));
req.article = article;
next();
});
It works, but i wanted to know the main differences between them, strange that i can't find any doc on mongoose docs.
load is a static method for the Article model that does exactly the same thing as the findOne and is defined in the schema as follows:
ArticleSchema.statics = {
load: function (id, cb) {
this.findOne({ _id : id }).populate('user').exec(cb);
}
};
whereby the findById method also calls the findOne method:
Model.findById = function findById (id, fields, options, callback) {
return this.findOne({ _id: id }, fields, options, callback);
};

Drop MongoDB database before running Mocha test

If I try to drop the database using after (at the end of my tests) it works.
If I try the following:
var db = mongoose.connect('mongodb://localhost/db-test')
describe('Database', function() {
before(function (done) {
db.connection.db.dropDatabase(function(){
done()
})
})
...
it does not drop the DB. what is going on? I would prefer dropping the db before starting testing -- so that after testing I can explore the db.
solved by connect in another define.. not sure if ideal.
describe('Init', function() {
before(function (done) {
mongoose.connect('mongodb://localhost/db-test', function(){
mongoose.connection.db.dropDatabase(function(){
done()
})
})
})
describe('Database', function() {
I implemented it a bit different.
I removed all documents in the "before" hook - found it a lot faster than dropDatabase().
I used Promise.all() to make sure all documents were removed before exiting the hook.
beforeEach(function (done) {
function clearDB() {
var promises = [
Model1.remove().exec(),
Model2.remove().exec(),
Model3.remove().exec()
];
Promise.all(promises)
.then(function () {
done();
})
}
if (mongoose.connection.readyState === 0) {
mongoose.connect(config.dbUrl, function (err) {
if (err) {
throw err;
}
return clearDB();
});
} else {
return clearDB();
}
});

Mongoose JS promises? Or how to manage batch save

How do I manage batch save in Mongoose? I saw it may not be possible yet:
How can I save multiple documents concurrently in Mongoose/Node.js?
Theres some mention about using some flow control library like q, but I also notice there promises in mongoose, can it be used? Can I do like in jQuery Deferred/Promises
$.when(obj1.save(), obj2.save(), obj3.save()).then ->
# do something?
Yes, you can do this with promises. If you were using the Q promise library, you could re-write #matz3's code like:
var tasks = [];
for (var i=0; i < docs.length; i++) {
tasks.push(docs[i].save());
}
Q.all(tasks)
.then(function(results) {
console.log(results);
}, function (err) {
console.log(err);
});
We start all the operations one at a time in the loop, but we don't wait for any of them to complete, so they run in parallel. We add a promise (that acts like a placeholder for the result) to an array. We then wait for all the promises in the array of promises to complete.
Most good Promises/A+ compatible libraries have some equivalent to Q.all
mongoose now allows you to choose which Promise implementation.
Here I am using the node.js default system Promise (ES6) baked into nodejs
var mongoose = require('mongoose');
mongoose.Promise = global.Promise; // use system implementation
Promise.all(obj1.save(), obj2.save(), obj3.save())
.then(function(resultSaves) {
console.log('parallel promise save result :');
console.log(resultSaves);
mongoose.disconnect();
}).catch(function(err) {
console.log('ERROR on promise save :');
console.log(err);
mongoose.disconnect();
});
node --version
v4.1.1
mongoose#4.1.8
Since mongoose now supports promises you may use Promise.all().then(), so it will return when all promises are resolved.
Promise.all([
obj1.save(),
obj2.save(),
obj3.save()
])
.then(console.log)
.catch(console.error)
In fact, if you're always calling the save() method you can use the Array.map() here:
Promise.all([ obj1, obj2, obj3 ].map( obj => obj.save() )
Aaand also use es6 syntax to destructure the resulting array:
Promise.all(
[ obj1, obj2, obj3 ]
.map( obj => obj.save() )
)
.then( ([ savedObj1, savedObj2, savedObj3 ]) => {
// do something with your saved objects...
})
Try the parallel function of the async module.
var functions = [];
for (var i=0; i < docs.length; i++) {
functions.push((function(doc) {
return function(callback) {
doc.save(callback);
};
})(docs[i]));
}
async.parallel(functions, function(err, results) {
console.log(err);
console.log(results);
});
To save multiple mongoose docs in parallel, you can do something simple like this (assuming you have an array named docs of documents to save):
var count = docs.length;
docs.forEach(function(doc) {
doc.save(function(err, result) {
if (--count === 0) {
// All done; call containing function's callback
return callback();
}
});
});
A refined example on how to use async parallel would be:
async.parallel([obj1.save, obj2.save, obj3.save], callback);
Since the convention is the same in Mongoose as in async (err, callback) you don't need to wrap them in your own callbacks, just add your save calls in an array and you will get a callback when all is finished.
What about async.queue.
A simple example:
var queue = async.queue(function(obj, callback) {
return obj.save(callback);
});
for (var i in objs) {
var obj = objs[i];
// Some changes on object obj
queue.push(obj);
}
If you need a callback after the queue is emptied:
var emptyQueue = true;
var queue = async.queue(function(obj, callback) {
return obj.save(callback);
});
queue.drain = function() {
// Every callbacks are finished
// bigCallback();
};
for (var i in objs) {
var obj = objs[i];
// Some changes on object obj
queue.push(obj);
emptyQueue = false;
}
if (emptyQueue) {
// Call manually queue drain in case of the queue is empty
// and we need to call bigCallback() for example
return queue.drain();
}
#ForbesLindesay Why loading an external library when you can use mongoose implementation of promises and create your own All ?
Create a module that enhance mongoose promise with all.
var Promise = require("mongoose").Promise;
Promise.all = function(promises) {
var mainPromise = new Promise();
if (promises.lenght == 0) {
mainPromise.resolve(null, promises);
}
var pending = 0;
promises.forEach(function(p, i) {
pending++;
p.then(function(val) {
promises[i] = val;
if (--pending === 0) {
mainPromise.resolve(null, promises);
}
}, function(err) {
mainPromise.reject(err);
});
});
return mainPromise;
}
module.exports = Promise;
Then use it with mongoose:
require('./promise')
...
var tasks = [];
for (var i=0; i < docs.length; i++) {
tasks.push(docs[i].save());
}
mongoose.Promise.all(tasks)
.then(function(results) {
console.log(results);
}, function (err) {
console.log(err);
});