Waiting for meteor cursor in method - mongodb

I have a large aggrogate query that required me to pass "allowDiskUse: true" as an option. This would not work with the aggegate as described here:
https://github.com/meteorhacks/meteor-aggregate/issues/11
My meteor method is defined here. When I call the method I need to wait for ondata to complete before anything is returned to the client, but nothing I try allows me to get that data in a safe way up to the front end.
Meteor.methods({
'getSummary': function (dept,startDate,endDate,filterType) {
f = myQuery(startdate,enddate,dayFinalGroup);
f.on("data", Meteor.bindEnvironment(function(row) {
//load an array or something here to return
}));
f.once("end", Meteor.bindEnvironment(function() {
// tidy up, in my case end the stream
}));
//here I'd return the array loaded
},
});
This is my front end.
Meteor.call(
'getSummary',0,Session.get('start_date'),Session.get('end_date'),1,
function(error, result){
if(error){
console.log(error);
} else {
Session.set('sumTotals',result);
}
}
);

Finally Got it. I utilized wrapSync
'getSummary': function (dept,startDate,endDate,filterType) {
console.log(dept);
console.log(startDate);
console.log(endDate);
console.log(filterType);
var startdate = new Date(startDate);
var enddate = new Date(endDate);
var arr = [];
f = myQuery(startdate,enddate,dayFinalGroup);
var fetchCursor = Meteor.wrapAsync(function fetchCursor (cursor, cb) {
cursor.each(function (err, doc) {
if (err) return cb(err);
if (!doc) return cb(null, { done: true }); // no more documents
arr.push(doc);
});
});
var myData = fetchCursor(f);
return arr;

Related

Order of save() and find() in NodeJS with MongoDB

I'm trying to create a new record in my MongoDB ("thisPlayer") and save it to my database, then find all records in my database (including the new one) and render them.
I am having trouble understanding why my save() function actually occurs after my find() function. When this code executes, the find() function does not include my new thisPlayer record. However, after the find() runs, the save occurs -- the record is saved to the database AFTER the find() ran.
Thanks in advance!
const playerNumber = async function countPlayers() {
return new Promise((resolve, reject) => {
Player.count(function(err, numOfDocs) {
err ? reject(err) : resolve(numOfDocs);
console.log('I have '+numOfDocs+' documents in my collection');
});
});
}
async function playerProfile() {
var count = await playerNumber();
console.log("count already in db: "+ count);
if (count===0) {
teamCaptain=1;
} else {teamCaptain=0};
count++;
const thisPlayer = new Player({
playerNum: count,
playerName: Name,
});
thisPlayer.save();
Player.find({}, function(err, playaz){
var playerOne;
if (playaz.length > 0) {
playerOne = playaz[0].playerName;
} else {
playerOne = "";
}
res.renderPjax("leavetakings",
{player1: "1: " + playerOne}
);
});
}
playerProfile();
You need to use await
for example.
await Player.find({})

How to check if value already exists in the data received from api before inserting it into db

I am having hard times trying to write data received from a api to db.
I successfully got data and then have to write it to db. The point is to check whether the quote is already exists in my collection.
The problem I am dealing with is that every value gets inserted in my collection, not regarding if it exists or not.
const { MongoClient } = require('mongodb')
const mongoUrl = 'mongodb://localhost/kanye_quotes'
async function connectToDb() {
const client = new MongoClient(mongoUrl, { useNewUrlParser: true })
await client.connect()
db = client.db()
}
async function addQuote(data) {
await connectToDb()
try {
const collection = db.collection('quotes')
let quotes = [];
quotes = await collection.find({}).toArray()
if (quotes = []) { // I added this piece of code because if not check for [], no values will be inserted
collection.insertOne(data, (err, result) => {
if (err) {
return
}
console.log(result.insertedId);
return
})
}
quotes.forEach(quote => {
if (quote.quote !== data.quote) { // I compare received data with data in collection, it actually works fine(the comparison works as it supposed to)
collection.insertOne(data, (err, result) => {
if (err) {
return
}
console.log(result.insertedId);
})
} else console.log('repeated value found'); // repeated value gets inserted. Why?
})
}
catch (err) {
console.log(err)
}
}
Hi it's probably better to set unique: true indexing on your schema. That way you won't have duplicated values.

nightwatch custom command callback

I'm trying to create a custom command in nightwatch that runs a query on a Postgres database and returns the result. The query runs just fine and outputs the result to the console but then the execution of the test stops. I don't understand how callbacks work. How can I fix this custom command?
exports.command = function(sql, callback) {
var self = this;
var pg = require('pg');
var conString = self.globals.testinfo.connectionString;
var db = new pg.Client(conString);
db.connect(function(err) {
if(err) {
console.error('could not connect', err);
}
else {
db.query(sql, function(err, result) {
if(err) {
console.log('error running query', err);
}
else {
console.log(result.rows.length);
db.end();
}
});
}
}),
function(result) {
if (typeof callback === 'function') {
callback.call(self, result);
}
}
return this;
};
I had to wrap the database connection in a perform command to get this working. I'm not sure if this is the best way to handle the callback, but it works. Here's the updated version of the custom command:
exports.command = function(sql,callback) {
var self = this;
var pg = require('pg');
var cs = self.globals.testinfo.connectionString;
self.perform(function(self,done) {
pg.connect(cs,function(err,db,done) {
if(err) {
return console.error(err);
}
db.query(sql, function(err,result) {
done();
if(err) {
return console.error(err);
}
console.log(result.rows.length);
callback(result.rows[0]);
});
});
pg.end();
done();
});
};
Here's how I call the custom command in the test:
browser.myCustomCommand('select * from table limit 1;', function(row) {
browser.assert.deepEqual(row.column,'some value');
});
Can you try this:
exports.command = function(sql, callback) {
var self = this;
var pg = require('pg');
var conString = self.globals.testinfo.connectionString;
var db = new pg.Client(conString);
var cb= function(result) {
if (typeof callback === 'function') {
callback.call(self, result);
}
};
db.connect(function(err) {
if(err) {
console.error('could not connect', err);
cb(false);
}
else {
db.query(sql, function(err, result) {
if(err) {
console.log('error running query', err);
cb(false);
}
else {
console.log(result.rows.length);
db.end();
cb(true);
}
});
}
}),
return this;
};
And in your test :
'test' : function(browser){
browser.yourCommandName(sql,function(result){
console.log(result); //if connect is good result would be true and false if fail to connect.
});
}
Ps: the result in callback can be as an object(contain rows or anything you want), instead of boolean only in this example.
And Nightwatch is used for end-to-end testing, it is not aimed for Database testing,i think you should find another framework to test database connection.

Creating new Meteor collections on the fly

Is it possible to create new Meteor collections on-the-fly? I'd like to create foo_bar or bar_bar depending on some pathname which should be a global variable I suppose (so I can access it throughout my whole application).
Something like:
var prefix = window.location.pathname.replace(/^\/([^\/]*).*$/, '$1');
var Bar = new Meteor.Collection(prefix+'_bar');
The thing here is that I should get my prefix variable from URL, so if i declare it outside of if (Meteor.isClient) I get an error: ReferenceError: window is not defined. Is it possible to do something like that at all?
Edit : Using the first iteration of Akshats answer my project js : http://pastie.org/6411287
I'm not entirely certain this will work:
You need it in two pieces, the first to load collections you've set up before (on both the client and server)
var collections = {};
var mysettings = new Meteor.Collection('settings') //use your settings
//Startup
Collectionlist = mysettings.find({type:'collection'});
Collectionlist.forEach(function(doc) {
collections[doc.name] = new Meteor.Collection(doc.name);
})'
And you need a bit to add the collections on the server:
Meteor.methods({
'create_server_col' : function(collectionname) {
mysettings.insert({type:'collection', name: collectionname});
newcollections[collectionname] = new Collection(collectionname);
return true;
}
});
And you need to create them on the client:
//Create the collection:
Meteor.call('create_server_col', 'My New Collection Name', function(err,result) {
if(result) {
alert("Collection made");
}
else
{
console.log(err);
}
}
Again, this is all untested so I'm just giving it a shot hopefully it works.
EDIT
Perhaps the below should work, I've added a couple of checks to see if the collection exists first. Please could you run meteor reset before you use it to sort bugs from the code above:
var collections = {};
var mysettings = new Meteor.Collection('settings')
if (Meteor.isClient) {
Meteor.startup(function() {
Collectionlist = mysettings.find({type:'collection'});
Collectionlist.forEach(function(doc) {
eval("var "+doc.name+" = new Meteor.Collection("+doc.name+"));
});
});
Template.hello.greeting = function () {
return "Welcome to testColl.";
};
var collectionname=prompt("Enter a collection name to create:","collection name")
create_collection(collectionname);
function create_collection(name) {
Meteor.call('create_server_col', 'tempcoll', function(err,result) {
if(!err) {
if(result) {
//make sure name is safe
eval("var "+name+" = new Meteor.Collection('"+name+"'));
alert("Collection made");
console.log(result);
console.log(collections);
} else {
alert("This collection already exists");
}
}
else
{
alert("Error see console");
console.log(err);
}
});
}
}
if (Meteor.isServer) {
Meteor.startup(function () {
// code to run on server at startup
Collectionlist = mysettings.find({type:'collection'});
Collectionlist.forEach(function(doc) {
collections[doc.name] = new Meteor.Collection(doc.name);
});
});
Meteor.methods({
'create_server_col' : function(collectionname) {
if(!mysettings.findOne({type:'collection', name: collectionname})) {
mysettings.insert({type:'collection', name: collectionname});
collections[collectionname] = new Meteor.Collection(collectionname);
return true;
}
else
{
return false; //Collection already exists
}
}
});
}
Also make sure your names are javascript escaped.
Things got much easier:
var db = MongoInternals.defaultRemoteCollectionDriver().mongo.db;
db.createCollection("COLLECTION_NAME", (err, res) => {
console.log(res);
});
Run this in your server method.

Mongoose JS promises? Or how to manage batch save

How do I manage batch save in Mongoose? I saw it may not be possible yet:
How can I save multiple documents concurrently in Mongoose/Node.js?
Theres some mention about using some flow control library like q, but I also notice there promises in mongoose, can it be used? Can I do like in jQuery Deferred/Promises
$.when(obj1.save(), obj2.save(), obj3.save()).then ->
# do something?
Yes, you can do this with promises. If you were using the Q promise library, you could re-write #matz3's code like:
var tasks = [];
for (var i=0; i < docs.length; i++) {
tasks.push(docs[i].save());
}
Q.all(tasks)
.then(function(results) {
console.log(results);
}, function (err) {
console.log(err);
});
We start all the operations one at a time in the loop, but we don't wait for any of them to complete, so they run in parallel. We add a promise (that acts like a placeholder for the result) to an array. We then wait for all the promises in the array of promises to complete.
Most good Promises/A+ compatible libraries have some equivalent to Q.all
mongoose now allows you to choose which Promise implementation.
Here I am using the node.js default system Promise (ES6) baked into nodejs
var mongoose = require('mongoose');
mongoose.Promise = global.Promise; // use system implementation
Promise.all(obj1.save(), obj2.save(), obj3.save())
.then(function(resultSaves) {
console.log('parallel promise save result :');
console.log(resultSaves);
mongoose.disconnect();
}).catch(function(err) {
console.log('ERROR on promise save :');
console.log(err);
mongoose.disconnect();
});
node --version
v4.1.1
mongoose#4.1.8
Since mongoose now supports promises you may use Promise.all().then(), so it will return when all promises are resolved.
Promise.all([
obj1.save(),
obj2.save(),
obj3.save()
])
.then(console.log)
.catch(console.error)
In fact, if you're always calling the save() method you can use the Array.map() here:
Promise.all([ obj1, obj2, obj3 ].map( obj => obj.save() )
Aaand also use es6 syntax to destructure the resulting array:
Promise.all(
[ obj1, obj2, obj3 ]
.map( obj => obj.save() )
)
.then( ([ savedObj1, savedObj2, savedObj3 ]) => {
// do something with your saved objects...
})
Try the parallel function of the async module.
var functions = [];
for (var i=0; i < docs.length; i++) {
functions.push((function(doc) {
return function(callback) {
doc.save(callback);
};
})(docs[i]));
}
async.parallel(functions, function(err, results) {
console.log(err);
console.log(results);
});
To save multiple mongoose docs in parallel, you can do something simple like this (assuming you have an array named docs of documents to save):
var count = docs.length;
docs.forEach(function(doc) {
doc.save(function(err, result) {
if (--count === 0) {
// All done; call containing function's callback
return callback();
}
});
});
A refined example on how to use async parallel would be:
async.parallel([obj1.save, obj2.save, obj3.save], callback);
Since the convention is the same in Mongoose as in async (err, callback) you don't need to wrap them in your own callbacks, just add your save calls in an array and you will get a callback when all is finished.
What about async.queue.
A simple example:
var queue = async.queue(function(obj, callback) {
return obj.save(callback);
});
for (var i in objs) {
var obj = objs[i];
// Some changes on object obj
queue.push(obj);
}
If you need a callback after the queue is emptied:
var emptyQueue = true;
var queue = async.queue(function(obj, callback) {
return obj.save(callback);
});
queue.drain = function() {
// Every callbacks are finished
// bigCallback();
};
for (var i in objs) {
var obj = objs[i];
// Some changes on object obj
queue.push(obj);
emptyQueue = false;
}
if (emptyQueue) {
// Call manually queue drain in case of the queue is empty
// and we need to call bigCallback() for example
return queue.drain();
}
#ForbesLindesay Why loading an external library when you can use mongoose implementation of promises and create your own All ?
Create a module that enhance mongoose promise with all.
var Promise = require("mongoose").Promise;
Promise.all = function(promises) {
var mainPromise = new Promise();
if (promises.lenght == 0) {
mainPromise.resolve(null, promises);
}
var pending = 0;
promises.forEach(function(p, i) {
pending++;
p.then(function(val) {
promises[i] = val;
if (--pending === 0) {
mainPromise.resolve(null, promises);
}
}, function(err) {
mainPromise.reject(err);
});
});
return mainPromise;
}
module.exports = Promise;
Then use it with mongoose:
require('./promise')
...
var tasks = [];
for (var i=0; i < docs.length; i++) {
tasks.push(docs[i].save());
}
mongoose.Promise.all(tasks)
.then(function(results) {
console.log(results);
}, function (err) {
console.log(err);
});