I have to mongodb collection.
first i will call to coll1 and get ids.Then I want to call coll2 and search by ids and some other fields.
when I am calling to funtion it is returing undefined.
how I can wait untill i m not get result from funtion.
coll1 docs sample:
{
"_id" : ObjectId(""),
"container_id" : "56867887fdb391ff09d15e9",
"item_state" : [
{
"user_id" : 1,
"username" : "x",
"state" : "1"
},
{
"user_id" : 2,
"username" : "y",
"state" : "3"
}
],
"name" : "members test"
}
enter code here
function listMyWorkitems(user_id,callback) {
var user_id=1;
var workItemList = new Array();
db.collection('containers').find({'start_date':{"$lt":new Date(2017,02,11)}}).toArray(function(err,docs){
console.log("doc length");
console.log(docs.length);
for (var i = 0; i < docs.length; i++) {
db.collection('work_items').find({"$and":[{'container_id':docs[i]._id.toString()},{'item_state':{"$elemMatch":{'user_id':user_id,'is_active':1,'state':{"$in":["1","2","3"]}}}}]}).toArray(function(err,workDocs){
//console.log(workDocs);
for (var i = 0; i < workDocs.length; i++) {
for (var j = 0; j < workDocs[i].item_state.length; j++) {
var doc=workDocs[i].item_state[j]
workItemList.push(workDocs[i].name)
}
}
});
}
callback(workItemList);
});
}
listMyWorkitems(user_id,funtion(err,workItemList) {
console.log(workItemList)
});
I understand async auto concept but still it is returning empty list
here is code what i tried till now.. i done for collection 1 but once it is solve then i can query to collection 2 also..
var async = require('async');
var mongojs = require("mongojs");
var db = mongojs("localhost/mc_dev");
async.auto({
coll1task: function(callback) {
var idlist =[];
console.log("ids fetch from collection 1");
db.collection('containers').find({'start_date':{"$lt":new Date(2017,02,11)}}).toArray(function(err,docs){
docs.forEach(function(doc){
console.log(doc._id);
idlist.push(doc._id);
});});
callback(null,idlist);
},
finalcontrol: [
'coll1task',
function(results,callback) {
console.log(results.coll1task);
}
],
},
function(error, results) {
console.log('error = ', error)
console.log('results = ', results)
})
The best approach to do things asynchronously in node.js is by using modules like ASYNC or PROMISE.
You can visit async and get access to all the modules provided by this library.
Main modules provided by async library are
async.series=> this is what you can use in your case.
2.async.parallel
async.auto => I will suggest you to use this as it provide you to perform operations asynchronously as well as synchronously
Further more you can also use PROMISES as now they are the part of ECMA SCRIPT 6 there are various modules also which you can use to get this done.
Related
I have mongodb in which there is 3 huge collections say 'A', 'B' and 'C'
Each collection contains about 2 million documents.
There are certain properties for each of the document.
Each document need to be updated based on those values of certain properties, from which i can determine what should be the '$set' to that document.
currently i am using the same approach for each collection.
that to find all documents in batches. collection them in memory (which i think the culprit for the current approach), then one by one update them all.
For the first collection(that have similar data as in other collections), it takes 10 minutes to get completed. then the next two collections taking 2 hours approx to get the task done or mongodb client get crashed earlier.
There is something wrong and no desired in the current approach.
Model.collection.find({}).batchSize(BATCH).toArray(function(err, docs){
if(err || !docs || !docs.length)
return afterCompleteOneCollection(err);
var spec = function(index) {
if(index % 1000 === 0) console.log('at index : ' + index);
var toSet = { };
var toUnset = { };
var over = function(){
var afterOver = function(err){
if(err) return afterCompleteOneCollection(err);
if(index < docs.length - 1) spec(index+1);
else afterCompleteOneCollection(null);
};
var sb = Object.keys(toSet).length;
var ub = Object.keys(toUnset).length;
if(sb || ub) {
var all = {};
if(sb) all.$set = toSet;
if(ub) all.$unset = toUnset;
Model.collection.update({ _id : docs[index]._id }, all, {}, afterOver);
} else afterOver(null);
};
forEachOfDocument(docs[index], toSet, toUnset, over);
};
spec(0);
});
Is there any better solution for the same.?
The streaming approach from here http://mongodb.github.io/node-mongodb-native/api-generated/cursor.html#stream worked for me
This is what i am doing :
var stream = Model.collection.find().stream();
stream.on('data', function(data){
if(data){
var toSet = { };
var toUnset = { };
var over = function(){
var afterOver = function(err){
if(err) console.log(err);
};
var sb = Object.keys(toSet).length;
var ub = Object.keys(toUnset).length;
if(sb || ub) {
var all = {};
if(sb) all.$set = toSet;
if(ub) all.$unset = toUnset;
Model.collection.update({ _id : data._id }, all, {}, afterOver);
} else afterOver(null);
};
forEachOfDocument(data, toSet, toUnset, over);
}
});
stream.on('close', function() {
afterCompleteOneCollection();
});
If I have a doc which has an array which contains a items which represents counts for a day, perhaps like :-
{
data : [ {'20141102' : 2 },{'20141103' : 4 } ]
}
when I do an update, and I have a string '20141103' and then later a '20141104' I want to either inc the array entry or add a new array entry. Is this possible with an update?
Yes, it's feasible. I tried like this:
(run on mongo shell; both client and server are V2.6.4)
function tryAndFine(coll, key, value) {
var entry = {};
entry[key] = value;
var parent = 'data';
var prefix = parent + '.';
function incrementOnly() {
var criteria = {};
criteria[prefix + key] = {$exists : true};
var update = {};
update[prefix + "$." + key] = value;
var result = coll.update(criteria, {$inc : update});
// if increment fails, try to add a new one
if (result.nModified == 0) {
addNewElement();
}
}
function addNewElement() {
var criteria = {};
criteria[prefix + key] = {$exists : false};
var update = {};
update[parent] = entry;
var result = coll.update(criteria, {$push : update}, {upsert : true});
// if exists, try to increment the count
if (result.upserted == 0 && result.nModified == 0) {
incrementOnly();
}
}
// run entry
incrementOnly();
}
// test
var c = db.c;
c.drop();
tryAndFine(c, '20141103', 1);
tryAndFine(c, '20141103', 1);
tryAndFine(c, '20141104', 1);
tryAndFine(c, '20141105', 1);
tryAndFine(c, '20141104', 1);
// output
{
"_id" : ObjectId("54577e1a3502852bd4ad2395"),
"data" : [ {
"20141103" : 2
}, {
"20141104" : 2
}, {
"20141105" : 1
} ]
}
I want to use findOne in map reduce. What is wrong with my code? My error is:
Command 'mapreduce' failed: exception: map invoke failed: JS Error: TypeError: user has no properties nofile_b:3 (response: { "errmsg" : "exception: map invoke failed: JS Error: TypeError: user has no properties nofile_b:3", "code" : 9014, "ok" : 0.0 })
string map = #"
function() {
var movie = this;
var user = db.users.findOne({UserId : parseInt(movie.UserId)});
emit( movie.UserId, {Name:user.Name});
}";
string reduce = #"
function(key, values) {
var result =values;
return result;
}";
string finalize = #"
function(key, value){
return value;
}";
Under c# code
var collection = database.GetCollection("movies");
var options = new MapReduceOptionsBuilder();
options.SetFinalize(finalize);
options.SetOutput(MapReduceOutput.Inline);
var results = collection.MapReduce(map, reduce, options);
lbResultList.Items.Clear();
foreach (var result in results.GetResults())
{
lbResultList.Items.Add(result.ToJson());
}
I solve my problem map function change
function () {
var user = db.users.find({UserId:this.UserId});
var userName ='';
var userSurName ='';
user.forEach(function(u) {
userName = u.Name;
userSurName = u.SurName;
});
emit(
this._id,
{title: this.Title,category:this.Category,UserName: userName,UserSurName: userSurName}
);
}
I think it is not logical,like sub query this solve. What can I do this case?
It looks like you're trying to do an SQL JOIN. MapReduce is the wrong tool for that. You'll actually want to just break this into an aggregate operation to get an array of UserID's and a query operation to get the names of users:
Something like this in mongo shell:
var UserIDArray = [];
movieCollection.aggregate({$group: {_id: "$UserId"}}).forEach(function (v) {
UserIDArray.push(v._id);
});
UserIDNamePairs = userCollection.find({_id : {$in: UserIdArray}}, {_id: "$name"}).toArray();
Hello all I'm trying to do is to get the count of each distinct departmentType:
fnMap = function() {
emit(this.departments.departmentType, {typeCount:1} );
}
fnReduce = function(key, values) {
var result = {typeCount: 0};
values.forEach(function(value) {
result.typeCount += value.brandCount;
});
return result;
};
var command = {
mapreduce : "clients",
query : {"departments.departmentType": {$exists: true}},
map : fnMap.toString(),
reduce : fnReduce.toString(),
//sort: {"departments.departmentType":1},
out: {inline: 1}
};
mongoose.connection.db.executeDbCommand(command, function(err, dbres) {
});
When executing the command, dbres.documents[0].results only contains 1 item with the total number of departmentTypes, instead of several items one for each departmentType with its count.
Any ideas what am I doing wrong?
Also, when I uncomment the SORT line, I get error "db assertion failure: could not create cursor over...", I believe the field name is written correctly.
Mongoose v3 has now a Model.mapreduce() function (see doc).
The full example shown is:
var o = {};
o.map = function () { emit(this.name, 1) }
o.reduce = function (k, vals) { return vals.length }
o.out = { replace: 'createdCollectionNameForResults' }
o.verbose = true;
User.mapReduce(o, function (err, model, stats) {
console.log('map reduce took %d ms', stats.processtime)
model.find().where('value').gt(10).exec(function (err, docs) {
console.log(docs);
});
})
The problem with count i believe is because in your fnReduce() function you are summit the results instead of displaying them in an array.
You can use:
db.clients.distinct("departments.departmentType")
That will give an array with all the distinct departmentType values.
There were two problems in your map/reduce. One is brandCount in reduce rather than typeCount. But more importantly, you are trying to emit once per document, when you need to emit once per department array element. Corrected (and slightly simplified) code:
> fnMap = function () {
this.departments.forEach(
function (d) {
emit(d.departmentType, 1);
}
);
}
> fnReduce = function (key, values) {
var result = 0;
values.forEach(
function (value) {result += value;});
return result;
}
I have a collection users in Mongo and I execute this map reduce which I believe is the equivalent of a COUNT(*) GROUP BY origin:
> m = function() { for (i in this.membership) {
... emit( this.membership[i].platform_profile.origin, 1 );
... } }
function () {
for (i in this.membership) {
emit(this.membership[i].platform_profile.origin, 1);
}
}
> r = function( id, values ) { var result = 0;
... for ( var i = 0; i < values.length; i ++ ) { result += values[i]; }
... return result; }
function (id, values) {
var result = 0;
for (var i = 0; i < values.length; i++) {
result += values[i];
}
return result;
}
> db.users.mapReduce(m, r, {out : { inline: 1}});
{
"results" : [
{
"_id" : 0,
"value" : 15
},
{
"_id" : 1,
"value" : 449
},
...
}
But if I try to count how many documents have this field set to a specific value like 1, I get fewer results:
db.users.count({"membership.platform_profile.origin": 1});
424
What am I missing?
Are your count queries using a sparse index by any chance? My only guess there would be if some other query criteria resulted in documents absent from from index to be ignored from the count.
I recreated your schema with some fixture data and the results between map/reduce and simple count queries are in agreement:
db.users.drop();
var map = function() {
for (i in this.membership) {
emit(this.membership[i].platform_profile.origin, 1);
}
};
var reduce = function(id, values ) {
var result = 0;
for (var i = 0; i < values.length; i++) {
result += values[i];
}
return result;
}
var origins = {1: "a", 2: "b", 3: "c", 4: "d"};
for (var i = 0; i < 1000; ++i) {
var membership = [];
for (var o in origins) {
if (0 == i % o) {
membership.push({ platform_profile: { origin: origins[o] }});
}
}
db.users.save({ membership: membership });
}
db.users.mapReduce(map, reduce, {out: {inline: 1}}).results.forEach(function(result){
print(result["_id"] + ": " + result["value"]);
});
for (var o in origins) {
print(origins[o] + ": " + db.users.count({"membership.platform_profile.origin": origins[o]}));
}
Here's the output:
$ mongo --quiet mr_count.js
a: 1000
b: 500
c: 334
d: 250
a: 1000
b: 500
c: 334
d: 250
You can use the following map/reduce for the equivalent of COUNT(*) GROUP BY origin
Map/Reduce Functions :
map = function() {
if(!this.membership) return;
for (i in this.membership) {
if(!this.membership[i].platform_profile || !this.membership[i].platform_profile.origin) return;
emit(this.membership[i].platform_profile.origin, 1);
}
}
reduce = function(key, values) {
var count = 0;
for (v in values) {
count += values[v];
}
return count;
}
result = db.runCommand({
"mapreduce" : "users",
"map" : map,
"reduce" : reduce,
"out" : "users_count"
});
I had the same issue. I replaced x.length by Array.sum(x) in the reduce function (assuming you emit 1 in the map function) and it works. I agree x.length should work too, but I cannot explain why it does not.