get undefined value in mongodb MapReduce - mongodb

I tried to use twice MapReduce aggregation to get unique user number per month.
The first MR function work out a mr_buyer_payment collection, like this:
{ "_id" : { "u" : "01329f19-27b0-435b-9ca1-450984024a31", "tid" : ISODate("2013-09-01T00:00:00Z") }, "value" : { "payment" : 38, "count_pay" : 1 } }
{ "_id" : { "u" : "264dd104-b934-490b-988e-5822fd7970f6", "tid" : ISODate("2013-09-01T00:00:00Z") }, "value" : { "payment" : 4.99, "count_pay" : 1 } }
{ "_id" : { "u" : "27bb8f72-a13e-4676-862c-02f41fea1bc0", "tid" : ISODate("2013-09-01T00:00:00Z") }, "value" : { "payment" : 11.98, "count_pay" : 2 } }
The second MR function works well with small data set , but when query grows more than 100 records, it gets wrong result , some value is NaN.
The debug log shows some value in Reduce function like v.payment, v.count_user became undefine.
date:Sun Jun 30 2013 17:00:00 GMT-0700 (PDT) value:undefined / 162 / undefined
And the MR result info is wired:
{
"result" : "mr_buyer_all",
"timeMillis" : 29,
"counts" : {
"input" : 167,
"emit" : 167,
"reduce" : 6, // it should be 3, as same as "output" number
"output" : 3
},
"ok" : 1,
}
This is 2nd MR function:
db.mr_buyer_payment.mapReduce(
function(){
var key = this._id.tid;
var value = {
payment:this.value.payment,
count_pay:this.value.count_pay,
count_user:1
};
if (value.count_pay>0)
{
print("date:"+key+" u:"+this._id.u+"value:"+value.payment+" / "+value.count_pay+" / "+value.count_user);
emit(key,value);
}
},
function(key,values){
var result = {revenue:0,count_pay:0,user:0};
values.forEach(function(v){
if (!v.count_user)
{
print("date:"+key+" "+"value:"+v.payment+" / "+v.count_pay+" / "+v.count_user);
} else
{
result.revenue += v.payment;
result.count_pay += v.count_pay;
result.user += v.count_user;
}
});
return result;
},
{
out:{replace:"mr_buyer_all"}
}
)

The sub-document in Reduce function should use same format as one in Map function. So the solution is :
function(key,values){
// the following key must be as same as the object in map
var r = {payment:0,count_pay:0,count_user:0}
values.forEach(function(v){
r.payment += v.payment;
r.count_pay += v.count_pay;
r.count_user += v.count_user;
});
return r;
},

Related

Incorrect response to mapReduce query in mongo-db

I have 1000 user records in collecton, in which 459 document has gender male and remaining as female
//document structure
> db.user_details.find().pretty()
{
"_id" : ObjectId("557e610d626754910f0974a4"),
"id" : 0,
"name" : "Leanne Flinn",
"email" : "leanne.flinn#unilogic.com",
"work" : "Unilogic",
"dob" : "Fri Jun 11 1965 20:50:58 GMT+0530 (IST)",
"age" : 5,
"gender" : "female",
"salary" : 35696,
"hobbies" : "Acrobatics,Meditation,Music"
}
{
"_id" : ObjectId("557e610d626754910f0974a5"),
"id" : 1,
"name" : "Edward Young",
"email" : "edward.young#solexis.com",
"work" : "Solexis",
"dob" : "Wed Feb 12 1941 16:45:53 GMT+0530 (IST)",
"age" : 1,
"gender" : "female",
"salary" : 72291,
"hobbies" : "Acrobatics,Meditation,Music"
}
{
"_id" : ObjectId("557e610d626754910f0974a6"),
"id" : 2,
"name" : "Haydee Milligan",
"email" : "haydee.milligan#dalserve.com",
"work" : "Dalserve",
"dob" : "Tue Sep 13 1994 13:45:04 GMT+0530 (IST)",
"age" : 17,
"gender" : "male",
"salary" : 20026,
"hobbies" : "Papier-Mache"
}
{
"_id" : ObjectId("557e610d626754910f0974a7"),
"id" : 3,
"name" : "Lyle Keesee",
"email" : "lyle.keesee#terrasys.com",
"work" : "Terrasys",
"dob" : "Tue Apr 25 1922 13:39:46 GMT+0530 (IST)",
"age" : 79,
"gender" : "female",
"salary" : 48032,
"hobbies" : "Acrobatics,Meditation,Music"
}
{
"_id" : ObjectId("557e610d626754910f0974a8"),
"id" : 4,
"name" : "Shea Mercer",
"email" : "shea.mercer#pancast.com",
"work" : "Pancast",
"dob" : "Mon Apr 08 1935 06:10:30 GMT+0530 (IST)",
"age" : 51,
"gender" : "male",
"salary" : 31511,
"hobbies" : "Acrobatics,Photography,Papier-Mache"
}
Number of users in each gender
> db.user_details.find({gender:'male'}).count()
459
>
> db.user_details.find({gender:'female'}).count()
541
> db.user_details.find({name:{$ne:null}}).count()
1000
> db.user_details.find({age:{$ne:null}}).count()
1000
Map reduce code
mapper = function(){
emit(this.gender, {name:this.name,age:this.age})
}
reducer = function(gender, users){
var res = 0;
users.forEach(function(user){
res = res + 1
})
return res;
}
db.user_details.mapReduce(mapper, reducer, {out: {inline:1}})
Why map reduce result has only 112 documents? It should contain 459 and 541 for male and female respectively, isn't it?
// Map reduce result
{
"results" : [
{
"_id" : "female",
"value" : 56
},
{
"_id" : "male",
"value" : 46
}
],
"timeMillis" : 45,
"counts" : {
"input" : 1000,
"emit" : 1000,
"reduce" : 20,
"output" : 2
},
"ok" : 1
}
Note : I know this is not a proper way to use map reduce, Actually i faced some more creepy problem in map reduce. Once i get solution to this question i could solve that
Your problem here is that you have missed one of the core concepts of how mapReduce works. The relevant documentation that explains this is found here:
MongoDB can invoke the reduce function more than once for the same key. In this case, the previous output from the reduce function for that key will become one of the input values to the next reduce function invocation for that key.
And then also a bit later:
the type of the return object must be identical to the type of the value emitted by the map function
What those two statements mean is you need to use the exact same signature issued from both the mapper and the reducer functions as the reduce process will indeed get called "multiple times".
This is how mapReduce deals with large data, but not necessarily processing all of the same values for a given "key" at once, but doing it in incremental "chunks":
There fore if all you want in the output is a "number" then all you "emit" is just a "number" as well:
db.collection.mapReduce(
function() {
emit(this.gender, this.age);
},
function(key,values) {
return Array.sum( values )
},
{ "out": { "inline": 1 } }
)
Or just "count" per type:
db.collection.mapReduce(
function() {
emit(this.gender, 1);
},
function(key,values) {
return Array.sum( values )
},
{ "out": { "inline": 1 } }
)
The point is "you need to put out the same as what you put in", as it will likely "go back in again". So whatever data you want to collect, the output structure for both mapper and reducer must be the same.
This is probably wrong.
users.forEach(function(user){
res = res + 1
})
Try this,
function(gender, users){
return Array.sum( users)
}
There is a mistake in the reduce function.
MONGODB reduce function can be called multiple times for the same KEY, so in your reduce code its getting overridden.
Also in map function you are emmitting the document of structure { user, age}, but in reduce function you are returning the count.
reduce = function(gender, doc) {
reducedVal = { user: 0, age: 0 };
for (var idx = 0; idx < doc.length; idx++) {
reducedVal.user += 1 ;
reducedVal.age += 1;
}
return reducedVal;
};
please check the below link as well:
http://thejackalofjavascript.com/mapreduce-in-mongodb/
This is a proper way to use map reduce(), for display gender-wise count of users
db.yourCollectionName.mapReduce(
function(){
emit(this.gender,1);
},
function(k,v){
return Array.sum(v);
},
{out:"genderCount"}
);
db.genderCount.find();

mongodb fetch hundreds of data out of millions of data

In my database, I have millions of documents. Each of them has a time stamp. Some have the same time stamp. I want to get some points (a few hundreds or potentially more like thousands) to draw a graph. I don't want all the points. I want every n points I pick 1 point. I know there's aggregation framework and I tried that. The problem with that is since my data is huge. When I do aggregation work, The result exceeds document maximum size, 16MB, easily. There's also a function called skip in mongodb but it only skips first n documents. Are there good ways to achieve what I want? Or is there way to make aggregation result bigger? Thanks in advance!
I'm not sure how you can do this with either A/F or M/R - just skipping so that you have (f.e.) each 10th point is not something M/R allows you to do—unless you select each point based on a random value with a 10% change... which is probably not what you want. But that does work:
db.so.output.drop();
db.so.find().count();
map = function() {
// rand does 0-1, so < 0.1 means 10%
if (Math.random() < 0.1) {
emit(this._id, this);
}
}
reduce = function(key, values) {
return values;
}
db.so.mapReduce( map, reduce, { out: 'output' } );
db.output.find();
Which outputs something line:
{
"result" : "output",
"timeMillis" : 4,
"counts" : {
"input" : 23,
"emit" : 3,
"reduce" : 0,
"output" : 3
},
"ok" : 1,
}
> db.output.find();
{ "_id" : ObjectId("51ffc4bc16473d7b84172d85"), "value" : { "_id" : ObjectId("51ffc4bc16473d7b84172d85"), "date" : ISODate("2013-08-05T15:24:45Z") } }
{ "_id" : ObjectId("51ffc75316473d7b84172d8e"), "value" : { "_id" : ObjectId("51ffc75316473d7b84172d8e") } }
{ "_id" : ObjectId("51ffc75316473d7b84172d8f"), "value" : { "_id" : ObjectId("51ffc75316473d7b84172d8f") } }
or:
> db.so.mapReduce( map, reduce, { out: 'output' } );
{
"result" : "output",
"timeMillis" : 19,
"counts" : {
"input" : 23,
"emit" : 2,
"reduce" : 0,
"output" : 2
},
"ok" : 1,
}
> db.output.find();
{ "_id" : ObjectId("51ffc4bc16473d7b84172d83"), "value" : { "_id" : ObjectId("51ffc4bc16473d7b84172d83"), "date" : ISODate("2013-08-05T15:24:25Z") } }
{ "_id" : ObjectId("51ffc4bc16473d7b84172d86"), "value" : { "_id" : ObjectId("51ffc4bc16473d7b84172d86"), "date" : ISODate("2013-08-05T15:25:15Z") } }
Depending on a random factor.

MongoDB MapReduce producing different results for each document

This is a follow-up from this question, where I tried to solve this problem with the aggregation framework. Unfortunately, I have to wait before being able to update this particular mongodb installation to a version that includes the aggregation framework, so have had to use MapReduce for this fairly simple pivot operation.
I have input data in the format below, with multiple daily dumps:
"_id" : "daily_dump_2013-05-23",
"authors_who_sold_books" : [
{
"id" : "Charles Dickens",
"original_stock" : 253,
"customers" : [
{
"time_bought" : 1368627290,
"customer_id" : 9715923
}
]
},
{
"id" : "JRR Tolkien",
"original_stock" : 24,
"customers" : [
{
"date_bought" : 1368540890,
"customer_id" : 9872345
},
{
"date_bought" : 1368537290,
"customer_id" : 9163893
}
]
}
]
}
I'm after output in the following format, that aggregates across all instances of each (unique) author across all daily dumps:
{
"_id" : "Charles Dickens",
"original_stock" : 253,
"customers" : [
{
"date_bought" : 1368627290,
"customer_id" : 9715923
},
{
"date_bought" : 1368622358,
"customer_id" : 9876234
},
etc...
]
}
I have written this map function...
function map() {
for (var i in this.authors_who_sold_books)
{
author = this.authors_who_sold_books[i];
emit(author.id, {customers: author.customers, original_stock: author.original_stock, num_sold: 1});
}
}
...and this reduce function.
function reduce(key, values) {
sum = 0
for (i in values)
{
sum += values[i].customers.length
}
return {num_sold : sum};
}
However, this gives me the following output:
{
"_id" : "Charles Dickens",
"value" : {
"customers" : [
{
"date_bought" : 1368627290,
"customer_id" : 9715923
},
{
"date_bought" : 1368622358,
"customer_id" : 9876234
},
],
"original_stock" : 253,
"num_sold" : 1
}
}
{ "_id" : "JRR Tolkien", "value" : { "num_sold" : 3 } }
{
"_id" : "JK Rowling",
"value" : {
"customers" : [
{
"date_bought" : 1368627290,
"customer_id" : 9715923
},
{
"date_bought" : 1368622358,
"customer_id" : 9876234
},
],
"original_stock" : 183,
"num_sold" : 1
}
}
{ "_id" : "John Grisham", "value" : { "num_sold" : 2 } }
The even indexed documents have the customers and original_stock listed, but an incorrect sum of num_sold.
The odd indexed documents only have the num_sold listed, but it is the correct number.
Could anyone tell me what it is I'm missing, please?
Your problem is due to the fact that the format of the output of the reduce function should be identical to the format of the map function (see requirements for the reduce function for an explanation).
You need to change the code to something like the following to fix the problem, :
function map() {
for (var i in this.authors_who_sold_books)
{
author = this.authors_who_sold_books[i];
emit(author.id, {customers: author.customers, original_stock: author.original_stock, num_sold: author.customers.length});
}
}
function reduce(key, values) {
var result = {customers:[] , num_sold:0, original_stock: (values.length ? values[0].original_stock : 0)};
for (i in values)
{
result.num_sold += values[i].num_sold;
result.customers = result.customers.concat(values[i].customers);
}
return result;
}
I hope that helps.
Note : the change num_sold: author.customers.length in the map function. I think that's what you want

Unable to set query filter in a mongodb mapReduce command

I am trying to filter a mapReduce command with a query. This query seems to not beeing used by the mapReduce command. When I use the runCommand with the same parameters the query filter is used. I tryed with a mongodb 2.2.1 and a 2.0.1.
The query of my mapReduce function is not used.
m = function () {
if (this.duration > 0) {
emit("dur", this.duration);
}
}
r = function (key, values) {
var index = 0;
var sum = 0;
for (var i = 0; i < values.length; i++) {
sum += values[i];
index++;
}
return sum / index;
}
This command doesn't work :
res = db.movies.mapReduce(m,r, {out: { inline : 1}},{query:{kinds:'Action'}});
{
"results" : [
{
"_id" : "dur",
"value" : 5148.227224559308
}
],
"timeMillis" : 1849,
"counts" : {
"input" : 105472,
"emit" : 69602,
"reduce" : 106,
"output" : 1
},
"ok" : 1,
}
This command work :
res = db.runCommand({mapReduce : "movies", map : m, reduce : r, query : {kinds:'Action'}, out : {inline:1} })
{
"results" : [
{
"_id" : "dur",
"value" : 6134.118191572414
}
],
"timeMillis" : 238,
"counts" : {
"input" : 3577,
"emit" : 2910,
"reduce" : 4,
"output" : 1
},
"ok" : 1
}
With runCommand the query is used. Any ideas ?
You need to combine the out and query options into a single object:
res = db.movies.mapReduce(m,r, {out: { inline : 1}, query: {kinds: 'Action'} });

MongoDB group by Functionalities

In MySQL
select a,b,count(1) as cnt from list group by a, b having cnt > 2;
I have to execute the group by function using having condition in mongodb.
But i am getting following error. Please share your input.
In MongoDB
> res = db.list.group({key:{a:true,b:true},
... reduce: function(obj,prev) {prev.count++;},
... initial: {count:0}}).limit(10);
Sat Jan 7 16:36:30 uncaught exception: group command failed: {
"errmsg" : "exception: group() can't handle more than 20000 unique keys",
"code" : 10043,
"ok" : 0
Once it will be executed, we need to run the following file on next.
for (i in res) {if (res[i].count>2) printjson(res[i])};
Regards,
Kumaran
MongoDB group by is very limited in most cases, for instance
- the result set must be lesser than 10000 keys.
- it will not work in sharded environments
So its better to use map reduce. so the query would be like this
map = function() { emit({a:true,b:true},{count:1}); }
reduce = function(k, values) {
var result = {count: 0};
values.forEach(function(value) {
result.count += value.count;
});
return result;
}
and then
db.list.mapReduce(map,reduce,{out: { inline : 1}})
Its a untested version. let me know if it works
EDIT:
The earlier map function was faulty. Thats why you are not getting the results. it should have been
map = function () {
emit({a:this.a, b:this.b}, {count:1});
}
Test data:
> db.multi_group.insert({a:1,b:2})
> db.multi_group.insert({a:2,b:2})
> db.multi_group.insert({a:3,b:2})
> db.multi_group.insert({a:1,b:2})
> db.multi_group.insert({a:3,b:2})
> db.multi_group.insert({a:7,b:2})
> db.multi_group.mapReduce(map,reduce,{out: { inline : 1}})
{
"results" : [
{
"_id" : {
"a" : 1,
"b" : 2
},
"value" : {
"count" : 2
}
},
{
"_id" : {
"a" : 2,
"b" : 2
},
"value" : {
"count" : 1
}
},
{
"_id" : {
"a" : 3,
"b" : 2
},
"value" : {
"count" : 2
}
},
{
"_id" : {
"a" : 7,
"b" : 2
},
"value" : {
"count" : 1
}
}
],
"timeMillis" : 1,
"counts" : {
"input" : 6,
"emit" : 6,
"reduce" : 2,
"output" : 4
},
"ok" : 1,
}
EDIT2:
Complete solution including applying having count >= 2
map = function () {
emit({a:this.a, b:this.b}, {count:1,_id:this._id});
}
reduce = function(k, values) {
var result = {count: 0,_id:[]};
values.forEach(function(value) {
result.count += value.count;
result._id.push(value._id);
});
return result;
}
>db.multi_group.mapReduce(map,reduce,{out: { replace : "multi_result"}})
> db.multi_result.find({'value.count' : {$gte : 2}})
{ "_id" : { "a" : 1, "b" : 2 }, "value" : { "_id" : [ ObjectId("4f0adf2884025491024f994c"), ObjectId("4f0adf3284025491024f994f") ], "count" : 2 } }
{ "_id" : { "a" : 3, "b" : 2 }, "value" : { "_id" : [ ObjectId("4f0adf3084025491024f994e"), ObjectId("4f0adf3584025491024f9950") ], "count" : 2 } }
You should use MapReduce instead. Group has its limitations.
In future you'll be able to use the Aggregation Framework. But for now, use map/reduce.
Depends on the number of your groups, you might find a simpler and faster solution than group or MapReduce by using distinct:
var res = [];
for( var cur_a = db.list.distinct('a'); cur_a.hasNext(); ) {
var a = cur_a.next();
for( var cur_b = db.list.distinct('b'); cur_b.hasNext(); ) {
var b = cur_b.next();
var cnt = db.list.count({'a':a,'b':b})
if (cnt > 2)
res.push({ 'a': a, 'b' : b 'cnt': cnt}
}
}
It will be faster if you have indexes on a and b
db.list.ensureIndex({'a':1,'b':1})