Mongodb get lastHour for each day - mongodb

I have the following schema in mongodb, where the timestamp is the timestamp at an hourly level
{
"day_chan1" : 54.464,
"day_chan2" : 44.141,
"day_chan3" : 44.89,
"gatewayId" : "443719005AA3",
"timestamp" : ISODate("2016-02-15T23:00:00.000Z"),
"total_curr_chan" : 5.408,
"total_day_chan" : 143.495,
"type" : 1
}
I want to be able to query the last timestamp for the day for the last 7 days and 30 days. In order to do this, I am thinking of doing something like
var d = new Date(); // today!
for(var i =1; i <= 7; i++) {
var n = i; // go back n days!
d.setDate(d.getDate() - n);
d.setHours(23,0,0);
var query = {
gatewayId: req.params.deviceId,
timestamp: { $lt: new Date(d) }
};
db
.find(query,function(resp) {
//return the data here
});
}
But this creates a problem of multiple callbacks and I want to know if there is an easier way of doing so using aggregates or some other method

Use the $hour operator within the $project operator to extract the hour part of the timestamp, then query with $match to filter documents that do not satisfy the given hour criteria:
var pipeline = [
{
"$project": {
"day_chan1": 1,
"day_chan2": 1,
"day_chan3": 1,
"gatewayId": 1,
"timestamp": 1,
"total_curr_chan": 1,
"total_day_chan": 1,
"type": 1,
"hour": { "$hour": "$timestamp" }
}
},
{ "$match": { "hour": 23 } }
];
collection.aggregate(pipeline, function(err, result) {
//return the data here
console.log(result);
});

For arbitrary last hour it must be a bit more complex:
db.collection.aggregate([
{$match:{
timestamp:{$type: "date"}}
// add date constraints here
},
{$project:{
_id:1,
date:{"y":{$year:"$timestamp"}, "d":{$dayOfYear:"$timestamp"}},
doc:"$$CURRENT"}
},
{$group:{
_id:"$date",
maxtime: {$max:"$doc.timestamp"},
doc:{$push:"$doc"}}
},
{$unwind:"$doc"},
{$project:{
latest: {$cmp: ["$maxtime", "$doc.timestamp"]},
doc:"$doc"}
},
{$match:{"latest":0}}
])
With map-reduce it should be simpler, but may be slower.

Related

$matching with field added with $dateToString doesn't work

In my MongoDB collection I have documents that contain a nested string field, containing a month and year, e.g. '04/2021'. Sample document:
{
"_id": {
"$oid": "608ba45cec43c5b24cda034b"
},
"status": "pass",
"stage": 5,
"priority": 0,
"payload": {
"company_id": "8800",
"company_name": "<MY COMPANY>",
"target_period": "04/2021"
},
"retry_count": 0,
"build_number": "101",
"job_name": "P123",
"createdAt": {
"$date": "2021-04-30T06:31:56.000Z"
},
"updatedAt": {
"$date": "2021-05-10T03:55:44.686Z"
}
}
I am trying to write an aggregation pipeline that will dynamically return documents where said field points to the past month. For example, ran this month (May 2021) I would get documents labeled with '04/2021'. From this post I found the oneliner for getting the comparison string: new Date(new Date().getFullYear(), new Date().getMonth(), 1). (I understand that by the virtue of getMonth returning a zero-based index of month, getting the previous month works by accident and has to be solved somehow.)
This pipeline does not work:
[
{
$addFields: {
previous_month: {
$dateToString: {
'date': new Date(new Date().getFullYear(), new Date().getMonth(), 1),
'format': '%m/%G'
}
}
}
},
{
$match: {
"payload.target_period": "$previous_month"
}
}
]
With MongoDB Compass I can see that the field previous_month is populated just fine by the $addFields stage (above sample document gets value 04/2021), but the $match stage returns 0 documents. I'm running MongoDB version 4.2.12.
You should use $expr operator while trying to self reference another ket in a document inside
$match stage.
[
{
$addFields: {
previous_month: {
$dateToString: {
'date': new Date(new Date().getFullYear(), new Date().getMonth(), 1),
'format': '%m/%G'
}
}
}
},
{
$match: {
$expr: {
$eq: [ "$payload.target_period", "$previous_month" ],
},
}
}
]
Instead of doing whole process in query, I think you can prepare input date in your client language, (js, nodejs) easily,
have prepared a function zeroFill it will return number with concat 0 if its less than 10,
get previous month date and pick previous month
concat both month and year
function zeroFill(i) { return (i < 10 ? '0' : '') + i; }
var date = new Date();
date.setMonth(date.getMonth() - 1);
let searchDate = zeroFill(date.getMonth() + 1) + "/" + date.getFullYear();
console.log(searchDate); // mm/yyyy
Your query would be just:
[{ $match: { "payload.target_period": searchDate } }]
Playground
I would suggest moment.js library, it is much simpler to use:
{ $match: { "payload.target_period": moment().startOf("months").subtract(1, "months").format("MM/YYYY") } }

How to calculate simple moving average using mongodb mapreduce?

I have time series data as the following format in a mongodb collection:
{
"Name" : "AKBNK",
"Date" : ISODate("2009-01-02T00:00:00Z"),
"Close" : 3.256746559,
}
I want to calculate simple moving average using mongodb mapreduce. I tried it as the following to perform window sliding, but it works slowly when the period is big.
var mapper = function() {
var i = 0, j = counter;
if (j < period) {
j = period;
i = period - counter;
}
for (; i < period && j <= limit; i++, j++) {
emit (j, this.Close);
}
counter++;
}
var reducer = function(key, values) {
return Array.sum(values);
}
var finalizer = function(key, reducedValue) {
return reducedValue / period;
}
var period = 730;
db.data.mapReduce(mapper, reducer, {finalize: finalizer, out: "smaOut", query: {Name: "AKBNK"}, sort: {Date: -1}, limit: period * 2 - 1, scope: {counter: 1, period: period, limit: period * 2 - 1}});
Any advice how can I do this faster? How can I map the data?
You could try using the below aggregation pipeline which seems to produce the correct results at a quick glance but at a much higher speed:
db.data.aggregate({
$match: {
"Name": "AKBNK" // this stage will use and index if you have one on the "Name" field
}
}, {
$sort: { "Date": -1 }, // this stage will also use and index if you have one on "Date"
}, {
$group: {
"_id": null, // create one single document
"allCloseValues": { $push: "$Close" } // that shall contain an array with all "Close" values
}
}, {
$addFields: {
"copyOfAllCloseValues": "$allCloseValues" // duplicate the array
}
}, {
$unwind: {
"path": "$copyOfAllCloseValues", // flatten the created single document
"includeArrayIndex": "_id" // use the "_id" field to hold the array index
}
}, {
$project: {
avg: {
$avg: { // calculate the average of some part of the array "Close"
$slice: [ "$allCloseValues", "$_id", 730 ] // which shall start at index "_id" and take up to 730 values
}
}
}
}, {
$out: "smaOut" // write the resulting documents out to the "smaOut" collection
});

How do I do a 'group by' for a datetime when I want to group by just the date using $group? [duplicate]

I am working on a project in which I am tracking number of clicks on a topic.
I am using mongodb and I have to group number of click by date( i want to group data for 15 days).
I am having data store in following format in mongodb
{
"_id" : ObjectId("4d663451d1e7242c4b68e000"),
"date" : "Mon Dec 27 2010 18:51:22 GMT+0000 (UTC)",
"topic" : "abc",
"time" : "18:51:22"
}
{
"_id" : ObjectId("4d6634514cb5cb2c4b69e000"),
"date" : "Mon Dec 27 2010 18:51:23 GMT+0000 (UTC)",
"topic" : "bce",
"time" : "18:51:23"
}
i want to group number of clicks on topic:abc by days(for 15 days)..i know how to group that but how can I group by date which are stored in my database
I am looking for result in following format
[
{
"date" : "date in log",
"click" : 9
},
{
"date" : "date in log",
"click" : 19
},
]
I have written code but it will work only if date are in string (code is here http://pastebin.com/2wm1n1ix)
...please guide me how do I group it
New answer using Mongo aggregation framework
After this question was asked and answered, 10gen released Mongodb version 2.2 with an aggregation framework, which is now the better way to do this sort of query. This query is a little challenging because you want to group by date and the values stored are timestamps, so you have to do something to convert the timestamps to dates that match. For the purposes of example I will just write a query that gets the right counts.
db.col.aggregate(
{ $group: { _id: { $dayOfYear: "$date"},
click: { $sum: 1 } } }
)
This will return something like:
[
{
"_id" : 144,
"click" : 165
},
{
"_id" : 275,
"click" : 12
}
]
You need to use $match to limit the query to the date range you are interested in and $project to rename _id to date. How you convert the day of year back to a date is left as an exercise for the reader. :-)
10gen has a handy SQL to Mongo Aggregation conversion chart worth bookmarking. There is also a specific article on date aggregation operators.
Getting a little fancier, you can use:
db.col.aggregate([
{ $group: {
_id: {
$add: [
{ $dayOfYear: "$date"},
{ $multiply:
[400, {$year: "$date"}]
}
]},
click: { $sum: 1 },
first: {$min: "$date"}
}
},
{ $sort: {_id: -1} },
{ $limit: 15 },
{ $project: { date: "$first", click: 1, _id: 0} }
])
which will get you the latest 15 days and return some datetime within each day in the date field. For example:
[
{
"click" : 431,
"date" : ISODate("2013-05-11T02:33:45.526Z")
},
{
"click" : 702,
"date" : ISODate("2013-05-08T02:11:00.503Z")
},
...
{
"click" : 814,
"date" : ISODate("2013-04-25T00:41:45.046Z")
}
]
There are already many answers to this question, but I wasn't happy with any of them. MongoDB has improved over the years, and there are now easier ways to do it. The answer by Jonas Tomanga gets it right, but is a bit too complex.
If you are using MongoDB 3.0 or later, here's how you can group by date. I start with the $match aggregation because the author also asked how to limit the results.
db.yourCollection.aggregate([
{ $match: { date: { $gte: ISODate("2019-05-01") } } },
{ $group: { _id: { $dateToString: { format: "%Y-%m-%d", date: "$date"} }, count: { $sum: 1 } } },
{ $sort: { _id: 1} }
])
To fetch data group by date in mongodb
db.getCollection('supportIssuesChat').aggregate([
{
$group : {
_id :{ $dateToString: { format: "%Y-%m-%d", date: "$createdAt"} },
list: { $push: "$$ROOT" },
count: { $sum: 1 }
}
}
])
Late answer, but for the record (for anyone else that comes to this page): You'll need to use the 'keyf' argument instead of 'key', since your key is actually going to be a function of the date on the event (i.e. the "day" extracted from the date) and not the date itself. This should do what you're looking for:
db.coll.group(
{
keyf: function(doc) {
var date = new Date(doc.date);
var dateKey = (date.getMonth()+1)+"/"+date.getDate()+"/"+date.getFullYear()+'';
return {'day':dateKey};
},
cond: {topic:"abc"},
initial: {count:0},
reduce: function(obj, prev) {prev.count++;}
});
For more information, take a look at MongoDB's doc page on aggregation and group: http://www.mongodb.org/display/DOCS/Aggregation#Aggregation-Group
This can help
return new Promise(function(resolve, reject) {
db.doc.aggregate(
[
{ $match: {} },
{ $group: { _id: { $dateToString: { format: "%Y-%m-%d", date: "$date" } }, count: { $sum: 1 } } },
{ $sort: { _id: 1 } }
]
).then(doc => {
/* if you need a date object */
doc.forEach(function(value, index) {
doc[index]._id = new Date(value._id);
}, this);
resolve(doc);
}).catch(reject);
}
Haven't worked that much with MongoDB yet, so I am not completely sure. But aren't you able to use full Javascript?
So you could parse your date with Javascript Date class, create your date for the day out of it and set as key into an "out" property. And always add one if the key already exists, otherwise create it new with value = 1 (first click). Below is your code with adapted reduce function (untested code!):
db.coll.group(
{
key:{'date':true},
initial: {retVal: {}},
reduce: function(doc, prev){
var date = new Date(doc.date);
var dateKey = date.getFullYear()+''+date.getMonth()+''+date.getDate();
(typeof prev.retVal[dateKey] != 'undefined') ? prev.retVal[dateKey] += 1 : prev.retVal[dateKey] = 1;
},
cond: {topic:"abc"}
}
)
thanks for #mindthief, your answer help solve my problem today. The function below can group by day a little more easier, hope can help the others.
/**
* group by day
* #param query document {key1:123,key2:456}
*/
var count_by_day = function(query){
return db.action.group(
{
keyf: function(doc) {
var date = new Date(doc.time);
var dateKey = (date.getMonth()+1)+"/"+date.getDate()+"/"+date.getFullYear();
return {'date': dateKey};
},
cond:query,
initial: {count:0},
reduce: function(obj, prev) {
prev.count++;
}
});
}
count_by_day({this:'is',the:'query'})
Another late answer, but still. So if you wanna do it in only one iteration and get the number of clicks grouped by date and topic you can use the following code:
db.coll.group(
{
$keyf : function(doc) {
return { "date" : doc.date.getDate()+"/"+doc.date.getMonth()+"/"+doc.date.getFullYear(),
"topic": doc.topic };
},
initial: {count:0},
reduce: function(obj, prev) { prev.count++; }
})
Also If you would like to optimize the query as suggested you can use an integer value for date (hint: use valueOf(), for the key date instead of the String, though for my examples the speed was the same.
Furthermore it's always wise to check the MongoDB docs regularly, because they keep adding new features all the time. For example with the new Aggregation framework, which will be released in the 2.2 version you can achieve the same results much easier http://docs.mongodb.org/manual/applications/aggregation/
If You want a Date oject returned directly
Then instead of applying the Date Aggregation Operators, instead apply "Date Math" to round the date object. This can often be desirable as all drivers represent a BSON Date in a form that is commonly used for Date manipulation for all languages where that is possible:
db.datetest.aggregate([
{ "$group": {
"_id": {
"$add": [
{ "$subtract": [
{ "$subtract": [ "$date", new Date(0) ] },
{ "$mod": [
{ "$subtract": [ "$date", new Date(0) ] },
1000 * 60 * 60 * 24
]}
]},
new Date(0)
]
},
"click": { "$sum": 1 }
}}
])
Or if as is implied in the question that the grouping interval required is "buckets" of 15 days, then simply apply that to the numeric value in $mod:
db.datetest.aggregate([
{ "$group": {
"_id": {
"$add": [
{ "$subtract": [
{ "$subtract": [ "$date", new Date(0) ] },
{ "$mod": [
{ "$subtract": [ "$date", new Date(0) ] },
1000 * 60 * 60 * 24 * 15
]}
]},
new Date(0)
]
},
"click": { "$sum": 1 }
}}
])
The basic math applied is that when you $subtract two Date objects the result returned will be the milliseconds of differnce numerically. So epoch is represented by Date(0) as the base for conversion in whatever language constructor you have.
With a numeric value, the "modulo" ( $mod ) is applied to round the date ( subtract the remainder from the division ) to the required interval. Being either:
1000 milliseconds x 60 seconds * 60 minutes * 24 hours = 1 day
Or
1000 milliseconds x 60 seconds * 60 minutes * 24 hours * 15 days = 15 days
So it's flexible to whatever interval you require.
By the same token from above an $add operation between a "numeric" value and a Date object will return a Date object equivalent to the millseconds value of both objects combined ( epoch is 0, therefore 0 plus difference is the converted date ).
Easily represented and reproducible in the following listing:
var now = new Date();
var bulk = db.datetest.initializeOrderedBulkOp();
for ( var x = 0; x < 60; x++ ) {
bulk.insert({ "date": new Date( now.valueOf() + ( 1000 * 60 * 60 * 24 * x ))});
}
bulk.execute();
And running the second example with 15 day intervals:
{ "_id" : ISODate("2016-04-14T00:00:00Z"), "click" : 12 }
{ "_id" : ISODate("2016-03-30T00:00:00Z"), "click" : 15 }
{ "_id" : ISODate("2016-03-15T00:00:00Z"), "click" : 15 }
{ "_id" : ISODate("2016-02-29T00:00:00Z"), "click" : 15 }
{ "_id" : ISODate("2016-02-14T00:00:00Z"), "click" : 3 }
Or similar distribution depending on the current date when the listing is run, and of course the 15 day intervals will be consistent since the epoch date.
Using the "Math" method is a bit easier to tune, especially if you want to adjust time periods for different timezones in aggregation output where you can similarly numerically adjust by adding/subtracting the numeric difference from UTC.
Of course, that is a good solution. Aside from that you can group dates by days as strings (as that answer propose) or you can get the beginning of dates by projecting date field (in aggregation) like that:
{'$project': {
'start_of_day': {'$subtract': [
'$date',
{'$add': [
{'$multiply': [{'$hour': '$date'}, 3600000]},
{'$multiply': [{'$minute': '$date'}, 60000]},
{'$multiply': [{'$second': '$date'}, 1000]},
{'$millisecond': '$date'}
]}
]},
}}
It gives you this:
{
"start_of_day" : ISODate("2015-12-03T00:00:00.000Z")
},
{
"start_of_day" : ISODate("2015-12-04T00:00:00.000Z")
}
It has some pluses: you can manipulate with your days in date type (not number or string), it allows you to use all of the date aggregation operators in following aggregation operations and gives you date type on the output.

MongoDB Aggregate for a sum on a per week basis for all prior weeks

I've got a series of docs in MongoDB. An example doc would be
{
createdAt: Mon Oct 12 2015 09:45:20 GMT-0700 (PDT),
year: 2015,
week: 41
}
Imagine these span all weeks of the year and there can be many in the same week. I want to aggregate them in such a way that the resulting values are a sum of each week and all its prior weeks counting the total docs.
So if there were something like 10 in the first week of the year and 20 in the second, the result could be something like
[{ week: 1, total: 10, weekTotal: 10},
{ week: 2, total: 30, weekTotal: 20}]
Creating an aggregation to find the weekTotal is easy enough. Including a projection to show the first part
db.collection.aggregate([
{
$project: {
"createdAt": 1,
year: {$year: "$createdAt"},
week: {$week: "$createdAt"},
_id: 0
}
},
{
$group: {
_id: {year: "$year", week: "$week"},
weekTotal : { $sum : 1 }
}
},
]);
But getting past this to sum based on that week and those weeks preceding is proving tricky.
The aggregation framework is not able to do this as all operations can only effectively look at one document or grouping boundary at a time. In order to do this on the "server" you need something with access to a global variable to keep the "running total", and that means mapReduce instead:
db.collection.mapReduce(
function() {
Date.prototype.getWeekNumber = function(){
var d = new Date(+this);
d.setHours(0,0,0);
d.setDate(d.getDate()+4-(d.getDay()||7));
return Math.ceil((((d-new Date(d.getFullYear(),0,1))/8.64e7)+1)/7);
};
emit({ year: this.createdAt.getFullYear(), week: this.createdAt.getWeekNumber() }, 1);
},
function(values) {
return Array.sum(values);
},
{
out: { inline: 1 },
scope: { total: 0 },
finalize: function(value) {
total += value;
return { total: total, weekTotal: value }
}
}
)
If you can live with the operation occuring on the "client" then you need to loop through the aggregation result and similarly sum up the totals:
var total = 0;
db.collection.aggregate([
{ "$group": {
"_id": {
"year": { "$year": "$createdAt" },
"week": { "$week": "$createdAt" }
},
"weekTotal": { "$sum": 1 }
}},
{ "$sort": { "_id": 1 } }
]).map(function(doc) {
total += doc.weekTotal;
doc.total = total;
return doc;
});
It's all a matter of whether it makes the most sense to you of whether this needs to happen on the server or on the client. But since the aggregation pipline has no such "globals", then you probably should not be looking at this for any further processing without outputting to another collection anyway.

MongoDB - Query all documents createdAt within last hours, and group by minute?

From reading various articles out there, I believe this should be possible, but I'm not sure where exactly to start.
This is what I'm trying to do:
I want to run a query, where it finds all documents createAt within the last hour, and groups all of them by minute, and since each document has a tweet value, like 5, 6, or 19, add them up for each one of those minutes and provides a sum.
Here's a sample of the collection:
{
"createdAt": { "$date": 1385064947832 },
"updatedAt": null,
"tweets": 47,
"id": "06E72EBD-D6F4-42B6-B79B-DB700CCD4E3F",
"_id": "06E72EBD-D6F4-42B6-B79B-DB700CCD4E3F"
}
Is this possible to do in mongodb?
#zero323 - I first tried just grouping the last hour like so:
db.tweetdatas.group( {
key: { tweets: 1, 'createdAt': 1 },
cond: { createdAt: { $gt: new Date("2013-11-20T19:44:58.435Z"), $lt: new Date("2013-11-20T20:44:58.435Z") } },
reduce: function ( curr, result ) { },
initial: { }
} )
But that just returns all the tweets within the timeframe, which technically is what I want, but now I want to group them all by each minute, and add up the sum of tweets for each minute.
#almypal
Here is the query that I'm using, based off your suggestion:
db.tweetdatas.aggregate(
{$match:{ "createdAt":{$gt: "2013-11-22T14:59:18.748Z"}, }},
{$project: { "createdAt":1, "createdAt_Minutes": { $minute : "$createdAt" }, "tweets":1, }},
{$group:{ "_id":"$createdAt_Minutes", "sum_tweets":{$sum:"$tweets"} }}
)
However, it's displaying this response:
{ "result" : [ ], "ok" : 1 }
Update: The response from #almypal is working. Apparently, putting in the date like I have in the above example does not work. While I'm running this query from Node, in the shell, I thought it would be easier to convert the var date to a string, and use that in the shell.
Use aggregation as below:
var lastHour = new Date();
lastHour.setHours(lastHour.getHours()-1);
db.tweetdatas.aggregate(
{$match:{ "createdAt":{$gt: lastHour}, }},
{$project: { "createdAt":1, "createdAt_Minutes": { $minute : "$createdAt" }, "tweets":1, }},
{$group:{ "_id":"$createdAt_Minutes", "sum_tweets":{$sum:"$tweets"} }}
)
and the result would be like this
{
"result" : [
{
"_id" : 1,
"sum_tweets" : 117
},
{
"_id" : 2,
"sum_tweets" : 40
},
{
"_id" : 3,
"sum_tweets" : 73
}
],
"ok" : 1
}
where _id corresponds to the specific minute and sum_tweets is the total number of tweets in that minute.