Related
I have two collections "datasets" and "users".
I tried to lookup datasets.assignedTo = users.id that's working fine. Also, I want to match the field of datasets.firstBillable >= users.prices.beginDate date field are matched to get the current index price value. And also check users.prices.endDate is less than or equal to users.prices.beginDate.
For example:
cgPrices: 45
https://mongoplayground.net/p/YQps9EozlAL
Collections:
db={
users: [
{
id: 1,
name: "Aravinth",
prices: [
{
beginDate: "2022-08-24T07:29:01.639Z",
endDate: "2022-08-31T07:29:01.639Z",
price: 45
}
]
},
{
id: 2,
name: "Raja",
prices: [
{
beginDate: "2022-07-25T07:29:01.639Z",
endDate: "2022-07-30T07:29:01.639Z",
price: 55
}
]
}
],
datasets: [
{
color: "braun, rose gold",
firstBillable: "2022-08-24T07:29:01.639Z",
assignedTo: 1
},
{
color: "beige, silber",
firstBillable: "2022-07-25T07:29:01.639Z",
assignedTo: 2
}
]
}
My current implementation:
db.datasets.aggregate([
{
"$lookup": {
"from": "users",
"as": "details",
let: {
assigned_to: "$assignedTo",
first_billable: "$firstBillable"
},
pipeline: [
{
"$match": {
$expr: {
"$and": [
{
"$eq": [
"$id",
"$$assigned_to"
]
},
{
"$gte": [
"$first_billable",
"$details.prices.beginDate"
]
},
{
"$lte": [
"$first_billable",
"$details.prices.endDate"
]
}
]
}
}
}
]
}
},
{
"$addFields": {
"details": 0,
"cg": {
$first: {
"$first": "$details.prices.price"
}
}
}
}
])
Output i needed:
[
{
"_id": ObjectId("5a934e000102030405000000"),
"assignedTo": 1,
"cg": 45,
"color": "braun, rose gold",
"details": 0,
"firstBillable": "2022-08-24T07:29:01.639Z"
},
{
"_id": ObjectId("5a934e000102030405000001"),
"assignedTo": 2,
"cg": 55,
"color": "beige, silber",
"details": 0,
"firstBillable": "2022-07-25T07:29:01.639Z"
}
]
https://mongoplayground.net/p/YQps9EozlAL
Concerns:
You should compare the date as Date instead of string, hence you are required to convert the date strings to Date before comparing.
In users collection, prices is an array. You need to deconstruct the array to multiple documents first before compare the date fields in price.
The query should be:
db.datasets.aggregate([
{
"$lookup": {
"from": "users",
"as": "details",
let: {
assigned_to: "$assignedTo",
first_billable: {
$toDate: "$firstBillable"
}
},
pipeline: [
{
$match: {
$expr: {
$eq: [
"$id",
"$$assigned_to"
]
}
}
},
{
$unwind: "$prices"
},
{
"$match": {
$expr: {
"$and": [
{
"$gte": [
"$$first_billable",
{
$toDate: "$prices.beginDate"
}
]
},
{
"$lte": [
"$$first_billable",
{
$toDate: "$prices.endDate"
}
]
}
]
}
}
}
]
}
},
{
"$addFields": {
"details": 0,
"cg": {
$first: "$details.prices.price"
}
}
}
])
Demo # Mongo Playground
I want to filter the dataset to extract documents which were created 7 days ago OR a Month ago OR Documents created at any date.
filter documents based on createdAt field in document.
Dataset:-
[
{
"_id": ObjectId("6257047cffd61ab62864c1ae"),
"type": "A",
"source": "B",
"user": ObjectId("622b55ff0b0af6b049c387d3"),
"createdAt": ISODate("2022-04-17T07:55:00.368Z"),
"updatedAt": ISODate("2022-04-17T07:55:00.368Z"),
},
{
"_id": ObjectId("6257047cffd61ab62864c1ad"),
"type": "B",
"source": "A",
"user": ObjectId("622b55ff0b0af6b049c387d3"),
"createdAt": ISODate("2022-04-23T07:55:00.368Z"),
"updatedAt": ISODate("2022-04-23T07:55:00.368Z"),
},
{
"_id": ObjectId("6257047cffd61ab62864c1ce"),
"type": "A",
"source": "C",
"user": ObjectId("622b55ff0b0af6b049c387d3"),
"createdAt": ISODate("2022-04-17T07:55:00.368Z"),
"updatedAt": ISODate("2022-04-17T07:55:00.368Z"),
},
{
"_id": ObjectId("6257047cffd61ab62864c1cb"),
"type": "A",
"source": "B",
"user": ObjectId("622b56250b0af6b049c387d6"),
"createdAt": ISODate("2022-04-24T07:55:00.368Z"),
"updatedAt": ISODate("2022-04-24T07:55:00.368Z"),
},
{
"_id": ObjectId("6257047cffd61ab62864c1cb"),
"type": "A",
"source": "B",
"user": ObjectId("622b56250b0af6b049c387d6"),
"createdAt": ISODate("2022-03-24T07:55:00.368Z"),
"updatedAt": ISODate("2022-03-24T07:55:00.368Z"),
},
{
"_id": ObjectId("6257047cffd61ab62864c1ce"),
"type": "A",
"source": "C",
"user": ObjectId("622b55ff0b0af6b049c387d3"),
"createdAt": ISODate("2022-03-17T07:55:00.368Z"),
"updatedAt": ISODate("2022-03-17T07:55:00.368Z"),
},
]
MongoDB aggregate query:-
db.collection.aggregate([
{
$addFields: {
paramType: "All",
paramSource: "All",
paramCreatedAt:"All",
}
},
{
$match: {
$and: [
{
user: ObjectId("622b55ff0b0af6b049c387d3")
},
{
$or: [
{
paramType: {
$eq: "All"
}
},
{
$expr: {
$eq: [
"$paramType",
"$type"
],
}
}
]
},
{
$or: [
{
paramSource: {
$eq: "All"
}
},
{
$expr: {
$eq: [
"$paramSource",
"$source"
]
}
}
]
}
]
}
},
{
$setWindowFields: {
output: {
totalCount: {
$count: {}
}
}
}
},
{
$sort: {
createdAt: -1
}
},
{
$skip: 0
},
{
$limit: 6
},
{
"$project": {
"paramSource": false,
"paramType": false,
}
}
])
how to filter to get documents created in the last 7 days or 30 days or any date.
paramCreatedAt will take one of the following values [All dates, 7 days ago, a month ago]
Example:-
If the All dates filter is applied then display all records.
If 7 days filter is applied display records created from the current date (which can be any day not necessary that it should be sunday) to 7 days back.
If 30 days filter applied then display records created in last 30 days
Your skeleton is pretty neat and you are actually quite close. For the date filtering, just use $dateDiff to return the date difference in days and compare it with the days interval your selected(i.e. 7 days or 30 days) by using $switch
db.collection.aggregate([
{
$addFields: {
paramType: "All",
paramSource: "All",
paramCreatedAt: "All dates"// [All dates, 7 days ago, a month ago]
}
},
{
$match: {
$and: [
{
user: ObjectId("622b55ff0b0af6b049c387d3")
},
{
$or: [
{
paramType: {
$eq: "All"
}
},
{
$expr: {
$eq: [
"$paramType",
"$type"
],
}
}
]
},
{
$or: [
{
paramSource: {
$eq: "All"
}
},
{
$expr: {
$eq: [
"$paramSource",
"$source"
]
}
}
]
},
{
$or: [
{
paramCreatedAt: {
$eq: "All dates"
}
},
{
$expr: {
$and: [
{
"$in": [
"$paramCreatedAt",
[
"7 days ago",
"a month ago"
]
]
},
{
$lte: [
{
"$dateDiff": {
"startDate": "$createdAt",
"endDate": "$$NOW",
"unit": "day"
}
},
{
"$switch": {
"branches": [
{
"case": {
$eq: [
"$paramCreatedAt",
"7 days ago"
]
},
"then": 7
},
{
"case": {
$eq: [
"$paramCreatedAt",
"a month ago"
]
},
"then": 30
}
]
}
}
]
}
]
}
}
]
}
]
}
},
{
$setWindowFields: {
output: {
totalCount: {
$count: {}
}
}
}
},
{
$sort: {
createdAt: -1
}
},
{
$skip: 0
},
{
$limit: 6
},
{
"$project": {
"paramSource": false,
"paramType": false,
}
}
])
Here is the Mongo playground for your reference.
Here's an alternate approach using $facet. $facet is very handy because it allows you to "match and group in parallel" and create overlapping buckets of documents. A single pipeline with $group and $cond on the aggregation field works well for "if/then/elif/elif/else" constructions where overlaps are not desired and an order of precedence is desired.
db.foo.aggregate([
// Initial filter(s):
{$match: {user: ObjectId("622b55ff0b0af6b049c387d3")}},
// Create a single version of "now" from the perspective of the
// CLIENT to use in queries to follow.
// To create such a target date from the perspective of the SERVER,
// use {$addFields: {DD: '$$NOW'}}
// Probably overkill but OK.
{$addFields: {DD: new ISODate()}},
{$facet: {
"all": [ ], // not exciting! :-)
"exactly_7_days_ago": [
{$match: {$expr:
{$eq: [7, {$floor: {$divide:[{$subtract:['$DD', '$createdAt'] }, 1000 * 60 * 60 * 24]}} ]}
}}
],
"everything_from_last_month": [
{$match: {$expr:
{$eq: [1, {$subtract:[{$month: '$DD'}, {$month: '$createdAt'} ]} ]}
}}
],
"only_one_day_from_last_month": [
{$match: {$expr:
{$and: [
{$eq: [1, {$subtract:[{$month: '$DD'}, {$month: '$createdAt'}]} ]},
{$eq: [0, {$subtract:[{$dayOfMonth: '$DD'}, {$dayOfMonth: '$createdAt'} ]} ]}
]}
}}
],
}}
]);
I am currently trying to create an aggregation pipeline in MongoDB to group the items into incremental time intervals, but I only succeeded in grouping them in disjoint time intervals so far.
Sample data:
{
"eventID": "abc",
"date": ISODate("2020-11-05T12:05:11.790Z"),
...........
},
{
"eventID": "xyz",
"date": ISODate("2020-11-05T12:12:11.790Z"),
...........
},
{
"eventID": "klm",
"date": ISODate("2020-11-05T12:28:11.790Z"),
...........
}
Current solution:
$group: {
"_id": {
"year": { $year: "$date" },
"dayOfYear": { $dayOfYear: "$date" },
"hour": { $hour: "$date" },
"interval": {
"$subtract": [
{ "$minute": "$date" },
{ "$mod": [{ "$minute": "$date"}, 10 ] }
]
}
},
"grouped_data": { "$push": { "eventID": "$eventID", "date": "$date" },
"count": { $sum: 1 } }
}
Which returns the data grouped in 10 minutes intervals but those are disjoint intervals (time windows of 10minutes that do not intersect).
Eg:
{
"_id": {
"year": 2020,
"dayOfYear": "314",
"hour": 12,
"interval": 0, // = interval beginning at minute 0 of 12th hour of the day
},
"grouped_data": [{ "eventID": "abc", "date": ISODate("2020-11-05T12:05:11.790Z" }],
"count": 1
},
{
"_id": {
"year": 2020,
"dayOfYear": "314",
"hour": 12,
"interval": 10, // = beginning at minute 10
},
"grouped_data": [{ "eventID": "xyz", "date": ISODate("2020-11-05T12:12:11.790Z") }],
"count": 1
},
{
"_id": {
"year": 2020,
"dayOfYear": "314",
"hour": 12,
"interval": 20, // = beginning at minute 20
},
"grouped_data": [{ "eventID": "klm", "date": ISODate("2020-11-05T12:28:11.790Z") }],
"count": 1
}
What I am actually looking for is grouping them in 10 minutes(or whatever is needed) incremental intervals. Eg: 0-9, 1-10, 2-11, etc. instead of 0-9, 10-19, 20-29 etc.
Edit:
The end goal here is to check if a count threshold is surpassed on a interval length defined by the user.
If user asks "Are there more than 2 events on a 10minute time window?", based on the sample data above and my current solution, the condition is not met. (1 event in 0-9 interval, and 1 event in 10-19). With incremental intervals I should be able to find that there are indeed 2 events in 10 minutes, but in the time interval 5-14. Eg:
{
"_id": {
*whatever logic for grouping in 10minutes window*
},
"grouped_data": [
{ "eventID": "abc", "date": ISODate("2020-11-05T12:05:11.790Z") },
{ "eventID": "xyz", "date": ISODate("2020-11-05T12:12:11.790Z") }],
"count": 2
},
{
"_id": {
*whatever logic for grouping in 10minutes window*
},
"grouped_data": [
{ "eventID": "klm", "date": ISODate("2020-11-05T12:28:11.790Z") }]
"count": 1
},
For me it is not clear which output you like to get, but this aggregation pipeline makes the sliding-window group:
db.collection.aggregate([
{
$group: {
_id: null,
data: { $push: "$$ROOT" },
min_date: { $min: "$date" },
max_date: { $max: "$date" }
}
},
{
$addFields: {
interval: {
$range: [
{ $toInt: { $divide: [{ $toLong: "$min_date" }, 1000] } },
{ $toInt: { $divide: [{ $toLong: "$max_date" }, 1000] } },
10 * 60]
}
}
},
{
$set: {
interval: {
$map: {
input: "$interval",
in: { $toDate: { $multiply: ["$$this", 1000] } }
}
}
}
},
{ $unwind: "$interval" },
{
$project: {
grouped_data: {
$filter: {
input: "$data",
cond: {
$and: [
{ $gte: ["$$this.date", "$interval"] },
{ $lt: ["$$this.date", { $add: ["$interval", 1000 * 60 * 10] }] },
]
}
}
},
interval: 1
}
}
])
Boundaries are given by input data, however can also use fixes dates:
db.collection.aggregate([
{ $group: { _id: null, data: { $push: "$$ROOT" } } },
{
$addFields: {
interval: {
$range: [
{ $toInt: { $divide: [{ $toLong: ISODate("2020-01-01T00:00:00Z") }, 1000] } },
{ $toInt: { $divide: [{ $toLong: ISODate("2020-12-31T23:59:59Z") }, 1000] } },
10 * 60]
}
}
},
{
$set: {
interval: {
$map: {
input: "$interval",
in: { $toDate: { $multiply: ["$$this", 1000] } }
}
}
}
},
{ $unwind: "$interval" },
{
$project: {
grouped_data: {
$filter: {
input: "$data",
cond: {
$and: [
{ $gte: ["$$this.date", "$interval"] },
{ $lt: ["$$this.date", { $add: ["$interval", 1000 * 60 * 10] }] },
]
}
}
},
interval: 1
}
}
])
I will try to answer my own question, maybe it will help other people on the internet. The solution I came up with is based on the answer of #Wernfried (thanks!).
db.getCollection("events_en").aggregate([
{
$match: { eventID: "XYZ" }
},
{
$group: {
_id: null,
events: { $push: "$$ROOT" },
limit: { $push: { $toDate: { $add: [{ $toLong: "$date" }, 1000 * 60 * 10] } } }
}
},
{ $unwind: "$limit" },
{
$project: {
events: {
$filter: {
input: "$events",
cond: {
$and: [
{ $lt: ["$$this.date", "$limit"] },
{ $gte: ["$$this.date", { $subtract: ["$limit", 1000 * 60 * 10] }] },
]
}
}
},
limit: 1,
}
},
{
$addFields: {
count: {
$size: "$events"
}
}
}
])
This will create a limit for each event, based on its date + 10 minutes (or whatever). And afterwards it filters the events (which are now duplicated for each of the limit using $unwind: "$limit"), based on that limit. The result is something like this:
{
"_id" : null,
"limit" : ISODate("2020-11-05T12:28:27.000+0000"),
"events" : [
{
"_id" : 13,
"eventID" : "XYZ",
"date" : ISODate("2020-11-05T12:18:27.000+0000")
},
{
"_id" : 63,
"eventID" : "XYZ",
"date" : ISODate("2020-11-05T12:19:55.000+0000")
},
............................
{
"_id" : 90,
"eventID" : "XYZ",
"date" : ISODate("2020-11-05T12:27:57.000+0000")
}
],
"count" : 5
}
{
"_id" : null,
"limit" : ISODate("2020-11-05T12:29:55.000+0000"),
"events" : [
{
"_id" : 63,
"eventID" : "XYZ",
"date" : ISODate("2020-11-05T12:19:55.000+0000")
},
{
"_id" : 90,
"eventID" : "XYZ",
"date" : ISODate("2020-11-05T12:27:57.000+0000")
},
{
"_id" : 97,
"eventID" : "XYZ",
"date" : ISODate("2020-11-05T12:29:36.000+0000")
}
],
"count" : 3
}
As you can see, looking at the limit of each group and at the dates of the events in each group, these intervals are now incremental, not disjoint. (event X is found in multiple groups, as long as it doesnt exceeds the time interval of 10minutes)
I have documents that look like this
{
"_id": "5e3334cede31d9555e38dbee",
"time": 400,
"datetime": "2020-01-05T16:35:42.315Z",
"version": "2.0.30",
"hostname": "bvasilchik-lt.extron.com",
"testfile": "cards.txt",
"tests": 5,
"failures": 3,
"skips": 0,
"status": "Failed",
"__v": 0
}
I want to create a result that includes the documents that have the highest number of time per testfile name, so if the top 10 were all the same testfile name I'd only want to show the top one that had the same testfile name.
I have done this but I also wanted to include another field that also shows the number of tests matching that grouping, but the only ways I found were to add the $first or the $last or the $max or the $min for the tests field, but that wouldn't be correct b/c the highest time might have a different number of tests.
I am also matching results from a specific date range
const times = await Suite.aggregate([
{
"$match": {
datetime: { "$gte": dateRange.startDate, "$lt": dateRange.endDate, }
}
},
{
"$group": {
_id: "$testfile",
time: { "$max" : "$time" },
}
},
{
"$sort": {
time: order
}
},
{
"$project": {
_id: 0,
testfile: "$_id",
time: "$time"
}
}
])
this produces these results
[
{
"testfile": "lists.txt",
"time": 900
},
{
"testfile": "buttons.txt",
"time": 800
},
{
"testfile": "cards.txt",
"time": 400
},
{
"testfile": "popover.txt",
"time": 300
},
{
"testfile": "about-pages.neb",
"time": 76
}
]
but what I want it to return is
[
{
"testfile": "lists.txt",
"tests": 5,
"time": 900
},
{
"testfile": "buttons.txt",
"tests": 4,
"time": 800
},
{
"testfile": "cards.txt",
"tests": 8,
"time": 400
},
{
"testfile": "popover.txt",
"tests": 1,
"time": 300
},
{
"testfile": "about-pages.neb",
"tests": 2,
"time": 76
}
]
You need to add extra field into $group and $project stages.
You need to use $max operator for time field and accumulatetests field time:tests values. In the last stage, we $reduce tests field taking highest value
{
"$group": {
_id: "$testfile",
time: {
$max: "$time"
},
tests: {
"$push": {
time: "$time",
tests: "$tests"
}
}
}
},
{
"$sort": {
time: 1
}
},
{
"$project": {
_id: 0,
testfile: "$_id",
time: "$time",
tests: {
$reduce: {
input: "$tests",
initialValue: 0,
in: {
$add: [
"$$value",
{
$cond: [
{
$and: [
{
$eq: [
"$time",
"$$this.time"
]
},
{
$gt: [
"$$this.tests",
"$$value"
]
}
]
},
{
$subtract: [
"$$this.tests",
"$$value"
]
},
0
]
}
]
}
}
}
}
}
MongoPlayground
How do I get counts data grouped by every hour in 24 hours even if data is not present i.e. IF 0 will select 0
MonogDB 3.6
Input
[
{
"_id": ObjectId("5ccbb96706d1d47a4b2ced4b"),
"date": "2019-05-03T10:39:53.108Z",
"id": 166,
"update_at": "2019-05-03T02:45:36.208Z",
"type": "image"
},
{
"_id": ObjectId("5ccbb96706d1d47a4b2ced4c"),
"date": "2019-05-03T10:39:53.133Z",
"id": 166,
"update_at": "2019-05-03T02:45:36.208Z",
"type": "image"
},
{
"_id": ObjectId("5ccbb96706d1d47a4b2ced4d"),
"date": "2019-05-03T10:39:53.180Z",
"id": 166,
"update_at": "2019-05-03T20:45:36.208Z",
"type": "image"
},
{
"_id": ObjectId("5ccbb96706d1d47a4b2ced7a"),
"date": "2019-05-10T10:39:53.218Z",
"id": 166,
"update_at": "2019-12-04T10:45:36.208Z",
"type": "image"
},
{
"_id": ObjectId("5ccbb96706d1d47a4b2ced7b"),
"date": "2019-05-03T10:39:53.108Z",
"id": 166,
"update_at": "2019-05-05T10:45:36.208Z",
"type": "image"
},
{
"_id": ObjectId("5ccbb96706d1d47a4b2cedae"),
"date": "2019-05-03T10:39:53.133Z",
"id": 166,
"update_at": "2019-05-05T10:45:36.208Z",
"type": "image"
},
{
"_id": ObjectId("5ccbb96706d1d47a4b2cedad"),
"date": "2019-05-03T10:39:53.180Z",
"id": 166,
"update_at": "2019-05-06T10:45:36.208Z",
"type": "image"
},
{
"_id": ObjectId("5ccbb96706d1d47a4b2cedab"),
"date": "2019-05-10T10:39:53.218Z",
"id": 166,
"update_at": "2019-12-06T10:45:36.208Z",
"type": "image"
}
]
Implementation
db.collection.aggregate({
$match: {
update_at: {
"$gte": "2019-05-03T00:00:00.0Z",
"$lt": "2019-05-05T00:00:00.0Z"
},
id: {
"$in": [
166
]
}
}
},
{
$group: {
_id: {
$substr: [
"$update_at",
11,
2
]
},
count: {
"$sum": 1
}
},
},
{
$project: {
_id: 0,
hour: "$_id",
count: "$count"
}
},
{
$sort: {
hour: 1
}
})
Actual Output:
{
"count": 2,
"hour": "02"
},
{
"count": 1,
"hour": "20"
}
My expectation code show 24 hours event data is 0 or null and convert from example "02" as "02 AM" , "13" as "01 PM":
Expected Output
{
"count": 0,
"hour": "01" // 01 AM
},
{
"count": 2,
"hour": "02"
},
{
"count": 0,
"hour": "03"
},
{
"count": 0,
"hour": "04"
},
{
"count": 0,
"hour": "05"
},
{
"count": 1,
"hour": "20" // to 08 pm
}
Try this solution:
Explanation
We group by hour to count how many images are uploaded.
Then, we add extra field hour to create time interval (if you had v4.x, there is a better solution).
We flattern hour field (will create new documents) and split first 2 digits to match count and split last 2 digits to put AM / PM periods.
db.collection.aggregate([
{
$match: {
update_at: {
"$gte": "2019-05-03T00:00:00.0Z",
"$lt": "2019-05-05T00:00:00.0Z"
},
id: {
"$in": [
166
]
}
}
},
{
$group: {
_id: {
$substr: [
"$update_at",
11,
2
]
},
count: {
"$sum": 1
}
}
},
{
$addFields: {
hour: [
"0000",
"0101",
"0202",
"0303",
"0404",
"0505",
"0606",
"0707",
"0808",
"0909",
"1010",
"1111",
"1212",
"1301",
"1402",
"1503",
"1604",
"1705",
"1806",
"1907",
"2008",
"2109",
"2210",
"2311"
]
}
},
{
$unwind: "$hour"
},
{
$project: {
_id: 0,
hour: 1,
count: {
$cond: [
{
$eq: [
{
$substr: [
"$hour",
0,
2
]
},
"$_id"
]
},
"$count",
0
]
}
}
},
{
$group: {
_id: "$hour",
count: {
"$sum": "$count"
}
}
},
{
$sort: {
_id: 1
}
},
{
$project: {
_id: 0,
hour: {
$concat: [
{
$substr: [
"$_id",
2,
2
]
},
{
$cond: [
{
$gt: [
{
$substr: [
"$_id",
0,
2
]
},
"12"
]
},
" PM",
" AM"
]
}
]
},
count: "$count"
}
}
])
MongoPlayground
There's no "magic" solution, you'll have to hardcode it into your aggregation:
Heres an example using Mongo v3.2+ syntax with some $map and $filter magic:
db.collection.aggregate([
{
$match: {
update_at: {
"$gte": "2019-05-03T00:00:00.0Z",
"$lt": "2019-05-05T00:00:00.0Z"
},
id: {"$in": [166]}
}
},
{
$group: {
_id: {$substr: ["$update_at", 11, 2]},
count: {"$sum": 1}
}
},
{
$group: {
_id: null,
hours: {$push: {hour: "$_id", count: "$count"}}
}
},
{
$addFields: {
hours: {
$map: {
input: {
$concatArrays: [
"$hours",
{
$map: {
input: {
$filter: {
input: ["00", "01", "02", "03", "04", "05", "06", "07", "08", "09", "10", "11", "12", "13", "14", "15", "16", "17", "18", "19", "20", "21", "22", "23"],
as: "missingHour",
cond: {
$not: {
$in: [
"$$missingHour",
{
$map: {
input: "$hours",
as: "hourObj",
in: "$$hourObj.hour"
}
}
]
}
}
}
},
as: "missingHour",
in: {hour: "$$missingHour", count: 0}
}
}
]
},
as: "hourObject",
in: {
count: "$$hourObject.count",
hour: {
$cond: [
{$eq: [{$substr: ["$$hourObject.hour", 0, 1]}, "0"]},
{$concat: ["$$hourObject.hour", " AM"]},
{
$concat: [{
$switch: {
branches: [
{case: {$eq: ["$$hourObject.hour", "13"]}, then: "1"},
{case: {$eq: ["$$hourObject.hour", "14"]}, then: "2"},
{case: {$eq: ["$$hourObject.hour", "15"]}, then: "3"},
{case: {$eq: ["$$hourObject.hour", "16"]}, then: "4"},
{case: {$eq: ["$$hourObject.hour", "17"]}, then: "5"},
{case: {$eq: ["$$hourObject.hour", "18"]}, then: "6"},
{case: {$eq: ["$$hourObject.hour", "19"]}, then: "7"},
{case: {$eq: ["$$hourObject.hour", "20"]}, then: "8"},
{case: {$eq: ["$$hourObject.hour", "21"]}, then: "9"},
{case: {$eq: ["$$hourObject.hour", "22"]}, then: "10"},
{case: {$eq: ["$$hourObject.hour", "23"]}, then: "11"},
],
default: "None"
}
}, " PM"]
}
]
}
}
}
}
}
},
{
$unwind: "$hours"
},
{
$project: {
_id: 0,
hour: "$hours.hour",
count: "$hours.count"
}
},
{
$sort: {
hour: 1
}
}
]);
A short explanation of the $addFields stage: we first add hours that we're missing, we then merge the two arrays (of the original found hours and the "new" missing hours), finally we convert to the required output ("01" to "01 AM").
If you're using Mongo v4+ I recommend you change the $group _id stage to use $dateFromString as its more consistent.
_id: {$hour: {$dateFromString: {dateString: "$update_at"}}}
If you do do that, you'll have to update the $filter and $map section to use numbers and not strings and eventually using $toString to cast into the format you want, hence the v4+ requirement.
You should store date values as Date objects instead of strings. I would do the formatting like this:
db.collection.aggregate(
[
{ $match: { ... } },
{
$group: {
_id: { h: { $hour: "$update_at" } },
count: { $sum: 1 }
}
},
{
$project: {
_id: 0,
hour: {
$switch: {
branches: [
{ case: { $lt: ["$_id.h", 10] }, then: { $concat: ["0", { $toString: "$_id.h" }, " AM"] } },
{ case: { $lt: ["$_id.h", 13] }, then: { $concat: [{ $toString: "$_id.h" }, " AM"] } },
{ case: { $lt: ["$_id.h", 22] }, then: { $concat: ["0", { $toString: { $subtract: ["$_id.h", 12] } }, " PM"] } },
{ case: { $lt: ["$_id.h", 24] }, then: { $concat: [{ $toString: { $subtract: ["$_id.h", 12] } }, " PM"] } }
]
}
},
hour24: "$_id.h",
count: 1
}
},
{ $sort: { hour24: 1 } }
])
As non-American I am not familiar with AM/PM rules, esp. for midnight and midday but I guess you get the principle.
Here is the query you can test it out, for MongoDB 4.0+
i will be improving query and update
const query = [{
$match: {
update_at: {
"$gte": ISODate("2019-05-03T00:00:00.0Z"),
"$lt": ISODate("2019-05-05T00:00:00.0Z")
},
id: {
"$in": [
166
]
}
}
},
{
$group: {
_id: { $hour: "$update_at" },
count: {
"$sum": 1
}
},
},
{
$addFields: {
hourStr: { $toString: { $cond: { if: { $gte: ["$_id", 12] }, then: { $subtract: [12, { $mod: [24, '$_id'] }] }, else: "$_id" } } },
}
},
{
$project: {
formated: { $concat: ["$hourStr", { $cond: { if: { $gt: ["$_id", 12] }, then: " PM", else: " AM" } }] },
count: "$count",
hour: 1,
}
}]
If you want to output in Indian Time formate. then below code work!
const query = [
{
$match: {
update_at: {
"$gte": ISODate("2019-05-03T00:00:00.0Z"),
"$lt": ISODate("2019-05-05T00:00:00.0Z")
},
id: {
"$in": [
166
]
}
}
},
{
$project: {
"h": { "$hour": { date: "$update_at", timezone: "+0530" } },
}
},
{
$group:
{
_id: { $hour: "$h" },
count: { $sum: 1 }
}
}
];