Tried limiting the Group By data on mongo db 3.0 - mongodb

I have tried limiting the Group by data on Mongo DB 3.0 but seems like there is no proper option to do this.
I want to display 5 latest records for each day based on this date dateCreated which is in array and corresponding records count/day
db.entries.find().limit(1).pretty();
{
"_id" : "bd348fb4dd38dd7a2",
"className" : "com.model.Entry",
"name" : "yQLs3T5NCJocPlOPuLgyEkQ9",
"description" : "4Z09BNPQNFhFiMbjqL RWC5SMs0d0XzogqdNmjk5dx1mw9roHgRrl8ljbHo16p1WTlNYU",
"account" : DBRef("accounts", "248a3-448b-a912-6573f23d34a5"),
"iconUrl" : "gA9QTuqYv9wZq1xKM37jdL",
"userCreatedBy" : DBRef("users", "8044-45d2-8567-a6cb808ce164"),
"timezone" : "Atlantic/Faroe",
"globalAccess" : false,
"tags" : [
{
"_id" : "8926079483331",
"category" : "PPq5k",
"value" : "NdKFQq",
"description" : "uDQVnhJ2tu5XWHinb",
"origin" : "User",
"dateCreated" : ISODate("2021-07-16T18:20:41.731Z"),
"dateModified" : ISODate("2021-07-16T18:20:53.319Z"),
"externalId" : "xkblzrwE"
}
{
"_id" : "89389483331",
"category" : "PPe5k",
"value" : "NdKFQq",
"description" : "uDQVnhJ2tu5XWHinb",
"origin" : "User",
"dateCreated" : ISODate("2021-07-16T18:20:41.731Z"),
"dateModified" : ISODate("2021-07-16T18:20:53.319Z"),
"externalId" : "xkblzrwE"
}
]}
Output I'm expecting this :
[2021-07-16 (Date)-> (5 Latest Entries) , 2 (total records for that day) ]
I have tried using below solution
How to get lastest N records of each group in mongodb?
and slice is not available on mongo 3.0 i'm kind of stuck here

I'm not sure you can use aggregation but below aggregation can give you the result you wanted.
db.collection.aggregate([
{
"$unwind": "$tags"
},
{
$match: {
"tags.dateCreated": {
"$gte": ISODate("2021-07-16")
}
}
},
{
"$replaceRoot": {
"newRoot": "$tags"
}
},
{
"$limit": 5
}
])
Playground
If you have any question feel free to ask me, please.

Related

Mongodb Query to get the nth document

I need to create a query in mongodb that needs to return the SECOND TO THE LAST document. I am planning to use $group for this query but i dont know what aggregation function to use. I only know $first and $last.
I have an example collection below and also include the expected output. Thank you!
"_id" : ObjectId("60dc27ac54b7c46bfa1b84b4"),
"auditlogs" : [
{
"_id" : ObjectId("60dc27ac54b7c46bfa1b84be"),
"userid" : ObjectId("5ffe702d59a9205db81fcb69"),
"action" : "ADDTRANSACTION"
},
{
"_id" : ObjectId("60dc27ac54b7c46bfa1b84bd"),
"userid" : ObjectId("5ffe644f9493e05db9245192"),
"action" : "EDITPROFILE"
},
{
"_id" : ObjectId("60dc27ac54b7c46bfa1b84bc"),
"userid" : ObjectId("5ffe64949493e05db9245197"),
"action" : "DELETETRANSACTION"
} ]
"_id" : ObjectId("60dc27ac54b7c46bfa1b75ge2"),
"auditlogs" : [
{
"_id" : ObjectId("60dc27ac54b7c46bfa1b84bb"),
"userid" : ObjectId("5ffe64b69493e05db924519b"),
"action" : "ADDTRANSACTION"
},
{
"_id" : ObjectId("60dc27ac54b7c46bfa1b84ba"),
"userid" : ObjectId("5ffe65419493e05db92451d4"),
"action" : "ADDTRANSACTION"
},
{
"_id" : ObjectId("60dc27ac54b7c46bfa1b84b9"),
"userid" : ObjectId("5ffe65689493e05db92451d9"),
"action" : "CHANGEACCESS"
},
{
"_id" : ObjectId("60dc27ac54b7c46bfa1b84b8"),
"userid" : ObjectId("5ffe65819493e05db92451dd"),
"action" : "DELETETRANSACTION"
},
{
"_id" : ObjectId("60dc27ac54b7c46bfa1b84b7"),
"userid" : ObjectId("5ffe65df9493e05db92451f3"),
"action" : "EDITPROFILE",
]
OUTPUT:
{"_id" : ObjectId("60dc27ac54b7c46bfa1b84b4"),"_id" : ObjectId("60dc27ac54b7c46bfa1b84bd"),"userid" : ObjectId("5ffe644f9493e05db9245192"),"action" : "EDITPROFILE"},
{"_id" : ObjectId("60dc27ac54b7c46bfa1b75ge2"),"_id" : ObjectId("60dc27ac54b7c46bfa1b84b8"),"userid" : ObjectId("5ffe65819493e05db92451dd"),"action" : "DELETETRANSACTION"}
You can't have two _id keys in one single object.
I've made the parent object's id to _parentId you can give it's a name anything you want except _id
Aggregation:
db.collection.aggregate([
{
$unwind: "$auditlogs"
},
{
"$project": {
"_parentId": "$_id",
"_id": "$auditlogs._id",
"action": "$auditlogs.action",
"userid": "$auditlogs.userid",
}
}
])
Playground
You can slice the array by -2 to get the last two item, then by 1 to get first one. Therefore, the array will be left the second to the last. Finally, unwind auditlogs so it can be changed from array to object which is structure that you want.
db.collection.aggregate([
{
$project: { auditlogs : { $slice: [ "$auditlogs", -2 ] } }
},
{
$project: { auditlogs : { $slice: [ "$auditlogs", 1 ] } }
},
{
$unwind: "$auditlogs"
}
])

Mongo aggregation groups and subgroup

Hi I have a Mongo aggregation:
[
{
"$match" : {
"dateTime" : {
"$gte" : ISODate("2017-01-01T00:00:00.000+0000"),
"$lt" : ISODate("2018-01-01T00:00:00.000+0000")
}
}
},
{
"$group" : {
"_id" : "dateTime",
"totals" : {
"$sum" : "$payment.totalAmount"
},
"count" : {
"$sum" : 1.0
}
}
}
],
{
"allowDiskUse" : false
}
);
This works fine. It aggregates, and sums by date range I supplied and I get an output as follows.
{
"_id" : "dateTime",
"totals" : 2625293.825017198,
"count" : 12038.0
}
However, I also want to further refine the groupings.
I have a field called 'companyId' and I want to calculate the sum and count by each company Id for the given time range.
I would like to get an output similar to this, where I get a sum and count for each company ID in the date range I queried, not just a sum/count of all the data:
[
{
"companyId" : "Acme Co",
"totals" : 2625293.825017198,
"count" : 12038.0
},
{
"companyId" : "Beta Co",
"totals" : 162593.82198,
"count" : 138.0
},
{
"companyId" : "Cel Co",
"totals" : 593.82,
"count" : 38.0
}
]
How do I do this? I have not been able to find a good example online.
Thanks

weird behaviour mongo aggregation framework

I have one Document called account holding and it has below records,
{ "_id" : ObjectId("57cfbb09e4b024be2f1bce57"),
"_class" : "com.commercestudio.domain.AccountHolding",
"accountId" : "5732933ae4b0b709443b0d1e",
"companyId" : "57223d6de4b06c4ef00415b5",
"brokerageAccountId" : "5KC05007",
"symbol" : "AGG",
"quantity" : 1.0,
"pricePaid" : 112.55,
"processDate" : ISODate("2016-09-06T00:00:00.000Z"),
"recordDate" : ISODate("2016-09-06T00:00:00.000Z"),
"createdOn" : ISODate("2016-09-07T07:00:25.479Z")
}
{ "_id" : ObjectId("57cfbb09e4b024be2f1bce5b"),
"_class" : "com.commercestudio.domain.AccountHolding",
"accountId" : "5732933ae4b0b709443b0d1e",
"companyId" : "57223d6de4b06c4ef00415b5",
"brokerageAccountId" : "5KC05007",
"symbol" : "LQD",
"quantity" : 4.0,
"pricePaid" : 123.78,
"processDate" : ISODate("2016-09-06T00:00:00.000Z"),
"recordDate" : ISODate("2016-09-06T00:00:00.000Z"),
"createdOn" : ISODate("2016-09-07T07:00:25.498Z")
}
.....
now I apply aggrigration framework for finding out latest record date data for perticulat accountId,
db.accountHolding.aggregate(
[
{
"$match": {
"accountId": "5834caf32ae7bacc527ef2f3",
"symbol": {
"$in": [
"IUSG",
"VEA",
"IEMG",
"SCHX",
"VBR",
"IUSV",
"VOE"
]
}
}
},
{
"$group": {
"_id": "$symbol",
"recordDate": {
"$last": "$recordDate"
},
"quantity": {
"$last": "$quantity"
},
"pricePaid": {
"$last": "$pricePaid"
}
}
}
])
and it returns two different results in two different environments,
On my development env. it shows,
{
"_id" : "VEA",
"recordDate" : ISODate("2018-03-02T00:00:00.000Z"),
"quantity" : 22.79609, "pricePaid" : 44.14
}
{ "_id" : "IUSG",
"recordDate" : ISODate("2018-03-02T00:00:00.000Z"),
"quantity" : 8.87831,
"pricePaid" : 55.79
}
something like this and from production env. it shows,
{
"_id" : "VEA",
"recordDate" : ISODate("2018-02-26T00:00:00Z"),
"quantity" : 22.79609,
"pricePaid" : 45.76
}
{
"_id" : "IUSG",
"recordDate" : ISODate("2018-02-26T00:00:00Z"),
"quantity" : 8.87831,
"pricePaid" : 57.47
}
actually, I am unable to find out the solution why this weird behaviour is taken place, as both env has same data.
My database server is deployed on AWS instance.
Can someone help me out with finding out the root cause and solution for the same?
This is expected behavior.
From the docs,
Returns the value that results from applying an expression to the last
document in a group of documents that share the same group by a field.
Only meaningful when documents are in a defined order.
Add $sort before $group stage.
{$sort:{recordDate:1}}

MongoDB filtering out subdocuments with lookup aggregation

Our project database has a capped collection called values which gets updated every few minutes with new data from sensors. These sensors all belong to a single sensor node, and I would like to query the last data from these nodes in a single aggregation. The problem I am having is filtering out just the last of ALL the types of sensors while still having only one (efficient) query. I looked around and found the $group argument, but I can't seem to figure out how to use it correctly in this case.
The database is structured as follows:
nodes:
{
"_id": 681
"sensors": [
{
"type": "foo"
},
{
"type": "bar"
}
]
}
values:
{
"_id" : ObjectId("570cc8b6ac55850d5740784e"),
"timestamp" : ISODate("2016-04-12T12:06:46.344Z"),
"type" : "foo",
"nodeid" : 681,
"value" : 10
}
{
"_id" : ObjectId("190ac8b6ac55850d5740776e"),
"timestamp" : ISODate("2016-04-12T12:06:46.344Z"),
"type" : "bar",
"nodeid" : 681,
"value" : 20
}
{
"_id" : ObjectId("167bc997bb66750d5740665e"),
"timestamp" : ISODate("2016-04-12T12:06:46.344Z"),
"type" : "bar",
"nodeid" : 200,
"value" : 20
}
{
"_id" : ObjectId("110cc9c6ac55850d5740784e"),
"timestamp" : ISODate("2016-04-09T12:06:46.344Z"),
"type" : "foo",
"nodeid" : 681,
"value" : 12
}
so let's imagine I want the data from node 681, I would want a structure like this:
nodes:
{
"_id": 681
"sensors": [
{
"_id" : ObjectId("570cc8b6ac55850d5740784e"),
"timestamp" : ISODate("2016-04-12T12:06:46.344Z"),
"type" : "foo",
"nodeid" : 681,
"value" : 10
},
{
"_id" : ObjectId("190ac8b6ac55850d5740776e"),
"timestamp" : ISODate("2016-04-12T12:06:46.344Z"),
"type" : "bar",
"nodeid" : 681,
"value" : 20
}
]
}
Notice how one value of foo is not queried, because I want to only get the latest value possible if there are more than one value (which is always going to be the case). The ordering of the collection is already according to the timestamp because the collection is capped.
I have this query, but it just gets all the values from the database (which is waaay too much to do in a lifetime, let alone one request of the web app), so I was wondering how I would filter it before it gets aggregated.
query:
db.nodes.aggregate(
[
{
$unwind: "$sensors"
},
{
$match:{
nodeid: 681
}
},
{
$lookup:{
from: "values", localField: "sensors.type", foreignField: "type", as: "sensors"
}
}
}
]
)
Try this
// Pipeline
[
// Stage 1 - sort the data collection if not already done (optional)
{
$sort: {
"timestamp":1
}
},
// Stage 2 - group by type & nodeid then get first item found in each group
{
$group: {
"_id":{type:"$type",nodeid:"$nodeid"},
"sensors": {"$first":"$$CURRENT"} //consider using $last if your collection is on reverse
}
},
// Stage 3 - project the fields in desired
{
$project: {
"_id":"$sensors._id",
"timestamp":"$sensors.timestamp",
"type":"$sensors.type",
"nodeid":"$sensors.nodeid",
"value":"$sensors.value"
}
},
// Stage 4 - group and push it to array sensors
{
$group: {
"_id":{nodeid:"$nodeid"},
"sensors": {"$addToSet":"$$CURRENT"}
}
}
]
as far as I got document structure, there is no need to use $lookup as all data is in readings(values) collection.
Please see proposed solution:
db.readings.aggregate([{
$match : {
nodeid : 681
}
},
{
$group : {
_id : {
type : "$type",
nodeid : "$nodeid"
},
readings : {
$push : {
timestamp : "$timestamp",
value : "$value",
id : "$_id"
}
}
}
}, {
$project : {
_id : "$_id",
readings : {
$slice : ["$readings", -1]
}
}
}, {
$unwind : "$readings"
}, {
$project : {
_id : "$readings.id",
type : "$_id.type",
nodeid : "$_id.nodeid",
timestamp : "$readings.timestamp",
value : "$readings.value",
}
}, {
$group : {
_id : "$nodeid",
sensors : {
$push : {
_id : "$_id",
timestamp : "$timestamp",
value : "$value",
type:"$type"
}
}
}
}
])
and output:
{
"_id" : 681,
"sensors" : [
{
"_id" : ObjectId("110cc9c6ac55850d5740784e"),
"timestamp" : ISODate("2016-04-09T12:06:46.344Z"),
"value" : 12,
"type" : "foo"
},
{
"_id" : ObjectId("190ac8b6ac55850d5740776e"),
"timestamp" : ISODate("2016-04-12T12:06:46.344Z"),
"value" : 20,
"type" : "bar"
}
]
}
Any comments welcome!

Slow $group in mongodb

I am working to fetch data from mongodb using $group. I have modified my query to
db.mydata.aggregate([{ $match: {"CreatedOn": {$lte: ISODate("2015-10-27T03:45:09Z"),
"$gte": ISODate("2015-09-09T07:37:27.526Z")}} },
{"$group" : { "_id" : "$myIP" , "total" : { "$sum" : "$SuccessCount"}}},
{ "$project" : { "myIP" : "$_id" , "_id" : 0 , "Total" : "$total"}},
{ "$sort" : { "Total" : -1}}, { "$limit" : 10}])
But it is taking more than 2 minute to execute, even for small amount of data. I have created index for CreatedOn. I have also created index for myIP.
I have document structure like
{ "_id" : ObjectId("55d33d7045cedc287ed840a3"),
"myIP" : "10.10.10.1","SuccessCount" : 1,
"CreatedOn":ISODate("2015-10-27T03:45:09Z")
}
I want success count's by all myIP's with maximum on top.