mongodb aggregate to find,count and project unique documnets - mongodb

Below are the sample collection.
col1:
"_id" : ObjectId("5ec293782bc00b43b463b67c")
"status" : ["running"],
"name" : "name1 ",
"dcode" : "dc001",
"address" : "address1",
"city" : "city1"
col2:
"_id" : ObjectId("5ec296182bc00b43b463b68f"),
"scode" : ObjectId("5ec2933df6079743c0a2a1f8"),
"ycode" : ObjectId("5ec293782bc00b43b463b67c"),
"city" : "city1",
"lockedDate" : ISODate("2020-05-20T00:00:00Z"),
"_id" : ObjectId("5ec296182bc00b43b463688b"),
"scode" : ObjectId("5ec2933df6079743c0a2a1ff"),
"ycode" : ObjectId("5ec293782bc00b43b463b67c"),
"city" : "city1",
"lockedDate" : ISODate("2020-05-20T00:00:00Z"),
"_id" : ObjectId("5ec296182bc00b43b44fc6cb"),
"scode" :null,
"ycode" : ObjectId("5ec293782bc00b43b463b67c"),
"city" : "city1",
"lockedDate" : ISODate("2020-05-20T00:00:00Z"),
problemStatement:
I want to display name from col1 & count of documents from col2 according to ycode where scode is != null
Tried attempt:
db.col1.aggregate([
{'$match':{
city:'city1'
}
},
{
$lookup:
{
from: "col2",
let: {
ycode: "$_id",city:'$city'
},
pipeline: [
{
$match: {
scode:{'$ne':null},
lockedDate:ISODate("2020-05-20T00:00:00Z"),
$expr: {
$and: [
{
$eq: [
"$ycode",
"$$ycode"
]
},
{
$eq: [
"$city",
"$$city"
]
}
]
},
},
},
], as: "col2"
}
},
{'$unwind':'$col2'},
{'$count':'ycode'},
{
$project: {
name: 1,
status: 1,
}
},
])
now problem with this query is it either displays the count or project the name & status i.e if i run this query in the current format it gives {} if I remove {'$count':'ycode'} then it project the values but doesn't give the count and if I remove $project then i do get the count {ycode:2} but then project doesn't work but I want to achieve both in the result. Any suggestions
ORM: mongoose v>5, mongodb v 4.0

You can try below query :
db.col1.aggregate([
{ "$match": { city: "city1" } },
{
$lookup: {
from: "col2",
let: { id: "$_id", city: "$city" }, /** Create local variables from fields of `col1` but not from `col2` */
pipeline: [
{
$match: { scode: { "$ne": null }, lockedDate: ISODate("2020-05-20T00:00:00Z"),
$expr: { $and: [ { $eq: [ "$ycode", "$$id" ] }, { $eq: [ "$city", "$$city" ] } ] }
}
},
{ $project: { _id: 1 } } // Optional, But as we just need count but not the entire doc, holding just `_id` helps in reduce size of doc
],
as: "col2" // will be an array either empty (If no match found) or array of objects
}
},
{
$project: { _id: 0, name: 1, countOfCol2: { $size: "$col2" } }
}
])
Test : mongoplayground

Related

I have this two collections namely Inward and Outward, Both collections have similar embeded sub documents contains product, batch and quantity fields

Inward collections
{"ord" : 1,
"products" : [
{
"name" : "apple",
"qty" : "10",
"batch" : "jun-2021"
},
{
"name" : "banana",
"qty" : 20,
"batch" : "jan-2021"
}
]
}
outward collections
{
"_id" : ObjectId("5edde5487957d9efea972a74"),
"inv" : 1,
"products" : [
{
"name" : "apple",
"qty" : 13,
"batch" : "jun-2021"
}
]
}
Now, I would like to perform actual stock quantity check for particular product and batch (grouping together) both the collections
You may try this way:
We join them with inward.ord = outward.inv condition.
Flatten products field.
Group by product's name and batch to sum qty value.
db.inward.aggregate([
{
$lookup: {
from: "outward",
let: {
ord: "$ord",
products: "$products"
},
pipeline: [
{
$match: {
$expr: {
$eq: [ "$$ord", "$inv" ]
}
}
},
{
$project: {
products: {
$concatArrays: [
"$$products",
"$products"
]
}
}
},
{
$unwind: "$products"
},
{
$replaceWith: "$products"
}
],
as: "products"
}
},
{
$unwind: "$products"
},
{
$group: {
_id: {
batch: "$products.batch",
name: "$products.name"
},
qty: {
$sum: "$products.qty"
}
}
}
])
MongoPlayground
Note: You need to have MongoDB v4.2

How to $lookup by avoiding null values in mongodb aggregate

In here i'm using $lookup to to a left join from other collections, the query works fine but when some records missing values it returns
errmsg : $in requires an array as a second argument, found: null
Heres the querying document structure :
{
"no" : "2020921008981",
"sale" : {
"soldItems" : [
{
"itemId" : "5b55ac7f0550de00210a3b24",
},
{
"itemId" : "5b55ac7f0550de00215584re",
}
],
"bills" : [
{
"billNo" : "2020921053467",
"insurancePlanId" : "160",
},
{
"billNo" : "2020921053467",
"insurancePlanId" : "170",
}
],
"visitIds" : [
5b55ac7f0550de00210a3b24, 5b55ac7f0550de00210a3b24
]
}
}
the query :
db.case.aggregate([
{
$lookup: {
from: "insurance",
let: { ipids: "$sale.bill.insurancePlanId" },
pipeline: [
{
$unwind: "$coveragePlans"
},
{
$match: { $expr: { $in: ["$coveragePlans._id", "$$ipids"] } }
},
{
$project: { _id: 0, name: 1 }
}
],
as: "insurances"
}
},
{
$lookup: {
from: "item",
let: { iid: "$salesOrder.purchaseItems.itemRefId" },
pipeline: [
{
$match: {
$expr: {
$in: ["$_id", {
$map: {
input: "$$iid",
in: { $toObjectId: "$$this" }
}
}
]
}
}
}
],
as: "items"
}
}
])
insurance collection :
{
"_id" : ObjectId("5b55aca20550de00210a6d25"),
"name" : "HIJKL"
"coveragePlans" : [
{
"_id" : "160",
"name" : "UVWZ",
},
{
"_id" : "161",
"name" : "LMNO",
}
]
},
{
"_id" : ObjectId("5b55aca20550de00210a6d25"),
"name" : "WXYZ"
"coveragePlans" : [
{
"_id" : "169",
"name" : "5ABC",
},
{
"_id" : "170",
"name" : "4XYZ",
}
]
}
item collection :
{
"_id" : ObjectId("5b55ac7f0550de00210a3b24"),
"code" : "ABCDE"
},
{
"_id" : ObjectId("5b55ac7f0550de00215584re"),
"code" : "PQRST"
}
How to avoid this and do null checks effectively before pipe-lining into the next stages? Tried with { $match: { "fieldName": { $exists: true, $ne: null } } } but it returns mongo error regarding the format. If its the way to go please mention the stage i should put that.. Thanks in advance
You can use $ifNull operator
let: { ipids: {$ifNull:["$sale.bill.insurancePlanId", [] ]} },
EDIT: To skip empty "$salesOrder.purchaseItems.itemRefId" values
let: { iid: {$filter: {input:"$salesOrder.purchaseItems.itemRefId", cond:{$ne:["$$this", ""]}}} },
You can get around that by not using $in.
It looks like this $map is executed separately for every document in the items collection. If you were to run the map in an $addFields stage, you could used the simple form of lookup to match the added field to _id, which would automagically handle missing, null, and array.
Remove the added field with a $project stage if necessary.
db.case.aggregate([
{$lookup: {
from: "insurance",
let: { ipids: "$sale.bill.insurancePlanId" },
pipeline: [
{$unwind: "$coveragePlans"},
{$match: { $expr: { $in: ["$coveragePlans._id", "$$ipids"] } }},
{$project: { _id: 0, name: 1 }}
],
as: "insurances"
}}
{$addFields:{
matchArray:{$map: {
input: "$$iid",
in: { $toObjectId: "$$this" }
}}
}},
{$lookup: {
from: "item",
localField: "matchArray",
foreignField:"_id",
as: "items"
}},
{$project:{
arrayField: 0
}}
])

How to avoid possible null error scenarios in mongodb Aggregate

I've set up a fairly long mongo aggregate query to join several mongo collections together and shape up them into output of set of string fields. The query works fine as long as all the required values (ie : ids) exists but it breaks when it encounters null or empty values when doing the $lookup.
Following is the patientFile collection thats being queried :
{
"no" : "2020921008981",
"startDateTime" : ISODate("2020-04-01T05:19:02.263+0000")
"saleId" : "5e8424464475140d19c6941b",
"patientId" : "5e8424464475140d1955941b"
}
sale collection :
{
"_id" : ObjectId("5e8424464475140d19c6941b"),
"invoices" : [
{
"billNumber" : "2020921053467",
"type" : "CREDIT",
"insurancePlanId" : "160"
},
{
"billNumber" : "2020921053469",
"type" : "DEBIT",
"insurancePlanId" : "161"
}
],
"status" : "COMPLETE"
}
insurance collection :
{
"_id" : ObjectId("5b55aca20550de00210a6d25"),
"name" : "HIJKL"
"plans" : [
{
"_id" : "160",
"name" : "UVWZ",
},
{
"_id" : "161",
"name" : "LMNO",
}
]
}
patient collection :
{
"_id" : ObjectId("5b55cc5c0550de00217ae0f3"),
"name" : "TAN NAI",
"userId" : {
"number" : "787333128H"
}
}
Heres the aggregate query :
db.getCollection("patientFile").aggregate([
{ $match: { "startDateTime": { $gte: ISODate("2020-01-01T00:00:00.000Z"),
$lt: ISODate("2020-05-01T00:00:00.000Z") } } },
{
$lookup:
{
from: "patient",
let: { pid: "$patientId" },
pipeline: [
{
$match: {
$expr: {
$eq: ["$_id", { $toObjectId: "$$pid" }]
}
}
},
{ "$project": { "name": 1, "userId.number": 1, "_id": 0 } }
],
as: "patient"
}
},
{
$lookup:
{
from: "sale",
let: { sid: "$saleId" },
pipeline: [
{
$match: {
$expr: {
$eq: ["$_id", { $toObjectId: "$$sid" }]
}
}
}
],
as: "sale"
}
},
{ $unwind: "$sale" },
{ $unwind: "$patient" },
{
$lookup: {
from: "insurance",
let: { pid: {$ifNull:["$sale.bill.insurancePlanId", [] ]} },
pipeline: [
{
$unwind: "$plans"
},
{
$match: { $expr: { $in: ["$plans._id", "$$pid"] } }
},
{
$project: { _id: 0, name: 1 }
}
],
as: "insurances"
}
},
{ $match: { "insurances.name": { $exists: true, $ne: null } } },
{
$addFields: {
invoice: {
$reduce: {
input: {$ifNull:["$sale.bill.billNumber", [] ]},
initialValue: "",
in: {
$cond: [{ "$eq": ["$$value", ""] }, "$$this", { $concat: ["$$value", "\n", "$$this"] }]
}
}
},
insurances: {
$reduce: {
input: {$ifNull:["$insurances.name", [] ]},
initialValue: "",
in: {
$cond: [{ "$eq": ["$$value", ""] }, "$$this", { $concat: ["$$value", "\n", "$$this"] }]
}
}
}
}
},
{
"$project": {
"startDateTime": 1,
"patientName": "$patient.name",
"invoice": 1,
"insurances": 1
}
}
],
{ allowDiskUse: true }
)
Error :
Unable to execute the selected commands
Mongo Server error (MongoCommandException): Command failed with error 241 (ConversionFailure): 'Failed to parse objectId '' in $convert with no onError value: Invalid string length for parsing to OID, expected 24 but found 0' on server localhost:27017.
The full response is:
{
"ok" : 0.0,
"errmsg" : "Failed to parse objectId '' in $convert with no onError value: Invalid string length for parsing to OID, expected 24 but found 0",
"code" : NumberInt(241),
"codeName" : "ConversionFailure"
}
As a solution i have found, used $ifNull but this error keeps coming. What would be the best step to take for this scenario?
I see a couple of ways:
Instead of converting the string value to an ObjectId to test, convert the ObjectId to a string
$match: {
$expr: {
$eq: [{$toString: "$_id"}, "$$pid" ]
}
}
Instead of the $toObjectId helper, use $convert and provide onError and/or onNull values:
$match: {
$expr: {
$eq: ["$_id", { $convert: {
input: "$$pid",
to: "objectId",
onError: {error:true},
onNull: {isnull:true}
}}]
}
}

$unwind, $aggregation manipulation in mongodb nodejs

please check this query
db.billsummaryofthedays.aggregate([
{
'$match': {
'userId': ObjectId('5e43de778b57693cd46859eb'),
'adminId': ObjectId('5e43e5cdc11f750864f46820'),
'date': ISODate("2020-02-11T16:30:00Z"),
}
},
{
$lookup:
{
from: "paymentreceivables",
let: { userId: '$userId', adminId: '$adminId' },
pipeline: [
{
$match:
{
paymentReceivedOnDate:ISODate("2020-02-11T16:30:00Z"),
$expr:
{
$and:
[
{ $eq: ["$userId", "$$userId"] },
{ $eq: ["$adminId", "$$adminId"] }
]
}
}
},
{ $project: { amount: 1, _id: 0 } }
],
as: "totalPayment"
}
}, {'$unwind':'$totalPayment'},
{ $group:
{ _id:
{ date: '$date',
userId: '$userId',
adminId: '$adminId' },
totalBill:
{
$sum: '$billOfTheDay'
},
totalPayment:
{
$sum: '$totalPayment.amount'
}
}
},
}
}])
this is the result i am getting in the shell
{
"_id" : {
"date" : ISODate("2020-02-11T18:30:00Z"),
"userId" : ObjectId("5e43de778b57693cd46859eb"),
"adminId" : ObjectId("5e43e5cdc11f750864f46820")
},
"totalBill" : 1595.6799999999998,
"totalPayments" : 100
}
now this is not what i expected,i assume due to {'$unwind':'$totalPayment'} it takes out all the values from the array and because of which every document is getting counted 2 times. When i remove {'$unwind':'$totalPayment'} then totalBill sum turns out to be correct but totalPayment is 0.
I have tried several other ways but not able to achieve the desired result
Below are my collections:
// collection:billsummaryofthedays//
{
"_id" : ObjectId("5e54f784f4032c1694535c0e"),
"userId" : ObjectId("5e43de778b57693cd46859eb"),
"adminId" : ObjectId("5e43e5cdc11f750864f46820"),
"date" : ISODate("2020-02-11T16:30:00Z"),
"UID":"acex01"
"billOfTheDay" : 468,
}
{
"_id" : ObjectId("5e54f784f4032c1694535c0f"),
"UID":"bdex02"
"userId" : ObjectId("5e43de778b57693cd46859eb"),
"adminId" : ObjectId("5e43e5cdc11f750864f46820"),
"date" : ISODate("2020-02-11T16:30:00Z"),
"billOfTheDay" : 329.84,
}
// collection:paymentreceivables//
{
"_id" : ObjectId("5e43e73169fe1e3fc07eb7c5"),
"paymentReceivedOnDate" : ISODate("2020-02-11T16:30:00Z"),
"adminId" : ObjectId("5e43e5cdc11f750864f46820"),
"userId" : ObjectId("5e43de778b57693cd46859eb"),
"amount" : 20,
}
{
"_id" : ObjectId("5e43e73b69fe1e3fc07eb7c6"),
"paymentReceivedOnDate" : ISODate("2020-02-11T16:30:00Z"),
"adminId" : ObjectId("5e43e5cdc11f750864f46820"),
"userId" : ObjectId("5e43de778b57693cd46859eb"),
"amount" : 30,
}
desired result should be totalBill:797.83 i.e[468+329.84,] and totalPayment:50 i.e[30+20,] but i am getting double the expected result and even if i am able to calculate one of the value correctly the other one result 0.How to tackle this??
Since you've multiple documents with same data in billsummaryofthedays collection then you can group first & then do $lookup - that way JOIN between two collections would be 1-Vs-many rather than many-Vs-many as like it's currently written, So you can try below query for desired o/p & performance gains :
db.billsummaryofthedays.aggregate([
{
"$match": {
"userId": ObjectId("5e43de778b57693cd46859eb"),
"adminId": ObjectId("5e43e5cdc11f750864f46820"),
"date": ISODate("2020-02-11T16:30:00Z"),
}
},
{
$group: {
_id: {
date: "$date",
userId: "$userId",
adminId: "$adminId"
},
totalBill: {
$sum: "$billOfTheDay"
}
}
},
{
$lookup: {
from: "paymentreceivables",
let: {
userId: "$_id.userId",
adminId: "$_id.adminId"
},
pipeline: [
{
$match: {
paymentReceivedOnDate: ISODate("2020-02-11T16:30:00Z"),
$expr: {
$and: [
{
$eq: [
"$userId",
"$$userId"
]
},
{
$eq: [
"$adminId",
"$$adminId"
]
}
]
}
}
},
{
$project: {
amount: 1,
_id: 0
}
}
],
as: "totalPayment"
}
},
{
$addFields: {
totalPayment: {
$reduce: {
input: "$totalPayment",
initialValue: 0,
in: {
$add: [
"$$value",
"$$this.amount"
]
}
}
}
}
}
])
Test : MongoDB-Playground

mongodb aggregation lookup with multiple conditions and ids

Having the following collections and data on them
db.a.insert([
{ "_id" : ObjectId("5b56989172ebcb00105e8f41"), "items" : [{id:ObjectId("5b56989172ebcb00105e8f41"), "instock" : 120}]},
{ "_id" : ObjectId("5b56989172ebcb00105e8f42"), "items" : [{id:ObjectId("5b56989172ebcb00105e8f42"), "instock" : 120}] },
{ "_id" : ObjectId("5b56989172ebcb00105e8f43"), "items" : [{ObjectId("5b56989172ebcb00105e8f43"), "instock" : 80}] }
])
db.b.insert([
{ "_id" : ObjectId("5b56989172ebcb00105e8f41")},
{ "_id" : ObjectId("5b56989172ebcb00105e8f42")},
{ "_id" : ObjectId("5b56989172ebcb00105e8f43")},
{ "_id" : ObjectId("5b56989172ebcb00105e8f44")},
{ "_id" : ObjectId("5b56989172ebcb00105e8f45")}
])
executing an lookup aggregation like
db.b.aggregate([
{
$lookup:
{
from: "b",
let: { bId: "$_id", qty: 100 },
pipeline: [
{ $match:
{ $expr:
{ $and:
[
{ $eq: [ "$items.id", "$$bId" ] },
{ $gte: [ "$instock", "$$qty" ] }
]
}
}
}
],
as: "a"
}
}
])
does not bring any results in the expected lookup operation. Is there any restriction to use ObjectId as a comparison? In the official documentations does not say any about it and it works like a charm with any other kind of data type, like strings
I am not sure if this is a bug in mongodb or not but the query only works after adding an $unwind stage first.
db.b.aggregate([
{
$lookup:
{
from: "a",
let: { bId: "$_id", qty: 100 },
pipeline: [
{
$unwind: {
path: "$items"
}
},
{ $match:
{ $expr:
{ $and:
[
{ $eq: [ "$items.id", "$$bId" ] },
{ $gte: [ "$items.instock", "$$qty" ] },
]
}
}
}
],
as: "a"
}
}
]);
Note: Join Conditions and Uncorrelated Sub-queries were added in mongo 3.6