Insert documents to MongoDB when count is equal to expected value? - mongodb

Is it possible to insert/upsert multiple documents in MongoDB 4.2 only if the the number of documents matching a particular query is of a particular size?
Example:
Let's say I have an items collection with the following 2 documents:
{ item: "ZZZ137", type="type1"}
{ item: "ZZZ138", type="type1"}
Now I want to insert these two documents:
{ item: "ZZZ139", type="type1"}
{ item: "ZZZ140", type="type1"}
but only of there are currently 2 items of type type1 in the collection (i.e. count of type1 is equal to 2).
Is it possible to somehow do this in MongoDB with a single command?
Update
To further illustrate my question let's imagine that insertMany had support for conditions. Then I'd like to do something like this (pseudo code that doesn't work):
db.items.insertMany({ { $count: { type: "type" } } : { $eq : 2 } } , [{ item: "ZZZ139", type="type1"}, { item: "ZZZ140", type="type1"}])
Where { { $count: { type: "type" } } : { $eq : 2 } } would be the query that must be fulfilled in order to insert item ZZZ139 and ZZZ140.

This can be achieved using $out or $merge if you insist on doing this in 1 call, however it's very inefficient due to the logic and restriction of these 2 operators. I personally recommend splitting it into 2 calls:
let typeTwoCount = await db.collection.countDocuments({type: "2"})
if (typeTwoCount === 2) {
await db.collection.insertMany(newItems)
}
Now we can use $out but due to the fact that it re-writes the collection we'll have to carry the entire collection through the pipeline and into the $out stage, which is ridiculous:
db.collection.aggregate([
{
$facet: {
typeTwo: [
{
$match: {
type: "2"
}
},
{
$count: "doc_count"
},
{
$addFields: {
newDocs: {
$cond: [
{$eq: ["$doc_count", 2]},
items,
[]
]
}
}
},
{
$unwind: "$newDocs"
},
{
$replaceRoot: {
newRoot: "$newDocs"
}
},
],
all: [
{
$match: {}
}
]
}
},
{
$addFields: {
merged: { $concatArrays: ["$all", "$typeTwo"]}
}
},
{
$unwind: "$merged"
},
{
$replaceRoot: {
newRoot: "$merged"
}
},
{
$out: "collection"
}
])
Now the issue with $merge is the following restriction:
The output collection cannot be the same collection as the collection being aggregated.
So we can employ similar tactic to the $out pipeline (with using the typeTwo pipeline for the $merge), but we'll have to start the aggregation with a different none empty dummy collection:
db.any_other_none_empty_collection.aggregate([
{
$limit: 1
},
{
$lookup: {
from: "collection",
let: {},
pipeline: [
{
$match: {
type: "2"
}
}
],
as: "all"
}
},
{
$addFields: {
doc_count: {$size: "$all"}
}
},
{
$addFields: {
newDocs: {
$cond: [
{$eq: ["$doc_count", 2]},
items,
[]
]
}
}
},
{
$unwind: "$newDocs"
},
{
$replaceRoot: {
newRoot: "$newDocs"
}
},
{
$merge: {
into: "collection"
}
}
])

Related

How to remove duplicate objects with different parameters in an aggregation in mongo db

[
{ id:1,month:5,year:2020,text:"Completed" },
{ id:2,month:2,year:2021,text:"Pending" },
{ id:3,month:3,year:2020,text:"Completed" },
{ id:4,month:5,year:2020,text:"Pending" },
{ id:5,month:4,year:2022,text:"Pending" },
]
These are the documents in my collection. I need to remove remove the duplicate objects with same year & month using aggregation in mongo db. so that i get
[
{ id:1,month:5,year:2020,text:"Completed" },
{ id:2,month:2,year:2021,text:"Pending" },
{ id:3,month:3,year:2020,text:"Completed" },
{ id:5,month:4,year:2022,text:"Pending" },
]
Maybe something like this:
db.collection.aggregate([
{
$group: {
_id: {
month: "$month",
year: "$year"
},
cnt: {
$sum: 1
},
doc: {
$push: "$$ROOT"
}
}
},
{
$match: {
cnt: {
$gt: 1
}
}
},
{
$project: {
docsTodelete: {
$slice: [
"$doc",
1,
{
"$size": "$doc"
}
]
}
}
},
{
$unwind: "$docsTodelete"
}
]).forEach(function(doc){
db.backup.save(doc.docsTodelete);
db.collection.remove(_id:doc.docsToDelete._id)
})
explained:
Group the documents by month-year and push the originals to array doc
Match only the documents that have duplicates
Slice the documents array to leave 1x document in the collection
Unwind the array with documents to be removed
Do forEach loop to remove the duplicated documents from the collection and store the removed in backup collection just in case you have doubts later.

How to query an array and retrieve it from MongoDB

Updated:
I have a document on the database that looks like this:
My question is the following:
How can I retrieve the first 10 elements from the friendsArray from database and sort it descending or ascending based on the lastTimestamp value.
I don't want to download all values to my API and then sort them in Python because that is wasting my resources.
I have tried it using this code (Python):
listOfUsers = db.user_relations.find_one({'userId': '123'}, {'friendsArray' : {'$orderBy': {'lastTimestamp': 1}}}).limit(10)
but it just gives me this error pymongo.errors.OperationFailure: Unknown expression $orderBy
Any answer at this point would be really helpful! Thank You!
use aggregate
first unwind
then sort according timestap
group by _id to create sorted array
use addfields and filter for getting first 10 item of array
db.collection.aggregate([
{ $match:{userId:"123"}},
{
"$unwind": "$friendsArray"
},
{
$sort: {
"friendsArray.lastTimeStamp": 1
}
},
{
$group: {
_id: "$_id",
friendsArray: {
$push: "$friendsArray"
}
},
},
{
$addFields: {
friendsArray: {
$filter: {
input: "$friendsArray",
as: "z",
cond: {
$lt: [
{
$indexOfArray: [
"$friendsArray",
"$$z"
]
},
10
]
}// 10 is n first item
}
}
},
}
])
https://mongoplayground.net/p/2Usk5sRY2L2
and for pagination use this
db.collection.aggregate([
{ $match:{userId:"123"}},
{
"$unwind": "$friendsArray"
},
{
$sort: {
"friendsArray.lastTimeStamp": 1
}
},
{
$group: {
_id: "$_id",
friendsArray: {
$push: "$friendsArray"
}
},
},
{
$addFields: {
friendsArray: {
$filter: {
input: "$friendsArray",
as: "z",
cond: {
$and: [
{
$gt: [
{
$indexOfArray: [
"$friendsArray",
"$$z"
]
},
10
]
},
{
$lt: [
{
$indexOfArray: [
"$friendsArray",
"$$z"
]
},
20
]
},
]
}// 10 is n first item
}
}
},
}
])
The translation of your find to aggregation(we need unwind that why aggregation is used) would be like the bellow query.
Test code here
Query (for descending replace 1 with -1)
db.collection.aggregate([
{
"$match": {
"userId": "123"
}
},
{
"$unwind": {
"path": "$friendsArray"
}
},
{
"$sort": {
"friendsArray.lastTimeStamp": 1
}
},
{
"$limit": 10
},
{
"$replaceRoot": {
"newRoot": "$friendsArray"
}
}
])
If you want to skip some before limit add one stage also
{
"$skip" : 10
}
To take the 10-20 messages for example.

Mongodb having problem of adding two values inside nested document with dynamic key

I wish to add currentAsset.total and longTermAsset.total for each of my child documents with dynamic key to a new field. My current mongodb version is 4.0.12
My source document is as below:
{
"_id":"5f44bc4c36ac3e2c8c6db4bd",
"counter":"Apple",
"balancesheet":{
"0":{
"currentAsset":{
"total":123.12
},
"longTermAsset":{
"total":10.16
}
},
"1":{
"currentAsset":{
"total":10.23
},
"longTermAsset":{
"total":36.28
}
}
}
}
The result document I wanted to get is:
{
"_id": "5f44bc4c36ac3e2c8c6db4bd",
"counter": "Apple",
"balancesheet": {
"0": {
"currentAsset": {
"total": 123.12
},
"longTermAsset": {
"total": 10.16
},
"totalAsset": 133.28
},
"1": {
"currentAsset": {
"total": 10.23
},
"longTermAsset": {
"total": 36.28
},
"totalAsset": 46.51
}
}
}
I have tried a few aggegrates but failed as it is giving me "errmsg" : "$add only supports numeric or date types, not array"
db.balancesheets.aggregate([
{
$match: { counter: "Apple" }
},
{
$project: {
bs: { $objectToArray: "$balancesheet" }
}
},
{
$addFields: {
totalAsset: {
$add: ["$bs.k.currentAsset.total", "$bs.k.longTermAsset.total"]
}
}
}
])
As I refer to this, it seems like the version needs to be 4.2 and above. Is there anyway that will be able to do it on my existing 4.0.12 version?
MongoDB Aggregation: add field from an embedded document via a dynamic field path
There is no version issues, follow few fixes,
first 2 pipelines looks good,
$unwind deconstruct bs array
$addFields corrected, you used k instead of v in accessing field total
$group to reconstruct and prepare again object to array
$addFields to convert bs array to object using $reduce
db.collection.aggregate([
// $match ... pipeline
// $project ... pipeline
// unwind bs array
{ $unwind: "$bs" },
{
$addFields: {
"bs.v.totalAsset": { $add: ["$bs.v.currentAsset.total", "$bs.v.longTermAsset.total"] }
}
},
{
$group: {
_id: "$_id",
bs: { $push: { $arrayToObject: [["$bs"]] } },
counter: { $first: "$counter" },
},
}
{
$addFields: {
bs: {
$reduce: {
input: "$bs",
initialValue: {},
in: { $mergeObjects: ["$$value", "$$this"] }
}
}
}
}
])
Playground

Join multiple collections after parallel aggregation in Mongodb

I have a collection called "Reel" which has embedded Objects.
{
"_id":"reel_1",
"category":[
{
"_id" : "cat_1",
"videos": [ {"_id":"vid_1"},{"_id":"vid_2"} ] //_id is reference of Video collection
},
{
"_id" : "cat_2",
"videos": [ {"_id":"vid_3"},{"_id":"vid_4"} ]
}
]
}
Video is another collection whose _id is referred inside reel-> category -> videos -> _id
{
"_id":"vid_1",
"title":"title 1",
"groups":[{"_id":"group_1"},{"_id":"group_2"}]
},
{
"_id":"vid_2",
"title":"title 2",
"groups":[{"_id":"group_1"},{"_id":"group_4"}]
},
{
"_id":"vid_3",
"title":"title 3",
"groups":[{"_id":"group_1"},{"_id":"group_2"}]
},
{
"_id":"vid_4",
"title":"title 4",
"groups":[{"_id":"group_3"},{"_id":"group_4"}]
}
The Document collection which holds _id of Reel and _id of Category
{
"_id":"doc_1",
"title":"document title",
"assessments":[
{
"reel":"reel_1", // reel reference _id
"category":"cat_1", // category reference _id
"groups":[{"_id":"group_1"},{"_id":"group_2"}
]
}
]
}
I need to join and find all related embedded Objects which has group_1.
I have done joining between Reel collection and Video collection and working fine,
{ $unwind: { path: '$category', preserveNullAndEmptyArrays: true }},
{ $unwind: { path: '$category.videos', preserveNullAndEmptyArrays: true }},
{
$lookup: {
from: 'video',
localField: 'category.videos._id',
foreignField: '_id',
as: 'joinVideo'
}
},
{ $unwind: { path: "$joinVideo", preserveNullAndEmptyArrays: true }},
{ $unwind: { path: "$joinVideo.groups", preserveNullAndEmptyArrays: true }},
{ $match: { "joinVideo.groups._id": "group_1" }},
{ $addFields: { "category.videos": "$joinVideo" }},
{
$group: {
_id: {
_id: "$_id",
category: "$category._id"
},
videos: {
$addToSet: "$category.videos"
}
}
}, {
$group: {
_id: "$_id._id",
category: {
$addToSet: {
"_id": "$_id.category",
"videos": "$videos"
}
}
}
}
The document collection should be embedded inside the category object based on reel _id and and category _id filtered by group_1. My expected result is
{
"_id":"reel_1",
"category":[
{
"_id" : "cat_1",
"videos": [
{
"_id":"vid_1",
"title":"title 1",
"groups":[ {"_id":"group_1"},{"_id":"group_2"}]
},
{
"_id":"vid_2",
"title":"title 2",
"groups":[{"_id":"group_1"},{"_id":"group_4"}]
}
],
"documents":[
{ // this document comes by reel="reel_1", category="cat_1", filtered by "group_1"
"_id":"doc_1",
"title":"document title",
}
]
},
{
"_id" : "cat_2",
"videos": [
{
"_id":"vid_3",
"title":"title 3",
"groups":[{"_id":"group_1"},{"_id":"group_2"}]
}
]
}
]
}
I tried in many ways. Since I'm new to Mongodb, I couldn't sort this out.
Since MongoDB v3.6, $lookup allows perform uncorrelated sub-queries. This allows us perform non-standard queries to join two or more collections.
Note: Explanation why we need to use $expr inside $lookup pipeline
Explanation
We apply $unwind to flatten $category
We perform $lookup with 2 conditions:
video.groups._id == 'group_1' and video._id in reel.category.videos._id
Since $reel.category.videos._id returns an array, we need to use $in operator
Again we perform $lookup with 2 conditions. It creates documents field for every document
To remove fields dynamically, we need to use Aggregation expressions called $$REMOVE which allows us exclude conditionally a field from document
We perform $group stage to transform into desired result
db.reel.aggregate([
{
$unwind: {
path: "$category",
preserveNullAndEmptyArrays: true
}
},
{
$lookup: {
from: "video",
let: {
videos: "$category.videos._id"
},
pipeline: [
{
$match: {
"groups._id": "group_1",
$expr: {
$in: [
"$_id",
"$$videos"
]
}
}
}
],
as: "category.videos"
}
},
{
$lookup: {
from: "document",
let: {
reel_id: "$_id",
category_id: "$category._id"
},
pipeline: [
{
$match: {
$expr: {
$and: [
{
$in: [
"$$reel_id",
"$assessments.reel"
]
},
{
$in: [
"$$category_id",
"$assessments.category"
]
}
]
}
}
},
{
$project: {
_id: 1,
title: 1
}
}
],
as: "category.documents"
}
},
{
$addFields: {
"category.documents": {
$cond: [
{
$eq: [
{
$size: "$category.documents"
},
0
]
},
"$$REMOVE",
"$category.documents"
]
}
}
},
{
$group: {
_id: "$_id",
category: {
$push: "$category"
}
}
}
])
MongoPlayground

How to know that aggregated group has previous/next values?

Suppose I have the following aggregation pipeline:
db.getCollection('posts').aggregate([
{ $match: { _id: { $gt: "some id" }, tag: 'some tag' } },
{ $limit: 5 },
{ $group: { _id: null, hasNextPage: {??}, hasPreviousPage: {??} } }
])
As a result $match and $limit stages would result in a subset of all the posts with a tag some tag. How can I know that there're posts before and after my subSet?
One of the possible ways, I guess, is to have expression (with $let) inside hasPreviousPage and hasNextPage that would search for one post with _id less than "some id" and greater than $last: "$_id"respectively. But I'm not sure how I can reference my group as a variable in $let. Also, maybe there're some other more effective ways.
You can use below aggregation:
db.posts.aggregate([
{ $match: { tag: 'some tag' } },
{ $sort: { _id: 1 } },
{
$facet: {
data: [
{ $match: { _id: { $gt: 'some id' } } },
{ $limit: 5 }
],
hasPreviousPage: [
{ $match: { _id: { $lte: 'some id' } } },
{ $count: "totalPrev" }
],
hasNextPage: [
{ $match: { _id: { $gt: 'some id' } } },
{ $skip: 5 },
{ $limit: 1 }, // just to check if there's any element
{ $count: "totalNext" }
]
}
},
{
$unwind: { path: "$hasPreviousPage", preserveNullAndEmptyArrays: true }
},
{
$unwind: { path: "$hasNextPage", preserveNullAndEmptyArrays: true }
},
{
$project: {
data: 1,
hasPreviousPage: { $gt: [ "$hasPreviousPage.totalPrev", 0 ] },
hasNextPage: { $gt: [ "$hasNextPage.totalNext", 0 ] }
}
}
])
To apply any paging you have to $sort your collection to get results in deterministic order. On a set that's sorted and filtered by tag you can run $facet which allows you to apply multiple subaggregations. Pipelines that are representing previous and nextPage can be ended with $count. Every subaggregation in $facet will return an array so we can run $unwind to get nested document instead of array for hasPreviousPage and hasNextPage. Option preserveNullAndEmptyArrays is required here cause otherwise MongoDB will remove whole document from aggregation pipeline if there are no prev / next documents. In the last step we can just convert subaggregations to boolean values.