Increment value in document and nested array simultaenously (with upsert) - mongodb

I want to increment a value both on document root as well as inside a nested array.
A playground example here
The schema
const UserPoints = new Schema(
{
points: {
type: Number,
},
monthly: {
type: [
new Schema(
{
year: {
type: Number,
},
month: {
type: Number,
},
points: {
type: Number,
min: 0,
},
},
{
_id: false,
timestamps: false,
}
),
],
},
},
{
timestamps: false,
}
);
What I have tried
Variables used: (currentYear = 2021, currentMonth = 7, addPoints = 5)
Note: The year, month may not exist in the document yet, so I need to make it work with "upsert".
UserPoints.findOneAndUpdate(
{
_id: userId,
"monthly.year": currentYear,
"monthly.month": currentMonth,
},
{
$inc: {
points: addPoints,
"monthly.$.points": addPoints,
},
},
{
upsert: true,
new: true,
}
).exec()
This does not work. And gives out an error:
{
ok:0
code:2
codeName:"BadValue"
name:"MongoError"
}
I would appreciate if someone can point me at the right direction to increment these values in the same operation.
Update:
The only way that I could make it work is by first making a query to check if the (year, month) exists in the "monthly" array.
Depending if it exists, I either $push the new month, or $inc the existing one's values.

Pipeline update way, requires MongoDB >=4.2
You filter by _id using index, so it will be fast also.
Query
Test code here
(put id=1 will update(add points to member points and to externa point field), id=2 will do nothing,id=3 will insert the member to the empty array and update the points, id=4 will upsert with an array contains only this member,and the points its points)
Cases (this is the order checked in the $cond also)
document doesnt exists, array will have 1 single member, and root points will have the value of the new member points
document exists , member dont exists, monthly empty
adds the member {:year ...} and increases the root point field
document exists, member exists
increase points inside the member and the root point fields
document exists, member dont exist, monthly not empty
does nothing
db.collection.update({
"_id": 4
},
[
{
"$addFields": {
"isupsert": {
"$not": [
{
"$ne": [
{
"$type": "$monthly"
},
"missing"
]
}
]
}
}
},
{
"$addFields": {
"doc": {
"$switch": {
"branches": [
{
"case": "$isupsert",
"then": {
"_id": "$_id",
"points": 5,
"monthly": [
{
"year": 2021,
"month": 7,
"points": 5
}
]
}
},
{
"case": {
"$and": [
{
"$isArray": [
"$monthly"
]
},
{
"$eq": [
{
"$size": "$monthly"
},
0
]
}
]
},
"then": {
"_id": "$_id",
"points": 5,
"monthly": [
{
"year": 2021,
"month": 7,
"points": 5
}
]
}
}
],
"default": {
"$let": {
"vars": {
"found": {
"$not": [
{
"$eq": [
{
"$size": {
"$filter": {
"input": "$monthly",
"as": "m",
"cond": {
"$and": [
{
"$eq": [
"$$m.year",
2021
]
},
{
"$eq": [
"$$m.month",
7
]
}
]
}
}
}
},
0
]
}
]
}
},
"in": {
"$cond": [
"$$found",
{
"$mergeObjects": [
"$ROOT",
{
"points": {
"$add": [
"$points",
5
]
},
"monthly": {
"$map": {
"input": "$monthly",
"as": "m",
"in": {
"$cond": [
{
"$and": [
{
"$eq": [
"$$m.year",
2021
]
},
{
"$eq": [
"$$m.month",
7
]
}
]
},
{
"$mergeObjects": [
"$$m",
{
"points": {
"$add": [
"$$m.points",
5
]
}
}
]
},
"$$m"
]
}
}
}
}
]
},
"$$ROOT"
]
}
}
}
}
}
}
},
{
"$replaceRoot": {
"newRoot": "$doc"
}
},
{
"$unset": [
"isupsert"
]
}
],
{
"upsert": true
})

you should find in array by $elemMatch
UserPoints.findOneAndUpdate(
{
_id: userId,
"monthly":{$elemMatch:{"year": currentYear}},
"monthly":{$elemMatch:{"month": currentMonth}}
},
{
$inc: {
points: addPoints,
"monthly.$.points": addPoints,
},
},
{
upsert: true,
new: true,
}
).exec()

Related

MongoDB - Lookup match with condition array of object with string

I have two collections "datasets" and "users".
I tried to lookup datasets.assignedTo = users.id that's working fine. Also, I want to match the field of datasets.firstBillable >= users.prices.beginDate date field are matched to get the current index price value. And also check users.prices.endDate is less than or equal to users.prices.beginDate.
For example:
cgPrices: 45
https://mongoplayground.net/p/YQps9EozlAL
Collections:
db={
users: [
{
id: 1,
name: "Aravinth",
prices: [
{
beginDate: "2022-08-24T07:29:01.639Z",
endDate: "2022-08-31T07:29:01.639Z",
price: 45
}
]
},
{
id: 2,
name: "Raja",
prices: [
{
beginDate: "2022-07-25T07:29:01.639Z",
endDate: "2022-07-30T07:29:01.639Z",
price: 55
}
]
}
],
datasets: [
{
color: "braun, rose gold",
firstBillable: "2022-08-24T07:29:01.639Z",
assignedTo: 1
},
{
color: "beige, silber",
firstBillable: "2022-07-25T07:29:01.639Z",
assignedTo: 2
}
]
}
My current implementation:
db.datasets.aggregate([
{
"$lookup": {
"from": "users",
"as": "details",
let: {
assigned_to: "$assignedTo",
first_billable: "$firstBillable"
},
pipeline: [
{
"$match": {
$expr: {
"$and": [
{
"$eq": [
"$id",
"$$assigned_to"
]
},
{
"$gte": [
"$first_billable",
"$details.prices.beginDate"
]
},
{
"$lte": [
"$first_billable",
"$details.prices.endDate"
]
}
]
}
}
}
]
}
},
{
"$addFields": {
"details": 0,
"cg": {
$first: {
"$first": "$details.prices.price"
}
}
}
}
])
Output i needed:
[
{
"_id": ObjectId("5a934e000102030405000000"),
"assignedTo": 1,
"cg": 45,
"color": "braun, rose gold",
"details": 0,
"firstBillable": "2022-08-24T07:29:01.639Z"
},
{
"_id": ObjectId("5a934e000102030405000001"),
"assignedTo": 2,
"cg": 55,
"color": "beige, silber",
"details": 0,
"firstBillable": "2022-07-25T07:29:01.639Z"
}
]
https://mongoplayground.net/p/YQps9EozlAL
Concerns:
You should compare the date as Date instead of string, hence you are required to convert the date strings to Date before comparing.
In users collection, prices is an array. You need to deconstruct the array to multiple documents first before compare the date fields in price.
The query should be:
db.datasets.aggregate([
{
"$lookup": {
"from": "users",
"as": "details",
let: {
assigned_to: "$assignedTo",
first_billable: {
$toDate: "$firstBillable"
}
},
pipeline: [
{
$match: {
$expr: {
$eq: [
"$id",
"$$assigned_to"
]
}
}
},
{
$unwind: "$prices"
},
{
"$match": {
$expr: {
"$and": [
{
"$gte": [
"$$first_billable",
{
$toDate: "$prices.beginDate"
}
]
},
{
"$lte": [
"$$first_billable",
{
$toDate: "$prices.endDate"
}
]
}
]
}
}
}
]
}
},
{
"$addFields": {
"details": 0,
"cg": {
$first: "$details.prices.price"
}
}
}
])
Demo # Mongo Playground

MongoDB aggregation : Keep value from previous document if null or not exists

I have a collection that contains time-based metrics. I only store them if they change over time and I want to keep their previous value in the aggregation result.
Here's an extract of the collection :
{
"_id": ObjectId("6115150f01d7d0426bcd0390"),
"conf": "conference123",
"uid": "2dd8b4e3-9dcd-4da6-bc36-aa0988dc9642",
"log": [
{
"dt": ISODate("2021-08-12T12:33:49.782Z"),
"connection_quality": 60,
"video_bitrate": 150
},
{
"dt": ISODate("2021-08-12T12:34:19.781Z"),
"video_bitrate": 145
// connection_quality didn't change so it's not stored
},
{
"dt": ISODate("2021-08-12T12:34:30.781Z"),
"video_bitrate": 130
// connection_quality didn't change so it's not stored
},
{
"dt": ISODate("2021-08-12T12:34:49.787Z"),
"connection_quality": 100,
"video_bitrate": 150
},
{
"dt": ISODate("2021-08-12T12:35:19.789Z"),
"video_bitrate": 160
// connection_quality didn't change so it's not stored
}
]
}
I tried the following aggregation but I don't know what to put after the last stage :
[{
$match: {
conf: 'conference123',
uid: '2dd8b4e3-9dcd-4da6-bc36-aa0988dc9642'
}
}, {
$unwind: {
path: '$log'
}
}, {
$project: {
_id: 0,
"Date": '$log.dt',
'User ID': '$uid',
'Connection Quality': "$log.cq"
}
}]
Here's the result that I get
[
{
"Date": ISODate("2021-08-12T12:33:49.782Z"),
"User ID":"2dd8b4e3-9dcd-4da6-bc36-aa0988dc9642",
"Connection Quality":60
},
{
"Date": ISODate("2021-08-12T12:34:19.781Z"),
"User ID":"2dd8b4e3-9dcd-4da6-bc36-aa0988dc9642"
},
{
"Date": ISODate("2021-08-12T12:34:30.781Z"),
"User ID":"2dd8b4e3-9dcd-4da6-bc36-aa0988dc9642"
},
{
"Date": ISODate("2021-08-12T12:34:49.787Z"),
"User ID":"2dd8b4e3-9dcd-4da6-bc36-aa0988dc9642",
"Connection Quality":100
},
{
"Date": ISODate("2021-08-12T12:35:19.789Z"),
"User ID":"2dd8b4e3-9dcd-4da6-bc36-aa0988dc9642"
}
]
But this is what I want to display
[
{
"Date": ISODate("2021-08-12T12:33:49.782Z"),
"User ID":"2dd8b4e3-9dcd-4da6-bc36-aa0988dc9642",
"Connection Quality":60
},
{
"Date": ISODate("2021-08-12T12:34:19.781Z"),
"User ID":"2dd8b4e3-9dcd-4da6-bc36-aa0988dc9642",
"Connection Quality":60
},
{
"Date": ISODate("2021-08-12T12:34:30.781Z"),
"User ID":"2dd8b4e3-9dcd-4da6-bc36-aa0988dc9642",
"Connection Quality":60
},
{
"Date": ISODate("2021-08-12T12:34:49.787Z"),
"User ID":"2dd8b4e3-9dcd-4da6-bc36-aa0988dc9642",
"Connection Quality":100
},
{
"Date": ISODate("2021-08-12T12:35:19.789Z"),
"User ID":"2dd8b4e3-9dcd-4da6-bc36-aa0988dc9642",
"Connection Quality":100
}
]
Any help would be greatly appreciated, thanks !
There is no straight way to do this operation,
$map to iterate loop of log array, check condition if connection_quality type is missing then go to select previous connection_quality otherwise return the current object
$filter to iterate loop of log and by conditions are: dt should less than and connection_quality should not missing
now we have to select the latest connection_quality from above filtered result so using $last we will select last object
$let to declare a variable and do above filter operation and return just connection_quality value
$unwind to deconstruct the log array
$project to project the result as per your requirement
db.collection.aggregate([
{
$match: {
conf: "conference123",
uid: "2dd8b4e3-9dcd-4da6-bc36-aa0988dc9642"
}
},
{
$addFields: {
log: {
$map: {
input: "$log",
as: "l",
in: {
$cond: [
{ $eq: [{ $type: "$$l.connection_quality" }, "missing"] },
{
dt: "$$l.dt",
connection_quality: {
$let: {
vars: {
log: {
$last: {
$filter: {
input: "$log",
cond: {
$and: [
{ $lt: ["$$this.dt", "$$l.dt"] },
{
$ne: [{ $type: "$$this.connection_quality" }, "missing"]
}
]
}
}
}
}
},
in: "$$log.connection_quality"
}
}
},
"$$l"
]
}
}
}
}
},
{ $unwind: "$log" },
{
$project: {
_id: 0,
"Date": "$log.dt",
"User ID": "$uid",
"Connection Quality": "$log.connection_quality"
}
}
])
Playground
You can do it with reduce
The bellow query adds the connection_quality if null or missing with the
value of the previous member that had connection_quality
It starts with a default 60, for example if the first member didn't had also
db.collection.aggregate([
{
"$addFields": {
"log": {
"$arrayElemAt": [
{
"$reduce": {
"input": "$log",
"initialValue": [
60,
[]
],
"in": {
"$let": {
"vars": {
"prv_value_logs": "$$value",
"log": "$$this"
},
"in": {
"$let": {
"vars": {
"prv_value": {
"$arrayElemAt": [
"$$prv_value_logs",
0
]
},
"logs": {
"$arrayElemAt": [
"$$prv_value_logs",
1
]
}
},
"in": {
"$cond": [
{
"$and": [
{
"$ne": [
"$$log.connection_quality",
null
]
},
{
"$ne": [
{
"$type": "$$log.connection_quality"
},
"missing"
]
}
]
},
[
"$$log.connection_quality",
{
"$concatArrays": [
"$$logs",
[
"$$log"
]
]
}
],
[
"$$prv_value",
{
"$concatArrays": [
"$$logs",
[
{
"$mergeObjects": [
"$$log",
{
"connection_quality": "$$prv_value"
}
]
}
]
]
}
]
]
}
}
}
}
}
}
},
1
]
}
}
}
])
Test code here
It doesn't change the document, just adds the missing connection_quality, if you want to change it after, you can add more stages.
Solution is fast for arrays <500 members.
Edit1
The slow part is not the $reduce its the $concat because MongodDB
doesn't have a way to add 1 element to the end fast.
Its not how many arrays you have, but how big they are.
I was curious why you said you cant use reduce and map/filter worked for you(because looks like O(n^2)), so i did a benchmark.
1000 elements (the log)
"Elapsed time: 44.408292 msecs" //reduce
"Elapsed time: 167.653179 msecs" //map and filter 3x
5000 elements
"Elapsed time: 263.549371 msecs"
"Elapsed time: 3298.880892 msecs" //10x+
10000 elements
"Elapsed time: 996.340296 msecs"
"Elapsed time: 14765.732331 msecs" //10x+
This is only for 1 document collection, so both solutions are very slow, not usable for big collections with big arrays > 500 elements.

How to filter an array of objects in mongoose by date field only selecting the most recent date

I'm trying to filter through an array of objects in a user collection on MongoDB. The structure of this particular collection looks like this:
name: "John Doe"
email: "john#doe.com"
progress: [
{
_id : ObjectId("610be25ae20ce4872b814b24")
challenge: ObjectId("60f9629edd16a8943d2cab9b")
date_unlocked: 2021-08-05T12:15:32.129+00:00
completed: true
date_completed: 2021-08-06T12:15:32.129+00:00
}
{
_id : ObjectId("611be24ae32ce4772b814b32")
challenge: ObjectId("60g6723efd44a6941l2cab81")
date_unlocked: 2021-08-06T12:15:32.129+00:00
completed: true
date_completed: 2021-08-07T12:15:32.129+00:00
}
]
date: 2021-08-04T13:06:34.129+00:00
How can I query the database using mongoose to return only the challenge with the most recent 'date_unlocked'?
I have tried: User.findById(req.user.id).select('progress.challenge progress.date_unlocked').sort({'progress.date_unlocked': -1}).limit(1);
but instead of returning a single challenge with the most recent 'date_unlocked', it is returning the whole user progress array.
Any help would be much appreciated, thank you in advance!
You can try this.
db.collection.aggregate([
{
"$unwind": {
"path": "$progress"
}
},
{
"$sort": {
"progress.date_unlocked": -1
}
},
{
"$limit": 1
},
{
"$project": {
"_id": 0,
"latestChallenge": "$progress.challenge"
}
}
])
Test the code here
Alternative solution is to use $reduce in that array.
db.collection.aggregate([
{
"$addFields": {
"latestChallenge": {
"$arrayElemAt": [
{
"$reduce": {
"input": "$progress",
"initialValue": [
"0",
""
],
"in": {
"$let": {
"vars": {
"info": "$$value",
"progress": "$$this"
},
"in": {
"$cond": [
{
"$gt": [
"$$progress.date_unlocked",
{
"$arrayElemAt": [
"$$info",
0
]
}
]
},
[
{
"$arrayElemAt": [
"$$info",
0
]
},
"$$progress.challenge"
],
"$$info"
]
}
}
}
}
},
1
]
}
}
},
{
"$project": {
"_id": 0,
"latestChallenge": 1
}
},
])
Test the code here
Mongoose can use raw MQL so you can use it.

MongoDb Create Aggregate Create query

I have 3 table users,shifts,temporaryShifts,
shifts:[{_id:ObjectId(2222),name:"Morning"},{_id:ObjectId(454),name:"Night"}]
users:[{_id:ObjectId(123),name:"Albert",shift_id:ObjectId(2222)}]
temporaryShifts:[
{_id:2,userId:ObjectId(123),shiftId:ObjectId(454),type:"temporary",date:"2020-02-01"},
{_id:987,userId:ObjectId(123),shiftId:ObjectId(454),type:"temporary",date:"2020-02-03"},
{_id:945,userId:ObjectId(123),shiftId:ObjectId(454),type:"temporary",date:"2020-02-08"},
{_id:23,userId:ObjectId(123),shiftId:ObjectId(454),date:"2020-02-09"}]
i want to make a mongoose aggregate query then give me result :
get result between two dates for example :2020-02-01 2020-02-05,
resullts is :
[
{_id:ObjectId(123),name:"Albert",shift:[
{_id:2,shiftId:ObjectId(454),type:"temporary",date:"2020-02-01"},
{_id:2,shiftId:ObjectId(2222),type:"permanent",date:"2020-02-02"},
{_id:2,shiftId:ObjectId(454),type:"temporary",date:"2020-02-03"},
{_id:2,shiftId:ObjectId(2222),type:"permanent",date:"2020-02-04"},
{_id:2,shiftId:ObjectId(2222),type:"permanent",date:"2020-02-05"},
]}
]
in result type temporary mean selected date in table temporaryShift document available else type permanent
MongoPlayGround You Can edit
You can first project a date range array using $range, in your example it will be like [2020-02-01, 2020-02-02, 2020-02-03, 2020-02-04, 2020-02-05], then you can use the array to perform $lookup
db.users.aggregate([
{
$limit: 1
},
{
"$addFields": {
"startDate": ISODate("2020-02-01"),
"endDate": ISODate("2020-02-05")
}
},
{
"$addFields": {
"dateRange": {
"$range": [
0,
{
$add: [
{
$divide: [
{
$subtract: [
"$endDate",
"$startDate"
]
},
86400000
]
},
1
]
}
]
}
}
},
{
"$addFields": {
"dateRange": {
$map: {
input: "$dateRange",
as: "increment",
in: {
"$add": [
"$startDate",
{
"$multiply": [
"$$increment",
86400000
]
}
]
}
}
}
}
},
{
"$unwind": "$dateRange"
},
{
"$project": {
"name": 1,
"shiftId": 1,
"dateCursor": "$dateRange"
}
},
{
"$lookup": {
"from": "temporaryShifts",
"let": {
dateCursor: "$dateCursor",
shiftId: "$shiftId"
},
"pipeline": [
{
"$addFields": {
"parsedDate": {
"$dateFromString": {
"dateString": "$date",
"format": "%Y-%m-%d"
}
}
}
},
{
$match: {
$expr: {
$and: [
{
$eq: [
"$$dateCursor",
"$parsedDate"
]
}
]
}
}
}
],
"as": "temporaryShiftsLookup"
}
},
{
"$unwind": {
path: "$temporaryShiftsLookup",
preserveNullAndEmptyArrays: true
}
},
{
$project: {
shiftId: 1,
type: {
"$ifNull": [
"$temporaryShiftsLookup.type",
"permanent"
]
},
date: "$dateCursor"
}
}
])
Here is the Mongo Playground for your reference.

Zip two array and create new array of object

hello all i'm working with a MongoDB database where each data row is like:
{
"_id" : ObjectId("5cf12696e81744d2dfc0000c"),
"contributor": "user1",
"title": "Title 1",
"userhasRate" : [
"51",
"52",
],
"ratings" : [
4,
3
],
}
and i need to change it to be like:
{
"_id" : ObjectId("5cf12696e81744d2dfc0000c"),
"contributor": "user1",
"title": "Title 1",
rate : [
{userhasrate: "51", value: 4},
{userhasrate: "52", value: 3},
]
}
I already try using this method,
db.getCollection('contens').aggregate([
{ '$group':{
'rates': {$push:{ value: '$ratings', user: '$userhasRate'}}
}
}
]);
and my result become like this
{
"rates" : [
{
"value" : [
5,
5,
5
],
"user" : [
"51",
"52",
"53"
]
}
]
}
Can someone help me to solve my problem,
Thank you
You can use $arrayToObject and $objectToArray inside $map to achieve the required output.
db.collection.aggregate([
{
"$project": {
"rate": {
"$map": {
"input": {
"$objectToArray": {
"$arrayToObject": {
"$zip": {
"inputs": [
"$userhasRate",
"$ratings"
]
}
}
}
},
"as": "el",
"in": {
"userhasRate": "$$el.k",
"value": "$$el.v"
}
}
}
}
}
])
Alternative Method
If userhasRate contains repeated values then the first solution will not work. You can use arrayElemAt and $map along with $zip if it contains repeated values.
db.collection.aggregate([
{
"$project": {
"rate": {
"$map": {
"input": {
"$zip": {
"inputs": [
"$userhasRate",
"$ratings"
]
}
},
"as": "el",
"in": {
"userhasRate": {
"$arrayElemAt": [
"$$el",
0
]
},
"value": {
"$arrayElemAt": [
"$$el",
1
]
}
}
}
}
}
}
])
Try below aggregate, first of all you used group without _id that grouped all the JSONs in the collection instead set it to "$_id" also you need to create 2 arrays using old data then in next project pipeline concat the arrays to get desired output:
db.getCollection('contens').aggregate([
{
$group: {
_id: "$_id",
rate1: {
$push: {
userhasrate: {
$arrayElemAt: [
"$userhasRate",
0
]
},
value: {
$arrayElemAt: [
"$ratings",
0
]
}
}
},
rate2: {
$push: {
userhasrate: {
$arrayElemAt: [
"$userhasRate",
1
]
},
value: {
$arrayElemAt: [
"$ratings",
1
]
}
}
}
}
},
{
$project: {
_id: 1,
rate: {
$concatArrays: [
"$rate1",
"$rate2"
]
}
}
}
])