mongodb find oldest date of three keys in each document - mongodb

I have a document schema that looks like this:
{
status: String,
estimateDate: Date,
lostDate: Date,
soldDate: Date,
assignedDate: Date
}
With this schema all three dates could exists and none of them could exists. I need to do a check of all three and if at least one exists use the oldest date if none exists use todays date. With the "returned" date, get the difference in days from another key (assignedDate). I have figured out how to do what I want with one date but cannot figure out how to scale this up to include all three keys. Below is the working code I have for one key.
Within my aggregate pipeline $project stage I do the following:
days: {
$cond: {
if: {
$not: ["$date1"]
},
then: {
$floor: {
$divide: [
{
$subtract: [new Date(), "$assignedDate"]
},
1000 * 60 * 60 * 24
]
}
},
else: {
$floor: {
$divide: [
{
$subtract: [
"$estimateDate",
"$assignedDate"
]
},
1000 * 60 * 60 * 24
]
}
}
}
}

You can use $min and $ifNull operators to get oldest date specify new Date() as default value if any of those dates does not exist:
db.col.aggregate([
{
$project: {
oldest: {
$min: [
{ $ifNull: [ "$lostDate", new Date() ] },
{ $ifNull: [ "$soldDate", new Date() ] },
{ $ifNull: [ "$assignedDate", new Date() ] },
]
}
}
}
])

Related

MongoDB query $group is returning null

I am learning MongoDb query and my requirement is to calculate the average time between two dates. I wrote a mongoDB query with project and group stages.
{
project: {
OrderObject:1,
date:1
}
},
{
group: {
objectId: '$OrderObject.pharmacy.companyName',
count: {
$sum: 1
},
duration: {
$avg: {
$abs: {
$divide: [
{
$subtract: [
{
$arrayElemAt: [
'$date',
0
]
},
{
$arrayElemAt: [
'$date',
1
]
}
]
},
60000
]
}
}
},
OrderIDs: {
$addToSet: '$OrderObject.orderID'
},
pharmacyName: {
$addToSet: '$OrderObject.pharmacy.companyName'
},
}
}
The output I get is
{
count: 3,
duration: 54.53004444444445,
OrderIDs: [ 'ABCDE', 'EWQSE', 'ERTRE' ],
pharmacyName: [ 'pharmacy business Name' ],
objectId: null
},
Can someone please tell me why objectId is null in this case but the value is printed in pharmacyName field. I am using this pipeline in parse server as query.aggregate(pipeline, {useMasterKey:true})
The my expectation is pharmacyName === objectId
Most probably your nested element here is with different name:
OrderObject.name.companyName
but this is not an issue for the $group stage since it make the aggregation for all elements( in total 3) in the collection when the _id is null and do not give you any errors ...
It is also interesing why in your output the "pharmacyName" appear simply as "name" ? ;)

updateOne - filter one field, push or update sub field, add/update counter

I want to track tags frequency within a period. How do I update (counter, updateDate) the array if found, otherwise push to field?
const date = new Date();
const beginMonth = new Date(date.getFullYear(), date.getMonth(), 1);
await Tags.updateOne(
{ tag },
{
$set: {
tag,
period: {
// should have $cond: [{ $in: [beginMonth, "$period.createDate"] }]
$push: {
"createDate": beginMonth,
"updateDate": date,
"counter": 1,
}
}
}
},
{ upsert: true }
)
it should have a condition to check if { $in: [beginMonth, "$period.createDate"] } then {$inc: {counter:1} else $push. Also read that upsert cannot be used like this. How do I achieve this?
example:
If world tag exists and createDate is in beginMonth (August 1, 2021), then update counter to 2 and updateDate.
Let food tag does not exists, so it would push createDate,updateDate,counter
Before update:
{
tag: 'world',
period: [{
createDate: 2021-08-01T00:00:00.000+00:00,
updateDate:2021-08-28T13:16:58.508+00:00,
counter:1
}]
}
After update:
[{
tag: 'world',
period: [{
createDate: 2021-08-01T00:00:00.000+00:00,
updateDate:2021-08-28T14:16:58.508+00:00,
counter:2
}]
}
{
tag: 'food',
period: [{
createDate: 2021-08-01T00:00:00.000+00:00,
updateDate:2021-08-28T14:16:58.508+00:00,
counter:1
}]
}]
createDate probably bad naming, should be beginningDate. I am trying to tally tag frequency by month and keeping track of latest update for this period.
You can try update with aggregation pipeline starting from MongoDB 4.2,
$ifNull to check input field is null then return a blank array
$cond to check beginMonth is in array period.createDate
condition is true then,
$add to increment number in counter
$map to iterate loop of period array and check condition if beginMonth is period.createDate then increment counter otherwise nothing
$mergeObjects to merge current object with updated counter property
else,
$concatArrays to concat current period array with new input object
upsert will work because we have handled all the possible scenarios
const date = new Date();
const beginMonth = new Date(date.getFullYear(), date.getMonth(), 1);
await Tags.updateOne(
{ tag },
[{
$set: {
period: {
$cond: [
{ $in: [beginMonth, { $ifNull: ["$period.createDate", []] }] },
{
$map: {
input: "$period",
in: {
$mergeObjects: [
"$$this",
{
$cond: [
{ $eq: [beginMonth, "$$this.createDate"] },
{ counter: { $add: ["$$this.counter", 1] } },
{}
]
}
]
}
}
},
{
$concatArrays: [
{ $ifNull: ["$period", []] },
[
{
createDate: beginMonth,
updateDate: date,
counter: 1
}
]
]
}
]
}
}
}],
{ upsert: true }
)
Case 1: tag exists, date exists, so incremenet counter
Playground
Case 2: tag exists, date not exists, so add the new element in period array
Playground
Case 3: tag not exists, so add new document, this is the job of upsert
Playground

Date range not working in aggregation pipeline, but works in find()

I am trying to filter data by a date range. Example return the data that was created no more than 14 days ago.
I can do this in find with the following:
{
$match: {
eventTime: { $gte: startTime.toDate(), $lte: endTime.toDate() }
}
}
eventTime is an ISO date as well as startTime and endTime
I am using an aggregation and a lookup and trying to implement the same thing:
{
$lookup:
{
from: "data",
let: { dataId: "$dataId", patientId: "$patientId" },
pipeline: [
{
$match:
{
$expr:
{
$and:
[
{ $eq: ["$patientId", patientId] },
{ $eq: ["$dataId", "$$dataId"] },
{ $gte: ["$eventTime", startTime.toDate()] },
{ $lte: ["$eventTime", endTime.toDate()] },
]
}
}
}
],
as: "data"
}
}
But no data results are returned. If I remove the dates I get all the correct data based on dataId and patient. so the join is working.. but somehow the date range is not.
Again both the eventTime and startTime and endTime are all ISO dates.
example :
let endTime = Moment(new Date());
let startTime = Moment().subtract(days, "days");
"eventTime": "2019-08-07T03:37:40.738Z"
startTime "2019-07-30T00:02:11.611Z"
endTime "2019-08-13T00:02:11.610Z"
End time is 'today'
so in the example here the data time is between the two dates and should be returned.
I looked there : https://docs.mongodb.com/manual/reference/operator/aggregation/gte/
and it should work.. but not the case
I tried:
{eventTime: { '$gte': new Date(startTime), $lte: new Date(endTime)}}
and I get:
MongoError: An object representing an expression must have exactly one field: { $gte: new Date(1564495211043), $lte: new Date(1565704811042) }
also tried:
{ eventTime: {'$gte': new Date(startTime)}}
and get:
MongoError: Expression $gte takes exactly 2 arguments. 1 were passed in.
also tried:
{ $eventTime: {'$gte': new Date(startTime)}}, {$eventTime: {'$lte': new Date(endTime)}}
and get: MongoError: Unrecognized expression '$eventTime'
Any insight would certainly be appreciated
I was able to get it working via toDate:
{
$match:
{
$expr:
{
$and:
[
{ $eq: ["$patientId", patientId] },
{ $eq: ["$dataId", "iraeOverallAlert"] },
{ "$gte": [ {$toDate: "$eventTime"}, startTime.toDate()] },
{ "$lte": [ {$toDate: "$eventTime"}, endTime.toDate()] },
]
}
}
},
Note: This was not needed in the find, but somehow was needed using aggregation. Makes no sense but yah for trial and error.

MongoDB get records with dates before a certian year

I have data where I have a year stored as a number, and other data points stored as dates. something like this
{
name: "bob",
year_arrived: 2002,
dob: 1995-06-06
}
if I want to get all the records where the dob is BEFORE the year_arrived minus 2 (meaning I want all before 2000 in the above example), how can I write that???
logically it's
WHERE $year(dob) < (year_arrived-2)
But for argument sake - let's pick a day of year such as July 1st
and get all records where dob < 2000-07-01
can I do something like
.find({ dob: { $lt: new ISOdate($concat:[(year_arrived-2),'-07-01']}) } })
I have tried that - with no luck
Assuming dob is of type Date;
db.coll.find({
"$expr": {
"$lt": [ { "$year": "$dob" }, { "$subtract": ["$year_arrived", 2] } ]
}
})
This will do if (extract_year(dob) < (year_arrived - 2)) which is exactly what you want.
You can use below query
db.collection.find({
"$expr": {
"$gte": [
{ "$year": { "$dateFromString": { "dateString": "$dob" } } },
{ "$subtract": ["$year_arrived", 2] }
]
}
})
You can do that by parsing the year from DOB and comparing it with 'year_arrived'. The following query can get you the expected output.
db.data.aggregate([
{
$match:{
$expr:{
$gt:[
{
$subtract:[
"$year_arrived",
{
$toInt:{
$substrBytes:[
"$dob",
0,
{
$indexOfBytes:["$dob","-"]
}
]
}
}
]
}
,2
]
}
}
}
]).pretty()

mongodb find between given time irrespective of date

Below query will give all user who logged in between 1-AUG-2018, 6AM to 1-AUG-2018, 8AM. Is there any way to find users who log-in between say 6AM to 8AM irrespective of date. In other words all user who log-in to any date but the time should be between 6AM-8AM.
db.user.find({
"log-time" : {"$gte": ISODate("2018-08-1T00:06:00.000Z"), "$lt": ISODate("2018-08-05T08:00:00.000Z")}
})
 
You can use $let to define temporary variable which will contain hour extracted from ISODate using $hour and then you can apply your conditions in $match inside $expr
db.user.aggregate([
{
$match: {
$expr: {
$let: {
vars: { hour: { $hour: "$log-time" } },
in: {
$and: [
{ $gte: [ "$$hour", 6 ] },
{ $lt: [ "$$hour", 8 ] }
]
}
}
}
}
}
])