I have the following schema
{
f1: "test",
f2: "something",
type: "A",
date: "2018-11-01T00:00:00.000Z",
deleted: false
},
{
"f1": "check",
type: "B",
deleted: false
}
Now what I want is to get all data, and if type = "A", then add an extra condition to my match query, suppose compare its date with current date.
My current query is:
db.getCollection('myCollection').aggregate([
{$match:{
{"deleted":false},
// I want to check if type is A then compare its date
}}
])
You could try an $or and say "If it's not type A or the date is x":
{$match:{
$and: [
{deleted: false},
$or: [
{type: {$ne: 'A'}},
{date: {$gte: ISODate("2018-01-01T00:00:00.0Z"), $lt: ISODate("2018-06-01T00:00:00.0Z")}}
]
]
}}
Use $match with $or condition.
db.getCollection('tests').aggregate([
{ $match: {
$or : [
{ "deleted": false, "type": "A", "date": ISODate("2018-11-01T00:00:00.000Z") },
{ "deleted": false, type: { $ne: "A" }}
]}
}
])
Related
I have a MongoDB model that is currently like this (this is the stripped version):
{
title: String,
type: {
type: String,
lowercase: true,
enum: ['event', 'regular', 'project'],
},
project_start_time: Date,
project_end_time: Date,
regular_start_date: Date,
regular_end_date: Date,
events: [{
id: Number,
date: Date
}]
}
Now, I want to query something like this:
Find data where the regular_end_date, project_end_time, and events at the last index are lower than the date provided
The catch is, not every data has the three criteria above because it is available according to the types (Sorry for the messy data, it is already there). Below is an example:
If the data type is an event, then there are events
If the data type is regular, then there are regular_start_date and regular_end_date
If the data type is a project, then there are project_start_date and project_end_date
So far, I've tried to use this:
db.data.find({
"$or": [
{
"project_end_time": {
"$lt": ISODate("2022-12-27T10:09:49.753Z")
},
},
{
"regular_end_date": {
"$lt": ISODate("2022-12-27T10:09:49.753Z")
}
},
{
"$expr": {
"$lt": [
{
"$getField": {
"field": "date",
"input": {
"$last": "$events"
}
}
},
ISODate("2022-12-27T10:09:49.753Z")
]
}
}
]
})
Also with aggregation pipeline:
db.data.aggregate([
{
$match: {
"$or": [{
"project_end_time": {
"$lt": ISODate("2022-12-27T10:09:49.753Z")
},
},
{
"regular_end_date": {
"$lt": ISODate("2022-12-27T10:09:49.753Z")
}
},
{
"$expr": {
"$lt": [{
"$getField": {
"field": "date",
"input": {
"$last": "$events"
}
}
},
ISODate("2022-12-27T10:09:49.753Z")
]}
}]
}
}
])
But it shows all data as if it wasn't filtered according to the criteria. Any idea where did I do wrong?
FYI I am using MongoDB 5.0.2
One option is to check if the relevant field exists before checking its value, otherwise its value is null which is less than your requested date:
db.collection.find({
$or: [
{$and: [
{project_end_time: {$exists: true}},
{project_end_time: {$lt: ISODate("2022-12-27T10:09:49.753Z")}}
]},
{$and: [
{regular_end_date: {$exists: true}},
{regular_end_date: {$lt: ISODate("2022-12-27T10:09:49.753Z")}}
]},
{$and: [
{"events.0": {$exists: true}},
{$expr: {
$lt: [
{$last: "$events.date"},
ISODate("2022-12-27T10:09:49.753Z")
]
}}
]}
]
})
See how it works on the playground example
Given the following data in a Mongo collection:
{
_id: "1",
dateA: ISODate("2021-12-31T00:00.000Z"),
dateB: ISODate("2022-01-11T00:00.000Z")
},
{
_id: "2",
dateA: ISODate("2022-01-02T00:00.000Z"),
dateB: ISODate("2022-01-08T00:00.000Z")
},
{
_id: "3",
dateA: ISODate("2022-01-03T00:00.000Z"),
dateB: ISODate("2022-01-05T00:00.000Z")
},
{
_id: "4",
dateA: ISODate("2022-01-09T00:00.000Z"),
dateB: null
},
{
_id: "5",
dateA: ISODate("2022-01-11T00:00.000Z"),
dateB: ISODate("2022-01-11T00:00.000Z")
},
{
_id: "6",
dateA: ISODate("2022-01-12T00:00.000Z"),
dateB: null
}
And given the range below:
ISODate("2022-01-01T00:00.000Z") .. ISODate("2022-01-10T00:00.000Z")
I want to find all values with dateA within given range, then I want to decrease the range starting it from the max dateB value, and finally fetching all documents that doesn't contain dateB.
In resume:
I'll start with range
ISODate("2022-01-01T00:00.000Z") .. ISODate("2022-01-10T00:00.000Z")
Then change to range
ISODate("2022-01-08T00:00.000Z") .. ISODate("2022-01-10T00:00.000Z")
Then find with
dateB: null
Finally, the result would be the document with
_id: "4"
Is there a way to find the document with _id: "4" in just one aggregate?
I know how to do it programmatically using 2 queries, but the main goal is to have just one request to the database.
You can use $max to find the maxDateB first. Then perform a self $lookup to apply the $match and find doc _id: "4".
db.collection.aggregate([
{
$match: {
dateA: {
$gte: ISODate("2022-01-01"),
$lt: ISODate("2022-01-10")
}
}
},
{
"$group": {
"_id": null,
"maxDateB": {
"$max": "$dateB"
}
}
},
{
"$lookup": {
"from": "collection",
"let": {
start: "$maxDateB",
end: ISODate("2022-01-10")
},
"pipeline": [
{
$match: {
$expr: {
$and: [
{
$gte: [
"$dateA",
"$$start"
]
},
{
$lt: [
"$dateA",
"$$end"
]
},
{
$eq: [
"$dateB",
null
]
}
]
}
}
}
],
"as": "result"
}
},
{
"$unwind": "$result"
},
{
"$replaceRoot": {
"newRoot": "$result"
}
}
])
Here is the Mongo Playground for your
Assuming the matched initial dateA range is not huge, here is alternate approach that exploits $push and $filter and avoids the hit of a $lookup stage:
db.foo.aggregate([
{$match: {dateA: {$gte: new ISODate("2022-01-01"), $lt: new ISODate("2022-01-10")} }},
// Kill 2 birds with one stone here. Get the max dateB AND prep
// an array to filter later. The items array will be as large
// as the match above but the output of this stage is a single doc:
{$group: {_id: null,
maxDateB: {$max: "$dateB" },
items: {$push: "$$ROOT"}
}},
{$project: {X: {$filter: {
input: "$items",
cond: {$and: [
// Each element of 'items' is passed as $$this so use
// dot notation to get at individual fields. Note that
// all other peer fields to 'items' like 'maxDateB' are
// in scope here and addressable using '$':
{$gt: [ "$$this.dateA", "$maxDateB"]},
{$eq: [ "$$this.dateB", null ]}
]}
}}
}}
]);
This yields a single doc result (I added an additional doc _id 41 to test the null equality for more than 1 doc):
{
"_id" : null,
"X" : [
{
"_id" : "4",
"dateA" : ISODate("2022-01-09T00:00:00Z"),
"dateB" : null
},
{
"_id" : "41",
"dateA" : ISODate("2022-01-09T00:00:00Z"),
"dateB" : null
}
]
}
It is possible to $unwind and $replaceRoot after this but there is little need to do so.
I have a collection like that:
[
{
student: "a",
package: [
{name: "one", createdAt: "2021-10-12T00:00:00", active: true},
{name: "two", createdAt: "2021-10-13T00:00:00", active: false},
{name: "three", createdAt: "2021-10-14T00:00:00", active: false}
]
},
{
student: "b",
package: [
{name: "one", createdAt: "2021-10-16T00:00:00", active: true},
{name: "two", createdAt: "2021-10-17T00:00:00", active: false},
{name: "three", createdAt: "2021-10-18T00:00:00", active: false}
]
},
{
student: "c",
package: [
{name: "one", createdAt: "2021-10-10T00:00:00", active: true},
{name: "two", createdAt: "2021-10-17T00:00:00", active: false},
{name: "three", createdAt: "2021-10-18T00:00:00", active: false}
]
}
]
I have no idea how can I do a query (Mongodb) to sort this collection based on the createdAt with active: true in the package array?
The expectation looks like this:
[
{
student: "c",
package: [
{name: "one", createdAt: "2021-10-10T00:00:00", active: true},
...
]
},
{
student: "a",
package: [
{name: "one", createdAt: "2021-10-12T00:00:00", active: true},
...
]
},
{
student: "b",
package: [
{name: "one", createdAt: "2021-10-16T00:00:00", active: true},
...
]
},
]
Could anyone help me with this? The idea comes up to my mind just to use the code to sort it, but is it possible to use a query MongoDB?
Query
creates a sort-key for each document, this is the latest date of the active package members (the $reduce does, this keeping the max date)
sort by it
unset to remove this extra key
*for descended or ascedent, you can chage the $gt with $lt and the sort 1, with sort -1. depending on what you need. If you use $lt replace "0" also with a max string like "9". Or if you have real dates, with a min or max date.
PlayMongo
aggregate(
[{"$set":
{"sort-key":
{"$reduce":
{"input": "$package",
"initialValue": "0",
"in":
{"$cond":
[{"$and":
["$$this.active", {"$gt": ["$$this.createdAt", "$$value"]}]},
"$$this.createdAt", "$$value"]}}}}},
{"$sort": {"sort-key": 1}},
{"$unset": ["sort-key"]}])
You can use this aggregation query:
First $unwind to deconstruct the array ang get each value.
Then $sort by active.
$group to get the initial data but sorted.
And last $sort again by createdAt.
db.collection.aggregate([
{
"$unwind": "$package"
},
{
"$set": {
"package.createdAt": {
"$toDate": "$package.createdAt"
}
}
},
{
"$sort": {
"package.active": -1
}
},
{
"$group": {
"_id": "$_id",
"student": {
"$first": "$student"
},
"package": {
"$push": "$package"
}
}
},
{
"$sort": {
"package.createdAt": 1
}
}
])
Example here
Also, to do the sorting, is better if createdAt is a Date field, otherwise you should parse to date. Like this example
I am trying to add a new subField with a condition.
In the case the field already exists, I don't overwrite it.
In the case the condition is not fulfilled, I don't want to add the parent object.
Here is my collection :
{type: "A", object: {a: "", b: "foo"}},
{type: "A", object: {a: ""}},
{type: "A"},
{type: "B"}
Here is my aggregate :
{
$addFields: {
"object.b": {
$cond: {
if: {$eq: ["$type","A"]},
then: {$ifNull: ["$object.b", "bar"]},
else: "$DROP"
}
}
}
}
$DROP is not an aggregate command, but in the else case I don't want to add the new field.
It will not create the b field, but the parent object remains.
Here is my current result :
{type: "A", "object": {a: "", b: "foo"}},
{type: "A", "object": {a: "", b: "bar"}},
{type: "A", "object": {b: "bar"}},
{type: "B", "object": {}},
Here is what I want :
{type: "A", object: {a: "", b: "foo"}},
{type: "A", object: {a: "", b: "bar"}},
{type: "A", object: {b: "bar"}},
{type: "B"}
Your help will be highly appreciated.
This aggregate query will give you the desired result:
db.collection.aggregate([
{
$addFields: {
object: {
$cond: {
if: { $eq: [ "$type", "A" ] },
then: {
$mergeObjects: [
"$object",
{ b: { $ifNull: [ "$object.b", "bar" ] } }
]
},
else: "$$REMOVE"
}
}
}
}
])
Note the $$REMOVE is a aggregation system variable.
When a $set adds a path, all path is added, even if you end up to $$REMOVE this will affect only the last of the path, the rest would be already added see example
Query
$set with switch case starting from object
if object doesn't exist
if type A add {object :{"b" : "bar"}}
else $$REMOVE
$type = "A" AND (not-value "$object.b")
add {"b" : "bar"} (this case covers also the case object:null)
else
keep old value (another type, or b had value)
*Maybe it could be smaller but we check many things(see the example for all cases of data)
object exists/null/not exists
type A/not
b exists/null/value
Test code here
db.collection.aggregate([
{
"$set": {
"object": {
"$switch": {
"branches": [
{
"case": {
"$eq": [
{
"$type": "$object"
},
"missing"
]
},
"then": {
"$cond": [
{
"$eq": [
"$type",
"A"
]
},
{
"b": "bar"
},
"$$REMOVE"
]
}
},
{
"case": {
"$and": [
{
"$eq": [
"$type",
"A"
]
},
{
"$or": [
{
"$eq": [
"$object.b",
null
]
},
{
"$eq": [
{
"$type": "$object.b"
},
"missing"
]
}
]
}
]
},
"then": {
"$mergeObjects": [
"$object",
{
"b": "bar"
}
]
}
}
],
"default": "$object"
}
}
}
}
])
How do I update all Mongo documents that have a single field set to 'null', or that doesn't have a value at all?
What I have, but I'm not sure if it's correct:
db.collection.update({name: {$eq: null}}, {$set: {name: 'test'}})
If the name field is not there try:
db.collection.update({"name": {"$exists": false}}, {"$set": {"name": "test"}})
$set will add a new field with the specified value, provided that the new field does not violate a type constraint.
If it is there and null, or does not have a value set:
db.collection.update({"name": null}, {"$set": {"name": "test"}})
You can combine both queries using $or as
db.collection.update(
{
"$or": [
{ "name": { "$exists": false } },
{ "name": null }
]
},
{ "$set": { "name": "test" } }
)
For MongoDB 3.2 and above, use updateMany() which updates multiple documents within the collection based on the filter:
db.collection.updateMany(
{
"$or": [
{ "name": { "$exists": false } },
{ "name": null }
]
},
{ "$set": { "name": "test" } }
)
Try this
db.collection.findAndModify({
query: {'$or': [{'name': {$eq: null}}, {'name': {"$exists":false}},{ field: { "$type" : 10} }]},
update: {$set: {name: 'test'}},
});
Thanks