I have two JSONs in a collection in mongodb and would like to write a bson.M filter to fetch the first JSON below.
I tried with the filter below to get the first JSON but got no result.
When the first JSON is in the collection, I get result when i use the filter
but when i have both JSONs, I do not get a result. Need help.
filter := bson.M{"type": "FPF", "status": "REGISTERED","fpfInfo.fpfInfoList.ssai.st": 1, "fpfInfo.fpfInfoList.infoList.dn": "sim"}
{
"_id" : "47f6ad68-d431-4b69-9899-f33d828f8f9c",
"type" : "FPF",
"status" : "REGISTERED",
"fpfInfo" : {
"fpfInfoList" : [
{
"ssai" : {
"st" : 1
},
"infoList" : [
{
"dn" : "sim"
}
]
}
]
}
},
{
"_id" : "347c8ed2-d9d1-4f1a-9672-7e8a232d2bf8",
"type" : "FPF",
"status" : "REGISTERED",
"fpfInfo" : {
"fpfInfoList" : [
{
"ssai" : {
"st" : 1,
"ds" : "000004"
},
"infoList" : [
{
"dn" : "sim"
}
]
}
]
}
}
db.collection.aggregate([
{
"$unwind": "$fpfInfo.fpfInfoList"
},
{
"$match": {
"fpfInfo.fpfInfoList.ssai.ds": {
"$exists": false
},
"fpfInfo.fpfInfoList.infoList.dn": "sim",
"fpfInfo.fpfInfoList.ssai.st": 1
}
}
])
Playground
Related
{
"dictID" : "37528e10-6344-4d93-8a57-35af0bdb6b34",
"dictVersion" : 1,
"addMeasures" : [
{
"measureId" : "f229ba18-0de8-47a1-8a87-88c95edd536a",
"userId" : "3966C3DD-8328-4F18-A061-02A01111763A",
"created" : "2017-01-07T05:47:22.512Z",
"ownerAction" : "NA"
}
],
"deleteMeasures" : [
{
"measureId" : "0b701469-1502-4de4-95de-1ee70ad6c577",
"userId" : "3966C3DD-8328-4F18-A061-02A01111763A",
"created" : "2017-01-07T05:47:35.193Z",
"ownerAction" : "NA"
},
{
"measureId" : "443d1b97-95ae-4410-9302-da3edbad4004",
"userId" : "3966C3DD-8328-4F18-A061-02A01111763A",
"created" : "2017-01-07T05:47:36.062Z",
"ownerAction" : "NA"
},
{
"measureId" : "aa1689c9-8df6-4bff-88a4-274a4f2dc2aa",
"userId" : "3966C3DD-8328-4F18-A061-02A01111763A",
"created" : "2017-01-07T05:47:37.075Z",
"ownerAction" : "NA"
}
]
}
Above is my mongo collection and i want to do the following query:
db.getCollection('DataDictionaryReview').update(
{
$and: [
{ dictID: "37528e10-6344-4d93-8a57-35af0bdb6b34" },
{ dictVersion: 1 },
{ "deleteMeasures.measureId": "aa1689c9-8df6-4bff-88a4-274a4f2dc2aa" },
{ "deleteMeasures.ownerAction": "NA" }
]
},
{
$set: {
"deleteMeasures.$.created": "2017-01-07T06:51:56.983Z",
"deleteMeasures.$.ownerAction": "Reject"
}
}
)
I want to update the last map of deleteMeasures but every time the query is updating first map of deleteMeasures.
This is strange, the query looks okay to me, however I think it needs $elemMatch to get the correct index value.
db.getCollection('DataDictionaryReview').update(
{
dictID: "37528e10-6344-4d93-8a57-35af0bdb6b34",
dictVersion: 1 ,
"deleteMeasures" : { $elemMatch : {"measureId": "aa1689c9-8df6-4bff-88a4-274a4f2dc2aa", "ownerAction": "NA" }}
},
{
$set: {
"deleteMeasures.$.created": "2017-01-07T06:51:56.983Z",
"deleteMeasures.$.ownerAction": "Reject"
}
}
)
I have the following aggregation which has a date comparison where I only want to grab documents that have the $$item.date less than the current time.
[
{ $match : { "_id" : "57b4c5f0291ebb13110b888e" } },
{ $project : {
"fulfillments" : {
$filter : {
"input" : "$fulfillments",
"as" : "item",
"cond" : { "$gte" : ["$$item.date","new Date()"]}
}
}
}
},
...
...
]
The important part that I have a question about above is the following:
"cond" : { "$gte" : ["$$item.date","new Date()"]}
This doesn't seem to be working as I can change the new Date() to 1 or 0 or pretty much any value and it still returns all documents. I need it to only return documents that have the date greater than or equal to the current date.
For example, given the following document
{
"_id": "57b4c5f0291ebb13110b888e",
"fulfillments": [
{
"_id": "582deb33bb117300010a2ae5",
"date": new Date("2016-11-23T17:00:00-0700"),
},
{
"_id": "582deb33bdf17300010a2ae5",
"date": new Date("2017-11-23T17:00:00-0700"),
}
}
Only the following fulfillment should be returned because it is 2017-11-23
{
"_id": "582deb33bdf17300010a2ae5",
"date": new Date("2017-11-23T17:00:00-0700"),
}
Update
There is question if I am giving an accurate document strucutre, so I included a screenshot below to validate this.
If you just want the current time on the machine just remove the " from the query, see below for my example
> db.test.find().pretty()
{
"_id" : "57b4c5f0291ebb13110b888e",
"fulfillments" : [
{
"_id" : "582deb33bb117300010a2ae5",
"date" : ISODate("2016-11-24T00:00:00Z")
},
{
"_id" : "582deb33bdf17300010a2ae5",
"date" : ISODate("2017-11-24T00:00:00Z")
}
]
}
>
> db.test.aggregate([
... { $match : { "_id" : "57b4c5f0291ebb13110b888e" } },
... { $project : {
... "fulfillments" : {
... $filter : {
... "input" : "$fulfillments",
... "as" : "item",
... "cond" : { "$gte" : ["$$item.date",new Date()]}
... }
... }
... }
... }
... ]).pretty()
{
"_id" : "57b4c5f0291ebb13110b888e",
"fulfillments" : [
{
"_id" : "582deb33bdf17300010a2ae5",
"date" : ISODate("2017-11-24T00:00:00Z")
}
]
}
>
I have something like below:
{
"_id" : "1",
"firstArray" : [
{
"_id" : "11",
"secondArray" : [ ]
},
{
"_id" : "12",
"secondArray" : [ ]
},
{
"_id" : "13",
"secondArray" : [ { "type" : "somthing" } ]
}
]
},
{
"_id" : "2",
"firstArray" : [
{
"_id" : "21",
"secondArray" : [ ]
},
{
"_id" : "22",
"secondArray" : [ ]
}
]
}
I need a mongodb query to find documents which ALL of the nested secondArrays are empty? the query should return second document and not the first one.
to solve that, we need to check size of arr2, but to enable that we need first to unwind arr1.
Please find below aggregation framework snippet which solves this problem,
db.pmoubed.aggregate([{
$unwind : "$firstArray"
}, {
$project : {
_id : 1,
firstArray : 1,
isNotEmpty : {
$size : "$firstArray.secondArray"
}
}
}, {
$group : {
_id : "$_id",
isNotEmpty : {
$sum : "$isNotEmpty"
},
firstArray : {
$push : "$firstArray"
}
}
}, {
$match : {
"isNotEmpty" : 0
}
}
])
Any comments welcome
I have this Mongo query:
db.getCollection('Catalogos').aggregate(
{ $match: {Items: {$elemMatch: {'MarIclase': '04'} } } },
{ $unwind : "$Items" },
{ $match: { "Items.MarIclase" : "04" } },
{ $group : {
_id : "$_id",
Items : { $push : { 'MarIclase': "$Items.MarIclase", 'MarCdescrip' : '$Items.MarCdescrip' } }
}}
);
The result of this query is:
{
"result" : [
{
"_id" : "CAT_MARCAS_VU",
"Items" : [
{
"MarIclase" : "04",
"MarCdescrip" : "5500 LARSON"
},
{
"MarIclase" : "04",
"MarCdescrip" : "A LINER"
}
]
}
],
"ok" : 1.0000000000000000
}
I'd like to have this result:
{
"result" : [
{
"_id" : "CAT_MARCAS_VU",
"Items" : [
{
"04" : "5500 LARSON"
},
{
"04" : "A LINER"
}
]
}
],
"ok" : 1.0000000000000000
}
¿Do you know if I can make something in the $push and change the fieldnames for values?
I'd like to have something like this:
{ "04" : "A LINER" }
{ "04" : "5500 LARSON" }
Thank you!
abstract document in collection md given:
{
vals : [{
uid : string,
val : string|array
}]
}
the following, partially correct aggregation is given:
db.md.aggregate(
{ $unwind : "$vals" },
{ $match : { "vals.uid" : { $in : ["x", "y"] } } },
{
$group : {
_id : { uid : "$vals.uid" },
vals : { $addToSet : "$vals.val" }
}
}
);
that may lead to the following result:
"result" : [
{
"_id" : {
"uid" : "x"
},
"vals" : [
[
"24ad52bc-c414-4349-8f3a-24fd5520428e",
"e29dec2f-57d2-43dc-818a-1a6a9ec1cc64"
],
[
"5879b7a4-b564-433e-9a3e-49998dd60b67",
"24ad52bc-c414-4349-8f3a-24fd5520428e"
]
]
},
{
"_id" : {
"uid" : "y"
},
"vals" : [
"0da5fcaa-8d7e-428b-8a84-77c375acea2b",
"1721cc92-c4ee-4a19-9b2f-8247aa53cfe1",
"5ac71a9e-70bd-49d7-a596-d317b17e4491"
]
}
]
as x is the result aggregated on documents containing an array rather than a string, the vals in the result is an array of arrays. what i look for in this case is to have a flattened array (like the result for y).
for me it seems like that what i want to achieve by one aggegration call only, is currently not supported by any given operation as e.g. a type conversion cannot be done or unwind expectes in every case an array as input type.
is map reduce the only option i have? if not ... any hints?
thanks!
You can use the aggregation to do the computation you want without changing your schema (though you might consider changing your schema simply to make queries and aggregations of this field easier to write).
I broke up the pipeline into multiple steps for readability. I also simplified your document slightly, again for readability.
Sample input:
> db.md.find().pretty()
{
"_id" : ObjectId("512f65c6a31a92aae2a214a3"),
"uid" : "x",
"val" : "string"
}
{
"_id" : ObjectId("512f65c6a31a92aae2a214a4"),
"uid" : "x",
"val" : "string"
}
{
"_id" : ObjectId("512f65c6a31a92aae2a214a5"),
"uid" : "y",
"val" : "string2"
}
{
"_id" : ObjectId("512f65e8a31a92aae2a214a6"),
"uid" : "y",
"val" : [
"string3",
"string4"
]
}
{
"_id" : ObjectId("512f65e8a31a92aae2a214a7"),
"uid" : "z",
"val" : [
"string"
]
}
{
"_id" : ObjectId("512f65e8a31a92aae2a214a8"),
"uid" : "y",
"val" : [
"string1",
"string2"
]
}
Pipeline stages:
> project1 = {
"$project" : {
"uid" : 1,
"val" : 1,
"isArray" : {
"$cond" : [
{
"$eq" : [
"$val.0",
[ ]
]
},
true,
false
]
}
}
}
> project2 = {
"$project" : {
"uid" : 1,
"valA" : {
"$cond" : [
"$isArray",
"$val",
[
null
]
]
},
"valS" : {
"$cond" : [
"$isArray",
null,
"$val"
]
},
"isArray" : 1
}
}
> unwind = { "$unwind" : "$valA" }
> project3 = {
"$project" : {
"_id" : 0,
"uid" : 1,
"val" : {
"$cond" : [
"$isArray",
"$valA",
"$valS"
]
}
}
}
Final aggregation:
> db.md.aggregate(project1, project2, unwind, project3, group)
{
"result" : [
{
"_id" : "z",
"vals" : [
"string"
]
},
{
"_id" : "y",
"vals" : [
"string1",
"string4",
"string3",
"string2"
]
},
{
"_id" : "x",
"vals" : [
"string"
]
}
],
"ok" : 1
}
If you modify your schema using always "vals.val" field as an array field (even when the record contains only one element) you can do it easily as follows:
db.test_col.insert({
vals : [
{
uid : "uuid1",
val : ["value1"]
},
{
uid : "uuid2",
val : ["value2", "value3"]
}]
});
db.test_col.insert(
{
vals : [{
uid : "uuid2",
val : ["value4", "value5"]
}]
});
Using this approach you only need to use two $unwind operations: one unwinds the "parent" array and the second unwinds every "vals.val" value. So, querying like
db.test_col.aggregate(
{ $unwind : "$vals" },
{ $unwind : "$vals.val" },
{
$group : {
_id : { uid : "$vals.uid" },
vals : { $addToSet : "$vals.val" }
}
}
);
You can obtain your expected value:
{
"result" : [
{
"_id" : {
"uid" : "uuid2"
},
"vals" : [
"value5",
"value4",
"value3",
"value2"
]
},
{
"_id" : {
"uid" : "uuid1"
},
"vals" : [
"value1"
]
}
],
"ok" : 1
}
And no, you can't execute this query using your current schema, since $unwind fails when the field isn't an array field.