How to update/add sub-document in sub-document in mongodb.
I have sample data like this:
{
"baselineParty": {
"AP": [
{
"party": {
"partyId": {
"value": "12345"
},
}
},
{
"party": {
"partyId": {
"value": "12346"
}
}
},
{
"party": {
"partyId": {
"value": "12347"
}
}
}
]
}
}
I want to add an extra field "baselineParty.AP.party.verifiedStatusYn" to the existing json
Expected result would be like this :
{
"baselineParty": {
"AP": [
{
"party": {
"partyId": {
"value": "12345"
}
},
"verifiedStatusYn": {
"by": "cdd",
"updated": "22",
"value": "yes"
}
},
{
"party": {
"partyId": {
"value": "12346"
}
},
"verifiedStatusYn": {
"by": "cdd",
"updated": "22",
"value": "yes"
}
},
{
"party": {
"partyId": {
"value": "12347"
}
},
"verifiedStatusYn": {
"by": "cdd",
"updated": "22",
"value": "yes"
}
}
]
}
}
I tried using $set but not getting expected result.
Do you guys have a solution for this. Thanks!
You will need $[] to update all entries in the array.
db.collection.update({},
{
$set: {
"baselineParty.AP.$[].party.verifiedStatusYn": {
"by": "cdd",
"updated": "22",
"value": "yes"
}
}
})
Here is the Mongo playground for your reference.
Related
I need to batch update cdi_tags with md5 collection(push item to cdi_tags) as follows:
db.getCollection("event").update({
"_id": {
$in: [ObjectId("6368f03e21b1ad246c84d67b"), ObjectId("6368f03f21b1ad246c84d982")]
},
"meta": {
$elemMatch: {
"key": "bags",
"value"
}
}
}, {
$addToSet: {
"meta.$.value.$[t].cdi_tags: "test_tag"
}
}, {
arrayFilters: [{
"t": {
$in: ["cc09ab29db36f85e154d2c1ae9517f57", "b6b9c266f584191b6eb2d2659948a7a9"]
}
}],
multi: true
})
but not work, my doc as follows
{
"_id": ObjectId("6368f03f21b1ad246c84d982"),
"event_key": "PLAA7-194710",
"data_source": "EAP",
"name": "EP33L-AA94710",
"production": "CP",
"meta":
[
{
"key": "auto_note",
"value":
[]
},
{
"key": "bags",
"value":
{
"cc09ab29db36f85e154d2c1ae9517f57":
{
"name": "PLAA63952_event_ftp_pcar_event_20221107-194709_0.bag",
"profile": "msquare-prediction-ro",
"md5": "cc09ab29db36f85e154d2c1ae9517f57",
"cdi_tags":
[
"FDI_V1_0_0",
"from_dpp",
"epl-no-sensor",
"with_f100",
"epl-fix_video",
"epl-fix_horizon"
]
},
"361f5160cca3c3dec90cbbf93e3d7ae3":
{
"name": "PLAA63952_event_ftp_pcar_event_20221107-194709_0.bag",
"profile": "msquare-prediction-ro",
"md5": "361f5160cca3c3dec90cbbf93e3d7ae3",
"cdi_tags":
[
"FDI_V1_0_0",
"china",
"ftp_epcar_rawdata",
"from_dpp",
"trigger_type:system"
]
}
}
}
]
}
thinks
how to batch update nested doc in mongo
how can I do this?
This is the array....
Can you please help me?
Can you please give me the answer???? Thanks a lot
{
"results": {
"data": [
{
"name": "xx",
"typeRelationship": [
{
"relationship": "parent",
"type": {
"id": "yyyyy",
}
}
],
"id": "xxxxxxxx"
},
{
"name": "yy",
"typeRelationship": [
{
"relationshipType": "parent",
"type": {
"id": "CCCC"
}
},
{
"relationshipType": "child",
"service": {
"id": "DDDD"
}
},
{
"relationshipType": "child",
"service": {
"id": "xxxxxxxx"
}
}
],
"id": "yyyyy"
}
]
}}
expected:
This is expected:
{
"data" : [ {
"id" : "xxxx",
"href" : "xxxxxx",
"relation":"parent"
} ]
}
For some reason I need to type so it does let me update!!!
This works.
[
{
"operation": "shift",
"spec": {
"data": {
"*": {
"type": {
"id": {
"xxxx": {
"#3": "data[]"
}
}
}
}
}
}
}
]
Edit 1
The below spec moves all the values which as id=xxxxx to the data array.
[
{
"operation": "shift",
"spec": {
"data": {
"*": {
"type": {
"*": {
"id": {
"xxxx": {
"#(2)": "data[]",
"#(4,relation)": "data[&3].relation"
}
}
}
}
}
}
}
}
]
This totally works.
Thanks.
Can you please let me know what is 2? 3? 4?
Because my array is a bit different and I want to fix those numbers but does not work....
{
"results": {
"data": [
{
"name": "xx",
"typeRelationship": [
{
"relationship": "parent",
"type": {
"id": "yyyyy",
}
}
],
"id": "xxxxxxxx"
},
{
"name": "yy",
"typeRelationship": [
{
"relationshipType": "parent",
"type": {
"id": "CCCC"
}
},
{
"relationshipType": "child",
"service": {
"id": "DDDD"
}
},
{
"relationshipType": "child",
"service": {
"id": "xxxxxxxx"
}
}
],
"id": "yyyyy"
}
]
}
}
expected:
{
"rows" : [ {
"rowdata" : {
"relationshipType" : "child",
"Name" : "yy",
"id" : "yyyyy"
}
} ]
}
I want to pass an event_id to Kibana/Elastic Search and find the min and max dates from the #timestamp field for this event_id. Then I want to set the date range to these dates and show all the results. I assume this is doable.
I can get the min and max with this aggregation:
GET /filebeat-*/_search
{
"query": {
"match": {
"event_id": 1234
}
},
"aggs" : {
"min_date": {"min": {"field": "#timestamp" }},
"max_date": {"max": {"field": "#timestamp" }}
}
}
and I can get the results by searching for the specific date range:
GET /filebeat-*/_search
{
"query": {
"bool": {
"filter": {
"range": {"#timestamp": {"gte": "2020-09-11T13:35:35.000Z", "lte": "2020-09-24T20:35:07.000Z"}}
}
}
}
}
how can I combine the two so that I can just change the event_id and have an auto date range type feature?
EDIT:
I can do this:
GET /filebeat-*/_search
{
"query": {
"bool": {
"must": {
"match": {
"event_id": 1234
}
},
"filter": {
"range": {
"#timestamp": {
"lte": "2020-09-25",
"gte": "2020-09-24"
}
}
}
}
},
"aggs": {
"min_date": {
"min": {
"field": "#timestamp"
}
},
"max_date": {
"max": {
"field": "#timestamp"
}
}
}
}
But what I would like to do is something like:
GET /filebeat-*/_search
{
"query": {
"bool": {
"must": {
"match": {
"event_id": 1234
}
},
"filter": {
"range": {
"#timestamp": {
"lte": "max_date",
"gte": "min_date"
}
}
}
}
},
"aggs": {
"min_date": {
"min": {
"field": "#timestamp"
}
},
"max_date": {
"max": {
"field": "#timestamp"
}
}
}
}
But this causes the error: "failed to parse date field [min_date]"
Is it possible to use the aggregated min and max values to define the date range?
Since you have not provided any sample index data, so applying range query on date type field
Adding a working example with index mapping, data, search query, and search result
Index Mapping:
{
"mappings": {
"properties": {
"date": {
"type": "date"
}
}
}
}
Index Data:
{
"date": "2015-02-10",
"event_id":"1234"
}
{
"date": "2015-01-01",
"event_id":"1235"
}
{
"date": "2015-02-01",
"event_id":"1234"
}
{
"date": "2015-02-01",
"event_id":"1235"
}
{
"date": "2015-01-20",
"event_id":"1234"
}
Search Query:
{
"query": {
"bool": {
"must": {
"match": {
"event_id": 1234
}
},
"filter": {
"range": {
"date": {
"lte": "2015-02-15",
"gte": "2015-01-11"
}
}
}
}
},
"aggs": {
"min_date": {
"min": {
"field": "date"
}
},
"max_date": {
"max": {
"field": "date"
}
}
}
}
Search Result:
"hits": {
"total": {
"value": 3,
"relation": "eq"
},
"max_score": 0.44183272,
"hits": [
{
"_index": "stof_64127765",
"_type": "_doc",
"_id": "3",
"_score": 0.44183272,
"_source": {
"date": "2015-02-01",
"event_id": "1234"
}
},
{
"_index": "stof_64127765",
"_type": "_doc",
"_id": "1",
"_score": 0.44183272,
"_source": {
"date": "2015-02-10",
"event_id": "1234"
}
},
{
"_index": "stof_64127765",
"_type": "_doc",
"_id": "5",
"_score": 0.44183272,
"_source": {
"date": "2015-01-20",
"event_id": "1234"
}
}
]
},
"aggregations": {
"max_date": {
"value": 1.4235264E12,
"value_as_string": "2015-02-10T00:00:00.000Z"
},
"min_date": {
"value": 1.421712E12,
"value_as_string": "2015-01-20T00:00:00.000Z"
}
}
I have collection x, each document of x has subcollection y. Each document of y has a time attribute. I can't figure out how to query just that subcollection via REST (I know this feature exists in the SDK). My query so far, which is obviously wrong:
{
"structuredQuery": {
"from": [
{
"collectionId": "x",
"allDescendants": true
}
],
"where": {
"compositeFilter": {
"op": "AND",
"filters": [
{
"fieldFilter": {
"field": {
"fieldPath": "y.time"
},
"op": "GREATER_THAN_OR_EQUAL",
"value": {
"integerValue": 1577836800000
}
}
},
{
"fieldFilter": {
"field": {
"fieldPath": "y.time"
},
"op": "LESS_THAN_OR_EQUAL",
"value": {
"integerValue": 1578355200000
}
}
}
]
}
}
}
}
Sending a POST to https://firestore.googleapis.com/v1/projects/PROJECT/databases/{default}/documents:runQuery, but I've also tried .../documents/x/ID/y:runQuery but that's obviously wrong too.
I believe you described a collection group query for collection group y. In the REST API, this is an allDescendants query on the path projects/PROJECT/databases/(default)/documents (known as the root document):
https://firestore.googleapis.com/v1/projects/PROJECT/databases/(default)/documents:runQuery
{
"structuredQuery": {
"from": [
{
"collectionId": "y",
"allDescendants": true
}
],
"where": {
"compositeFilter": {
"op": "AND",
"filters": [
{
"fieldFilter": {
"field": {
"fieldPath": "time"
},
"op": "GREATER_THAN_OR_EQUAL",
"value": {
"integerValue": 1577836800000
}
}
},
{
"fieldFilter": {
"field": {
"fieldPath": "time"
},
"op": "LESS_THAN_OR_EQUAL",
"value": {
"integerValue": 1578355200000
}
}
}
]
}
}
}
}
Declare the path to the subcollection in the URL:
https://firestore.googleapis.com/v1/projects/PROJECT/databases/(default)/documents/x/documentX:runQuery
Then make the collectionId in from collection "y" and allDescendants false:
{
"structuredQuery": {
"from": [
{
"collectionId": "y",
"allDescendants": false
}
],
"where": {
"compositeFilter": {
"op": "AND",
"filters": [
{
"fieldFilter": {
"field": {
"fieldPath": "y.time"
},
"op": "GREATER_THAN_OR_EQUAL",
"value": {
"integerValue": 1577836800000
}
}
},
{
"fieldFilter": {
"field": {
"fieldPath": "y.time"
},
"op": "LESS_THAN_OR_EQUAL",
"value": {
"integerValue": 1578355200000
}
}
}
]
}
}
}
}
Source: https://firebase.google.com/docs/firestore/reference/rest/v1/projects.databases.documents/runQuery#path-parameters
I want to sum two variable in REST API,and order by it.
This is my REST API:
"aggs": {
"genres": {
"terms": {
"field": "L7_PROTO_NAME.keyword",
"order": {
"sum_bytes": "desc"
}
},
"aggs": {
"in_bytes": {
"sum": {
"field": "IN_BYTES"
}
},
"out_bytes": {
"sum": {
"field": "OUT_BYTES"
}
}
}
thank you in advance!
You need to create another sub-aggregation that sums the two fields and then order the terms aggregation by that sub-aggregation:
{
"query": {
"bool": {
"should": [
{
"term": {
"_index": "logstash-2018.01.02"
}
},
{
"term": {
"IPV4_DST_ADDR": "192.168.0.159"
}
},
{
"term": {
"IPV4_SRC_ADDR": "192.168.0.159"
}
}
]
}
},
"aggs": {
"genres": {
"terms": {
"field": "L7_PROTO_NAME.keyword",
"order": {
"sum_bytes": "desc"
}
},
"aggs": {
"in_bytes": {
"sum": {
"field": "IN_BYTES"
}
},
"out_bytes": {
"sum": {
"field": "OUT_BYTES"
}
},
"sum_bytes": {
"sum": {
"script": {
"source": "doc.IN_BYTES.value + doc.OUT_BYTES.value"
}
}
}
}
}
}
}
Since scripts are quite computation heavy, you should sum those two fields at indexing time and index the result as a new field that you can use directly in your aggregation, like this:
{
"query": {
"bool": {
"should": [
{
"term": {
"_index": "logstash-2018.01.02"
}
},
{
"term": {
"IPV4_DST_ADDR": "192.168.0.159"
}
},
{
"term": {
"IPV4_SRC_ADDR": "192.168.0.159"
}
}
]
}
},
"aggs": {
"genres": {
"terms": {
"field": "L7_PROTO_NAME.keyword",
"order": {
"sum_bytes": "desc"
}
},
"aggs": {
"in_bytes": {
"sum": {
"field": "IN_BYTES"
}
},
"out_bytes": {
"sum": {
"field": "OUT_BYTES"
}
},
"sum_bytes": {
"sum": {
"field": "SUM_BYTES"
}
}
}
}
}
}