Expected [START_OBJECT] under [filter] - rest

I want to put double filter in aggs.
such like this.
"aggs": {
"download1" : {
"filter" : [
{ "term": { "IPV4_DST_ADDR":"192.168.0.159"}},
{ "range": { "LAST_SWITCHED": { "gte": "now-5m" } }}
],
"aggs" : {
"downlod_bytes" : { "sum" : { "field" : "IN_BYTES" } }
}
}
}
but it show me an error:
"error": {
"root_cause": [
{
"type": "parsing_exception",
"reason": "Expected [START_OBJECT] under [filter], but got a [START_ARRAY] in [download1]",
"line": 33,
"col": 24
}
]}
How can I do, thank you in advance!

You need to combine both queries with a bool/filter
{
"aggs": {
"download1": {
"filter": {
"bool": {
"filter": [
{
"term": {
"IPV4_DST_ADDR": "192.168.0.159"
}
},
{
"range": {
"LAST_SWITCHED": {
"gte": "now-5m"
}
}
}
]
}
},
"aggs": {
"downlod_bytes": {
"sum": {
"field": "IN_BYTES"
}
}
}
}
}
}

Related

Cannot find # in OpenSearch query

I have an index that includes a field and when a '#' is input, I cannot get the query to find the #.
Field Data: "#3213939"
Query:
GET /invoices/_search
{
"query": {
"bool": {
"should": [
{
"match": {
"referenceNumber": {
"query": "#32"
}
}
},
{
"wildcard": {
"referenceNumber": {
"value": "*#32*"
}
}
}
]
}
}
}
"#" character drops during standard text analyzer this is why you can't find it.
POST _analyze
{
"text": ["#3213939"]
}
Response:
{
"tokens": [
{
"token": "3213939",
"start_offset": 1,
"end_offset": 8,
"type": "<NUM>",
"position": 0
}
]
}
You can update the analyzer and customize it.
https://www.elastic.co/guide/en/elasticsearch/reference/current/analysis-standard-analyzer.html
OR
you can use referenceNumber.keyword field.
GET test_invoices/_search
{
"query": {
"bool": {
"should": [
{
"match": {
"referenceNumber": {
"query": "#32"
}
}
},
{
"wildcard": {
"referenceNumber.keyword": {
"value": "*#32*"
}
}
}
]
}
}
}

Jolt Transformation problems with null Elements

I have a problem with my Jolt transformation but no idea how to fix it.
I get "null" Element in the array I produce:
{
"Verkaufsprodukt": [
{
"Produkt": [
{
"Elementarprodukt": [
{
"ArtID": {
"bezeichnung": "b",
"value": "0302"
},
"VersichertePerson": {
"PartnerID": "1"
}
},
{
"ArtID": {
"bezeichnung": "f"
},
"VersichertePerson": {
"PartnerID": "1"
}
},
{
"ArtID": {
"bezeichnung": "c"
},
"VersichertePerson": {
"PartnerID": "1"
}
},
{
"ArtID": {
"bezeichnung": "a",
"value": "0301"
},
"VersichertePerson": {
"PartnerID": "1"
}
}
]
}
]
}
],
"Partner": [
{
"Name": "Holgerson",
"PartnerID": "1",
"Vorname": "Nils"
}
]
}
My result:
{
"vertragsdetails" : {
"versichertePersonen" : {
"versicherungssummenOderLeistungen" : [ null, {
"kennung" : "0302"
}, null, {
"kennung" : "0301"
} ]
}
}
}
Here is my spec:
[
{
"operation": "shift",
"spec": {
"Verkaufsprodukt": {
"*": {
"Produkt": {
"*": {
"Elementarprodukt": {
"*": {
"VersichertePerson": {
"PartnerID": {
"1": {
"#(3)": {
"ArtID": {
"value": "vertragsdetails.versichertePersonen.versicherungssummenOderLeistungen[&6].kennung"
}
}
}
}
}
}
}
}
}
}
}
}
}
]
I see, that the null elements comes from the "ArtID" elements without "vaules" but how can I get rid of them?
I tried a '"operation": "shift",' but that deleted also other elements I want to have.
Can somebody help? Thanks!
I found a soulotion. I added this to my spec:
{
"operation": "shift",
"spec": {
"vertragsdetails": {
"versichertePersonen": {
"versicherungssummenOderLeistungen": {
"*": {
"kennung": {
"#1": "vertragsdetails.versichertePersonen.versicherungssummenOderLeistungen[]"
}
}
}
"dataToKeep": "vertragsdetails.versichertePersonen.dataToKeep"
}
}
}
}

Doctrine Mongodb ODM Add Dynamic Dates in Aggregation

I'm trying to know if specific motorcycle in date range has contract or not.
My schema looks like:
{
"_id" : ObjectId("575b7c0b0419c906e262d54b"),
"customer" : {
"id" : ObjectId("575b7c0b0419c906e262d54b")
},
"name" : "Harley Store",
"description" : "Harley Store",
"contracts" : [
{
"_id" : ObjectId("575b7c0b0419c906e262d54b"),
"bike" : {
"id" : ObjectId("575b7c0b0419c906e262d54b")
},
"from" : ISODate("2050-01-01T00:00:00.000Z"),
"till" : ISODate("2050-01-05T00:00:00.000Z"),
"cost" : 10000,
"lapse" : [
ISODate("2050-01-01T00:00:00.000Z"),
ISODate("2050-01-02T00:00:00.000Z"),
ISODate("2050-01-03T00:00:00.000Z"),
ISODate("2050-01-04T00:00:00.000Z"),
ISODate("2050-01-05T00:00:00.000Z")
]
},
{
"_id" : ObjectId("575b7c0b0419c906e262d54c"),
"bike" : {
"id" : ObjectId("575b7c0b0419c906e262d54c")
},
"from" : ISODate("2050-01-01T00:00:00.000Z"),
"till" : ISODate("2050-01-05T00:00:00.000Z"),
"cost" : 10000,
"lapse" : [
ISODate("2050-01-06T00:00:00.000Z"),
ISODate("2050-01-07T00:00:00.000Z"),
ISODate("2050-01-08T00:00:00.000Z"),
ISODate("2050-01-09T00:00:00.000Z")
]
}
]
}
I have the following query in the mongo shell:
db.getCollection('BikeStore').aggregate([
{
$unwind:'$contracts'
},
{
$project:{
contract:'$contracts',
_id: 0
}
},
{
$match:{
'contract.bike.id': ObjectId("575b7c0b0419c906e262d54b")
}
},
{
$match:{
$or: [
{'contract.lapse': {$eq: ISODate("2049-01-31T00:00:00.000Z")}},
{'contract.lapse': {$eq: ISODate("2050-02-01T00:00:00.000Z")}},
{'contract.lapse': {$eq: ISODate("2050-02-02T00:00:00.000Z")}}
]
}
}
])
The query in mongo shell works fine, but the dates are generated dynamically from-till and I can not find the way to get this done using query builder.
My query builder:
public function hasContracts(string $bikeId, \DateTime $from, \DateTime $till): bool
{
$filterDate = \DateTimeImmutable::createFromMutable($from);
$days = $from->diff($till)->days;
$qb = $this->createAggregationBuilder();
$qb->unwind('$contracts');
$qb->project()
->field('contract')
->expression('$contracts')
->field('_id')
->expression(0);
$qb->match()->field('contract.bike.id')->equals(new ObjectId($bikeId));
for($i;$days){ //$i menor or equal $days
$qb->match()->addOr(
$qb->matchExpr()->field('contract.lapse')->equals(
new UTCDateTime(
$filterDate->add(
\DateInterval::createFromDateString(sprintf('%d day', $i)))
->setTime(0, 0)->getTimestamp() * 1000
)
)
);
}
return 0 !== $qb->execute()->count();
}
The query that generates the odm is the following:
{
"aggregate": true,
"pipeline": [
{
"$unwind": "$contracts"
},
{
"$project": {
"contract": "$contracts",
"_id": 0
}
},
{
"$match": {
"contract.bike.id": {
"$oid": "575b7c0b0419c906e262d54b"
}
}
},
{
"$match": {
"$or": [
{
"contract.lapse": {
"$date": {
"$numberLong": "2524780800000"
}
}
}
]
}
},
{
"$match": {
"$or": [
{
"contract.lapse": {
"$date": {
"$numberLong": "2524867200000"
}
}
}
]
}
},
{
"$match": {
"$or": [
{
"contract.lapse": {
"$date": {
"$numberLong": "2524953600000"
}
}
}
]
}
},
{
"$match": {
"$or": [
{
"contract.lapse": {
"$date": {
"$numberLong": "2525040000000"
}
}
}
]
}
},
{
"$match": {
"$or": [
{
"contract.lapse": {
"$date": {
"$numberLong": "2525126400000"
}
}
}
]
}
},
{
"$match": {
"$or": [
{
"contract.lapse": {
"$date": {
"$numberLong": "2525212800000"
}
}
}
]
}
},
{
"$match": {
"$or": [
{
"contract.lapse": {
"$date": {
"$numberLong": "2525299200000"
}
}
}
]
}
},
{
"$match": {
"$or": [
{
"contract.lapse": {
"$date": {
"$numberLong": "2525385600000"
}
}
}
]
}
},
{
"$match": {
"$or": [
{
"contract.lapse": {
"$date": {
"$numberLong": "2525472000000"
}
}
}
]
}
},
{
"$match": {
"$or": [
{
"contract.lapse": {
"$date": {
"$numberLong": "2525558400000"
}
}
}
]
}
},
{
"$match": {
"$or": [
{
"contract.lapse": {
"$date": {
"$numberLong": "2525644800000"
}
}
}
]
}
},
{
"$match": {
"$or": [
{
"contract.lapse": {
"$date": {
"$numberLong": "2525731200000"
}
}
}
]
}
},
{
"$match": {
"$or": [
{
"contract.lapse": {
"$date": {
"$numberLong": "2525817600000"
}
}
}
]
}
},
{
"$match": {
"$or": [
{
"contract.lapse": {
"$date": {
"$numberLong": "2525904000000"
}
}
}
]
}
},
{
"$match": {
"$or": [
{
"contract.lapse": {
"$date": {
"$numberLong": "2525990400000"
}
}
}
]
}
},
{
"$match": {
"$or": [
{
"contract.lapse": {
"$date": {
"$numberLong": "2526076800000"
}
}
}
]
}
},
{
"$match": {
"$or": [
{
"contract.lapse": {
"$date": {
"$numberLong": "2526163200000"
}
}
}
]
}
},
{
"$match": {
"$or": [
{
"contract.lapse": {
"$date": {
"$numberLong": "2526249600000"
}
}
}
]
}
},
{
"$match": {
"$or": [
{
"contract.lapse": {
"$date": {
"$numberLong": "2526336000000"
}
}
}
]
}
},
{
"$match": {
"$or": [
{
"contract.lapse": {
"$date": {
"$numberLong": "2526422400000"
}
}
}
]
}
},
{
"$match": {
"$or": [
{
"contract.lapse": {
"$date": {
"$numberLong": "2526508800000"
}
}
}
]
}
},
{
"$match": {
"$or": [
{
"contract.lapse": {
"$date": {
"$numberLong": "2526595200000"
}
}
}
]
}
},
{
"$match": {
"$or": [
{
"contract.lapse": {
"$date": {
"$numberLong": "2526681600000"
}
}
}
]
}
},
{
"$match": {
"$or": [
{
"contract.lapse": {
"$date": {
"$numberLong": "2526768000000"
}
}
}
]
}
},
{
"$match": {
"$or": [
{
"contract.lapse": {
"$date": {
"$numberLong": "2526854400000"
}
}
}
]
}
},
{
"$match": {
"$or": [
{
"contract.lapse": {
"$date": {
"$numberLong": "2526940800000"
}
}
}
]
}
},
{
"$match": {
"$or": [
{
"contract.lapse": {
"$date": {
"$numberLong": "2527027200000"
}
}
}
]
}
},
{
"$match": {
"$or": [
{
"contract.lapse": {
"$date": {
"$numberLong": "2527113600000"
}
}
}
]
}
},
{
"$match": {
"$or": [
{
"contract.lapse": {
"$date": {
"$numberLong": "2527200000000"
}
}
}
]
}
},
{
"$match": {
"$or": [
{
"contract.lapse": {
"$date": {
"$numberLong": "2527286400000"
}
}
}
]
}
},
{
"$match": {
"$or": [
{
"contract.lapse": {
"$date": {
"$numberLong": "2527372800000"
}
}
}
]
}
},
{
"$match": {
"$or": [
{
"contract.lapse": {
"$date": {
"$numberLong": "2527459200000"
}
}
}
]
}
}
],
"options": {
"cursor": true
},
"db": "store",
"collection": "BikeStore"
}
How do I add the dates dynamically into the match and not duplicate the match ?
Thx for you help!!!
Each time you call $qb->match() you're creating a new $match stage. This should do:
$qb->match();
for($i;$days){ //$i menor or equal $days
$qb->addOr(/* ... */);
}

REST API query string

I want to filter out the Sum_PKTS which value is lower than 10.
How could I merge the two query string?
Is it possible?
BTW, the "Sum_PKTS" field is sum by "field" : "Packet.
the goal is to filter the local IP and aggregate "packet" field, and finally filter the Sum_PKTS which value is lower than 10.
{
"range":{
"Sum_PKTS":{
"gte": 10
}
}
}
--
GET /_search
{
"size" : 0,
"query": {
"bool": {
"should": [
{
"match":{"IPV4_DST_ADDR":"192.168.0.0/16"}
},
{
"match":{"IPV4_SRC_ADDR":"192.168.0.0/16"}
}
],
"minimum_should_match": 1,
"must":[
{
"range":{
"#timestamp":{
"gte":"now-5m"
}
}
}
]
}
},
"aggs": {
"DST_Local_IP": {
"filter": {
"bool": {
"filter": {
"match":{"IPV4_DST_ADDR":"192.168.0.0/16"}
}
}
},
"aggs": {
"genres":{
"terms" : {
"field" : "IPV4_DST_ADDR" ,
"order" : { "Sum_PKTS" : "desc" }
},
"aggs":{
"Sum_PKTS": {
"sum" : { "field" : "Packet" }
}
}
}
}
},
"SRC_Local_IP": {
"filter": {
"bool": {
"filter": {
"match":{"IPV4_SRC_ADDR":"192.168.0.0/16"}
}
}
},
"aggs": {
"genres":{
"terms" : {
"field" : "IPV4_SRC_ADDR" ,
"order" : { "Sum_PKTS" : "desc" }
},
"aggs":{
"Sum_PKTS": {
"sum" : { "field" : "Packet" }
}
}
}
}
}
}
}
thank you in advance!
You can achieve what you want using a bucket selector pipeline aggregation (see the two Sum_PKTS_gte_10 aggregations below):
{
"size": 0,
"query": {
"bool": {
"should": [
{
"match": {
"IPV4_DST_ADDR": "192.168.0.0/16"
}
},
{
"match": {
"IPV4_SRC_ADDR": "192.168.0.0/16"
}
}
],
"minimum_should_match": 1,
"must": [
{
"range": {
"#timestamp": {
"gte": "now-5m"
}
}
}
]
}
},
"aggs": {
"DST_Local_IP": {
"filter": {
"bool": {
"filter": {
"match": {
"IPV4_DST_ADDR": "192.168.0.0/16"
}
}
}
},
"aggs": {
"genres": {
"terms": {
"field": "IPV4_DST_ADDR",
"order": {
"Sum_PKTS": "desc"
}
},
"aggs": {
"Sum_PKTS": {
"sum": {
"field": "Packet"
}
},
"Sum_PKTS_gte_10": {
"bucket_selector": {
"buckets_path": {
"sum_packets": "Sum_PKTS"
},
"script": "params.sum_packets >= 10"
}
}
}
}
}
},
"SRC_Local_IP": {
"filter": {
"bool": {
"filter": {
"match": {
"IPV4_SRC_ADDR": "192.168.0.0/16"
}
}
}
},
"aggs": {
"genres": {
"terms": {
"field": "IPV4_SRC_ADDR",
"order": {
"Sum_PKTS": "desc"
}
},
"aggs": {
"Sum_PKTS": {
"sum": {
"field": "Packet"
}
},
"Sum_PKTS_gte_10": {
"bucket_selector": {
"buckets_path": {
"sum_packets": "Sum_PKTS"
},
"script": "params.sum_packets >= 10"
}
}
}
}
}
}
}
}

filter range date elasticsearch

This is how my datas look like
{
"name": "thename",
"openingTimes": {
"monday": [
{
"start": "10:00",
"end": "14:00"
},
{
"start": "19:00",
"end": "02:30"
}
]
}
}
I want to query this document saying, opened on monday between 13:00 and 14:00.
I tried this filter but it doesn't return my document:
{
"filter": {
"range": {
"openingTimes.monday.start": {
"lte": "13:00"
},
"openingTimes.monday.end": {
"gte": "14:00"
}
}
}
}
If I simply say opened on monday at 13:00, it works:
{
"filter": {
"range": {
"openingTimes.monday.start": {
"lte": "13:00"
}
}
}
}
Or even closing on monday from 14:00, works too:
{
"filter": {
"range": {
"openingTimes.monday.start": {
"gte": "14:00"
}
}
}
}
but combining both of them doens't give me anything. How can I manage to create a filter meaning opened on monday between 13:00 and 14:00 ?
EDIT
This is how I mapped the openingTime field
{
"properties": {
"monday": {
"type": "nested",
"properties": {
"start": {"type": "date","format": "hour_minute"},
"end": {"type": "date","format": "hour_minute"}
}
}
}
}
SOLUTION (#DanTuffery)
Based on #DanTuffery answer I changed my filter to his (which is working perfectly) and added the type definition of my openingTime attribute.
For the record I am using elasticsearch as my primary db through Ruby-on-Rails using the following gems:
gem 'elasticsearch-rails', git: 'git://github.com/elasticsearch/elasticsearch-rails.git'
gem 'elasticsearch-model', git: 'git://github.com/elasticsearch/elasticsearch-rails.git'
gem 'elasticsearch-persistence', git: 'git://github.com/elasticsearch/elasticsearch-rails.git', require: 'elasticsearch/persistence/model'
Here is how my openingTime attribute's mapping looks like:
attribute :openingTimes, Hash, mapping: {
type: :object,
properties: {
monday: {
type: :nested,
properties: {
start:{type: :date, format: 'hour_minute'},
end: {type: :date, format: 'hour_minute'}
}
},
tuesday: {
type: :nested,
properties: {
start:{type: :date, format: 'hour_minute'},
end: {type: :date, format: 'hour_minute'}
}
},
...
...
}
}
And here is how I implemented his filter:
def self.openedBetween startTime, endTime, day
self.search filter: {
nested: {
path: "openingTimes.#{day}",
filter: {
bool: {
must: [
{range: {"openingTimes.#{day}.start"=> {lte: startTime}}},
{range: {"openingTimes.#{day}.end" => {gte: endTime}}}
]
}
}
}
}
end
First create your mapping with the openingTimes object at the top level.
/PUT http://localhost:9200/demo/test/_mapping
{
"test": {
"properties": {
"openingTimes": {
"type": "object",
"properties": {
"monday": {
"type": "nested",
"properties": {
"start": {
"type": "date",
"format": "hour_minute"
},
"end": {
"type": "date",
"format": "hour_minute"
}
}
}
}
}
}
}
}
Index your document
/POST http://localhost:9200/demo/test/1
{
"name": "thename",
"openingTimes": {
"monday": [
{
"start": "10:00",
"end": "14:00"
},
{
"start": "19:00",
"end": "02:30"
}
]
}
}
With a nested filter query you can search for the document with the start and end fields within boolean range queries:
/POST http://localhost:9200/demo/test/_search
{
"query": {
"filtered": {
"query": {
"match_all": {}
},
"filter": {
"nested": {
"path": "openingTimes.monday",
"filter": {
"bool": {
"must": [
{
"range": {
"openingTimes.monday.start": {
"lte": "13:00"
}
}
},
{
"range": {
"openingTimes.monday.end": {
"gte": "14:00"
}
}
}
]
}
}
}
}
}
}
}