How to pull an element in sub of sub array in MongoDB? - mongodb

I want to pull an object from com_address where "add_id":111:
{
"_id" : ObjectId("5f7b6b4e327e2111883909f3"),
"FirstName" : "abc",
"LastName" : "abc",
"DateOfBirth" : "05/09/2020",
"gender" : "M",
"address" : {
"country" : "string",
"state" : "string",
"city" : [
{
"id" : 15,
"type" : "string",
"com_address" : [
{
"add_id" : 113,
"street" : "string",
"house_no" : "string",
"landmark" : "string"
},
{
"add_id" : 114,
"street" : "string",
"house_no" : "string",
"landmark" : "string"
}
]
},
{
"id" : 16,
"type" : "string",
"com_address" : [
{
"add_id" : 110,
"street" : "string",
"house_no" : "string",
"landmark" : "string"
},
{
"add_id" : 111,
"street" : "string",
"house_no" : "string",
"landmark" : "string"
}
]
}
]
}
}
This is my query:
db.getCollection('student').update({"_id" : ObjectId("5f7b6b4e327e2111883909f3")},{$pull:{"address.city":{"com_address.add_id":111}}})
By doing this object inside city with id 16 is getting deleted instead of pulling an object from com_address.
How to pull an object from com_address with add_id?

Try this, which uses the $[] operator
db.getCollection('student').update({
"_id" : ObjectId("5f7b6b4e327e2111883909f3")
},
{
$pull: { "address.city.$[].com_address": { "add_id": 111 }}
})

Related

mongodb $lookup return empty array

I'm new to mongodb and in this question I have 2 collections, one is selected_date, another is global_mobility_report, what I'm trying to do is to find entries in global_mobility_report whose date is in the selected_date so I use $lookup to join the two collections.
date_selected:
{
"_id" : ObjectId("5f60d81ba43174cf172ebfdc"),
"date" : ISODate("2020-05-22T00:00:00.000+08:00")
},
{
"_id" : ObjectId("5f60d81ba43174cf172ebfdd"),
"date" : ISODate("2020-05-23T00:00:00.000+08:00")
},
{
"_id" : ObjectId("5f60d81ba43174cf172ebfde"),
"date" : ISODate("2020-05-24T00:00:00.000+08:00")
},
{
"_id" : ObjectId("5f60d81ba43174cf172ebfdf"),
"date" : ISODate("2020-05-25T00:00:00.000+08:00")
},
{
"_id" : ObjectId("5f60d81ba43174cf172ebfe0"),
"date" : ISODate("2020-05-26T00:00:00.000+08:00")
},
{
"_id" : ObjectId("5f60d81ba43174cf172ebfe1"),
"date" : ISODate("2020-05-27T00:00:00.000+08:00")
}
global_mobility_report:
{
"_id" : ObjectId("5f49fb013acddb5eec37f99e"),
"country_region_code" : "AE",
"country_region" : "United Arab Emirates",
"sub_region_1" : "",
"sub_region_2" : "",
"metro_area" : "",
"iso_3166_2_code" : "",
"census_fips_code" : "",
"date" : "2020-02-15",
"retail_and_recreation_percent_change_from_baseline" : "0",
"grocery_and_pharmacy_percent_change_from_baseline" : "4",
"parks_percent_change_from_baseline" : "5",
"transit_stations_percent_change_from_baseline" : "0",
"workplaces_percent_change_from_baseline" : "2",
"residential_percent_change_from_baseline" : "1"
},
{
"_id" : ObjectId("5f49fb013acddb5eec37f99f"),
"country_region_code" : "AE",
"country_region" : "United Arab Emirates",
"sub_region_1" : "",
"sub_region_2" : "",
"metro_area" : "",
"iso_3166_2_code" : "",
"census_fips_code" : "",
"date" : "2020-02-16",
"retail_and_recreation_percent_change_from_baseline" : "1",
"grocery_and_pharmacy_percent_change_from_baseline" : "4",
"parks_percent_change_from_baseline" : "4",
"transit_stations_percent_change_from_baseline" : "1",
"workplaces_percent_change_from_baseline" : "2",
"residential_percent_change_from_baseline" : "1"
},
{
"_id" : ObjectId("5f49fb013acddb5eec37f9a0"),
"country_region_code" : "AE",
"country_region" : "United Arab Emirates",
"sub_region_1" : "",
"sub_region_2" : "",
"metro_area" : "",
"iso_3166_2_code" : "",
"census_fips_code" : "",
"date" : "2020-02-17",
"retail_and_recreation_percent_change_from_baseline" : "-1",
"grocery_and_pharmacy_percent_change_from_baseline" : "1",
"parks_percent_change_from_baseline" : "5",
"transit_stations_percent_change_from_baseline" : "1",
"workplaces_percent_change_from_baseline" : "2",
"residential_percent_change_from_baseline" : "1"
},
{
"_id" : ObjectId("5f49fb013acddb5eec37f9a1"),
"country_region_code" : "AE",
"country_region" : "United Arab Emirates",
"sub_region_1" : "",
"sub_region_2" : "",
"metro_area" : "",
"iso_3166_2_code" : "",
"census_fips_code" : "",
"date" : "2020-02-18",
"retail_and_recreation_percent_change_from_baseline" : "-2",
"grocery_and_pharmacy_percent_change_from_baseline" : "1",
"parks_percent_change_from_baseline" : "5",
"transit_stations_percent_change_from_baseline" : "0",
"workplaces_percent_change_from_baseline" : "2",
"residential_percent_change_from_baseline" : "1"
}
when I try to find all entries in global with 'date' match in selected_date(I have converted the string to data format in gobal_mobility_report), it returns empty array.
db.global_mobility_report.aggregate([
{$match:{country_region:"Indonesia"}},
{$addFields: {"dateconverted": {$convert: { input: "$date", to: "date", onError:"onErrorExpr", onNull:"onNullExpr"}:}}},
{
$lookup:
{
from: "selected_date",
localField:"dateconverted",
foreignField: "date",
as: "selected_dates" // empty
}
})]
The output is:
{
"_id" : ObjectId("5f49fd6a3acddb5eec4427bb"),
"country_region_code" : "ID",
"country_region" : "Indonesia",
"sub_region_1" : "",
"sub_region_2" : "",
"metro_area" : "",
"iso_3166_2_code" : "",
"census_fips_code" : "",
"date" : "2020-02-15",
"retail_and_recreation_percent_change_from_baseline" : "-2",
"grocery_and_pharmacy_percent_change_from_baseline" : "-2",
"parks_percent_change_from_baseline" : "-8",
"transit_stations_percent_change_from_baseline" : "1",
"workplaces_percent_change_from_baseline" : "5",
"residential_percent_change_from_baseline" : "1",
"dateconverted" : ISODate("2020-02-15T08:00:00.000+08:00"),
"selected_dates" : [ ]
},
{
"_id" : ObjectId("5f49fd6a3acddb5eec4427bc"),
"country_region_code" : "ID",
"country_region" : "Indonesia",
"sub_region_1" : "",
"sub_region_2" : "",
"metro_area" : "",
"iso_3166_2_code" : "",
"census_fips_code" : "",
"date" : "2020-02-16",
"retail_and_recreation_percent_change_from_baseline" : "-3",
"grocery_and_pharmacy_percent_change_from_baseline" : "-3",
"parks_percent_change_from_baseline" : "-7",
"transit_stations_percent_change_from_baseline" : "-4",
"workplaces_percent_change_from_baseline" : "2",
"residential_percent_change_from_baseline" : "2",
"dateconverted" : ISODate("2020-02-16T08:00:00.000+08:00"),
"selected_dates" : [ ]
}
The reason you are getting an empty array is because dateconverted does not match the date field.
The $lookup operator does an equality between the localField and the foreigntField field, so basically with an example
db.users.insertMany([
{ email: "test#example.com", userId: 0 },
{ email: "test2#example.com", userId: 1 },
{ email: "test3#example.com", userId: 2 },
{ email: "test3#example.com", userId: 3 }
]);
db.posts.insertMany([
{ by: 0, post: "hello world" },
{ by: 0 , post: "hello earthlings" },
{ by: 3, post: "test test test"}
]);
db.posts.aggregate([
{
$lookup: {
from: "users",
localField: "by",
foreignField: "userId",
as: "list_of_post"
}
}
]).toArray();
The output will be what it suppose to be, because the localField matched the ForeignField
[
{
"_id" : ObjectId("5f60f6859a6df3133b325eb0"),
"by" : 0,
"post" : "hello world",
"list_of_post" : [
{
"_id" : ObjectId("5f60f6849a6df3133b325eac"),
"email" : "test#example.com",
"userId" : 0
}
]
},
{
"_id" : ObjectId("5f60f6859a6df3133b325eb1"),
"by" : 0,
"post" : "hello earthlings",
"list_of_post" : [
{
"_id" : ObjectId("5f60f6849a6df3133b325eac"),
"email" : "test#example.com",
"userId" : 0
}
]
},
{
"_id" : ObjectId("5f60f6859a6df3133b325eb2"),
"by" : 3,
"post" : "test test test",
"list_of_post" : [
{
"_id" : ObjectId("5f60f6849a6df3133b325eaf"),
"email" : "test3#example.com",
"userId" : 3
}
]
}
]
Let's mimic a situation where it does not match
db.posts.drop();
db.posts.insertMany([
{ by: 20, post: "hello world" },
{ by: 23 , post: "hello earthlings" },
{ by: 50, post: "test test test"}
]);
We get an empty array
[
{
"_id" : ObjectId("5f60f83344304796ae700b4d"),
"by" : 20,
"post" : "hello world",
"list_of_post" : [ ]
},
{
"_id" : ObjectId("5f60f83344304796ae700b4e"),
"by" : 23,
"post" : "hello earthlings",
"list_of_post" : [ ]
},
{
"_id" : ObjectId("5f60f83344304796ae700b4f"),
"by" : 50,
"post" : "test test test",
"list_of_post" : [ ]
}
]
So, back to your question, the reason for the empty array is as a result of the dateconverted field not matching the date field. So, let's take a look at an example.
In the first document the dateconverted is
ISODate("2020-02-16T08:00:00.000+08:00") and checking at date_selected document , there is no field that correspond to this value ISODate("2020-02-16T08:00:00.000+08:00"). But let's manually insert this, so you will properly understand what I am talking about.
db.date_selected.insert({
"_id" : ObjectId(),
"date": ISODate("2020-02-16T08:00:00.000+08:00")
});
Running the aggregation pipeline will also make selected_dates an empty array. And the other thing you have to note is that the mm/dd/yyy part of the ISODate object does not also match any document in your question. Secondly, you have to devise another means of running the comparison, because the aggregation pipeline in the $addFileds stage will be affected by timezone and other issues as well.

Unable to load data in druid

I am a newbie in druid. Trying to load a very simple data in JSON format to druid. The data contains just one dimension, one metric and timestamp. I have been successfully able to load data to druid for a different dataset but somehow I am getting errors for this dataset.
This is my index file :
{
"type" : "index",
"spec" : {
"dataSchema" : {
"dataSource" : "datatemplate",
"parser" : {
"type" : "string",
"parseSpec" : {
"format" : "json",
"dimensionsSpec" : {
"dimensions" : [
"Loc"
]
},
"timestampSpec" : {
"format" : "auto",
"column" : "Timestamp"
}
}
},
"metricsSpec" : [{"name" : "Qty","type" : "doubleSum","fieldName" : "Qty"}],
"granularitySpec" : {
"type" : "uniform",
"segmentGranularity" : "day",
"queryGranularity" : "none",
"intervals" : ["2016-01-01T00:00:00Z/2030-06-30T00:00:00Z"],
"rollup" : true
}
},
"ioConfig" : {
"type" : "index",
"firehose" : {
"type" : "local",
"baseDir" : "datatemplate/",
"filter" : "datatemplate.json"
},
"appendToExisting" : false
},
"tuningConfig" : {
"type" : "index",
"targetPartitionSize" : 10000000,
"maxRowsInMemory" : 40000,
"forceExtendableShardSpecs" : true
}
}
}
Also here is my dataset in JSON format:
{"Loc": "A", "Qty": "1", "Timestamp": "2017-12-01T00:00:00Z"}
{"Loc": "A", "Qty": "1", "Timestamp": "2017-12-01T00:00:00Z"}
{"Loc": "B", "Qty": "2", "Timestamp": "2017-12-01T00:00:00Z"}
{"Loc": "B", "Qty": "1", "Timestamp": "2017-12-01T00:00:00Z"}

swagger file for all possible 'properties'

I need to call an API to load data on an ongoing basis. API returns different properties for each event. When I create the swagger file it has properties from sample return, but in the long run there will be more properties which can be added by the source system and they will not be in swagger file.
Is there any way to recreate swagger file before data load dynamically with additional properties?
Swagger file is generated by Informatica Cloud based on a sample return while testing the connection.
Properties list has a different number of entries based on event type.
swagger file:
{"swagger" : "2.0",
"info" : {
"description" : null,
"version" : "1.0.0",
"title" : null,
"termsOfService" : null,
"contact" : null,
"license" : null
},
"host" : "<host>.com",
"basePath" : "/api",
"schemes" : [ "https" ],
"paths" : {
"/2.0" : {
"post" : {
"tags" : [ "events" ],
"summary" : null,
"description" : null,
"operationId" : "events",
"produces" : [ "application/json" ],
"consumes" : [ "application/json" ],
"parameters" : [ {
"name" : "script",
"in" : "query",
"description" : null,
"required" : false,
"type" : "string"
}, {
"name" : "Authorization",
"in" : "header",
"description" : null,
"required" : false,
"type" : "string"
} ],
"responses" : {
"200" : {
"description" : "successful operation",
"schema" : {
"$ref" : "#/definitions/events"
}
}
}
}
}
},
"definitions" : {
"events##properties" : {
"properties" : {
"$app_build_number" : {
"type" : "string"
},
"$app_version_string" : {
"type" : "string"
},
"$carrier" : {
"type" : "string"
},
"$lib_version" : {
"type" : "string"
},
"$manufacturer" : {
"type" : "string"
},
"$model" : {
"type" : "string"
},
"$os" : {
"type" : "string"
},
"$os_version" : {
"type" : "string"
},
"$radio" : {
"type" : "string"
},
"$region" : {
"type" : "string"
},
"$screen_height" : {
"type" : "number",
"format" : "int32"
},
"$screen_width" : {
"type" : "number",
"format" : "int32"
},
"Home Step Enabled" : {
"type" : "string"
},
"Number Of Lifetime Logins" : {
"type" : "number",
"format" : "int32"
},
"Sessions" : {
"type" : "number",
"format" : "int32"
},
"mp_country_code" : {
"type" : "string"
},
"mp_lib" : {
"type" : "string"
}
}
},
"events" : {
"properties" : {
"name" : {
"type" : "string"
},
"distinct_id" : {
"type" : "string"
},
"labels" : {
"type" : "string"
},
"time" : {
"type" : "number",
"format" : "int64"
},
"sampling_factor" : {
"type" : "number",
"format" : "int32"
},
"dataset" : {
"type" : "string"
},
"properties" : {
"$ref" : "#/definitions/events##properties"
}
}
}
}
}
sample return:
"name": "Session",
"distinct_id": "1234567890",
"labels": [],
"time": 1520072505000,
"sampling_factor": 1,
"dataset": "$event_data_set",
"properties": {
"$app_build_number": "900",
"$app_version_string": "1.9",
"$carrier": "AT&T",
"$lib_version": "2.0.1",
"$manufacturer": "Apple",
"$model": "iPhone10,6",
"$os": "iOS",
"$os_version": "11.2.6",
"$radio": "LTE",
"$region": "Florida",
"$screen_height": 667,
"$screen_width": 375,
"Number Of Lifetime Logins": 2,
"Session Length": "00h:00m:08s",
"Sessions": 43,
"mp_country_code": "US",
"mp_lib": "swift"
}
}

How to merge two matching objects from different array into one object?

I have a situation where I have got one result from aggregation where I am getting data in this format.
{
"_id" : ObjectId("5a42432d69cbfed9a410e8ad"),
"bacId" : "BAC0023444",
"cardId" : "2",
"defaultCardOrder" : "2",
"alias" : "Finance",
"label" : "Finance",
"for" : "",
"cardTooltip" : {
"enable" : true,
"text" : ""
},
"dataBlocks" : [
{
"defaultBlockOrder" : "1",
"blockId" : "1",
"data" : "0"
},
{
"defaultBlockOrder" : "2",
"blockId" : "2",
"data" : "0"
},
{
"defaultBlockOrder" : "3",
"blockId" : "3",
"data" : "0"
}
],
"templateBlocks" : [
{
"blockId" : "1",
"label" : "Gross Profit",
"quarter" : "",
"data" : "",
"dataType" : {
"typeId" : "2"
},
"tooltip" : {
"enable" : true,
"text" : ""
}
},
{
"blockId" : "2",
"label" : "Profit Forecast",
"quarter" : "",
"data" : "",
"dataType" : {
"typeId" : "2"
},
"tooltip" : {
"enable" : true,
"text" : ""
}
},
{
"blockId" : "3",
"label" : "Resource Billing",
"quarter" : "",
"data" : "",
"dataType" : {
"typeId" : "2"
},
"tooltip" : {
"enable" : true,
"text" : ""
}
}
]
},
{
"_id" : ObjectId("5a42432d69cbfed9a410e8ad"),
"bacId" : "BAC0023444",
"cardId" : "3",
"defaultCardOrder" : "3",
"alias" : "Staffing",
"label" : "Staffing",
"for" : "",
"cardTooltip" : {
"enable" : true,
"text" : ""
},
"dataBlocks" : [
{
"defaultBlockOrder" : "1",
"blockId" : "1",
"data" : "1212"
},
{
"defaultBlockOrder" : "2",
"blockId" : "2",
"data" : "1120"
},
{
"defaultBlockOrder" : "3",
"blockId" : "3",
"data" : "1200"
}
],
"templateBlocks" : [
{
"blockId" : "1",
"label" : "Staffing Planner",
"quarter" : "",
"data" : "",
"dataType" : {
"typeId" : "1"
},
"tooltip" : {
"enable" : true,
"text" : ""
}
},
{
"blockId" : "2",
"label" : "Baseline",
"quarter" : "",
"data" : "",
"dataType" : {
"typeId" : "1"
},
"tooltip" : {
"enable" : true,
"text" : ""
}
},
{
"blockId" : "3",
"label" : "Projected",
"quarter" : "",
"data" : "",
"dataType" : {
"typeId" : "1"
},
"tooltip" : {
"enable" : true,
"text" : ""
}
}
]
}
Now I want to compare the two array of objects for each row, here in this case its "dataBlocks" and "templateBlocks" based on "blockId" s and I want to get the result in the following format.
{
"_id" : ObjectId("5a42432d69cbfed9a410e8ad"),
"bacId" : "BAC0023444",
"cardId" : "2",
"defaultCardOrder" : "2",
"alias" : "Finance",
"label" : "Finance",
"for" : "",
"cardTooltip" : {
"enable" : true,
"text" : ""
},
"blocks" : [
{
"defaultBlockOrder" : "1",
"blockId" : "1",
"data" : "0",
"label" : "Gross Profit",
"quarter" : "",
"dataType" : {
"typeId" : "2"
},
"tooltip" : {
"enable" : true,
"text" : ""
}
},
{
"defaultBlockOrder" : "2",
"blockId" : "2",
"data" : "0",
"label" : "Profit Forecast",
"quarter" : "",
"dataType" : {
"typeId" : "2"
},
"tooltip" : {
"enable" : true,
"text" : ""
}
},
{
"defaultBlockOrder" : "3",
"blockId" : "3",
"data" : "0",
"label" : "Resource Billing",
"quarter" : "",
"dataType" : {
"typeId" : "2"
},
"tooltip" : {
"enable" : true,
"text" : ""
}
}
]
},
{
"_id" : ObjectId("5a42432d69cbfed9a410e8ad"),
"bacId" : "BAC0023444",
"cardId" : "3",
"defaultCardOrder" : "3",
"alias" : "Staffing",
"label" : "Staffing",
"for" : "",
"cardTooltip" : {
"enable" : true,
"text" : ""
},
"dataBlocks" : [
{
"defaultBlockOrder" : "1",
"blockId" : "1",
"data" : "1212",
"label" : "Staffing Planner",
"quarter" : "",
"dataType" : {
"typeId" : "1"
},
"tooltip" : {
"enable" : true,
"text" : ""
}
},
{
"defaultBlockOrder" : "2",
"blockId" : "2",
"data" : "1120",
"label" : "Baseline",
"quarter" : "",
"dataType" : {
"typeId" : "1"
},
"tooltip" : {
"enable" : true,
"text" : ""
}
},
{
"defaultBlockOrder" : "3",
"blockId" : "3",
"data" : "1200",
"label" : "Projected",
"quarter" : "",
"dataType" : {
"typeId" : "1"
},
"tooltip" : {
"enable" : true,
"text" : ""
}
}
]
}
Is it possible to get it done with mongodb ? I am using 3.4 and trying to achieve this using aggregation.
Thanks in advance.
You can try below aggregation in 3.6.
The query below iterates the dataBlocks array and merges the data block element with template block element. The template block is looked up using $indexofArray which locates the array index with matching block id and $arrayElemAt to access the element at the found index.
db.collection_name.aggregate([{"$addFields":{
"blocks":{
"$map":{
"input":"$dataBlocks",
"in":{
"$mergeObjects":[
"$$this",
{"$arrayElemAt":[
"$templateBlocks",
{"$indexOfArray":["$templateBlocks.blockId","$$this.blockId"]}
]
}
]
}
}
}
}}])
For 3.4, replace $mergeObjects with combination of $arrayToObject, $objectToArray and $concatArrays to merge the each array element from both arrays.
db.collection_name.aggregate([{"$addFields":{
"blocks":{
"$map":{
"input":"$dataBlocks",
"in":{
"$arrayToObject":{
"$concatArrays":[
{"$objectToArray":"$$this"},
{"$objectToArray":{
"$arrayElemAt":[
"$templateBlocks",
{"$indexOfArray":["$templateBlocks.blockId","$$this.blockId"]
}
]
}}
]
}
}
}
}
}}])
You can use project with exclusion as last stage to remove array fields from output.
{"$project":{"templateBlocks":0,"dataBlocks":0}}
The following query does the job:
db.merge.aggregate([
// unwind twice
{$unwind: "$templateBlocks"},
{$unwind: "$dataBlocks"},
// get rid of documents where dataBlocks.blockId and
// templateBlocks.blockId are not equal
{$redact: {$cond: [{
$eq: [
"$dataBlocks.blockId",
"$templateBlocks.blockId"
]
},
"$$KEEP",
"$$PRUNE"
]
}
},
// merge dataBlocks and templateBlocks into a single document
{$project: {
bacId: 1,
cardId: 1,
defaultCardOrder: 1,
alias: 1,
label: 1,
for: 1,
cardTooltip: 1,
dataBlocks: {
defaultBlockOrder: "$dataBlocks.defaultBlockOrder",
blockId: "$dataBlocks.blockId",
data: "$dataBlocks.data",
label: "$templateBlocks.label",
quarter: "$templateBlocks.quarter",
data: "$templateBlocks.data",
dataType: "$templateBlocks.dataType",
tooltip: "$templateBlocks.tooltip"
}
}
},
// group to put correspondent dataBlocks to an array
{$group: {
_id: {
_id: "$_id",
bacId: "$bacId",
cardId: "$cardId",
defaultCardOrder: "$defaultCardOrder",
alias: "$alias",
label: "$label",
for: "$for",
cardTooltip: "$cardTooltip"
},
dataBlocks: {$push: "$dataBlocks" }
}
},
// remove the unnecessary _id object
{$project: {
_id: "$_id._id",
bacId: "$_id.bacId",
cardId: "$_id.cardId",
defaultCardOrder: "$_id.defaultCardOrder",
alias: "$_id.alias",
label: "$_id.label",
for: "$_id.for",
cardTooltip: "$_id.cardTooltip",
dataBlocks: "$dataBlocks"
}
}
])
Take into account that performance depends of size of your data set as the query unwinds twice and it may produce significant amount of intermediate documents.

Elasticsearch index operation fails on complex object

I am indexing a data stream to Elasticsearch and I cannot figure out how to normalize incoming data to make it index without error. I have a mapping type "getdatavalues" which is a meta-data query. This meta-data query can return very different looking responses but I'm not seeing the difference. The error I get:
{"index":{"_index":"ens_event-2016.03.11","_type":"getdatavalues","_id":"865800029798177_2016_03_11_03_18_12_100037","status":400,"error":"MapperParsingException[object mapping for [getdatavalues] tried to parse field [output] as object, but got EOF, has a concrete value been provided to it?]"}}
when performing:
curl -XPUT 'http://192.168.99.100:80/es/ens_event-2016.03.11/getdatavalues/865800029798177_2016_03_11_03_18_12_100037' -d '{
"type": "getDataValues",
"input": {
"deviceID": {
"IMEI": "865800029798177",
"serial-number": "64180258"
},
"handle": 644,
"exprCode": "200000010300140000080001005f00a700000000000000",
"noRollHandle": "478669308-578452",
"transactionID": 290
},
"timestamp": "2016-03-11T03:18:12.000Z",
"handle": 644,
"output": {
"noRollPubSessHandle": "478669308-578740",
"publishSessHandle": 1195,
"status": true,
"matchFilter": {
"prefix": "publicExpr.operatorDefined.commercialIdentifier.FoodSvcs.Restaurant.\"A&C Kabul Curry\".\"Rooster Street\"",
"argValues": {
"event": "InternationalEvent",
"hasEvent": "anyEvent"
}
},
"transactionID": 290,
"validFor": 50
}
}'
Here's what Elasticsearch has for the mapping:
"getdatavalues" : {
"dynamic_templates" : [ {
"strings" : {
"mapping" : {
"index" : "not_analyzed",
"type" : "string"
},
"match_mapping_type" : "string"
}
} ],
"properties" : {
"handle" : {
"type" : "long"
},
"input" : {
"properties" : {
"deviceID" : {
"properties" : {
"IMEI" : {
"type" : "string",
"index" : "not_analyzed"
},
"serial-number" : {
"type" : "string",
"index" : "not_analyzed"
}
}
},
"exprCode" : {
"type" : "string",
"index" : "not_analyzed"
},
"handle" : {
"type" : "long"
},
"noRollHandle" : {
"type" : "string",
"index" : "not_analyzed"
},
"serviceVersion" : {
"type" : "string",
"index" : "not_analyzed"
},
"transactionID" : {
"type" : "long"
}
}
},
"output" : {
"properties" : {
"matchFilter" : {
"properties" : {
"argValues" : {
"properties" : {
"Interests" : {
"type" : "object"
},
"MerchantId" : {
"type" : "string",
"index" : "not_analyzed"
},
"Queue" : {
"type" : "string",
"index" : "not_analyzed"
},
"Vibe" : {
"type" : "string",
"index" : "not_analyzed"
},
"event" : {
"properties" : {
"event" : {
"type" : "string",
"index" : "not_analyzed"
},
"hasEvent" : {
"type" : "string",
"index" : "not_analyzed"
}
}
},
"hasEvent" : {
"type" : "string",
"index" : "not_analyzed"
},
"interests" : {
"type" : "string",
"index" : "not_analyzed"
}
}
},
"prefix" : {
"type" : "string",
"index" : "not_analyzed"
},
"transactionID" : {
"type" : "long"
},
"validFor" : {
"type" : "long"
}
}
},
"noRollPubSessHandle" : {
"type" : "string",
"index" : "not_analyzed"
},
"publishSessHandle" : {
"type" : "long"
},
"status" : {
"type" : "boolean"
},
"transactionID" : {
"type" : "long"
},
"validFor" : {
"type" : "long"
}
}
},
"timestamp" : {
"type" : "date",
"format" : "dateOptionalTime"
},
"type" : {
"type" : "string",
"index" : "not_analyzed"
}
}
},
Looks like the argValues object doesn't quite agree with your mapping:
"argValues": {
"event": "InternationalEvent",
"hasEvent": "anyEvent"
}
Either this:
"argValues": {
"event": {
"event": "InternationalEvent"
},
"hasEvent": "anyEvent"
}
Or this:
"argValues": {
"event": {
"event": "InternationalEvent"
"hasEvent": "anyEvent"
},
}
Would both seem to be valid.