Exception throws when insert data into druid via tranquility - druid

i'm pushing kafka stream into druid via tranquility.
kafka version is 0.9.1 , tranquility is 0.8 , druid is 0.10.
tranquility is started fine when no message produced,but when producer sending message i will get JsonMappingException like this:
ava.lang.IllegalArgumentException: Can not deserialize instance of java.util.ArrayList out of VALUE_STRING token
at [Source: N/A; line: -1, column: -1]
at com.fasterxml.jackson.databind.ObjectMapper._convert(ObjectMapper.java:2774) ~[com.fasterxml.jackson.core.jackson-databind-2.4.6.jar:2.4.6]
at com.fasterxml.jackson.databind.ObjectMapper.convertValue(ObjectMapper.java:2700) ~[com.fasterxml.jackson.core.jackson-databind-2.4.6.jar:2.4.6]
at com.metamx.tranquility.druid.DruidBeams$.makeFireDepartment(DruidBeams.scala:406) ~[io.druid.tranquility-core-0.8.0.jar:0.8.0]
at com.metamx.tranquility.druid.DruidBeams$.fromConfigInternal(DruidBeams.scala:291) ~[io.druid.tranquility-core-0.8.0.jar:0.8.0]
at com.metamx.tranquility.druid.DruidBeams$.fromConfig(DruidBeams.scala:199) ~[io.druid.tranquility-core-0.8.0.jar:0.8.0]
at com.metamx.tranquility.kafka.KafkaBeamUtils$.createTranquilizer(KafkaBeamUtils.scala:40) ~[io.druid.tranquility-kafka-0.8.0.jar:0.8.0]
at com.metamx.tranquility.kafka.KafkaBeamUtils.createTranquilizer(KafkaBeamUtils.scala) ~[io.druid.tranquility-kafka-0.8.0.jar:0.8.0]
at com.metamx.tranquility.kafka.writer.TranquilityEventWriter.<init>(TranquilityEventWriter.java:64) ~[io.druid.tranquility-kafka-0.8.0.jar:0.8.0]
at com.metamx.tranquility.kafka.writer.WriterController.createWriter(WriterController.java:171) ~[io.druid.tranquility-kafka-0.8.0.jar:0.8.0]
at com.metamx.tranquility.kafka.writer.WriterController.getWriter(WriterController.java:98) ~[io.druid.tranquility-kafka-0.8.0.jar:0.8.0]
at com.metamx.tranquility.kafka.KafkaConsumer$2.run(KafkaConsumer.java:231) ~[io.druid.tranquility-kafka-0.8.0.jar:0.8.0]
at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471) [na:1.7.0_67]
at java.util.concurrent.FutureTask.run(FutureTask.java:262) [na:1.7.0_67]
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145) [na:1.7.0_67]
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615) [na:1.7.0_67]
at java.lang.Thread.run(Thread.java:745) [na:1.7.0_67]
and my kafka.json is :
{
"dataSources" : {
"stock-index-topic" : {
"spec" : {
"dataSchema" : {
"dataSource" : "stock-index-topic",
"parser" : {
"type" : "string",
"parseSpec" : {
"timestampSpec" : {
"column" : "timestamp",
"format" : "auto"
},
"dimensionsSpec" : {
"dimensions" : ["code","name","acronym","market","tradeVolume","totalValueTraded","preClosePx","openPrice","highPrice","lowPrice","tradePrice","closePx","timestamp"],
"dimensionExclusions" : [
"timestamp",
"value"
]
},
"format" : "json"
}
},
"granularitySpec" : {
"type" : "uniform",
"segmentGranularity" : "DAY",
"queryGranularity" : "none",
"intervals":"no"
},
"metricsSpec" : [
{
"name" : "firstPrice",
"type" : "doubleFirst",
"fieldName" : "tradePrice"
},{
"name" : "lastPrice",
"type" : "doubleLast",
"fieldName" : "tradePrice"
}, {
"name" : "minPrice",
"type" : "doubleMin",
"fieldName" : "tradePrice"
}, {
"name" : "maxPrice",
"type" : "doubleMax",
"fieldName" : "tradePrice"
}
]
},
"ioConfig" : {
"type" : "realtime"
},
"tuningConfig" : {
"type" : "realtime",
"maxRowsInMemory" : "100000",
"intermediatePersistPeriod" : "PT10M",
"windowPeriod" : "PT10M"
}
},
"properties" : {
"task.partitions" : "1",
"task.replicants" : "1",
"topicPattern" : "stock-index-topic"
}
}
},
"properties" : {
"zookeeper.connect" : "localhost:2181",
"druid.discovery.curator.path" : "/druid/discovery",
"druid.selectors.indexing.serviceName" : "druid/overlord",
"commit.periodMillis" : "15000",
"consumer.numThreads" : "2",
"kafka.zookeeper.connect" : "localhost:2181",
"kafka.group.id" : "tranquility-kafka"
}
}
i use the kafka-console-consumer to get the data ,it looks like
{"code": "399982", "name": "500等权", "acronym": "500DQ", "market": "102", "tradeVolume": 0, "totalValueTraded": 0.0, "preClosePx": 0.0, "openPrice": 0.0, "highPrice": 0.0, "lowPrice": 0.0, "tradePrice": 7184.7142, "closePx": 0.0, "timestamp": "2017-05-16T09:06:39.000+08:00"}
Any idea why? Thanks.

"metricsSpec" : [
{
"name" : "firstPrice",
"type" : "doubleFirst",
"fieldName" : "tradePrice"
},{
"name" : "lastPrice",
"type" : "doubleLast",
"fieldName" : "tradePrice"
}, {
"name" : "minPrice",
"type" : "doubleMin",
"fieldName" : "tradePrice"
}, {
"name" : "maxPrice",
"type" : "doubleMax",
"fieldName" : "tradePrice"
}
]
},
it's wrong.The document said :
First and Last aggregator cannot be used in ingestion spec, and should only be specified as part of queries.
So,the issue is solved.

Related

Unable to load data in druid

I am a newbie in druid. Trying to load a very simple data in JSON format to druid. The data contains just one dimension, one metric and timestamp. I have been successfully able to load data to druid for a different dataset but somehow I am getting errors for this dataset.
This is my index file :
{
"type" : "index",
"spec" : {
"dataSchema" : {
"dataSource" : "datatemplate",
"parser" : {
"type" : "string",
"parseSpec" : {
"format" : "json",
"dimensionsSpec" : {
"dimensions" : [
"Loc"
]
},
"timestampSpec" : {
"format" : "auto",
"column" : "Timestamp"
}
}
},
"metricsSpec" : [{"name" : "Qty","type" : "doubleSum","fieldName" : "Qty"}],
"granularitySpec" : {
"type" : "uniform",
"segmentGranularity" : "day",
"queryGranularity" : "none",
"intervals" : ["2016-01-01T00:00:00Z/2030-06-30T00:00:00Z"],
"rollup" : true
}
},
"ioConfig" : {
"type" : "index",
"firehose" : {
"type" : "local",
"baseDir" : "datatemplate/",
"filter" : "datatemplate.json"
},
"appendToExisting" : false
},
"tuningConfig" : {
"type" : "index",
"targetPartitionSize" : 10000000,
"maxRowsInMemory" : 40000,
"forceExtendableShardSpecs" : true
}
}
}
Also here is my dataset in JSON format:
{"Loc": "A", "Qty": "1", "Timestamp": "2017-12-01T00:00:00Z"}
{"Loc": "A", "Qty": "1", "Timestamp": "2017-12-01T00:00:00Z"}
{"Loc": "B", "Qty": "2", "Timestamp": "2017-12-01T00:00:00Z"}
{"Loc": "B", "Qty": "1", "Timestamp": "2017-12-01T00:00:00Z"}

String to ISO Date

I've looked this up a few times and no answer has worked so far. I want to turn a string date into an ISODate - I don't want it converted to or from UTC, I just want to make the date value which is stored as string to be stored as a date.
I don't want UTC because I want to query against and calculate based upon a time of day (example: 12pm) anywhere in the world and I'm summing by hour, day, week and month and comparing data across many countries based upon local time.
Currently I have a date as a string which I want to add to a new document with a new name:
"transaction_date": "$object_raw_origin_data.register_sales.sale_date"
The date value as string below is shown as:
"sale_date" : "2018-03-13 20:05:46"
I want it to be: "transaction_date" : ISODate("2018-03-13 20:05:46")
The date and time must read 2018-03-13 20:05:46 not converted to UTC.
Thanks, Matt
Origin Data JSON
{
"object_category" : "application",
"object_type" : "register-sales-24-months",
"object_origin" : "vend",
"tenant_uuid" : "00000000-0000-0009-9999-999999999999",
"party_uuid" : "8d519765-05d2-469f-ad35-d7a22fa9df2f",
"subscription_uuid" : "0",
"connection_uuid" : "6ed9bd79-d9c5-4296-a821-7e15b1c69e6c",
"status" : "",
"object_created" : ISODate("2018-03-15T21:40:57.158+0000"),
"object_raw_origin_data" : {
"pagination" : {
"results" : NumberInt(75964),
"page" : NumberInt(1),
"page_size" : NumberInt(200),
"pages" : NumberInt(380)
},
"register_sales" : {
"id" : "776a66f2-993c-b372-11e8-26f9d3c1bdde",
"source" : "USER",
"source_id" : "",
"register_id" : "02dcd191-ae55-11e6-edd8-ec8dce1d9e1c",
"market_id" : "3",
"customer_id" : "02d59481-b67d-11e5-f667-b08185e8f6d5",
"customer_name" : "",
"customer" : {
"id" : "02d59481-b67d-11e5-f667-b08185e8f6d5",
"name" : "",
"customer_code" : "WALKIN",
"customer_group_id" : "02d59481-b67d-11e5-f667-b08185e893f8",
"customer_group_name" : "All Customers",
"updated_at" : "2016-01-01 12:16:44",
"deleted_at" : "",
"balance" : "0",
"year_to_date" : "0",
"date_of_birth" : "",
"sex" : "",
"custom_field_1" : "",
"custom_field_2" : "",
"custom_field_3" : "",
"custom_field_4" : "",
"note" : "",
"contact" : {
}
},
"user_id" : "02d59481-b655-11e5-f667-dca974edc4ea",
"user_name" : "Alvaro Velosa",
"sale_date" : "2018-03-13 20:05:46",
"created_at" : "2018-03-13 20:06:00",
"updated_at" : "2018-03-13 20:06:00",
"total_price" : 4.5,
"total_cost" : 3.34,
"total_tax" : NumberInt(0),
"tax_name" : "No Tax",
"note" : "",
"status" : "CLOSED",
"short_code" : "wqgsgi",
"invoice_number" : "Masonic2108Temple",
"accounts_transaction_id" : "",
"return_for" : "",
"register_sale_products" : [
{
"id" : "776a66f2-993c-b372-11e8-26f9e92cb9bc",
"product_id" : "02dcd191-ae55-11e7-f130-9d4f4bcd91b1",
"register_id" : "02dcd191-ae55-11e6-edd8-ec8dce1d9e1c",
"sequence" : "0",
"handle" : "LAGERDRAUGHT300",
"sku" : "10287",
"name" : "LAGER DRAUGHT £3.00",
"quantity" : NumberInt(1),
"price" : 3.5,
"cost" : 2.74,
"price_set" : NumberInt(0),
"discount" : NumberInt(0),
"loyalty_value" : NumberInt(0),
"tax" : NumberInt(0),
"tax_id" : "02d59481-b67d-11e5-f667-b08185ec2871",
"tax_name" : "No Tax",
"tax_rate" : NumberInt(0),
"tax_total" : NumberInt(0),
"price_total" : 3.5,
"display_retail_price_tax_inclusive" : "0",
"status" : "CONFIRMED",
"attributes" : [
{
"name" : "line_note",
"value" : ""
}
]
},
{
"id" : "776a66f2-993c-b372-11e8-26f9e98104e0",
"product_id" : "02dcd191-ae55-11e7-f130-9d50e948a0b5",
"register_id" : "02dcd191-ae55-11e6-edd8-ec8dce1d9e1c",
"sequence" : "0",
"handle" : "SOFTDRINK",
"sku" : "10292",
"name" : "SOFT DRINK",
"quantity" : NumberInt(1),
"price" : NumberInt(1),
"cost" : 0.6,
"price_set" : NumberInt(0),
"discount" : NumberInt(0),
"loyalty_value" : NumberInt(0),
"tax" : NumberInt(0),
"tax_id" : "02d59481-b67d-11e5-f667-b08185ec2871",
"tax_name" : "No Tax",
"tax_rate" : NumberInt(0),
"tax_total" : NumberInt(0),
"price_total" : NumberInt(1),
"display_retail_price_tax_inclusive" : "0",
"status" : "CONFIRMED",
"attributes" : [
{
"name" : "line_note",
"value" : ""
}
]
}
],
"totals" : {
"total_tax" : NumberInt(0),
"total_price" : 4.5,
"total_payment" : 4.5,
"total_to_pay" : NumberInt(0)
},
"register_sale_payments" : [
{
"id" : "776a66f2-993c-b372-11e8-26f9eab2cb46",
"payment_type_id" : "1",
"register_id" : "02dcd191-ae55-11e6-edd8-ec8dce1d9e1c",
"retailer_payment_type_id" : "02d59481-b655-11e5-f667-b0a23bc0e7bc",
"name" : "Cash",
"label" : "Account Customer",
"payment_date" : "2018-03-13 20:05:46",
"amount" : NumberInt(10)
},
{
"id" : "776a66f2-993c-b372-11e8-26f9eab6b21a",
"payment_type_id" : "1",
"register_id" : "02dcd191-ae55-11e6-edd8-ec8dce1d9e1c",
"retailer_payment_type_id" : "02d59481-b655-11e5-f667-b0a23bc0e7bc",
"name" : "Cash",
"label" : "Account Customer",
"payment_date" : "2018-03-13 20:05:46",
"amount" : -5.5
}
]
}
}
}
Thanks for the help from #Saleem and #Krishna however, I have tried to create an ISODate using a $concat of $substr from a string and it isn't working.
db.Vend_raw_transactions.aggregate(
[{
"$project": {
"object_origin": "$object_origin",
"company": "$party_uuid",
"connection": "$connection_uuid",
"object_creation_date": "$object_created",
// "transaction_date": "$object_raw_origin_data.register_sales.sale_date",
"transaction_date": {
"$dateFromString": {
"dateString": {
"$concat": [
"$substr": [ "$object_raw_origin_data.register_sales.sale_date", 0, 10 ],
"T",
"$substr": [ "$object_raw_origin_data.register_sales.sale_date", 11, 9]
]
}
}
},
"transaction_gross_value": {
"$add": [
"$object_raw_origin_data.register_sales.total_price",
"$object_raw_origin_data.register_sales.total_tax"
]
},
"transaction_net_value": "$object_raw_origin_data.register_sales.total_price",
"transaction_tax_value": "$object_raw_origin_data.register_sales.total_tax",
"transaction_cost_value": "$object_raw_origin_data.register_sales.total_cost",
"object_class": "goods-service-transaction",
"object_origin_category": "point-of-sale",
"object_type": "receipt",
"object_origin_type": "offline",
"transaction_reference": "$object_raw_origin_data.register_sales.invoice_number",
"transaction_status": "$object_raw_origin_data.register_sales.status",
"transaction_currency": "GBP",
"party_name": "$object_raw_origin_data.register_sales.customer.customer_name",
"party_identifier": "$object_raw_origin_data.register_sales.customer.customer_code",
"staff_identifier": "$object_raw_origin_data.register_sales.user_id",
"staff_name": "$object_raw_origin_data.register_sales.user_name",
"line_items" : {
"$map": {
"input": "$object_raw_origin_data.register_sales.register_sale_products",
"as" : "product",
"in": {"item_name": "$$product.name",
"item_system_id": "$$product.id",
"item_identifier": "$$product.sku",
"item_category" : "sales-revenue",
"item_quantity" : "$$product.quantity",
"item_net_unit_sale_value" : "$$product.price",
"item_net_unit_discount_value" : "$$product.discount",
"item_net_unit_member_value" : "$$product.loyalty_value",
"item_net_unit_cost_value" : "$$product.cost",
"item_unit_tax_value" : "$$product.tax",
"item_price_list_reference" : "$$product.price_set",
"item_total_sale_value" : "$$product.price_total",
"item_total_tax_value" : "$$product.tax_total"
}
}
}
}
}
// ,{"$out": "9SP_Source" }
])
I cannot work out why this isn't working, it seems like it should - I'm creating a new string based upon an existing string value and adding a T in the middle but it doesn't recognise $DateFromString. Thanks
Error Message
{
"message" : "Unrecognized expression '$dateFromString'",
"stack" : "MongoError: Unrecognized expression '$dateFromString'" +
"at queryCallback (C:\Users\mattl\AppData\Local\Programs\nosqlbooster4mongo\resources\app.asar\node_modules\mongodb-core\lib\cursor.js:223:25)" +
"at C:\Users\mattl\AppData\Local\Programs\nosqlbooster4mongo\resources\app.asar\node_modules\mongodb-core\lib\connection\pool.js:541:18" +
"at _combinedTickCallback (internal/process/next_tick.js:131:7)" +
"at process._tickCallback (internal/process/next_tick.js:180:9)",
"name" : "MongoError",
"ok" : 0,
"errmsg" : "Unrecognized expression '$dateFromString'",
"code" : 168,
"codeName" : "InvalidPipelineOperator"
}
I've managed to make another string using the below script but $dateFromString unrecognised
db.Vend_raw_transactions.aggregate(
[{
"$project": {
"origin_date": "$object_raw_origin_data.register_sales.sale_date",
"transaction_date": {
"$substr": [ "$object_raw_origin_data.register_sales.sale_date", 0, 10 ]
},
"transaction_time": {
"$substr": [ "$object_raw_origin_data.register_sales.sale_date", 11, 9 ]
},
"new_string_date": {
"$concat" :[
{"$substr": [ "$object_raw_origin_data.register_sales.sale_date", 0, 10 ]},
"T",
{"$substr": [ "$object_raw_origin_data.register_sales.sale_date", 11, 9 ]}]
}
}
}
])
Thanks

MongoDB shell mongoimport not importing document from json file

Can someone help me figure out what the problem is with this cos i have been trying to get it work but all efforts avail abortive
mongoimport --db test --collection products --drop --file"C:\Program Files\MongoDB\Server\scripts\products.json"
running the above on my shell keeps given me the below error
2018-01-13T19:55:55.019+0100 E QUERY [thread1] SyntaxError: missing ; before statement #(shell):1:14
======================Update=========================
{ "_id" : "ac3", "name" : "AC3 Phone", "brand" : "ACME", "type" : "phone", "price" : 200, "warranty_years" : 1, "available" : true }
{ "_id" : "ac7", "name" : "AC7 Phone", "brand" : "ACME", "type" : "phone", "price" : 320, "warranty_years" : 1, "available" : false }
{ "_id" : { "$oid" : "507d95d5719dbef170f15bf9" }, "name" : "AC3 Series Charger", "type" : [ "accessory", "charger" ], "price" : 19, "warranty_years" : 0.25, "for" : [ "ac3", "ac7", "ac9" ] }
{ "_id" : { "$oid" : "507d95d5719dbef170f15bfa" }, "name" : "AC3 Case Green", "type" : [ "accessory", "case" ], "color" : "green", "price" : 12, "warranty_years" : 0 }
{ "_id" : { "$oid" : "507d95d5719dbef170f15bfb" }, "name" : "Phone Extended Warranty", "type" : "warranty", "price" : 38, "warranty_years" : 2, "for" : [ "ac3", "ac7", "ac9", "qp7", "qp8", "qp9" ] }
{ "_id" : { "$oid" : "507d95d5719dbef170f15bfc" }, "name" : "AC3 Case Black", "type" : [ "accessory", "case" ], "color" : "black", "price" : 12.5, "warranty_years" : 0.25, "available" : false, "for" : "ac3" }
{ "_id" : { "$oid" : "507d95d5719dbef170f15bfd" }, "name" : "AC3 Case Red", "type" : [ "accessory", "case" ], "color" : "red", "price" : 12, "warranty_years" : 0.25, "available" : true, "for" : "ac3" }
{ "_id" : { "$oid" : "507d95d5719dbef170f15bfe" }, "name" : "Phone Service Basic Plan", "type" : "service", "monthly_price" : 40, "limits" : { "voice" : { "units" : "minutes", "n" : 400, "over_rate" : 0.05 }, "data" : { "units" : "gigabytes", "n" : 20, "over_rate" : 1 }, "sms" : { "units" : "texts sent", "n" : 100, "over_rate" : 0.001 } }, "term_years" : 2 }
{ "_id" : { "$oid" : "507d95d5719dbef170f15bff" }, "name" : "Phone Service Core Plan", "type" : "service", "monthly_price" : 60, "limits" : { "voice" : { "units" : "minutes", "n" : 1000, "over_rate" : 0.05 }, "data" : { "n" : "unlimited", "over_rate" : 0 }, "sms" : { "n" : "unlimited", "over_rate" : 0 } }, "term_years" : 1 }
{ "_id" : { "$oid" : "507d95d5719dbef170f15c00" }, "name" : "Phone Service Family Plan", "type" : "service", "monthly_price" : 90, "limits" : { "voice" : { "units" : "minutes", "n" : 1200, "over_rate" : 0.05 }, "data" : { "n" : "unlimited", "over_rate" : 0 }, "sms" : { "n" : "unlimited", "over_rate" : 0 } }, "sales_tax" : true, "term_years" : 2 }
{ "_id" : { "$oid" : "507d95d5719dbef170f15c01" }, "name" : "Cable TV Basic Service Package", "type" : "tv", "monthly_price" : 50, "term_years" : 2, "cancel_penalty" : 25, "sales_tax" : true, "additional_tarriffs" : [ { "kind" : "federal tarriff", "amount" : { "percent_of_service" : 0.06 } }, { "kind" : "misc tarriff", "amount" : 2.25 } ] }

Druid:how to add a numeric data to metric without aggregation function

The scenario is i want to setup a stock quote server and save the quote data into druid.
my requirement is to get the latest price of all the stock by a query.
But i notice that the query interface of druid such as time series only work on metrics filed ,not the dimension fields.
so i consider to make the price filed one of the metrics,but no need to aggregated.
how can i do it?
Any suggestions?
here is my tranquility config file.
{
"dataSources" : {
"stock-index-topic" : {
"spec" : {
"dataSchema" : {
"dataSource" : "stock-index-topic",
"parser" : {
"type" : "string",
"parseSpec" : {
"timestampSpec" : {
"column" : "timestamp",
"format" : "auto"
},
"dimensionsSpec" : {
"dimensions" : ["code","name","acronym","market","tradeVolume","totalValueTraded","preClosePx","openPrice","highPrice","lowPrice","latestPrice","closePx"],
"dimensionExclusions" : [
"timestamp",
"value"
]
},
"format" : "json"
}
},
"granularitySpec" : {
"type" : "uniform",
"segmentGranularity" : "HOUR",
"queryGranularity" : "SECOND",
},
"metricsSpec" : [
{
"name" : "firstPrice",
"type" : "doubleFirst",
"fieldName" : "tradePrice"
},{
"name" : "lastPrice",
"type" : "doubleLast",
"fieldName" : "tradePrice"
}, {
"name" : "minPrice",
"type" : "doubleMin",
"fieldName" : "tradePrice"
}, {
"name" : "maxPrice",
"type" : "doubleMax",
"fieldName" : "tradePrice"
}
]
},
"ioConfig" : {
"type" : "realtime"
},
"tuningConfig" : {
"type" : "realtime",
"maxRowsInMemory" : "100000",
"intermediatePersistPeriod" : "PT10M",
"windowPeriod" : "PT10M"
}
},
"properties" : {
"task.partitions" : "1",
"task.replicants" : "1",
"topicPattern" : "stock-index-topic"
}
}
},
"properties" : {
"zookeeper.connect" : "localhost:2181",
"druid.discovery.curator.path" : "/druid/discovery",
"druid.selectors.indexing.serviceName" : "druid/overlord",
"commit.periodMillis" : "15000",
"consumer.numThreads" : "2",
"kafka.zookeeper.connect" : "localhost:2181",
"kafka.group.id" : "tranquility-kafka"
}
}
I think you should make [latest_price] as new numeric dimension, it would be much better from performance and querying standpoint considering how druid works.
Metrics and meant to perform aggregation functions as core so won't be helpful in your use case.

Need Help Join Collection

Collection Inventory sample data:
{
"_id" : "89011704252315531324",
"sku" : "A2015-01-000",
"type" : "package",
"status" : "active",
"lng" : "-72.789153",
"lat" : "44.173515",
"acq" : "28",
"gtime" : ISODate("2017-01-11T22:27:48.000Z"),
"qlng" : "-72.796501",
"qlat" : "44.214783",
"qtime" : ISODate("2016-11-27T18:21:10.000Z"),
"timestamp" : ISODate("2017-01-12T14:43:29.000Z"),
"modified" : Date(-62135596800000),
"battery" : "60",
"wearables" : [
{
"_id" : "0009003C100228234E45",
"type" : "wearable",
"status" : "active",
"battery" : "50",
"timestamp" : ISODate("2017-01-12T11:43:33.000Z")
},
{
"_id" : "004A003F200B36634E45",
"type" : "cradle",
"status" : "active",
"battery" : "64",
"timestamp" : ISODate("2017-01-11T22:27:26.000Z")
},
{
"_id" : "11223344556600000B55",
"type" : "falldetect",
"status" : "active",
"battery" : "64",
"timestamp" : ISODate("2017-01-12T08:43:29.000Z")
}
],
"company" : "ConnectAmericaProduction",
"companies" : [],
"remoteIp" : "172.31.45.196:53864",
"subscriber" : "5783e20aa2c89f346e000006",
"ring" : "90",
"speaker" : "90",
"mic" : "55",
"version" : "4352",
"cradle" : "OFF",
"ctime" : ISODate("2017-01-11T23:13:59.000Z"),
"csqtime" : Date(-62135596800000)
}
collection calllog sample data
{
"_id" : "89011704252315531324",
"cdr" : [
{
"direction" : "Outgoing",
"duration" : 46,
"timestamp" : ISODate("2016-11-23T03:25:06.000Z"),
"number" : "",
"name" : "Call Center",
"lng" : "-71.208061",
"lat" : "42.330265",
"acq" : "",
"timezone" : {
"dstOffset" : 0.0,
"rawOffset" : 0.0,
"status" : "",
"timeZoneId" : "",
"timeZoneName" : ""
}
},
{
"direction" : "Incoming",
"duration" : 51,
"timestamp" : ISODate("2016-11-23T03:26:02.000Z"),
"number" : "",
"name" : "Call Center",
"lng" : "-71.205727",
"lat" : "42.333347",
"acq" : "",
"timezone" : {
"dstOffset" : 0.0,
"rawOffset" : 0.0,
"status" : "",
"timeZoneId" : "",
"timeZoneName" : ""
}
},
{
"direction" : "Outgoing",
"duration" : 49,
"timestamp" : ISODate("2016-11-27T18:21:04.000Z"),
"number" : "",
"name" : "Call Center",
"lng" : "-72.796501",
"lat" : "44.214783",
"acq" : "",
"timezone" : {
"dstOffset" : 0.0,
"rawOffset" : -18000.0,
"status" : "OK",
"timeZoneId" : "America/New_York",
"timeZoneName" : "Eastern Standard Time"
}
}
]
}
after run this aggrgrate function
db.calllog.aggregate([{$unwind: "$cdr"}, {$lookup:{from: "inventory", localField: "_id", foreignField: "_id", as: "wearables" }}, { "$project": { "cdr.direction": 1, "cdr.duration": 1,"cdr.date": 1,"wearables.type": 1, "wearables.status": 1, "wearables.battery": 1} }])
Result:
{ "_id" : "89011704252315531324", "cdr" : { "direction" : "Outgoing", "duration" : 46 }, "wearables" : [ { "type" : "package", "status" : "active", "battery" : "60" } ] }
{ "_id" : "89011704252315531324", "cdr" : { "direction" : "Incoming", "duration" : 51 }, "wearables" : [ { "type" : "package", "status" : "active", "battery" : "60" } ] }
{ "_id" : "89011704252315531324", "cdr" : { "direction" : "Outgoing", "duration" : 49 }, "wearables" : [ { "type" : "package", "status" : "active", "battery" : "60" } ] }
needed help can not get query to shows wearables type such as wearable, cradle, falldetect
thank
Did you try to do wearables.wearables.type? When you call wearables.type, you are actually getting the type of the Inventory. If you need the type of the wearable that is inside the inventory. You need to put inventory.wearables.type. The initial problem is that you're calling the inventory "wearables" and making a confusion out of it.
I would do the following:
db.calllog.aggregate([{$unwind: "$cdr"},
{$lookup:{from: "inventory", localField: "_id", foreignField: "_id", as: "inventory" }},
{$unwind: "$inventory.wearables"},
{ "$project": {
"cdr.direction": 1,
"cdr.duration": 1,
"cdr.date": 1,
"inventory.type": 1,
"inventory.status": 1,
"inventory.battery": 1,
"inventory.wearables.type":1
}}])