Iterate over a tag substring in Overpass Turbo - overpass-api

I want to use the development version of Overpass (https://olbricht.nrw/ovt/#) to create bespoke areas with the hull() function.
My data to make the hull() around is in the tag "ref" which for these objects are formated as [n]-[m] and I want to use only the [n] part for the grouping.
The only solution I've been able to make is to do a select for each value of [n], use convert to create a derived object and then concatenating all of these together. Very cumbersome to maintain.
My code looks like this:
[out:json];
{{geocodeArea:Gentofte Municipality}}->.a;
(
node["power"="substation"](area.a);
way["power"="substation"](area.a);
)->.trafoer;
.trafoer out;.trafoer>;out geom;
node["ref"]["power"="cable_distribution_cabinet"](area.a)->.skabe;
node.skabe["ref"~ "^11-"]->.ix; (.skabe; - .ix;)->.skabe; (.sum; .ix convert skab zone= "11", ::=::, ::geom=geom();)->.sum;
node.skabe["ref"~ "^13-"]->.ix; (.skabe; - .ix;)->.skabe; (.sum; .ix convert skab zone= "13", ::=::, ::geom=geom();)->.sum;
node.skabe["ref"~ "^15-"]->.ix; (.skabe; - .ix;)->.skabe; (.sum; .ix convert skab zone= "15", ::=::, ::geom=geom();)->.sum;
node.skabe["ref"~ "^21-"]->.ix; (.skabe; - .ix;)->.skabe; (.sum; .ix convert skab zone= "21", ::=::, ::geom=geom();)->.sum;
node.skabe["ref"~ "^43-"]->.ix; (.skabe; - .ix;)->.skabe; (.sum; .ix convert skab zone= "43", ::=::, ::geom=geom();)->.sum;
node.skabe["ref"~ "^132-"]->.ix; (.skabe; - .ix;)->.skabe; (.sum; .ix convert skab zone= "132", ::=::, ::geom=geom();)->.sum;
node.skabe["ref"~ "^198-"]->.ix; (.skabe; - .ix;)->.skabe; (.sum; .ix convert skab zone= "198", ::=::, ::geom=geom();)->.sum;
node.skabe["ref"~ "^241-"]->.ix; (.skabe; - .ix;)->.skabe; (.sum; .ix convert skab zone= "241", ::=::, ::geom=geom();)->.sum;
node.skabe["ref"~ "^254-"]->.ix; (.skabe; - .ix;)->.skabe; (.sum; .ix convert skab zone= "254", ::=::, ::geom=geom();)->.sum;
node.skabe["ref"~ "^357-"]->.ix; (.skabe; - .ix;)->.skabe; (.sum; .ix convert skab zone= "357", ::=::, ::geom=geom();)->.sum;
node.skabe["ref"~ "^424-"]->.ix; (.skabe; - .ix;)->.skabe; (.sum; .ix convert skab zone= "424", ::=::, ::geom=geom();)->.sum;
node.skabe["ref"~ "^441-"]->.ix; (.skabe; - .ix;)->.skabe; (.sum; .ix convert skab zone= "441", ::=::, ::geom=geom();)->.sum;
node.skabe["ref"~ "^444-"]->.ix; (.skabe; - .ix;)->.skabe; (.sum; .ix convert skab zone= "444", ::=::, ::geom=geom();)->.sum;
node.skabe["ref"~ "^499-"]->.ix; (.skabe; - .ix;)->.skabe; (.sum; .ix convert skab zone= "499", ::=::, ::geom=geom();)->.sum;
node.skabe["ref"~ "^686-"]->.ix; (.skabe; - .ix;)->.skabe; (.sum; .ix convert skab zone= "686", ::=::, ::geom=geom();)->.sum;
node.skabe["ref"~ "^733-"]->.ix; (.skabe; - .ix;)->.skabe; (.sum; .ix convert skab zone= "733", ::=::, ::geom=geom();)->.sum;
node.skabe["ref"~ "^820-"]->.ix; (.skabe; - .ix;)->.skabe; (.sum; .ix convert skab zone= "820", ::=::, ::geom=geom();)->.sum;
node.skabe["ref"~ "^821-"]->.ix; (.skabe; - .ix;)->.skabe; (.sum; .ix convert skab zone= "821", ::=::, ::geom=geom();)->.sum;
node.skabe["ref"~ "^907-"]->.ix; (.skabe; - .ix;)->.skabe; (.sum; .ix convert skab zone= "907", ::=::, ::geom=geom();)->.sum;
node.skabe["ref"~"^1145-"]->.ix; (.skabe; - .ix;)->.skabe; (.sum; .ix convert skab zone="1145", ::=::, ::geom=geom();)->.sum;
node.skabe["ref"~"^1267-"]->.ix; (.skabe; - .ix;)->.skabe; (.sum; .ix convert skab zone="1267", ::=::, ::geom=geom();)->.sum;
node.skabe["ref"~"^1396-"]->.ix; (.skabe; - .ix;)->.skabe; (.sum; .ix convert skab zone="1396", ::=::, ::geom=geom();)->.sum;
node.skabe["ref"~"^1492-"]->.ix; (.skabe; - .ix;)->.skabe; (.sum; .ix convert skab zone="1492", ::=::, ::geom=geom();)->.sum;
node.skabe["ref"~"^1517-"]->.ix; (.skabe; - .ix;)->.skabe; (.sum; .ix convert skab zone="1517", ::=::, ::geom=geom();)->.sum;
node.skabe["ref"~"^1555-"]->.ix; (.skabe; - .ix;)->.skabe; (.sum; .ix convert skab zone="1555", ::=::, ::geom=geom();)->.sum;
node.skabe["ref"~"^1692-"]->.ix; (.skabe; - .ix;)->.skabe; (.sum; .ix convert skab zone="1692", ::=::, ::geom=geom();)->.sum;
node.skabe["ref"~"^1960-"]->.ix; (.skabe; - .ix;)->.skabe; (.sum; .ix convert skab zone="1960", ::=::, ::geom=geom();)->.sum;
node.skabe["ref"~"^4029-"]->.ix; (.skabe; - .ix;)->.skabe; (.sum; .ix convert skab zone="4029", ::=::, ::geom=geom();)->.sum;
node.skabe["ref"~"^4205-"]->.ix; (.skabe; - .ix;)->.skabe; (.sum; .ix convert skab zone="4205", ::=::, ::geom=geom();)->.sum;
node.skabe["ref"~"^4531-"]->.ix; (.skabe; - .ix;)->.skabe; (.sum; .ix convert skab zone="4531", ::=::, ::geom=geom();)->.sum;
node.skabe["ref"~"^4782-"]->.ix; (.skabe; - .ix;)->.skabe; (.sum; .ix convert skab zone="4782", ::=::, ::geom=geom();)->.sum;
node.skabe["ref"~"^5009-"]->.ix; (.skabe; - .ix;)->.skabe; (.sum; .ix convert skab zone="5009", ::=::, ::geom=geom();)->.sum;
node.skabe["ref"~"^6842-"]->.ix; (.skabe; - .ix;)->.skabe; (.sum; .ix convert skab zone="6842", ::=::, ::geom=geom();)->.sum;
node.skabe["ref"~"^7106-"]->.ix; (.skabe; - .ix;)->.skabe; (.sum; .ix convert skab zone="7106", ::=::, ::geom=geom();)->.sum;
.skabe out;
.sum out geom;
.sum;
for (t["zone"])
{
make substation_area
substation_ref=_.val,
source=set("{"+type()+" "+id()+"}"),
::geom=hull(gcat(geom()));
out geom;
}
{{style:
node[!zone], way {
color:red; fill-color:red; width:3; text: ref;
}
node[zone], way {
color:darkgreen; fill-color:green; width:1; symbol-size:4;
}
node[power=substation], way {
color:black; fill-color:blue; width:1; dashes:5,5; symbol-size:12; text: ref;
}
area {
text: substation_ref;
}
}}
Is there a more elegant way to have the for loop running directly on the tag instead of creating the derived objects first?

Related

Printing a string with newline as text ("example \n example" in one line)

Im working on balancing some text over multiple lines and I am adding manual newLines to split the text evenly over the lines. To figure it out i need to see where the "\n" are added for testing my code, but the only way ive found is to print it all out as character:
["T", "i", "r", "s", "d", "a", "g", " ", "f", "i", "k", "k", " ", "1", "5", " ", "b", "a", "r", "n", " ", "i", " ", "G", "r", "a", "n", "s", "t", "u", "b", "b", "e", "n", " ", "b", "a", "r", "n", "e", "h", "a", "g", "e", " ", "h", "a", "g", "e", "t", " ", "s", "e", "g", " ", "b", "a", "m", " ", "b", "a", "m", " ", "s", "h", "a", "r", "k", " ", "w", "e", "e", "k", " ", "f", "o", "r", " ", "l", "i", "f", "e", " ", "a", "n", "d", " ", "f", "o", "r", " ", "e", "v", "e", "r"]
Its a pain to find new \n symbols in here and I am now wondering if i could see it as a string in any way. Any suggestions?
you can split the string by newline and join it with any visible character you like (in the example with ~~~)
myString.components(separatedBy: CharacterSet.newlines).joined(separator: "~~~")
Because '\' is an escaped charecter so you can print it like this
print("something \\n something")

MongoDB search by DateTime type not working

Hi Im new to Mongo DB and trying to start with it, i created a collection and inserted this document into it
{
"_id" : ObjectId("574ad7c42368e58f8c07a47d"),
"id" : 10000,
"BookieId" : 1,
"EventBookieId" : null,
"LeagueId" : 214,
"LeagueNameId" : 4571,
"Team1Id" : 435,
"Team1NameId" : null,
"Team2Id" : 430,
"Team2NameId" : null,
"EventDateTime" : "2013-09-14T14:00:00",
"isNew" : true,
"CountryId" : 36,
"isCountry" : true,
"isLeague" : true,
"Gameid" : null,
"Reversed" : false,
"eventId" : 10000,
"bets" : [
{
"id" : 1,
"EventConnectionId" : 10000,
"isCorners" : false,
"DateTime" : "2013-09-10T09:06:43.333",
"IsLive" : false,
"Team1Score" : null,
"Team2Score" : null,
"Team1RedCards" : null,
"Team2RedCards" : null,
"odds" : []
},
{
"id" : 2,
"EventConnectionId" : 10000,
"isCorners" : false,
"DateTime" : "2013-09-10T19:50:26.84",
"IsLive" : false,
"Team1Score" : 0,
"Team2Score" : 0,
"Team1RedCards" : null,
"Team2RedCards" : null,
"odds" : [
{
"id" : 62282718,
"BetId" : 2,
"BookieId" : 1,
"BookieBetId" : "21968173",
"ScrapingDateTime" : "2013-09-13T08:39:15.81",
"MinuteInGame" : null,
"GamePart" : null,
"Odds1" : 1.81,
"Odds2" : 2.1236,
"Odds3" : null,
"MaxBet" : 2000,
"Bet" : "Hcap",
"BetGamePart" : 0,
"Line" : -0.25,
"ID_IDENTITY" : 1
},
{
"id" : 62282719,
"BetId" : 2,
"BookieId" : 1,
"BookieBetId" : "21968175",
"ScrapingDateTime" : "2013-09-13T08:39:15.81",
"MinuteInGame" : null,
"GamePart" : null,
"Odds1" : 2.04167,
"Odds2" : 1.86,
"Odds3" : null,
"MaxBet" : 2000,
"Bet" : "Total",
"BetGamePart" : 0,
"Line" : 2.0,
"ID_IDENTITY" : 2
},
{
"id" : 62282720,
"BetId" : 2,
"BookieId" : 1,
"BookieBetId" : "21968177",
"ScrapingDateTime" : "2013-09-13T08:39:15.81",
"MinuteInGame" : null,
"GamePart" : null,
"Odds1" : 2.16,
"Odds2" : 3.45,
"Odds3" : 3.05,
"MaxBet" : 1000,
"Bet" : "X12",
"BetGamePart" : 0,
"Line" : null,
"ID_IDENTITY" : 3
},
{
"id" : 62282721,
"BetId" : 2,
"BookieId" : 1,
"BookieBetId" : "21968179",
"ScrapingDateTime" : "2013-09-13T08:39:15.81",
"MinuteInGame" : null,
"GamePart" : null,
"Odds1" : 2.21951,
"Odds2" : 1.7,
"Odds3" : null,
"MaxBet" : 1000,
"Bet" : "Hcap",
"BetGamePart" : 1,
"Line" : -0.25,
"ID_IDENTITY" : 4
},
{
"id" : 62282722,
"BetId" : 2,
"BookieId" : 1,
"BookieBetId" : "21968180",
"ScrapingDateTime" : "2013-09-13T08:39:15.81",
"MinuteInGame" : null,
"GamePart" : null,
"Odds1" : 2.78,
"Odds2" : 3.9,
"Odds3" : 1.99,
"MaxBet" : 500,
"Bet" : "X12",
"BetGamePart" : 1,
"Line" : null,
"ID_IDENTITY" : 5
},
{
"id" : 62282723,
"BetId" : 2,
"BookieId" : 1,
"BookieBetId" : "21968181",
"ScrapingDateTime" : "2013-09-13T08:39:15.81",
"MinuteInGame" : null,
"GamePart" : null,
"Odds1" : 1.93,
"Odds2" : 1.95,
"Odds3" : null,
"MaxBet" : 1000,
"Bet" : "Total",
"BetGamePart" : 1,
"Line" : 0.75,
"ID_IDENTITY" : 6
},
{
"id" : 62290838,
"BetId" : 2,
"BookieId" : 1,
"BookieBetId" : "14382209",
"ScrapingDateTime" : "2013-09-13T15:38:11.303",
"MinuteInGame" : null,
"GamePart" : null,
"Odds1" : 1.81,
"Odds2" : 2.1236,
"Odds3" : null,
"MaxBet" : 2000,
"Bet" : "Hcap",
"BetGamePart" : 0,
"Line" : -0.25,
"ID_IDENTITY" : 7
},
{
"id" : 62290839,
"BetId" : 2,
"BookieId" : 1,
"BookieBetId" : "14382211",
"ScrapingDateTime" : "2013-09-13T09:27:39.6",
"MinuteInGame" : null,
"GamePart" : null,
"Odds1" : 2.05263,
"Odds2" : 1.85,
"Odds3" : null,
"MaxBet" : 2000,
"Bet" : "Total",
"BetGamePart" : 0,
"Line" : 2.0,
"ID_IDENTITY" : 8
},
{
"id" : 62290840,
"BetId" : 2,
"BookieId" : 1,
"BookieBetId" : "14382213",
"ScrapingDateTime" : "2013-09-13T10:29:22.863",
"MinuteInGame" : null,
"GamePart" : null,
"Odds1" : 2.15,
"Odds2" : 3.45,
"Odds3" : 3.1,
"MaxBet" : 1000,
"Bet" : "X12",
"BetGamePart" : 0,
"Line" : null,
"ID_IDENTITY" : 9
},
{
"id" : 62290841,
"BetId" : 2,
"BookieId" : 1,
"BookieBetId" : "14382215",
"ScrapingDateTime" : "2013-09-13T15:38:11.303",
"MinuteInGame" : null,
"GamePart" : null,
"Odds1" : 2.21951,
"Odds2" : 1.7,
"Odds3" : null,
"MaxBet" : 1000,
"Bet" : "Hcap",
"BetGamePart" : 1,
"Line" : -0.25,
"ID_IDENTITY" : 10
},
{
"id" : 62290842,
"BetId" : 2,
"BookieId" : 1,
"BookieBetId" : "14382216",
"ScrapingDateTime" : "2013-09-13T15:38:11.303",
"MinuteInGame" : null,
"GamePart" : null,
"Odds1" : 2.78,
"Odds2" : 3.9,
"Odds3" : 1.99,
"MaxBet" : 500,
"Bet" : "X12",
"BetGamePart" : 1,
"Line" : null,
"ID_IDENTITY" : 11
},
{
"id" : 62290843,
"BetId" : 2,
"BookieId" : 1,
"BookieBetId" : "14382217",
"ScrapingDateTime" : "2013-09-13T15:38:11.303",
"MinuteInGame" : null,
"GamePart" : null,
"Odds1" : 1.93,
"Odds2" : 1.95,
"Odds3" : null,
"MaxBet" : 1000,
"Bet" : "Total",
"BetGamePart" : 1,
"Line" : 0.75,
"ID_IDENTITY" : 12
},
{
"id" : 62381139,
"BetId" : 2,
"BookieId" : 1,
"BookieBetId" : "20864356",
"ScrapingDateTime" : "2013-09-13T15:38:11.303",
"MinuteInGame" : null,
"GamePart" : null,
"Odds1" : 2.05263,
"Odds2" : 1.85,
"Odds3" : null,
"MaxBet" : 2000,
"Bet" : "Total",
"BetGamePart" : 0,
"Line" : 2.0,
"ID_IDENTITY" : 13
},
{
"id" : 62381140,
"BetId" : 2,
"BookieId" : 1,
"BookieBetId" : "20864358",
"ScrapingDateTime" : "2013-09-13T15:38:11.303",
"MinuteInGame" : null,
"GamePart" : null,
"Odds1" : 2.15,
"Odds2" : 3.45,
"Odds3" : 3.1,
"MaxBet" : 1000,
"Bet" : "X12",
"BetGamePart" : 0,
"Line" : null,
"ID_IDENTITY" : 14
}
]
}
]
}
now I'm trying to find by date with this Query
db.EventsBetsOdds.find( { "bets": { $elemMatch: { "DataTime": "2013-09-10T09:06:43.333" } } } );
Fetched 0 record(s) in 400ms
is what i get, what am i doing wrong?
i tried to put an index on this field, indexing finished
Your mongo query should be like this:
db.testes.find({
'bets.DateTime': '2013-09-10T09:06:43.333'
}, {
'bets': {
$elemMatch: {
'DateTime': '2013-09-10T09:06:43.333'
}
}
})
Or something like this:
db.testes.find({
'bets.DateTime': '2013-09-10T09:06:43.333'
}, {
'bets.$': 1
})
Helpful links:
$ (projection operator)
$elemMatch - Array of Embedded Documents
Array Field Limitations
$filter (aggregation)

How to aggregate time series documents in mongodb

i have a mongo sharded cluster where i save data from a virtual machines monitoring system (zabbix ecc). Now I want to get some information from the db, for example the avg memfree in the last 2 days of one vm.
I read the tutorials about aggregation and also the comparison with sql, but i don't understand how to query time series documents (written like mongo suggests on webinars).
Example: i have a collection with many of these docs (one doc represents 1 hour):
"_id" : ObjectId("5558ab960e8956234285de14"),
"timestamp_hour" : ISODate("2014-10-13T23:00:00.000+02:00"),
"name" : "memfree",
"unity" : "B",
"values" : {
"0" : {
"0" : 2041004032.0000000000000000,
"1" : 2041004032.0000000000000000,
"2" : 2041004032.0000000000000000,
"3" : 2041004032.0000000000000000,
"4" : 2041004032.0000000000000000,
"5" : 2041004032.0000000000000000,
"6" : 2041004032.0000000000000000,
"7" : 2041004032.0000000000000000,
"8" : 2041004032.0000000000000000,
"9" : 2041004032.0000000000000000,
"10" : 2041004032.0000000000000000,
"11" : 2041004032.0000000000000000,
"12" : 2041004032.0000000000000000,
"13" : 2041004032.0000000000000000,
"14" : 2041004032.0000000000000000,
"15" : 2041004032.0000000000000000,
"16" : 2041004032.0000000000000000,
"17" : 2041004032.0000000000000000,
"18" : 2041004032.0000000000000000,
"19" : 2041004032.0000000000000000,
"20" : 2041004032.0000000000000000,
"21" : 2041004032.0000000000000000,
"22" : 2041004032.0000000000000000,
"23" : 2041004032.0000000000000000,
"24" : 2041004032.0000000000000000,
"25" : 2041004032.0000000000000000,
"26" : 2041004032.0000000000000000,
"27" : 2041004032.0000000000000000,
"28" : 2041004032.0000000000000000,
"29" : 2041004032.0000000000000000,
"30" : 2041004032.0000000000000000,
"31" : 2041004032.0000000000000000,
"32" : 2041004032.0000000000000000,
"33" : 2041004032.0000000000000000,
"34" : 2041004032.0000000000000000,
"35" : 2041004032.0000000000000000,
"36" : 2041004032.0000000000000000,
"37" : 2041004032.0000000000000000,
"38" : 2041004032.0000000000000000,
"39" : 2041004032.0000000000000000,
"40" : 2041004032.0000000000000000,
"41" : 2041004032.0000000000000000,
"42" : 2041004032.0000000000000000,
"43" : 2041004032.0000000000000000,
"44" : 2041004032.0000000000000000,
"45" : 2041004032.0000000000000000,
"46" : 2041004032.0000000000000000,
"47" : 2041004032.0000000000000000,
"48" : 2041004032.0000000000000000,
"49" : 2041004032.0000000000000000,
"50" : 2041004032.0000000000000000,
"51" : 2041004032.0000000000000000,
"52" : 2041004032.0000000000000000,
"53" : 2041004032.0000000000000000,
"54" : 2041004032.0000000000000000,
"55" : 2041004032.0000000000000000,
"56" : 2041004032.0000000000000000,
"57" : 2041004032.0000000000000000,
"58" : 2041004032.0000000000000000,
"59" : 2041004032.0000000000000000
},
"1" : {
"0" : 2041004032.0000000000000000,
"1" : 2041004032.0000000000000000,
"2" : 2041004032.0000000000000000,
"3" : 2041004032.0000000000000000,
"4" : 2041004032.0000000000000000,
"5" : 2041004032.0000000000000000,
"6" : 2041004032.0000000000000000,
"7" : 2041004032.0000000000000000,
"8" : 2041004032.0000000000000000,
"9" : 2041004032.0000000000000000,
"10" : 2041004032.0000000000000000,
"11" : 2041004032.0000000000000000,
"12" : 2041004032.0000000000000000,
"13" : 2041004032.0000000000000000,
"14" : 2041004032.0000000000000000,
"15" : 2041004032.0000000000000000,
"16" : 2041004032.0000000000000000,
"17" : 2041004032.0000000000000000,
"18" : 2041004032.0000000000000000,
"19" : 2041004032.0000000000000000,
"20" : 2041004032.0000000000000000,
"21" : 2041004032.0000000000000000,
"22" : 2041004032.0000000000000000,
"23" : 2041004032.0000000000000000,
"24" : 2041004032.0000000000000000,
"25" : 2041004032.0000000000000000,
"26" : 2041004032.0000000000000000,
"27" : 2041004032.0000000000000000,
"28" : 2041004032.0000000000000000,
"29" : 2041004032.0000000000000000,
"30" : 2041004032.0000000000000000,
"31" : 2041004032.0000000000000000,
"32" : 2041004032.0000000000000000,
"33" : 2041004032.0000000000000000,
"34" : 2041004032.0000000000000000,
"35" : 2041004032.0000000000000000,
"36" : 2041004032.0000000000000000,
"37" : 2041004032.0000000000000000,
"38" : 2041004032.0000000000000000,
"39" : 2041004032.0000000000000000,
"40" : 2041004032.0000000000000000,
"41" : 2041004032.0000000000000000,
"42" : 2041004032.0000000000000000,
"43" : 2041004032.0000000000000000,
"44" : 2041004032.0000000000000000,
"45" : 2041004032.0000000000000000,
"46" : 2041004032.0000000000000000,
"47" : 2041004032.0000000000000000,
"48" : 2041004032.0000000000000000,
"49" : 2041004032.0000000000000000,
"50" : 2041004032.0000000000000000,
"51" : 2041004032.0000000000000000,
"52" : 2041004032.0000000000000000,
"53" : 2041004032.0000000000000000,
"54" : 2041004032.0000000000000000,
"55" : 2041004032.0000000000000000,
"56" : 2041004032.0000000000000000,
"57" : 2041004032.0000000000000000,
"58" : 2041004032.0000000000000000,
"59" : 2041004032.0000000000000000
....
i want to know the avg memfree from '2014-10-13T23:00:00.000' to '2014-10-15T23:00:00.000'. so i need to sum all of values from 13 to 15 (3600*24*2 values).
I think that the query will be something like this, but i don't know how to explain the avg command.....
db.metrics.aggregate( [
{ $match: { name: 'memfree' ,timestamp_hour:{$gte: ISODate("2014-10-13T23:00:00.000+02:00")},timestamp_hour:{$lte: ISODate("2014-10-15T23:00:00.000+02:00")} } },
{
$group: {
_id: "$name",
avg: { $avg: "how can get all the values??" }
}
}
] )
any advice?
thanks
EDIT:
correct answer (works for one to many metrics) is:
map = function() {
for (var min in this.values)
for (sec in this.values[min]){
data = {value: {}, count: {}}
data.value[this.name] = this.values[min][sec]
data.count[this.name] = 1
emit(this.name, data);
}
}
reduce = function(key, values) {
var sum = values.reduce(function(a, b) {
out = {value: {}, count: {},avg:0}
for (k in b.value){
incount = a.count[k] || 0
invalue = a.value[k] || 0
out.value[k] = b.value[k]+invalue
out.count[k] = b.count[k]+incount
}
out.avg = out.value[k]/out.count[k]
return out
});
return sum;
}
printjson(db.node0208_26608.mapReduce(map, reduce,
{
query: { name: {$in:['ioutil','memtotal','memfree']} ,
timestamp_hour:{$gte: ISODate("2014-09-22T00:00:00.000+02:00")},
timestamp_hour:{$lte: ISODate("2014-09-28T23:00:00.000+02:00")}
},
//to write directly on a collection
//out:{merge: "map_reduce_out"},
out: {inline:1},
verbose:true
})
)
produces this result:
{
"results" : [
{
"_id" : "ioutil",
"value" : {
"value" : {
"ioutil" : 2495762.106280909
},
"count" : {
"ioutil" : 601200
},
"avg" : 4.15130090865088
}
},
{
"_id" : "memfree",
"value" : {
"value" : {
"memfree" : 28500447903744
},
"count" : {
"memfree" : 601200
},
"avg" : 47405934.636966065
}
},
{
"_id" : "memtotal",
"value" : {
"value" : {
"memtotal" : 635834327040000
},
"count" : {
"memtotal" : 594000
},
"avg" : 1070428160
}
}
],
"counts" : {
"input" : NumberLong(499),
"emit" : NumberLong(1796400),
"reduce" : NumberLong(11),
"output" : NumberLong(3)
},
"timeMillis" : 37956,
"timing" : {
"shardProcessing" : 37948,
"postProcessing" : 8
},
"shardCounts" : {
"192.168.0.19:27017" : {
"input" : 165,
"emit" : 594000,
"reduce" : 4,
"output" : 1
},
"192.168.0.20:27017" : {
"input" : 334,
"emit" : 1202400,
"reduce" : 7,
"output" : 2
}
},
"postProcessCounts" : {
"192.168.0.21:27017" : {
"input" : NumberLong(3),
"reduce" : NumberLong(0),
"output" : NumberLong(3)
}
},
"ok" : 1
}
This will be hard to achieve using the aggregation framework. But it "works" well with MapReduce. Something along the lines of that (untested):
// collect *individual* values
map = function() {
for (var min in this.values)
for (sec in this.values[min])
data = {value: {}, count: {}}
data.value[this.name] = this.values[min][sec]
data.count[this.name] = 1
emit(null, data);
}
// sum values and count
reduce = function(key, values) {
var sum = values.reduce(function(a, b) {
out = {value: {}, count: {}}
for (k in b.value)
incount = a.count[k] || 0
invalue = a.value[k] || 0
out.value[k] = b.value[k]+invalue
out.count[k] = b.count[k]+incount
return out
});
return sum;
}

Converting a MongoDB String attribute to an Object which can be queried on

I have the following data in mongoDB
{ "_id" : "ee477d7a-7a0c-4420-b476-c402012c74a9",
"_class" : "com.TrackingData",
"modified" : ISODate("2014-07-09T20:23:33.117Z"),
"eventtype" : "Test",
"eventdata" : "{\"QueryDate\":\"01-APR-2014\",
\"SearchQuery\":{\
"keyword\":\"Java\",\
"location\":\"Santa Clara, CA\",
\"Facet\":\"skill~java~perl|workAuth~USC\",
\"SearchAgentId\":\"4299\"
},
\"Viewed\":[
{\"ViewedID\":\"8992ade400a\",
\"Dockey\":\"3323aba3233\",
\"PID\":\"32399a\",
\"actionsTaken\":\"email|direct message|report seeker\",
\"viewDate\":\"01-APR-2014\",
\"MessageSent\":\"true\",
\"Message\":[
{\"MessageID\":\"123aca323\",
\"Delivered\":\"True\",
\"Opened\":\"True\",
\"ClickThroughRate\":\"NotBad\",
\"MessageDate\":\"02-APR-2014\",
\"Response\":[{\"ResponseId\":\"a323a9da\",\"ResponseDate\":\"23-APR-2014\"}]}]}]}",
"eventsource" : "API-Dev Test - JMachine",
"sourceip" : "myIp",
"entityid" : "Test",
"groupid" : "ice",
"datecreated" : ISODate("2014-07-09T20:23:33.112Z") }
evendata is stored as a String I have tried to convert this to an object through the mongo shell:
db.mydb.find( { 'eventdata' : { $type : 2 } } ).forEach( function (x) { x.eventdata = new Object(x.eventdata); db.mydb.save(x);});
However is simply seemed to split these into a map:
"eventdata" : { "0" : "{", "1" : "\"", "2" : "V", "3" : "i", "4" : "e", "5" : "w", "6" : "e", "7" : "d", "8" : "\"", "9" : ":", "10" : "[", "11" : "{", "12" : "\"", "13" : "V", "14" : "i", "15" : "e", "16" : "w", "17" : "e", "18" : "d", "19" : "I", "20" : "D", "21" : "\"", "22" : ":", "23" : "\"", "24" : "8", "25" : "9", "26" : "9", "27" : "2", "28" : "a", "29" : "d", "30" : "e", "31" : "4", "32" : "0", "33" : "0", "34" : "a", "35" : "\"", "36" : ",", "37" : "\"", "38" : "D", "39" : "o", "40" : "c", "41" : "k", "42" : "e", "43" : "y", "44" : "\"", "45" : ":", "46" : "\"", "47" : "1", "48" : "7", "49" : "2", "50" : "9", "51" : "f", "52" : "7", "53" : "a", "54" : "f", "55" : "c", "56" : "d", "57" : "1", "58" : "f", "59" : "7", "60" : "6", "61" : "3", "62" : "9", "63" : "3", "64" : "a", "65" : "b", "66" : "6", "67" : "6", "68" : "d", "69" : "5", "70" : "c", "71" : "6", "72" : "4", "73" : "8", "74" : "a", "75" : "f", "76" : "3", "77" : "7", "78" : "b", "79" : "\"", "80" : ",", "81" : "\"", "82" : "P", "83" : "I", "84" : "D", "85" : "\"", "86" : ":", "87" : "\"", "88" : "\"", "89" : ",", "90" : "\"", "91" : "a", "92" : "c", "93" : "t", "94" : "i", "95" : "o", "96" : "n", "97" : "s", "98" : "T", "99" : "a", "100" : "k", "101" : "e", "102" : "n", "103" : "\"", "104" : ":", "105" : "\"", "106" : "\"", "107" : ",", "108" : "\"", "109" : "v", "110" : "i", "111" : "e", "112" : "w", "113" : "D", "114" : "a", "115" : "t", "116" : "e", "117" : "\"", "118" : ":", "119" : "\"", "120" : "0", "121" : "9", "122" : "-", "123" : "J", "124" : "U", "125" : "L", "126" : "-", "127" : "2", "128" : "0", "129" : "1", "130" : "4", "131" : " ", "132" : "2", "133" : "0", "134" : ":", "135" : "3", "136" : "1", "137" : ":", "138" : "2", "139" : "3", "140" : "\"", "141" : "}", "142" : "]", "143" : "}" },
Which still does not recongised my nested queries such as:
db.mydb.find({'eventdata.SearchQuery.keyword' :'keywordValue' }).skip(0).limit(20)
So given my original data structure. What need to be done in terms of transformation to allow me to drill down into this eventdata attribute.
In your example with forEach use JSON.parse(x.eventdata) instead of new Object(x.eventdata).
Of course you cannot query it directly with .find() - it's just string from the MongoDB point of view. You should have store it as BSON if you wanted to query it... All you can use is documented here: http://docs.mongodb.org/manual/reference/operator/query/ After the transformation from JSON to native BSON (the forEach example) it will be possible to query it.

how do I insert an array of objects to mongodb using meteor?

How do I get a clean insert without extraneous characters being added when trying to insert an array of object. If I manually do an insert from mongodb shell I get the expected results, otherwise it doesn't seem to work.
What I'm trying to achieve is the results from mongodb shell:
db.test.insert([{name:"john"},{name:"jane"}]);
which yields:
db.test.find()
{ "_id" : ObjectId("53bb0768dc2469c1f440a3c2"), "name" : "john" }
{ "_id" : ObjectId("53bb0768dc2469c1f440a3c3"), "name" : "jane" }
But I don't get that, so I used the code snippet below to test several ways to insert the array of objects hoping to find the right combination:
test = new Meteor.Collection("test");
a = new Array();
a.push({name:"john"});
a.push({name:"jane"});
console.log(a);
test.insert(a);
console.log(a.toString());
test.insert(a.toString());
console.log(JSON.stringify(a));
test.insert(JSON.stringify(a));
test.insert([{name:"john"},{name:"jane"}]);
test.insert([{"name":"john"},{"name":"jane"}]);
What I get in the console:
[ { name: 'john' }, { name: 'jane' } ]
[object Object],[object Object]
[{"name":"john"},{"name":"jane"}]
What I get in the database:
db.test.find()
{ "0" : { "name" : "john" }, "1" : { "name" : "jane" }, "_id" : "SYkv79XLNQsWgkYmw" }
{ "0" : "[", "1" : "o", "2" : "b", "3" : "j", "4" : "e", "5" : "c", "6" : "t", "7" : " ", "8" : "O", "9" : "b", "10" : "j", "11" : "e", "12" : "c", "13" : "t", "14" : "]", "15" : ",", "16" : "[", "17" : "o", "18" : "b", "19" : "j", "20" : "e", "21" : "c", "22" : "t", "23" : " ", "24" : "O", "25" : "b", "26" : "j", "27" : "e", "28" : "c", "29" : "t", "30" : "]", "_id" : "SiQ3ZpGfeBqj4mXB2" }
{ "0" : "[", "1" : "{", "2" : "\"", "3" : "n", "4" : "a", "5" : "m", "6" : "e", "7" : "\"", "8" : ":", "9" : "\"", "10" : "j", "11" : "o", "12" : "h", "13" : "n", "14" : "\"", "15" : "}", "16" : ",", "17" : "{", "18" : "\"", "19" : "n", "20" : "a", "21" : "m", "22" : "e", "23" : "\"", "24" : ":", "25" : "\"", "26" : "j", "27" : "a", "28" : "n", "29" : "e", "30" : "\"", "31" : "}", "32" : "]", "_id" : "kKRiR8NjNJefBYRya" }
{ "0" : { "name" : "john" }, "1" : { "name" : "jane" }, "_id" : "RBrvkrw5xZaEGdczF" }
{ "0" : { "name" : "john" }, "1" : { "name" : "jane" }, "_id" : "2cfWJqHY4aJ6yF68s" }
I expected a simple 'test.insert(a)' to give me what I want, but it includes the array indexes. How do I build an array of objects to insert into mongodb from meteor without the array indexes? Stringify seemed to build a clean looking serialization of the array, but apparently I just don't know how to do this? The purpose of this is so I can build a complex array of objects in memory and do a bulk insert.
Meteor only lets you store root level documents as objects, if you give it an array it will try to convert it to an object. This is why you're getting this weird result. You would have to adjust your document to store arrays as part of the root document
test = new Meteor.Collection("test");
a = new Array();
a.push({name:"john"});
a.push({name:"jane"});
var doc = {
names: a
}
test.insert(a);
It won't be possible to store a document as [].
Template.first.rendered=function(){
var a=[];
a.push({name:"rahul"});
a.push({name:"vidu"});
Meteor.call("array", a , function(error,result){
});
};