Hi I am posting the question here first time. also I am new to mongodb. I have searched on the google but i did not find the answer. Currently in my collection i have list of string as
"keepOuts" : [
"198,168,282,115,292,180",
"356,120,466,192,330,252,346,186"
]
I need a mongo db query which which will update all the documents in my collection such as
"keepOuts" : [
{
"coordinates" : "126,200,166,185,158,236",
"height" : NumberInt(0)
},
{
"coordinates" : "146,141,169,107,192,154",
"height" : NumberInt(0)
}
],
Here for each entry in old keepOuts each object is created and it will go in key coordinates also additional field is set into the the same object with key height. height will be by default 0.
I got the query from this portal it gives me the split array but it does not update the collection. query is
db.quoteDetails1.aggregate([
{"$match":{keepOuts:{ $exists: true}}},
{"$unwind": {path : "$keepOuts"}},
{
$group: {"_id": "$_id",
result:{
$push:{
coordinates:"$keepOuts"}}}}])
Thank you all.
Try this!
db.quoteDetails1.find({}).toArray().forEach(
function (elem) {
for(var i = 0; i < elem.keepOuts.length; ++i) {
var obj = {"coordinates" : elem.keepOuts[i], "height": NumberInt(0)}
elem.keepOuts[i] = obj
}
db.quoteDetails1.update({_id: elem._id}, elem);
});
I got the answer by below javascript
db.getCollection('quoteDetails').find({keepOuts:{"$exists":true}}).forEach(
function(doc) {
if(doc.keepOuts.length > 0){
var array = doc.keepOuts;
if(typeof array[0] === "string"){
db.quoteDetails.update({"_id":doc._id},{"$set":{keepOuts :
arrayToObj(doc)
}});
}
}
}
);
function arrayToObj(doc){
print("processing for" + doc._id);
var array = doc.keepOuts;
var len = array.length;
var outerArray =[];
for(var i = 0; i<len; i++){
var obj = {};
var curVal = array[i];
obj["coordinates"]=curVal;
obj["height"]=0;
outerArray.push(obj);
}
return outerArray;
}
Related
I have a "mongodb colllenctions" and I'd like to remove the "empty strings"with keys from it.
From this:
{
"_id" : ObjectId("56323d975134a77adac312c5"),
"year" : "15",
"year_comment" : "",
}
{
"_id" : ObjectId("56323d975134a77adac312c5"),
"year" : "",
"year_comment" : "asd",
}
I'd like to gain this result:
{
"_id" : ObjectId("56323d975134a77adac312c5"),
"year" : "15",
}
{
"_id" : ObjectId("56323d975134a77adac312c5"),
"year_comment" : "asd",
}
How could I solve it?
Please try executing following code snippet in Mongo shell which strips fields with empty or null values
var result=new Array();
db.getCollection('test').find({}).forEach(function(data)
{
for(var i in data)
{
if(data[i]==null || data[i]=='')
{
delete data[i]
}
}
result.push(data)
})
print(tojson(result))
Would start with getting a distinct list of all the keys in the collection, use those keys as your query basis and do an ordered bulk update using the Bulk API operations. The update statement uses the $unset operator to remove the fields.
The mechanism to get distinct keys list that you need to assemble the query is possible through Map-Reduce. The following mapreduce operation will populate a separate collection with all the keys as the _id values:
mr = db.runCommand({
"mapreduce": "my_collection",
"map" : function() {
for (var key in this) { emit(key, null); }
},
"reduce" : function(key, stuff) { return null; },
"out": "my_collection" + "_keys"
})
To get a list of all the dynamic keys, run distinct on the resulting collection:
db[mr.result].distinct("_id")
// prints ["_id", "year", "year_comment", ...]
Now given the list above, you can assemble your query by creating an object that will have its properties set within a loop. Normally your query will have this structure:
var keysList = ["_id", "year", "year_comment"];
var query = keysList.reduce(function(obj, k) {
var q = {};
q[k] = "";
obj["$or"].push(q);
return obj;
}, { "$or": [] });
printjson(query); // prints {"$or":[{"_id":""},{"year":""},{"year_comment":""}]}
You can then use the Bulk API (available with MongoDB 2.6 and above) as a way of streamlining your updates for better performance with the query above. Overall, you should be able to have something working as:
var bulk = db.collection.initializeOrderedBulkOp(),
counter = 0,
query = {"$or":[{"_id":""},{"year":""},{"year_comment":""}]},
keysList = ["_id", "year", "year_comment"];
db.collection.find(query).forEach(function(doc){
var emptyKeys = keysList.filter(function(k) { // use filter to return an array of keys which have empty strings
return doc[k]==="";
}),
update = emptyKeys.reduce(function(obj, k) { // set the update object
obj[k] = "";
return obj;
}, { });
bulk.find({ "_id": doc._id }).updateOne({
"$unset": update // use the $unset operator to remove the fields
});
counter++;
if (counter % 1000 == 0) {
// Execute per 1000 operations and re-initialize every 1000 update statements
bulk.execute();
bulk = db.collection.initializeOrderedBulkOp();
}
})
If you need to update a single blank parameter or you prefer to do parameter by parameter, you can use the mongo updateMany functionality:
db.comments.updateMany({year: ""}, { $unset : { year : 1 }})
I have a time data in my Mongo database. Each document equal a minute and contain 60 seconds as objects with value for each. How to get average value of all seconds in one minute?
A document looking like that:
{
"_id" : ObjectId("55575e4062771c26ec5f2287"),
"timestamp" : "2015-05-16T18:12:00.000Z",
"values" : {
"0" : "26.17",
"1" : "26.17",
"2" : "26.17",
...
"58" : "24.71",
"59" : "25.20"
}
}
You could take two approaches here:
Changing the schema and use the aggregation framework to get the average by using the $avg operator OR
Apply Map-Reduce.
Let's look at the first option. Currently as it is, the schema will not make it possible to use the aggregation framework because of the dynamic keys in the values subdocument. The ideal schema that would favour the aggregation framework would have the values field be an array which contains embedded key/value documents like this:
/* 0 */
{
"_id" : ObjectId("5559d66c9bbec0dd0344e4b0"),
"timestamp" : "2015-05-16T18:12:00.000Z",
"values" : [
{
"k" : "0",
"v" : 26.17
},
{
"k" : "1",
"v" : 26.17
},
{
"k" : "2",
"v" : 26.17
},
...
{
"k" : "58",
"v" : 24.71
},
{
"k" : "59",
"v" : 25.20
}
]
}
With MongoDB 3.6 and newer, use the aggregation framework to tranform the hashmaps to an array by using the $objectToArray operator then use $avg to calculate the average.
Consider running the following aggregate pipeline:
db.test.aggregate([
{
"$addFields": {
"values": { "$objectToArray": "$values" }
}
}
])
Armed with this new schema, you would then need to update your collection to change the string values to int by iterating the cursor returned from the aggregate method and using bulkWrite as follows:
var bulkUpdateOps = [],
cursor = db.test.aggregate([
{
"$addFields": {
"values": { "$objectToArray": "$values" }
}
}
]);
cursor.forEach(doc => {
const { _id, values } = doc;
let temp = values.map(item => {
item.key = item.k;
item.value = parseFloat(item.v) || 0;
delete item.k;
delete item.v;
return item;
});
bulkUpdateOps.push({
"updateOne": {
"filter": { _id },
"update": { "$set": { values: temp } },
"upsert": true
}
});
if (bulkUpdateOps.length === 1000) {
db.test.bulkWrite(bulkUpdateOps);
bulkUpdateOps = [];
}
});
if (bulkUpdateOps.length > 0) {
db.test.bulkWrite(bulkUpdateOps);
}
If your MongoDB version does not support the $objectToArray operator in the aggregation framework, then to convert the current schema into the one above takes a bit of native JavaScript functions with the MongoDB find() cursor's forEach() function as follows (assuming you have a test collection):
var bulkUpdateOps = [],
cursor = db.test.find();
cursor.forEach(doc => {
const { _id, values } = doc;
let temp = Object.keys(values).map(k => {
let obj = {};
obj.key = k;
obj.value = parseFloat(doc.values[k]) || 0;
return obj;
});
bulkUpdateOps.push({
"updateOne": {
"filter": { _id },
"update": { "$set": { values: temp } },
"upsert": true
}
});
if (bulkUpdateOps.length === 1000) {
db.test.bulkWrite(bulkUpdateOps);
bulkUpdateOps = [];
}
});
if (bulkUpdateOps.length > 0) {
db.test.bulkWrite(bulkUpdateOps);
}
or
db.test.find().forEach(function (doc){
var keys = Object.keys(doc.values),
values = keys.map(function(k){
var obj = {};
obj.key = k;
obj.value = parseFloat(doc.values[k]) || 0;
return obj;
});
doc.values = values;
db.test.save(doc);
});
The collection will now have the above schema and thus follows the aggregation pipeline that will give you the average time in one minute:
db.test.aggregate([
{
"$fields": {
"average": { "$avg": "$values.value" }
}
}
])
Or for MongoDB 3.0 and lower
db.test.aggregate([
{ "$unwind": "$values" },
{
"$group": {
"_id": "$timestamp",
"average": {
"$avg": "$values.value"
}
}
}
])
For the above document, the output would be:
/* 0 */
{
"result" : [
{
"_id" : "2015-05-16T18:12:00.000Z",
"average" : 25.684
}
],
"ok" : 1
}
As for the other Map-Reduce option, the intuition behind the operation is you would use JavaScript to make the necessary transformations and calculate the final average. You would need to define three functions:
Map
When you tell Mongo to MapReduce, the function you provide as the map function will receive each document as the this parameter. The purpose of the map is to exercise whatever logic you need in JavaScript and then call emit 0 or more times to produce a reducible value.
var map = function(){
var obj = this.values;
var keys = Object.keys(obj);
var values = [];
keys.forEach(function(key){
var val = parseFloat(obj[key]);
var value = { count: 1, qty: val };
emit(this.timestamp, value);
});
};
For each document you need to emit a key and a value. The key is the first parameter to the emit function and represents how you want to group the values (in this case you will be grouping by the timestamp). The second parameter to emit is the value, which in this case is a little object containing the count of documents (always 1) and total value of each individual value object key i.e. for each second within the minute.
Reduce
Next you need to define the reduce function where Mongo will group the items you emit and pass them as an array to this reduce function It's inside the reduce function where you want to do the aggregation calculations and reduce all the objects to a single object.
var reduce = function(key, values) {
var result = {count: 0, total: 0 };
values.forEach(function(value){
result.count += value.count;
result.total += value.qty;
});
return result;
};
This reduce function returns a single result. It's important for the return value to have the same shape as the emitted values. It's also possible for MongoDB to call the reduce function multiple times for a given key and ask you to process a partial set of values, so if you need to perform some final calculation, you can also give MapReduce a finalize function.
Finalize
The finalize function is optional, but if you need to calculate something based on a fully reduced set of data, you'll want to use a finalize function. Mongo will call the finalize function after all the reduce calls for a set are complete. This would be the place to calculate the average of all the second values in a document/timestamp:
var finalize = function (key, value) {
value.average = value.total / value.count;
return value;
};
Putting It Together
With the JavaScript in place, all that is left is to tell MongoDB to execute a MapReduce:
var map = function(){
var obj = this.values;
var keys = Object.keys(obj);
var values = [];
keys.forEach(function(key){
var val = parseFloat(obj[key]);
var value = { count: 1, qty: val };
emit(this.timestamp, value);
});
};
var reduce = function(key, values) {
var result = {count: 0, total: 0 };
values.forEach(function(value){
result.count += value.count;
result.total += value.qty;
});
return result;
};
var finalize = function (key, value) {
value.average = value.total / value.count;
return value;
};
db.collection.mapReduce(
map,
reduce,
{
out: { merge: "map_reduce_example" },
finalize: finalize
}
)
And when you query the output collection map_reduce_example, db.map_reduce_example.find(), you get the result:
/* 0 */
{
"_id" : null,
"value" : {
"count" : 5,
"total" : 128.42,
"average" : 25.684
}
}
References:
A Simple MapReduce with MongoDB and C#
MongoDB docuumentation on mapReduce
This kind of data structure creates lots of conflicts and difficult to handled mongo operations. This case either you changed your schema design. But, if you not able to changed this schema then follow this :
In your schema having two major problem 1> keys dynamic and 2> values of given keys in string so you should use some programming code to calculating avg check below scripts
From ref this first calculated size of values
Object.size = function(obj) {
var size = 0,
key;
for (key in obj) {
if (obj.hasOwnProperty(key)) size++;
}
return size;
};
db.collectionName.find().forEach(function(myDoc) {
var objects = myDoc.values;
var value = 0;
// Get the size of an object
var size = Object.size(objects);
for (var key in objects) {
value = value + parseFloat(objects[key]); // parse string values to float
}
var avg = value / size
print(value);
print(size);
print(avg);
});
I stuck with 2 problems first how to loop and update in Mini MongoDb like this. And How to get _id for update it.
var data = [
{
"key" : "North America" ,
"values" : [
[ 1025409600000 , 23.041422681023] ,
[ 1028088000000 , 19.854291255832]
]
},
{
"key" : "Africa" ,
"values" : [
[ 1025409600000 , 7.9356392949025] ,
[ 1028088000000 , 7.4514668527298]
]
}];
And this function for loop BUT it just stop at very first looping
for(var i = 0; i < data.length; i++) {
Looping.insert({
key:data[i].key,
values:[]
});
var looping_id = Looping._id;
for(var j = 0; j < data[i].values.length; j++) {
Looping.update({
_id: looping_id
},
{
$addToSet: {values: data[i].values[j]}
});
}
}
Assuming Looping is a collection, you could insert your items with this:
_.each(data, function(item) {
Looping.insert(item);
});
However, because you are using $addToSet, I'm guessing that the values could be non-unique. In that case you can try:
_.each(data, function(item) {
var id = Looping.insert({key: item.key});
_.each(item.values, function(value) {
Looping.update(id, {$addToSet: {values: value}});
});
});
Notes:
The id of the inserted item is returned by the call to insert, it is not held in the collection object.
If you have a lot of items to insert, you should consider de-duplicating each set of values prior to the insert so you can avoid all of the updates.
Suppose I have a single document in my mongo collection that looks like this:
{
"_id": 123,
"field_to_prune":
{
"keep_field_1": "some value",
"random_field_1": "some value",
"keep_field_2": "some value",
"random_field_2": "some value",
"random_field_3": "some value"
}
}
I want to prune that document to look like this:
{
"_id": 123,
"field_to_prune":
{
"keep_field_1": "some value",
"keep_field_2": "some value"
}
}
However, my issue is that I don't know what the "random" field names are. In mongo, how would i $unset all fields except a couple of known fields?
I can think of a couple of ways, but i don't know the syntax.. i could select all field NAMES and then for each one of those unset the field. kind of like this:
[Some query to find all field names under "field_to_prune" for id 123].forEach(function(i) {
var key = "field_to_prune." + i;
print("removing field: " + key);
var mod = {"$unset": {}};
mod["$unset"][key] = "";
db.myCollection.update({ _id: "123" }, mod);
});
Another way I was thinking of doing it was to unset where the field name is not in an array of strings that i defined. not sure how to do that either. Any ideas?
If you don't care about atomicity then you may do it with save:
doc = db.myCollection.findOne({"_id": 123});
for (k in doc.field_to_prune) {
if (k === 'keep_field_1') continue;
if (k === 'keep_field_2') continue;
delete doc.field_to_prune[k];
}
db.myCollection.save(doc);
The main problem of this solution is that it's not atomic. So, any update to doc between findOne and save will be lost.
Alternative is to actually unset all unwanted fields instead of saving the doc:
doc = db.myCollection.findOne({"_id": 123});
unset = {};
for (k in doc.field_to_prune) {
if (k === 'keep_field_1') continue;
if (k === 'keep_field_2') continue;
unset['field_to_prune.'+k] = 1;
}
db.myCollection.update({_id: doc._id}, {$unset: unset});
This solution is much better because mongo runs update atomically, so no update will be lost. And you don't need another collection to do what you want.
Actually the best way to do this is to iterate over the cursor an use the $unset update operate to remove those fields in subdocuments except the known fields you want to keep. Also you need to use "bulk" operations for maximum efficiency.
MongoDB 3.2 deprecates Bulk() and its associated methods. So if you should use the .bulkWrite()
var count = 0;
var wantedField = ["keep_field_1", "keep_field_2"];
var requests = [];
var count = 0;
db.myCollection.find().forEach(function(document) {
var fieldToPrune = document.field_to_prune;
var unsetOp = {};
for (var key in fieldToPrune) {
if ((wantedFields.indexOf(key) === -1) && Object.prototype.hasOwnProperty.call(fieldToPrune, key ) ) {
unsetOp["field_to_prune."+key] = " ";
}
}
requests.push({
"updateOne": {
"filter": { "_id": document._id },
"update": { "$unset": unsetOp }
}
});
count++;
if (count % 1000 === 0) {
// Execute per 1000 operations and re-init
db.myCollection.bulkWrite(requests);
requests = [];
}
})
// Clean up queues
db.myCollection.bulkWrite(requests)
From MongoDB 2.6 you can use the Bulk API.
var bulk = db.myCollection.initializeUnorderedBulkOp();
var count = 0;
db.myCollection.find().forEach(function(document) {
fieldToPrune = document.field_to_prune;
var unsetOp = {};
for (var key in fieldToPrune) {
if ((wantedFields.indexOf(key) === -1) && Object.prototype.hasOwnProperty.call(fieldToPrune, key ) ) {
unsetOp["field_to_prune."+key] = " ";
}
}
bulk.find({ "_id": document._id }).updateOne( { "$unset": unsetOp } );
count++;
if (count % 1000 === 0) {
// Execute per 1000 operations and re-init
bulk.execute();
bulk = db.myCollection.initializeUnorderedBulkOp();
}
})
// Clean up queues
if (count > 0) {
bulk.execute();
}
I solved this with a temporary collection. i did the following:
db.myCollection.find({"_id": "123"}).forEach(function(i) {
db.temp.insert(i);
});
db.myCollection.update(
{_id: "123"},
{ $unset: { "field_to_prune": ""}}
)
db.temp.find().forEach(function(i) {
var key1 = "field_to_prune.keep_field_1";
var key2 = "field_to_prune.keep_field_2";
var mod = {"$set": {}};
mod["$set"][key1] = i.field_to_prune.keep_field_1;
mod["$set"][key2] = i.field_to_prune.keep_field_2;
db.myCollection.update({_id: "123"}, mod)
});
db.getCollection("temp").drop();
Unfortunately all the solutions presented so far are relying on script execution and some sort of forEach invocation, which will end up handling only one document at a time. If the collection to normalize is big this is going to be impractical and take way too long.
Also the functions passed to forEach are executed on the client, meaning that if the connection to the database is lost, the operation is going to be interrupted in the middle of the process, potentially leaving the collection in inconsistent state.
Performance issues could be mitigated by using bulk operations like the one proposed by #styvane here. That's solid advice.
But we can do better. Update operations support aggregation pipeline syntax since MongoDB 4.2, allowing the data normalization operation to be achieved by simply creating a new temporary object containing only the desired fields, unset the old one and then putting the temporary one back in its place, all using with the current values of the document as references:
db.theCollection.updateMany(
{field_to_prune: {$exists: true}},
[
{$set: {_temp: {
keep_field_1: '$field_to_prune.keep_field_1',
keep_field_2: '$field_to_prune.keep_field_2'
}}},
{$unset: 'field_to_prune'},
{$set: {field_to_prune: '$_temp'}},
{$unset: '_temp'}
]
)
Example:
> db.myColl.insertOne({
... _id: 123,
... field_to_prune: {
... keep_field_1: "some value",
... random_field_1: "some value",
... keep_field_2: "some value",
... random_field_2: "some value",
... random_field_3: "some value"
... }
... })
{ "acknowledged" : true, "insertedId" : 123 }
>
> db.myColl.insertOne({
... _id: 234,
... field_to_prune: {
... // keep_field_1 is absent
... random_field_1: "some value",
... keep_field_2: "some value",
... random_field_2: "some value",
... random_field_3: "some value"
... }
... })
{ "acknowledged" : true, "insertedId" : 234 }
>
> db.myColl.find()
{ "_id" : 123, "field_to_prune" : { "keep_field_1" : "some value", "random_field_1" : "some value", "keep_field_2" : "some value", "random_field_2" : "some value", "random_field_3" : "some value" } }
{ "_id" : 234, "field_to_prune" : { "random_field_1" : "some value", "keep_field_2" : "some value", "random_field_2" : "some value", "random_field_3" : "some value" } }
>
> db.myColl.updateMany(
... {field_to_prune: {$exists: true}},
... [
... {$set: {_temp: {
... keep_field_1: '$field_to_prune.keep_field_1',
... keep_field_2: '$field_to_prune.keep_field_2'
... }}},
... {$unset: 'field_to_prune'},
... {$set: {field_to_prune: '$_temp'}},
... {$unset: '_temp'}
... ]
...)
{ "acknowledged" : true, "matchedCount" : 2, "modifiedCount" : 2 }
>
> db.myColl.find()
{ "_id" : 123, "field_to_prune" : { "keep_field_1" : "some value", "keep_field_2" : "some value" } }
{ "_id" : 234, "field_to_prune" : { "keep_field_2" : "some value" } }
here is my solution, I think easier than the others I read:
db.labels.find({"_id" : ObjectId("123")}).snapshot().forEach(
function (elem) {
db.labels.update({_id: elem._id},
{'field_to_prune.keep_field_1': elem.field_to_prune.keep_field_1,
'field_to_prune.keep_field_2': elem.field_to_prune.keep_field_2});
});
I'm deleting everything but the fields 'keep_field_1' and 'keep_field_2'
I have the following mongodb structure
{
"_id": ObjectId("507c80a143188f9610000003"),
"date": ISODate("2012-10-15T21: 31: 13.0Z"),
"uid": NumberInt(35920),
"comp": NumberInt(770),
"fields": {
"rating": {
"parent": "rating",
"weight": NumberInt(2),
"rel_weight": 0.11,
},
"capacity": {
"parent": "capacity",
"weight": NumberInt(4),
"rel_weight": 0.89,
},
}
}
The "fields" attribute has 2 fields "rating" and "capacity" in it. But, each entry might have a different set of fields. eg. dimension, price etc.
I would like to find all entries that have "rating" under "fields" and get a sum of "weight" attribute of all such entries.
I am a newbie to mongodb and I tried using the mapReduce function, but with no avail.
Given below is the code I used. Kindly let me know where I went wrong or if there is a better solution instead of this code.
function map(){
emit(this._id,{weight:this.fields.rating.weight});
}
function reduce(key,value){
var sum = 0;
for ( var i=0; i<value.length; i++ ) {
sum += value[i].amount;
}
return sum;
}
res = db.collection_name.mapReduce(map, reduce, { query: {"fields.rating" : { $exists: true } });
I was finally able to figure it out and get it working. I have shared my code below.
var map = function(){
if(this.fields.rating){
emit(this.fields.rating.parent,this.fields.rating.weight);
}
}
var reduce = function (k, vals) {
var sum = 0;
var count = 0;
for (var i in vals) {
sum += vals[i];
count++;
}
var avg = (sum/count);
return avg;
}
var res = db.myCollection.mapReduce(map, reduce, {out:"myoutput"});