Remove multiple documents from array in MongoDB - mongodb

My document contains an array like:
{
"differentialDiagnosis" : "IART/Flutter",
"explanation" : "The rhythm.",
"fileName" : "A115a JPEG.jpg",
"history" : "1 year old with fussiness",
"interpretationList" : [
{
"interpretations" : [
ObjectId("54efe7c8d6d5ca3d5c580a22"),
ObjectId("54efe80bd6d5ca3d5c580a26")
]
},
{
"interpretations" : [
ObjectId("54efe80bd6d5ca3d5c580a26"),
ObjectId("54efe82ad6d5ca3d5c580a28")
]
}
],
}
and I want to remove all occurrences of ObjectId("54efe80bd6d5ca3d5c580a26"),
but I write a query:
db.ekgs.update({'interpretationList.interpretations':ObjectId("54c09fb3581c4c8c218d1a40")}, {$pull:{ 'interpretationList.$.interpretations':{ ObjectId("54c09fb3581c4c8c218d1a40")}})
This removes only first occurrence of ObjectId("54efe80bd6d5ca3d5c580a26").

The reason your query is only removing the first occurrence is because, as explained in this page in the documentation, "the positional $ operator acts as a placeholder for the first element that matches the query document".
The problem is that it is really tricky to deal with these types of updates with schema having embedded arrays in embedded objects in embedded arrays. In order to get around this problem, if you are able to flatten the schema, then your update becomes much easier. So if instead, your document looked like this:
{
"differentialDiagnosis" : "IART/Flutter",
"explanation" : "The rhythm.",
"fileName" : "A115a JPEG.jpg",
"history" : "1 year old with fussiness",
"interpretations" : [
ObjectId("54efe7c8d6d5ca3d5c580a22"),
ObjectId("54efe80bd6d5ca3d5c580a26"),
ObjectId("54efe82ad6d5ca3d5c580a28")
]
}
Then your query would be as simple as the one below. (Remember to add { "multi": true } as an option if you want to update multiple documents).
db.ekgs.update(
{ "interpretations": ObjectId("54efe80bd6d5ca3d5c580a26")},
{ "$pull": { "interpretations": ObjectId("54efe80bd6d5ca3d5c580a26") }}
);
But I understand that you might not be able to change the schema. In that case, you can try a solution that requires a small script. In the mongo shell, you can use the following bit of JavaScript to do the operation.
// Get cursor with documents requiring updating.
var oid = ObjectId("54efe80bd6d5ca3d5c580a26");
var c = db.ekgs.find({ "interpretationList.interpretations": oid });
// Iterate through cursor, removing oid from each subdocument in interpretationList.
while (c.hasNext()) {
var isModified = false;
var doc = c.next();
var il = doc.interpretationList;
for (var i in il) {
var j = il[i].interpretations.length;
while (j--) {
// If oid to remove is present, remove it from array
// and set flag that the document has been modified.
if (il[i].interpretations[j].str === oid.str) {
il[i].interpretations.splice(j, 1);
isModified = true;
}
}
}
// If modified, update interpretationList for document.
if (isModified) {
db.ekgs.update({ "_id": doc._id }, { "$set": { "interpretationList": il }});
}
}
UPDATE: Example of how it might work using the Node.js driver.
// Get cursor with documents requiring updating.
var oid = new ObjectID("54efe80bd6d5ca3d5c580a26");
var ekgs = db.collection("ekgs");
ekgs.find({ "interpretationList.interpretations": oid },
function(err, c) {
if(err) throw err;
// Iterate through cursor, removing oid from each subdocument in interpretationList.
c.each(function(err, doc) {
if (err) throw err;
// If doc is null then the cursor is exhausted/empty and closed.
if (doc != null) {
var isModified = false;
var il = doc.interpretationList;
for (var i in il) {
var j = il[i].interpretations.length;
while (j--) {
// If oid to remove is present, remove it from array
// and set flag that the document has been modified.
if (il[i].interpretations[j].equals(oid)) {
il[i].interpretations.splice(j, 1);
isModified = true;
}
}
}
// If modified, update interpretationList for document.
if (isModified) {
ekgs.update({ "_id": doc._id },
{ "$set": { "interpretationList": il }},
function(err, res) {
if (err) throw err;
// Callback.
console.log(res);
});
}
}
});
});

Related

Add field if not exist to document in Mongo

Source Doc
{
"_id" : "12345",
"LastName" : "Smith",
"FirstName" : "Fred",
"ProfileCreated" : NumberLong(1447118831860),
"DropOut" : false,
}
New Doc
{
"_id" : "12345",
"LastName" : "Smith",
"FirstName" : "Fred",
"ProfileCreated" : NumberLong(1447118831860),
"DropOut" : true,
"LatestConsultation" : false,
}
I have two collections which share a lot of the same document ID's and fields but over time the new documents will have fields added to them and or completely new documents with new ID's will get created.
I think I know how to handle new documents using $setOnInsert and upsert = true but I'm not sure how best to handle the addition of new fields. The behavior I require for documents that exists in both collection matched on _id with new fields is to add the new field to the document without modifying the values of any of the other fields even if they have changed as in the example where the DropOut value has changed. The resulting document I require is.
Result document
{
"_id" : "12345",
"LastName" : "Smith",
"FirstName" : "Fred",
"ProfileCreated" : NumberLong(1447118831860),
"DropOut" : false,
"LatestConsultation" : false,
}
What is the best and most performatic way to achive this? Also if this can somehow be combined into a single statement that also includes the addition of documents that exists in the new collection but not in the source collection that would be amazing :-)
PS. I am using Pymongo so a Pymongo example would be even better but I can translate a mongo shell example.
Not sure is this is possible with an atomic update. However, you could string in some mixed operations and tackle this in such a way that you iterate the new collection and for each document in the new collection:
Use the _id field to query the old collection. Use the findOne() method to return a document from the old collection that matches on the _id from the new collection.
Extend the new doc with the old doc by adding the new fields which do not exist in the old document.
Update the new collection with this merged document.
The following basic mongo shell example demonstrates the algorithm above:
function merge(from, to) {
var obj = {};
if (!from) {
from = {};
} else {
obj = from;
}
for (var key in to) {
if (!from.hasOwnProperty(key)) {
obj[key] = to[key];
}
}
return obj;
}
db.new_collection.find({}).snapshot().forEach(function(doc){
var old_doc = db.old_collection.findOne({ "_id": doc._id }),
merged_doc = merge(old_doc, doc);
db.new_collection.update(
{ "_id": doc._id },
{ "$set": merged_doc }
);
});
For dealing with large collections, better leverage your updates using the bulk API which offers better performance and efficient update operations done through
sending the update requests in bulk rather than each update operation for every request (which is slow). The method to use is the bulkWrite() function, which can be applied in the above example as:
function merge(from, to) {
var obj = {};
if (!from) {
from = {};
} else {
obj = from;
}
for (var key in to) {
if (!from.hasOwnProperty(key)) {
obj[key] = to[key];
}
}
return obj;
}
var ops = [];
db.new_collection.find({}).snapshot().forEach(function(doc){
var old_doc = db.old_collection.findOne({ "_id": doc._id }),
merged_doc = merge(old_doc, doc);
ops.push({
"updateOne": {
"filter": { "_id": doc._id },
"update": { "$set": merged_doc }
}
});
if (ops.length === 1000) {
db.new_collection.bulkWrite(ops);
ops = [];
}
});
if (ops.length > 0) db.new_collection.bulkWrite(ops);
Or for MongoDB 2.6.x and 3.0.x releases use this version of Bulk operations:
var bulk = db.new_collection.initializeUnorderedBulkOp(),
counter = 0;
db.new_collection.find({}).snapshot().forEach(function(doc){
var old_doc = db.old_collection.findOne({ "_id": doc._id }),
merged_doc = merge(old_doc, doc);
bulk.find({ "_id": doc._id }).updateOne({ "$set": merged_doc });
if (counter % 1000 === 0) {
bulk.execute();
bulk = db.new_collection.initializeUnorderedBulkOp();
}
});
if (counter % 1000 !== 0 ) bulk.execute();
The Bulk operations API in both cases will help reduce the IO load on the server by sending the requests only once in every 1000 documents in the collection to process.

MongoDB Shell Script Update all field Names where there is space in field name

Using MongoDB shell script 3.2, how can I update all fields where field names have a space replace those with underscore?
{
"Some Field": "value",
"OtherField" :"Value",
"Another Field" : "Value"
}
update the above document as below
{
"Some_Field": "value",
"OtherField" :"Value",
"Another_Field" : "Value"
}
rename field can be done with something like this
db.CollectionName.update( { _id: 1 }, { $rename: { 'nickname': 'alias', 'cell': 'mobile' } } )
Challenging part here is filter, how to come up with a filter where there is a space in field name
This needs a two-step approach. First, you need a mechanism to get a list of all the keys with a space in your collection. Once you get the list, construct an object that maps those keys to their renamed values. You can then use that object as your $rename operator document. Consider using mapReduce to get the list of keys with spaces.
The following mapReduce operation will populate a separate collection with all the filtered keys as the _id values:
mr = db.runCommand({
"mapreduce": "CollectionName",
"map": function() {
var regxp = /\s/;
for (var key in this) {
if (key.match(regxp)) {
emit(key, null);
}
}
},
"reduce": function() {},
"out": "filtered_keys"
})
To get a list of all the spaced keys, run distinct on the resulting collection:
db[mr.result].distinct("_id")
["Some Field", "Another Field"]
Now given the list above, you can assemble your update document by creating an object that will have its properties set within a loop. Normally your update document will have this structure:
var update = {
"$rename": {
"Some Field": "Some_Field",
"Another Field": "Another_Field"
}
}
Thus
var update = { "$rename": {} };
db[mr.result].distinct("_id").forEach(function (key){
update["$rename"][key] = key.replace(/ /g,"_");
});
which you can then use in your update as
db.CollectionName.update({ }, update, false, true );
Thanks to #chridam that was a excellent query.
Had to make small changes to run query, Full working query.
mr = db.runCommand({
"mapreduce": "MyCollectionName",
"map": function() {
var regxp = /\s/;
for (var key in this) {
if (key.match(regxp)) {
emit(key, null);
}
}
},
"reduce": function() {},
"out": "filtered_keys"
})
db[mr.result].distinct("_id")
var update = { "$rename": {} };
db[mr.result].distinct("_id").forEach(function (key){
update["$rename"][key] = key.replace(/\s+/g, "_");
});
//print(update)
db.MyCollectionName.update({ }, update, false, true );

Complex mongodb document search

I'm attempting to write a find query where one of the keys is unknown at the time the query is run, for example on the following document I'm interested in returning the document if "setup" is true:
{
"a": {
"randomstringhere": {
"setup": true
}
}
}
However I can't work how to wildcard the "randomstringhere" field as it changes for each document in the collection.
Can somebody help?
There is not much you can do with that. But you can modify your collection schema like
{
"a": [
{
"keyName": "randomstringhere",
"setup": true
},
//...
]
}
you can than write query to look
{
'a' : { $elemMatch: { setup: true } ,
}
You can't do this with a single query, as with the current design you would need a mechanism to get all the random keys that you need and then assemble the query document that uses the $or operator in the event that you get a list of variable key name.
The first part of your operation is possible using Map-Reduce. The following mapreduce operation will populate a separate collection called collectionKeys with all the random keys as the _id values:
mr = db.runCommand({
"mapreduce": "collection",
"map" : function() {
for (var key in this.a) { emit(key, null); }
},
"reduce" : function() { },
"out": "collectionKeys"
})
To get a list of all the random keys, run distinct on the resulting collection:
db[mr.result].distinct("_id")
Example Ouput
["randomstring_1", "randomstring_2", "randomstring_3", "randomstring_4", ...]
Now given the list above, you can assemble your query by creating an object that will have its properties set within a loop. Normally your query document will have this structure:
var query = {
"$or": [
{ "a.randomstring_1.setup": true },
{ "a.randomstring_2.setup": true },
{ "a.randomstring_3.setup": true }
]
};
which you can then use in your query:
db.collection.find(query)
So using the above list of subdocument keys, you can dynamically construct the above using JavaScript's map() method:
mr = db.runCommand({
"mapreduce": "collection", // your collection name
"map" : function() { // map function
for (var key in this.a) { emit(key, null); }
},
"reduce" : function() { }, // empty reducer that doesn't do anything
"out": "collectionKeys" // output collection with results
})
var randomstringKeysList = db[mr.result].distinct("_id"),
orOperator = randomstringKeysList.map(function (key){
var o = {};
o["a."+ key +".setup"] = true;
return o;
}),
query = { "$or": orOperator };
db.collection.find(query);

MongoDB - Remove Folder

I'm trying to delete all folders on MongoDB whose descriptions contain a number higher than 10. Can you tell me how to do that?
I've been trying desperately since hours...
Thanks very much!
Robomongo
You need a mechanism to get a list of the keys in the collection first, filter the list for the ones that have a number greater than 10 and then generate a query that you will use with the $unset operator in your update. Your update document should have this structure:
var update = {
"$unset": {
"p11": "",
"p12": "",
...
}
}
which you will use in your update as
db.collection.update({}, update, {multi: true});
You need the mapReduce() command to generate that update document. The following mapreduce operation will populate a separate collection with the document as the value:
db.collection.mapReduce(
function() {
var map = this;
for (var key in map) {
if (map.hasOwnProperty(key)){
num = parseInt(key.replace(/[^\d.]/g, '' ));
if (num > 10) emit(null, key);
}
}
},
function(key, values) {
return values.reduce(function(o, v) {
o[v] = "";
return o;
}, {});
},
{ "out": "filtered_keys" }
);
You can then run a query on the resultant collection to get the update document and do the actual update:
var update = {
"$unset": db.filtered_keys.findOne({"_id": null}).value
},
options = { "multi": true };
db.collection.update({}, update, options);

How can i remove empty string from a mongodb collection?

I have a "mongodb colllenctions" and I'd like to remove the "empty strings"with keys from it.
From this:
{
"_id" : ObjectId("56323d975134a77adac312c5"),
"year" : "15",
"year_comment" : "",
}
{
"_id" : ObjectId("56323d975134a77adac312c5"),
"year" : "",
"year_comment" : "asd",
}
I'd like to gain this result:
{
"_id" : ObjectId("56323d975134a77adac312c5"),
"year" : "15",
}
{
"_id" : ObjectId("56323d975134a77adac312c5"),
"year_comment" : "asd",
}
How could I solve it?
Please try executing following code snippet in Mongo shell which strips fields with empty or null values
var result=new Array();
db.getCollection('test').find({}).forEach(function(data)
{
for(var i in data)
{
if(data[i]==null || data[i]=='')
{
delete data[i]
}
}
result.push(data)
})
print(tojson(result))
Would start with getting a distinct list of all the keys in the collection, use those keys as your query basis and do an ordered bulk update using the Bulk API operations. The update statement uses the $unset operator to remove the fields.
The mechanism to get distinct keys list that you need to assemble the query is possible through Map-Reduce. The following mapreduce operation will populate a separate collection with all the keys as the _id values:
mr = db.runCommand({
"mapreduce": "my_collection",
"map" : function() {
for (var key in this) { emit(key, null); }
},
"reduce" : function(key, stuff) { return null; },
"out": "my_collection" + "_keys"
})
To get a list of all the dynamic keys, run distinct on the resulting collection:
db[mr.result].distinct("_id")
// prints ["_id", "year", "year_comment", ...]
Now given the list above, you can assemble your query by creating an object that will have its properties set within a loop. Normally your query will have this structure:
var keysList = ["_id", "year", "year_comment"];
var query = keysList.reduce(function(obj, k) {
var q = {};
q[k] = "";
obj["$or"].push(q);
return obj;
}, { "$or": [] });
printjson(query); // prints {"$or":[{"_id":""},{"year":""},{"year_comment":""}]}
You can then use the Bulk API (available with MongoDB 2.6 and above) as a way of streamlining your updates for better performance with the query above. Overall, you should be able to have something working as:
var bulk = db.collection.initializeOrderedBulkOp(),
counter = 0,
query = {"$or":[{"_id":""},{"year":""},{"year_comment":""}]},
keysList = ["_id", "year", "year_comment"];
db.collection.find(query).forEach(function(doc){
var emptyKeys = keysList.filter(function(k) { // use filter to return an array of keys which have empty strings
return doc[k]==="";
}),
update = emptyKeys.reduce(function(obj, k) { // set the update object
obj[k] = "";
return obj;
}, { });
bulk.find({ "_id": doc._id }).updateOne({
"$unset": update // use the $unset operator to remove the fields
});
counter++;
if (counter % 1000 == 0) {
// Execute per 1000 operations and re-initialize every 1000 update statements
bulk.execute();
bulk = db.collection.initializeOrderedBulkOp();
}
})
If you need to update a single blank parameter or you prefer to do parameter by parameter, you can use the mongo updateMany functionality:
db.comments.updateMany({year: ""}, { $unset : { year : 1 }})