MongoDB CoffeeScript find document with several fields - mongodb

How can I check if there are any documents with both name: "John" and age: 40?
This doesn't seem to be working
db.Event.findOne name: "John", age: 40, (error, result) ->
unless result
db.Event.save {name: "John", age: 40}
# document not found, so add it
inserted = true
else
# document found
inserted = false

A callback to manage inserted boolean:
data =
'name': 'John'
'age': 40
inserted = false
db.Event.update data,
{ '$setOnInsert': data },
'upsert': true,
(error, result) ->
inserted = not error

Sounds like you need the $setOnInsert update operator which when used with the update flag { "upsert": true } within an atomic update() operation, the operation inserts a new document and $setOnInsert assigns the specified values to the fields in the document. If the update operation does not result in an insert, $setOnInsert does nothing:
JavaScript:
var data = {
"name": "John",
"age": 40
}
db.Event.update(
data,
{ "$setOnInsert": data },
{ "upsert": true }
)
CoffeeScript:
data =
'name': 'John'
'age': 40
db.Event.update data, { '$setOnInsert': data }, 'upsert': true

Related

update specific element from nested document array mongodb where has two matches

I need to update or create if not exist, specific obj,set score.b1 =50 and total=100 where object match curse=5 block=2
{ "_id":"sad445"
"year":2020,
"grade":4,
"seccion":"A",
"id": 100,
"name": "pedro",
"notes":[{"curse":5,
"block":1,
"score":{ "a1": 5,"a2": 10, "a3": 15},
"total" : 50
},{
"curse":5,
"block":2,
"score":{ "b1": 10,"b2": 20, "b3": 30},
"total" : 20
}
]
}
I can update all obj but I need to update or create specific elem from the score and not all. and/or create objs "notes":[{curse, block and score}] if notes is empty notes:[]
notas.UpdateMany(
{"$and":[{"_id":"sad445"},{"notes":{"$elemMatch":{"curse":5,"block":3}}}]},
{"$set":{"updated_at":{"$date":{"$numberLong":"1620322881360"}},
"notes.$.score":{"vvkzo":15,"i2z4i":2,"i2z4i|pm":5},
"notes.$.total":100}},
{"multiple":false})
Demo - https://mongoplayground.net/p/VaE28ujeOPx
Use $ (update)
The positional $ operator identifies an element in an array to update without explicitly specifying the position of the element in the array.
the positional $ operator acts as a placeholder for the first element that matches the query document, and
the array field must appear as part of the query document.
db.collection.update({
"notes": {
"$elemMatch": { "block": 2, "curse": 5 }
}
},
{
$set: { "notes.$.score.b4": 40 }
})
Read upsert: true
Optional. When true, update() either:
Creates a new document if no documents match the query. For more
details see upsert behavior. Updates a single document that matches
the query. If both upsert and multi are true and no documents match
the query, the update operation inserts only a single document.
To avoid multiple upserts, ensure that the query field(s) are uniquely
indexed. See Upsert with Unique Index for an example.
Defaults to false, which does not insert a new document when no match
is found.
Update
Demo - https://mongoplayground.net/p/iQQDyjG2a_B
Use $function
db.collection.update(
{ "_id": "sad445" },
[
{
$set: {
notes: {
$function: {
body: function(notes) {
var record = { curse:5, block:2, score:{ b4:40 } };
if(!notes || !notes.length) { return [record]; } // create new record and return in case of no notes
var updated = false;
for (var i=0; i < notes.length; i++) {
if (notes[i].block == 2 && notes[i].curse == 5) { // check condition for update
updated = true;
notes[i].score.b4=40; break; // update here
}
}
if (!updated) notes.push(record); // if no update push the record in notes array
return notes;
},
args: [
"$notes"
],
lang: "js"
}
}
}
}
]
)
Try to add upsert: true.
Creates a new document if no documents match the query. Updates a single document that matches
the query.
notas.UpdateMany(
{"$and":[{"_id":"sad445"},{"notes":{"$elemMatch":{"curse":5,"block":3}}}]},
{"$set":{"updated_at":{"$date":{"$numberLong":"1620322881360"}},
"notes.$.score":{"vvkzo":15,"i2z4i":2,"i2z4i|pm":5},
"notes.$.total":100}},
{"multiple":false, "upsert":true})

MongoDB - Get IDs of inserted and existing documents after "Insert if not exist" operation on multiple documents

I have to insert multiple documents if they don't already exist, but the important thing is that in the query results I need to have IDs of both the inserted and already existing items.
I'm trying with the following bulkWrite operation:
// external_id is a unique id other than the mongo _id
let items = [
{external_id: 123, name: "John"},
{external_id: 456, name: "Mike"},
{external_id: 789, name: "Joseph"}
];
db.collection("my_collection")
.bulkWrite(
items.map((item) => {
return {
updateOne: {
filter: { external_id: item.external_id },
update: { $setOnInsert: item},
upsert: true,
},
};
})
);
The problem is that the BulkWriteResult return only the _id of the inserted items in upsertedIds, while for the existing items return only the nMatched number.
The other solution I have think about is to make (1) a find over an array of ids, (2) check the results for the ones already existing, and (3) then insertMany for the new ones:
let ids = [123, 456, 789];
let items = [
{external_id: 123, name: "John"},
{external_id: 456, name: "Mike"},
{external_id: 789, name: "Joseph"}
];
// STEP 1: Find alredy existings items
db.collection("my_collection")
.find({ external_id: { $in: ids } })
.toArray(function (err, existingItems) {
// If John already exist
// existingItems = [{_id: ObjectId, external_id: 123, name: "John"}]
// STEP 2: Check which item has to be created
let itemsToBeCreated = items.filter((item) =>
!existingItems.some((ex) => ex.external_id === item.external_id)
);
// STEP 3: Insert new items
db.collection("my_collection")
.insertMany(itemsToBeCreated, function (err, result) {
// FINALLY HERE I GET ALL THE IDs OF THE EXISTING AND INSERTED ITEMS
});
});
With this solution I'm concerned about performance, because these operations are fired 100K times a day for 10 items each, and about 90% of the times the items are new. So 900K new items and 100K already existing.
I would like to know if there is a better way of achieving this.
Thanks in advance

using mongoose to update a specific sub doc property's value [duplicate]

Is there a way to update values in an object?
{
_id: 1,
name: 'John Smith',
items: [{
id: 1,
name: 'item 1',
value: 'one'
},{
id: 2,
name: 'item 2',
value: 'two'
}]
}
Lets say I want to update the name and value items for item where id = 2;
I have tried the following w/ mongoose:
var update = {name: 'updated item2', value: 'two updated'};
Person.update({'items.id': 2}, {'$set': {'items.$': update}}, function(err) { ...
Problem with this approach is that it updates/sets the entire object, therefore in this case I lose the id field.
Is there a better way in mongoose to set certain values in an array but leave other values alone?
I have also queried for just the Person:
Person.find({...}, function(err, person) {
person.items ..... // I might be able to search through all the items here and find item with id 2 then update the values I want and call person.save().
});
You're close; you should use dot notation in your use of the $ update operator to do that:
Person.update({'items.id': 2}, {'$set': {
'items.$.name': 'updated item2',
'items.$.value': 'two updated'
}}, function(err) { ...
model.update(
{ _id: 1, "items.id": "2" },
{
$set: {
"items.$.name": "yourValue",
"items.$.value": "yourvalue",
}
}
)
MongoDB Document
There is a mongoose way for doing it.
const itemId = 2;
const query = {
item._id: itemId
};
Person.findOne(query).then(doc => {
item = doc.items.id(itemId );
item["name"] = "new name";
item["value"] = "new value";
doc.save();
//sent respnse to client
}).catch(err => {
console.log('Oh! Dark')
});
There is one thing to remember, when you are searching the object in array on the basis of more than one condition then use $elemMatch
Person.update(
{
_id: 5,
grades: { $elemMatch: { grade: { $lte: 90 }, mean: { $gt: 80 } } }
},
{ $set: { "grades.$.std" : 6 } }
)
here is the docs
For each document, the update operator $set can set multiple values, so rather than replacing the entire object in the items array, you can set the name and value fields of the object individually.
{'$set': {'items.$.name': update.name , 'items.$.value': update.value}}
Below is an example of how to update the value in the array of objects more dynamically.
Person.findOneAndUpdate({_id: id},
{
"$set": {[`items.$[outer].${propertyName}`]: value}
},
{
"arrayFilters": [{ "outer.id": itemId }]
},
function(err, response) {
...
})
Note that by doing it that way, you would be able to update even deeper levels of the nested array by adding additional arrayFilters and positional operator like so:
"$set": {[`items.$[outer].innerItems.$[inner].${propertyName}`]: value}
"arrayFilters":[{ "outer.id": itemId },{ "inner.id": innerItemId }]
More usage can be found in the official docs.
cleaner solution using findOneAndUpdate
await Person.findOneAndUpdate(
{ _id: id, 'items.id': 2 },
{
$set: {
'items.$.name': 'updated item2',
'items.$.value': 'two updated',
}
},
);
In Mongoose, we can update array value using $set inside dot(.) notation to specific value in following way
db.collection.update({"_id": args._id, "viewData._id": widgetId}, {$set: {"viewData.$.widgetData": widgetDoc.widgetData}})
Having tried other solutions which worked fine, but the pitfall of their answers is that only fields already existing would update adding upsert to it would do nothing, so I came up with this.
Person.update({'items.id': 2}, {$set: {
'items': { "item1", "item2", "item3", "item4" } }, {upsert:
true })
I had similar issues. Here is the cleanest way to do it.
const personQuery = {
_id: 1
}
const itemID = 2;
Person.findOne(personQuery).then(item => {
const audioIndex = item.items.map(item => item.id).indexOf(itemID);
item.items[audioIndex].name = 'Name value';
item.save();
});
Found this solution using dot-object and it helped me.
import dot from "dot-object";
const user = await User.findByIdAndUpdate(id, { ...dot.dot(req.body) });
I needed to update an array element with dynamic key-value pairs.
By mapping the update object to new keys containing the $ update operator, I am no longer bound to know the updated keys of the array element and instead assemble a new update object on the fly.
update = {
name: "Andy",
newKey: "new value"
}
new_update = Object.fromEntries(
Object.entries(update).map(
([k, v], i) => ["my_array.$." + k, v]
)
)
console.log({
"$set": new_update
})
In mongoose we can update, like simple array
user.updateInfoByIndex(0,"test")
User.methods.updateInfoByIndex = function(index, info) ={
this.arrayField[index]=info
this.save()
}
update(
{_id: 1, 'items.id': 2},
{'$set': {'items.$[]': update}},
{new: true})
Here is the doc about $[]: https://docs.mongodb.com/manual/reference/operator/update/positional-all/#up.S[]

mongodb: add an attribute to a subdocument

Given a collection of Users:
db.users.insertMany(
[
{
_id: 1,
name: "sue",
points: [
{ points: 85, bonus: 20 },
{ points: 85, bonus: 10 }
]
},
{
_id: 2,
name: "bob",
points: [
{ points: 85, bonus: 20 },
{ points: 64, bonus: 12 }
]
}]);
How do I add an attribute bonus_raw in every points, with a copy of the value of bonus value? I tried:
db.getCollection('users').update({ },
{$set:{ 'points.$.bonus_raw' : 'points.$.bonus' }}, false, true)
but I get:
The positional operator did not find the match needed from the query. Unexpanded update: points.$.bonus_raw
Updating multiple items in an array is not possible as of now in MongoDB.
To get this done, you will have to query the document, loop over all of your nested documents, and then save it back to MongoDB.
In your case, this can help:-
db.users.find({points: { $exists: true } }).forEach(function (doc){
doc.points.forEach(function (points) {
points.bonus_raw = points.bonus;
});
db.users.save(doc)
});
Also, take care of race conditions while doing an update in this way. See this

Remove all fields that are null

How can I remove all fields that are null from all documents of a given collection?
I have a collection of documents such as:
{
'property1': 'value1',
'property2': 'value2',
...
}
but each document may have a null entry instead of a value entry.
I would like to save disk space by removing all null entries. The existence of the null entries does not contain any information in my case because I know the format of the JSON document a priori.
Starting Mongo 4.2, db.collection.update() can accept an aggregation pipeline, finally allowing the removal of a field based on its value:
// { _id: ObjectId("5d0e8...d2"), property1: "value1", property2: "value2" }
// { _id: ObjectId("5d0e8...d3"), property1: "value1", property2: null, property3: "value3" }
db.collection.update(
{},
[{ $replaceWith: {
$arrayToObject: {
$filter: {
input: { $objectToArray: "$$ROOT" },
as: "item",
cond: { $ne: ["$$item.v", null] }
}
}
}}],
{ multi: true }
)
// { _id: ObjectId("5d0e8...d2"), property1: "value1", property2: "value2" }
// { _id: ObjectId("5d0e8...d3"), property1: "value1", property3: "value3" }
In details:
The first part {} is the match query, filtering which documents to update (in our case all documents).
The second part [{ $replaceWith: { ... }] is the update aggregation pipeline (note the squared brackets signifying the use of an aggregation pipeline):
With $objectToArray, we first transform the document to an array of key/values such as [{ k: "property1", v: "value1" }, { k: "property2", v: null }, ...].
With $filter, we filter this array of key/values by removing items for which v is null.
We then transform back the filtered array of key/values to an object using $arrayToObject.
Finally, we replace the whole document by the modified one with $replaceWith.
Don't forget { multi: true }, otherwise only the first matching document will be updated.
// run in mongo shell
var coll = db.getCollection("collectionName");
var cursor = coll.find();
while (cursor.hasNext()) {
var doc = cursor.next();
var keys = {};
var hasNull = false;
for ( var x in doc) {
if (x != "_id" && doc[x] == null) {
keys[x] = 1;
hasNull = true;
}
}
if (hasNull) {
coll.update({_id: doc._id}, {$unset:keys});
}
}
This is an important question since mongodb cannot index null values (i.e. do not query for nulls or you will be waiting for a long time), so it is best to entirely avoid nulls and set default values using setOnInsert.
Here is a recursive solution to removing nulls:
/**
* RETRIEVES A LIST OF ALL THE KEYS IN A DOCUMENT, WHERE THE VALUE IS 'NULL' OR 'UNDEFINED'
*
* #param doc
* #param keyName
* #param nullKeys
*/
function getNullKeysRecursively(doc, keyName, nullKeys)
{
for (var item_property in doc)
{
// SKIP BASE-CLASS STUFF
if (!doc.hasOwnProperty(item_property))
continue;
// SKIP ID FIELD
if (item_property === "_id")
continue;
// FULL KEY NAME (FOR SUB-DOCUMENTS)
var fullKeyName;
if (keyName)
fullKeyName = keyName + "." + item_property;
else
fullKeyName = item_property;
// DEBUGGING
// print("fullKeyName: " + fullKeyName);
// NULL FIELDS - MODIFY THIS BLOCK TO ADD CONSTRAINTS
if (doc[item_property] === null || doc[item_property] === undefined)
nullKeys[fullKeyName] = 1;
// RECURSE OBJECTS / ARRAYS
else if (doc[item_property] instanceof Object || doc[item_property] instanceof Array)
getNullKeysRecursively(doc[item_property], fullKeyName, nullKeys);
}
}
/**
* REMOVES ALL PROPERTIES WITH A VALUE OF 'NULL' OR 'UNDEFINED'.
* TUNE THE 'LIMIT' VARIABLE TO YOUR MEMORY AVAILABILITY.
* ONLY CLEANS DOCUMENTS THAT REQUIRE CLEANING, FOR EFFICIENCY.
* USES bulkWrite FOR EFFICIENCY.
*
* #param collectionName
*/
function removeNulls(collectionName)
{
var coll = db.getCollection(collectionName);
var lastId = ObjectId("000000000000000000000000");
var LIMIT = 10000;
while (true)
{
// GET THE NEXT PAGE OF DOCUMENTS
var page = coll.find({ _id: { $gt: lastId } }).limit(LIMIT);
if (! page.hasNext())
break;
// BUILD BULK OPERATION
var arrBulkOps = [];
page.forEach(function(item_doc)
{
lastId = item_doc._id;
var nullKeys = {};
getNullKeysRecursively(item_doc, null, nullKeys);
// ONLY UPDATE MODIFIED DOCUMENTS
if (Object.keys(nullKeys).length > 0)
// UNSET INDIVIDUAL FIELDS, RATHER THAN REWRITE THE ENTIRE DOC
arrBulkOps.push(
{ updateOne: {
"filter": { _id: item_doc._id },
"update": { $unset: nullKeys }
} }
);
});
coll.bulkWrite(arrBulkOps, { ordered: false } );
}
}
// GO GO GO
removeNulls('my_collection');
document before:
{
"_id": ObjectId("5a53ed8f6f7c4d95579cb87c"),
"first_name": null,
"last_name": "smith",
"features": {
"first": {
"a": 1,
"b": 2,
"c": null
},
"second": null,
"third" : {},
"fourth" : []
},
"other": [
null,
123,
{
"a": 1,
"b": "hey",
"c": null
}
]
}
document after:
{
"_id" : ObjectId("5a53ed8f6f7c4d95579cb87c"),
"last_name" : "smith",
"features" : {
"first" : {
"a" : 1,
"b" : 2
}
},
"other" : [
null,
123,
{
"a" : 1,
"b" : "hey"
}
]
}
As you can see, it removes null, undefined, empty objects and empty arrays. If you need it to be more/less aggressive, it is a matter of modifying the block "NULL FIELDS - MODIFY THIS BLOCK TO ADD CONSTRAINTS".
edits welcome, especially #stennie
You can use the mongo updateMany functionality, but you must do this by specifying the parameter you are going to update, such as the year parameter:
db.collection.updateMany({year: null}, { $unset : { year : 1 }})
Like this question mentioned (mongodb query without field name):
Unfortunately, MongoDB does not support any method of querying all fields with a particular value.
So, you can either iterate the document (like Wizard's example) or do it in non-mongodb way.
If this is a JSON file, remove all the lines with null in sed might works:
sed '/null/d' ./mydata.json
Update for 2022:
If you delete keys with values Null, [], "", {} from the DB, that won't reduce it's size on disk.
You need to do that before you upload data into the collection.
Tested it myself. I had 6.000.000 documents in collection. Ran script of Xavier Guihot. Before script it was 7.8GB, after the script it became 7.9GB.
I confirm, that script do the job and remove keys, it's just that it doesn't reduce the size of the DB space allocation.
Then I deleted completely the collection, and imported .json dumps, that had already been formatted (removed all keys with values Null, [], "", {}). After collection size was 6.1GB That's minus 22% of the original size.
Here is python script I used to remove all empty keys from json dumps:
import fileinput
import json
for line in fileinput.input(inplace=1):
j = {k:v for k, v in json.loads(line).items() if v}
print(line.replace(line, json.dumps(j)))
Just run the script with file name as argument, for example: python3 main.py dump-00001
ps: take into account, that you need to wait ~200 seconds after changes to DB, because WiredTiger keep history backup of data for consistency after you make changes. That's mean, that only after 200 sec you will see real storage allocation of DB. 200 sec is default value for that action.