I am reading all documents of a specific schema from Mongoose. Now in my program I am doing some modifications to the results I got from Mongoose over time. Something like this:
var model = mongoose.model("Doc", docSchema);
model.find(function(err, result){
// for each result do some modifications
});
How can I send all the results back to the database to be saved? Currently I am iterating the documents and doing a save() on every document. I think there must be a better way. But currently I only find information on updating documents IN the database without returning them. Or bulk updates which do the SAME to update to each document.
You can use update query with multi:true which update all documents in your db.
please find below reference code,
model.update({ "_id": id }, { $set: { "Key": "Value" } }, { multi: true }, function (err, records) {
if (err || !records) {
return res.json({ status: 500, message: "Unable to update documents." });
} else {
return res.json({ status: 200, message: "success" });
}
});
If you are trying to make the same change to each document in the results, you could do something like this:
model.update({ _id: { $in: results.map(doc=>doc._id) }}, { yourField: 'new value' }, { multi: true })
Related
So I have a collection that has over 1million documents clearly, I don't need to search 1000 or more documents as it should only be updating the latest document.
const nowplayingData = {"type":"S",
"station": req.params.stationname,
"song": data[1],
"artist": data[0],
"timeplay":npdate};
LNowPlaying.findOneAndUpdate( nowplayingData,
{ $addToSet: { history: [uuid] } },
{ upsert: true, sort: {_id:-1} },
function(err) {
if (err) {
console.log('ERROR when submitting round');
console.log(err);
}
});
I have added a sort on it, so that it is able to get the latest one first, but if the document is not in there and it's the first time the document is being added then the way the script is written will query all documents to find.
When really it only needs to look at the last say 100 documents or even better if timeplay is in the last 5 minutes.
You can do something like:
const nowplayingData = {
"type":"S","station": req.params.stationname, "song": data[1], "artist": data[0],
"timeplay":{$gte: beforeFiveMin}};
But this will create a new document almost every time...so you will need to maintain it...
findOneAndUpdate updates the document but returns with the error
collection.findOneAndUpdate(
{ verificationHash: req.query.hash },
{ $unset: {verificationHash: ''}}).then((err, user) => {//some code}
);
in this case my document is updated but user in callback is always undefined and content of error looks like
{
lastErrorObject: {n:1, updatedExisting: true},
value: document,
ok: 1
}
Am i missing something here or is this how the object in the callback is returned?
When executing .findOneAndUpdate in mongoDB then use {returnNewDocument: true} or if using mongoose you can use {new : true}
Set the returnNewDocument as true and you will get updated document.
collection.findOneAndUpdate(
{ verificationHash: req.query.hash }, // query
{ $unset: {verificationHash: ''}}, // update condition
{ returnNewDocument: true }, // returns updated document
function (err, documents) {
res.send({ error: err, affected: documents });
db.close();
}
)
In my model I have a thread that contains an array of comments. When I try to update these comments it always updates the wrong one and I cannot figure out why.
Thread.findOneAndUpdate({
'comments._id': req.params.commentId,
'comments.created_by.user_id': user._id
},
{
$set: {
'comments.$.content': req.body.content,
}
}, { new: true }, (err, thread) => {
if(!err && thread){
res.status(200).json({success: true})
}
})
This code works, but it always updates the first comment rather than the one I am trying to update.
I'm trying to aggregate data from my Mongo collection to produce some statistics for FreeCodeCamp by making a large json file of the data to use later.
I'm running into the error in the title. There doesn't seem to be a lot of information about this, and the other posts here on SO don't have an answer. I'm using the latest version of MongoDB and drivers.
I suspect there is probably a better way to run this aggregation, but it runs fine on a subset of my collection. My full collection is ~7GB.
I'm running the script via node aggScript.js > ~/Desktop/output.json
Here is the relevant code:
MongoClient.connect(secrets.db, function(err, database) {
if (err) {
throw err;
}
database.collection('user').aggregate([
{
$match: {
'completedChallenges': {
$exists: true
}
}
},
{
$match: {
'completedChallenges': {
$ne: ''
}
}
},
{
$match: {
'completedChallenges': {
$ne: null
}
}
},
{
$group: {
'_id': 1, 'completedChallenges': {
$addToSet: '$completedChallenges'
}
}
}
], {
allowDiskUse: true
}, function(err, results) {
if (err) { throw err; }
var aggData = results.map(function(camper) {
return _.flatten(camper.completedChallenges.map(function(challenges) {
return challenges.map(function(challenge) {
return {
name: challenge.name,
completedDate: challenge.completedDate,
solution: challenge.solution
};
});
}), true);
});
console.log(JSON.stringify(aggData));
process.exit(0);
});
});
Aggregate returns a single document containing all the result data, which limits how much data can be returned to the maximum BSON document size.
Assuming that you do actually want all this data, there are two options:
Use aggregateCursor instead of aggregate. This returns a cursor rather than a single document, which you can then iterate over
add a $out stage as the last stage of your pipeline. This tells mongodb to write your aggregation data to the specified collection. The aggregate command itself returns no data and you then query that collection as you would any other.
It just means that the result object you are building became too large. This kind of issue should not be impacted by the version. The fix implemented for 2.5.0 only prevents the crash from occurring.
You need to filter ($match) properly to have the data which you need in result. Also group with proper fields. The results are put into buffer of 64MB. So reduce your data. $project only the columns you require in result. Not whole documents.
You can combine your 3 $match objects to single to reduce pipelines.
{
$match: {
'completedChallenges': {
$exists: true,
$ne: null,
$ne: ""
}
}
}
I had this issue and I couldn't debug the problem so I ended up abandoning the aggregation approach. Instead I just iterated through each entry and created a new collection. Here's a stripped down shell script which might help you see what I mean:
db.new_collection.ensureIndex({my_key:1}); //for performance, not a necessity
db.old_collection.find({}).noCursorTimeout().forEach(function(doc) {
db.new_collection.update(
{ my_key: doc.my_key },
{
$push: { stuff: doc.stuff, other_stuff: doc.other_stuff},
$inc: { thing: doc.thing},
},
{ upsert: true }
);
});
I don't imagine that this approach would suit everyone, but hopefully that helps anyone who was in my particular situation.
The mongoose schema looks like:
var followSchema = new Schema({
userId: {type: String, required: true},
following : []
});
The Objective is to look for a user using the ID field and then add items to the following array.
The below method does not work:
var myRecord = FollowModel.findOneAndUpdate({userId: req.user.Id}, {upsert: true});
myRecord.exec(function(err, result) {
result.following.push('Test');
res.status(200).send('New Item Added to Follow List');
});
How is this to be done ?
It does work, you just forgot to include the { "new": true } option, as well as a valid update statment:
FollowModel.findOneAndUpdate(
{ "userId": req.user.Id},
{ "$setOnInsert": {
}},
{ "upsert": true, "new": true },
function(err,result) {
if (err) throw err;
console.log(result);
}
);
The blank $setOnInsert here will no nothing in this case, but otherwise you would put something in there that you wanted created "on insert". This would be applied along with any fields in the "query" portion of the statement.
Other standard update operators can also apply, but generally in the case where something "is" matched that you want to update.
But it's the { "new": true } here that lets the command know to return the "modified or created" document, rather than the "original or non existing" document, which is the default without that option.