$match not finding objectID in mongoose 5.6+ - match

I have checked various posts already made on this topic before , but none of the suggestion worked for me. I have seen those suggestions will not work in mongoose 5.0. I am using mongoose 5.6. I tried with find(), which is working. but I need aggregation fm for extended usage. most of the suggestions were on converting record id mongoose.Types.ObjectId() which I used . but not worked me. pl help
here is my test code.
function totalActualSale (recordId) {
Record.aggregate([
{
$match : {
'_id':mongoose.Types.ObjectId(recordId),
}
},
{
$unwind: { "path": '$SalesList'}
},
{ $group:
{_id:
{
item:"$SalesList.item",
},
pqty: { $sum: '$SalesList.pqty' },
}
},
], function (err, purc) {
console.log("\nStock:"+ JSON.stringify(purc));
});
}

try this :
function totalActualSale (recordId) {
Record.aggregate([
{
$match : {
'_id':new mongoose.mongo.ObjectId(recordId),
}
},
{
$unwind: { "path": '$SalesList'}
},
{ $group:
{_id:
{
item:"$SalesList.item",
},
pqty: { $sum: '$SalesList.pqty' },
}
},
], function (err, purc) {
console.log("\nStock:"+ JSON.stringify(purc));
});
}

Actually the actual issue was just reverse. the 'recordId' passed to the function was the ObjectID of a record. so, when comparing we should convert that into string. recordId.toString(). So the solution is
function totalActualSale (recordId) {
Record.aggregate([
{
$match : {
'_id':recordId.toString(),
}
},
{
$unwind: { "path": '$SalesList'}
},
{ $group:
{_id:
{
item:"$SalesList.item",
},
pqty: { $sum: '$SalesList.pqty' },
}
},
], function (err, purc) {
console.log("\nStock:"+ JSON.stringify(purc));
});
}

Related

Mongodb update duplicate array value

I am getting in an array of data and then want to insert that into my mongodb. I want to overwrite any duplicate values with the new one (on Im uploading) and if no duplicates just add it onto the current array.
I currently have:
db.cases.updateOne(
{ companyID: 218 },
{
$addToSet: {
cases: [AN ARRAY OF CASES]
},
$currentDate: { lastModified: true }
})
The collection has multiple companies and each has an array of cases - see image below :
The other thing that doesn't seem to work is that the currentDate doesn't seem to change whenever I update the cases, not sure if thats the way I have written the query?
Thank you.
You can use $addToSet (https://docs.mongodb.com/manual/reference/operator/update/addToSet/) with $[] (https://docs.mongodb.com/manual/reference/operator/update/positional-filtered/) to accomplish this.
I scripted an example in Node.js to show you what I mean:
const { MongoClient } = require('mongodb');
async function main() {
/**
* Connection URI. Update <username>, <password>, and <your-cluster-url> to reflect your cluster.
*/
const uri = "mongodb+srv://<username>:<password>#<your-cluster-url>?retryWrites=true&w=majority";
/**
* The Mongo Client you will use to interact with your database
*/
const client = new MongoClient(uri, { useUnifiedTopology: true });
try {
// Connect to the MongoDB cluster
await client.connect();
// Make the appropriate DB calls
await updateArray(client, 'id1')
} finally {
// Close the connection to the MongoDB cluster
await client.close();
}
}
main().catch(console.error);
async function updateArray(client, id) {
const docId = "299";
const mynewdoc = {
id: docId,
priority: 5,
casenumber: 40,
new: "field"
}
const result = await client.db("NameOfYourDb").collection("NameOfYourCollection").updateOne(
{ _id: id },
{
$set: {
"cases.$[element]": mynewdoc
}
},
{
arrayFilters: [{ "element.id": docId }]
}
)
if (result) {
console.log(result);
} else {
console.log(`Not found '${id}'`);
}
const result2 = await client.db("NameOfYourDb").collection("NameOfYourCollection").updateOne(
{ _id: id },
{
$addToSet: {
"cases": mynewdoc
}
}
)
if (result2) {
console.log(result2);
} else {
console.log(`Not found '${id}'`);
}
}
See https://www.mongodb.com/blog/post/quick-start-nodejs-mongodb--how-to-get-connected-to-your-database for an explanation of how the Node.js code is structured.
So I have figured a hack around but it is not exactly what I wanted - maybe someone can use this and expand upon it to make it work better:
db.cases.aggregate(
{ $match: { 'companyID': 218 }},
{ $unwind: '$cases' },
{ $group: { "_id": "$cases.casenumber", cases: {$push:'$cases'}, "count": { $sum:1 }}},
{ $match: { "count": { "$gt": 1 }}}
{ $project: { "cases": { $slice: [ "$cases", -1, { $subtract: [ { $size: "$cases" }, 1 ]}] }}},
{ $project: { "cases": { $arrayElemAt: ['$cases', 0] }}},
{ $group: { _id: 1, cases: {$push:'$cases' }}},
{ $out: 'cases' }
)
The only problem is that using out overwrites the cases document so I need to figure out a way to write it to the cases array inside of the company id (218)
Hopefully this can help someone though.
Thanks.

Mongodb $expr query is very slow

I have a Mongo 4.2.0 instance here on my development environment with a simple collection of only 300 entries.
I've build some basic queue handling juggling with some date fields.
To get an document that should be updated I have the following $expr-query, which runs very slow imho.
db.collection("myupdates").findOneAndUpdate({
$expr: {
$and: [
{ $gt: ["$shouldUpdate", "$updatedAt"] },
{ $gt: ["$shouldUpdate", "$isUpdatingAt"] },
{ $gt: ["$shouldUpdate", "$updateErroredAt"] },
]
},
}, {
$set: {
isUpdatingAt: new Date(),
},
});
This query takes around ~120ms after warmup on my standard year 2019 laptop. Where my other simple queries only take ~3ms.
Although it doesn't really matter to set indexes with 300 documents, I've tried of course to set them all. Single to compound indexes. This does not do the trick.
It's also not the findOneAndUpdate, with countDocuments I achieve the same slow speed.
Is this the normal speed of an $expr or aggregation syntax? What did I wrong? Is there a better way to achieve this? Do I have to use Redis for this use case?
Possible solution
As #Neil Lunn pointed out in the answers, calculated conditions do not utilize an index and should be the last resort.
So I just got rid of the calculated condition by splitting the query into 2 queries. The first query is getting an actual value I can match with.
These 2 queries boil down to ~10ms total, which is much better then 120ms.
const shouldUpdateDateResult = await mongo.db.collection("myupdates").findOne({
shouldUpdate: { $exists: true }
}, {
shouldUpdate: 1,
});
const shouldUpdateDate = shouldUpdateDateResult && shouldUpdateDateResult.shouldUpdate;
const result = await mongo.db.collection("myupdates").findOneAndUpdate({
$and: [
{ shouldUpdate: shouldUpdateDate },
{ $or: [
{ updatedAt: { $eq: null } },
{ updatedAt: { $exists: false } },
{ updatedAt: { $lte: shouldUpdateDate } }
] },
{ $or: [
{ isUpdatingAt: { $eq: null } },
{ isUpdatingAt: { $exists: false } },
{ isUpdatingAt: { $lte: shouldUpdateDate } }
] },
{ $or: [
{ updateErroredAt: { $eq: null } },
{ updateErroredAt: { $exists: false } },
{ updateErroredAt: { $lte: shouldUpdateDate } }
] },
],
}, {
$set: {
isUpdatingAt: new Date(),
},
});
The whole idea behind this is a processing queue usable by multiple workers.

Get only last created document per id when using $in

I have 10.000 readings (documents) in MongoDB and I have an array of deviceIds. I need to get in Mongoose the lastly created document for each device.
My approach:
Reading.find({
"deviceId": { "$in": deviceIds}
},
function(err, readings) {
callback(err,readings);
}).sort({createdAt:-1}).limit(1);
This obviusly returns only one document. I need to get one per each deviceId in the deviceIds array.
EDIT 21/02/2018: Thanks to the help of #mickl I changed my code to:
Reading.aggregate([
{ $match: { "deviceId": { "$in": finalDevices} } },
{
$group: {
deviceId: "$deviceId",
maxCreatedAt: { $max: "$createdAt" }
}
}
],function(err, reading) {
if (err) {
callback(err, null);
} else {
callback(null, reading)
}
})
Now I am getting the following error: "The field 'deviceId' must be an accumulator object"
You can use aggregation framework's $match to filter your collection to specified devices and then use $group with $max to get the dates.
Device.aggregate([
{ $match: { "deviceId": { "$in": deviceIds} } },
{
$group: {
_id: "$deviceId",
maxCreatedAt: { $max: "$createdAt" }
}
}
],function(err, readings) {
callback(err,readings);
})

Aggregate is not a function - mongodb/meteor

I have this collection (Spieltag) with two documents in MongoDB:
0: Object Note:2.5 SaisonID:201516 SpielerID:105 SpieltagID:1 Tore:1 _id:"vkD5sMCdZdntoCFGP"
1: Object Note:3 SaisonsID:201516 SpielerID:105 SpieltagID:1 Tore:0 _id:"PrqokMS47K4vx4KR4"
I want to summarize Note (2.5+1) with a "where clause" on SpielerID.
This is what I have tried to use:
Spieltag.aggregate({ $match: {
{ SpielerID: { $gte: 105 } }
} },
{ $group: { _id : null, sum : { $sum: "$Note" } } });
But it doesn't work, throwing Aggregate is not a function. Any idea what's wrong?
First, you need to add the aggregate package for Meteor :
meteor add meteorhacks:aggregate
Second, you must pass an array parameter in aggregate like :
Spieltag.aggregate([{
$match: {
SpielerID: { $gte: 105 },
},
}, {
$group: {
_id: null,
sum: { $sum: '$Note' },
},
}]);

mongodb convert array elements from int to string

I have a mongodb full of records with mixed types and need to sanitize data to make some sense in back end application.
To change type(NumberLong to String) of a normal field is easy, just cast it to string in Javascript like:
db.foo.find({ field: { $type: 18 } }).forEach(function (x) {
x.field = new String(x.field); // convert field to string
db.foo.save(x);
});
But how do I change array elements from NumberLong to String?
For example I have field:
"elements" : { "top" : {"random" : [ NumberLong(12) , NumberLong(20), NumberLong(13)] } }
and I need all the elements of elements.top.random as strings.
Do I need to do "foreach" element of the array or is there any better way?
Any Javascript guru can help me out?
Before posting I just could not find an answer, and now I did find something similar and adopted...
so here it is:
db.foo.find( {"elements.top.random": {$type:18}} ).forEach( function (x) {
var arr = [];
x.elements.top.random.forEach( function (e) { arr.push("" + e); } );
x.elements.top.random = arr;
db.foo.save(x);
});
We came across this issue today with a more recent version of MongoDB (v4.0.0). Since the save() API is deprecated, we had to use updateOne(), resulting in the following code:
db.foo.find( {"elements.top.random": {$type:18}} ).forEach(function (x) {
var stringValues = [];
x.elements.top.random.forEach(function(e) {
stringValues.push("" + e);
});
db.foo.updateOne(
{ _id: x._id },
{ $set: { "elements.top.random" : stringValues } }
);
});
With MongoDB v4.2+, you can do an update with aggregation pipeline. Use $reduce and $toString to do the string conversion and string concat.
db.collection.update({
"elements.top.random": {
$type: 18
}
},
[
{
$set: {
"elements.top.random": {
"$reduce": {
"input": "$elements.top.random",
"initialValue": "",
"in": {
"$concat": [
"$$value",
{
$toString: "$$this"
}
]
}
}
}
}
}
],
{
multi: true
})
Here is the Mongo playground for your reference.
With a slightly older version of MongoDB (v4.0+), you can still have $toString available. You can use the old "aggregate-then-update" approach.
db.collection.aggregate([
{
$match: {
"elements.top.random": {
$type: 18
}
}
},
{
$set: {
"elements.top.random": {
"$reduce": {
"input": "$elements.top.random",
"initialValue": "",
"in": {
"$concat": [
"$$value",
{
$toString: "$$this"
}
]
}
}
}
}
}
]).forEach(agg => {
db.collection.update(
{ _id: agg._id },
{ $set: { "elements.top.random" : agg.elements.top.random } }
)
})