I have a collection with data like this:
[{
_id: 1,
address: '1/23 Fake Street'
},
{
_id: 2,
address: '5/20 Whatever Lane'
},
{
_id: 3,
address: '10 Foo Avenue'
}]
I'd like to perform a Mongo bulk update query, which does the following:
Transforms the address field to lowercase
Creates a new field, 'buildingAddress', which splits an address at the slash (if present, as with the first two items) and uses the text after it to populate the new field
In Node, I'd do something like this:
const cursor = db.items.find({});
for await (const item of cursor) {
try {
await pageMapper(item);
} catch (e) {
console.error(e);
}
}
async function pageMapper(item){
const newAddress = item.address.toLowerCase()
const buildingAddress = newAddress.split('/ ')[1];
return db.items.updateOne(item._id, {
$set: {
address: newAddress,
buildingAddress
}
})
}
I'm wondering if there's a way to do this in the MongoDB shell itself, passing in a function to db.collection.update? Or should I stick to the node driver for doing more complex update operations?
If you are using MongoDB 4.2+, you can use aggregation or the pipeline form of update to accomplish that.
$toLower converts a string to lower case
$split to split the field
$slice or $arrayElemAt to pick the element(s) to keep
One possible way to do that with update:
db.items.updateMany({},[
{$addFields:{
address:{$toLower:"$address"}
}},
{$addFields:{
buildingAddress:{
$arrayElemAt:[
{$split:["$address","/"]},
-1
]
}
}}
])
Env:
MongoDB (3.2.0) with Mongoose
Collection:
users
Text Index creation:
BasicDBObject keys = new BasicDBObject();
keys.put("name","text");
BasicDBObject options = new BasicDBObject();
options.put("name", "userTextSearch");
options.put("unique", Boolean.FALSE);
options.put("background", Boolean.TRUE);
userCollection.createIndex(keys, options); // using MongoTemplate
Document:
{"name":"LEONEL"}
Queries:
db.users.find( { "$text" : { "$search" : "LEONEL" } } ) => FOUND
db.users.find( { "$text" : { "$search" : "leonel" } } ) => FOUND (search caseSensitive is false)
db.users.find( { "$text" : { "$search" : "LEONÉL" } } ) => FOUND (search with diacriticSensitive is false)
db.users.find( { "$text" : { "$search" : "LEONE" } } ) => FOUND (Partial search)
db.users.find( { "$text" : { "$search" : "LEO" } } ) => NOT FOUND (Partial search)
db.users.find( { "$text" : { "$search" : "L" } } ) => NOT FOUND (Partial search)
Any idea why I get 0 results using as query "LEO" or "L"?
Regex with Text Index Search is not allowed.
db.getCollection('users')
.find( { "$text" : { "$search" : "/LEO/i",
"$caseSensitive": false,
"$diacriticSensitive": false }} )
.count() // 0 results
db.getCollection('users')
.find( { "$text" : { "$search" : "LEO",
"$caseSensitive": false,
"$diacriticSensitive": false }} )
.count() // 0 results
MongoDB Documentation:
Text Search
$text
Text Indexes
Improve Text Indexes to support partial word match
As at MongoDB 3.4, the text search feature is designed to support case-insensitive searches on text content with language-specific rules for stopwords and stemming. Stemming rules for supported languages are based on standard algorithms which generally handle common verbs and nouns but are unaware of proper nouns.
There is no explicit support for partial or fuzzy matches, but terms that stem to a similar result may appear to be working as such. For example: "taste", "tastes", and tasteful" all stem to "tast". Try the Snowball Stemming Demo page to experiment with more words and stemming algorithms.
Your results that match are all variations on the same word "LEONEL", and vary only by case and diacritic. Unless "LEONEL" can be stemmed to something shorter by the rules of your selected language, these are the only type of variations that will match.
If you want to do efficient partial matches you'll need to take a different approach. For some helpful ideas see:
Efficient Techniques for Fuzzy and Partial matching in MongoDB by John Page
Efficient Partial Keyword Searches by James Tan
There is a relevant improvement request you can watch/upvote in the MongoDB issue tracker: SERVER-15090: Improve Text Indexes to support partial word match.
As Mongo currently does not supports partial search by default...
I created a simple static method.
import mongoose from 'mongoose'
const PostSchema = new mongoose.Schema({
title: { type: String, default: '', trim: true },
body: { type: String, default: '', trim: true },
});
PostSchema.index({ title: "text", body: "text",},
{ weights: { title: 5, body: 3, } })
PostSchema.statics = {
searchPartial: function(q, callback) {
return this.find({
$or: [
{ "title": new RegExp(q, "gi") },
{ "body": new RegExp(q, "gi") },
]
}, callback);
},
searchFull: function (q, callback) {
return this.find({
$text: { $search: q, $caseSensitive: false }
}, callback)
},
search: function(q, callback) {
this.searchFull(q, (err, data) => {
if (err) return callback(err, data);
if (!err && data.length) return callback(err, data);
if (!err && data.length === 0) return this.searchPartial(q, callback);
});
},
}
export default mongoose.models.Post || mongoose.model('Post', PostSchema)
How to use:
import Post from '../models/post'
Post.search('Firs', function(err, data) {
console.log(data);
})
Without creating index, we could simply use:
db.users.find({ name: /<full_or_partial_text>/i}) (case insensitive)
If you want to use all the benefits of MongoDB's full-text search AND want partial matches (maybe for auto-complete), the n-gram based approach mentioned by Shrikant Prabhu was the right solution for me. Obviously your mileage may vary, and this might not be practical when indexing huge documents.
In my case I mainly needed the partial matches to work for just the title field (and a few other short fields) of my documents.
I used an edge n-gram approach. What does that mean? In short, you turn a string like "Mississippi River" into a string like "Mis Miss Missi Missis Mississ Mississi Mississip Mississipp Mississippi Riv Rive River".
Inspired by this code by Liu Gen, I came up with this method:
function createEdgeNGrams(str) {
if (str && str.length > 3) {
const minGram = 3
const maxGram = str.length
return str.split(" ").reduce((ngrams, token) => {
if (token.length > minGram) {
for (let i = minGram; i <= maxGram && i <= token.length; ++i) {
ngrams = [...ngrams, token.substr(0, i)]
}
} else {
ngrams = [...ngrams, token]
}
return ngrams
}, []).join(" ")
}
return str
}
let res = createEdgeNGrams("Mississippi River")
console.log(res)
Now to make use of this in Mongo, I add a searchTitle field to my documents and set its value by converting the actual title field into edge n-grams with the above function. I also create a "text" index for the searchTitle field.
I then exclude the searchTitle field from my search results by using a projection:
db.collection('my-collection')
.find({ $text: { $search: mySearchTerm } }, { projection: { searchTitle: 0 } })
I wrapped #Ricardo Canelas' answer in a mongoose plugin here on npm
Two changes made:
- Uses promises
- Search on any field with type String
Here's the important source code:
// mongoose-partial-full-search
module.exports = exports = function addPartialFullSearch(schema, options) {
schema.statics = {
...schema.statics,
makePartialSearchQueries: function (q) {
if (!q) return {};
const $or = Object.entries(this.schema.paths).reduce((queries, [path, val]) => {
val.instance == "String" &&
queries.push({
[path]: new RegExp(q, "gi")
});
return queries;
}, []);
return { $or }
},
searchPartial: function (q, opts) {
return this.find(this.makePartialSearchQueries(q), opts);
},
searchFull: function (q, opts) {
return this.find({
$text: {
$search: q
}
}, opts);
},
search: function (q, opts) {
return this.searchFull(q, opts).then(data => {
return data.length ? data : this.searchPartial(q, opts);
});
}
}
}
exports.version = require('../package').version;
Usage
// PostSchema.js
import addPartialFullSearch from 'mongoose-partial-full-search';
PostSchema.plugin(addPartialFullSearch);
// some other file.js
import Post from '../wherever/models/post'
Post.search('Firs').then(data => console.log(data);)
If you are using a variable to store the string or value to be searched:
It will work with the Regex, as:
{ collection.find({ name of Mongodb field: new RegExp(variable_name, 'i') }
Here, the I is for the ignore-case option
The quick and dirty solution, that worked for me: use text search first, if nothing is found, then make another query with a regexp. In case you don't want to make two queries - $or works too, but requires all fields in query to be indexed.
Also, you'd better not to use case-insensitive rx, because it can't rely on indexes. In my case I've made lowercase copies of used fields.
Good n-gram based approach for fuzzy matching is explained here
(Also explains how to score higher for Results using prefix Matching)
https://medium.com/xeneta/fuzzy-search-with-mongodb-and-python-57103928ee5d
Note : n-gram based approaches can be storage extensive and mongodb collection size will increase.
I create an additional field which combines all the fields within a document that I want to search. Then I just use regex:
user = {
firstName: 'Bob',
lastName: 'Smith',
address: {
street: 'First Ave',
city: 'New York City',
}
notes: 'Bob knows Mary'
}
// add combined search field with '+' separator to preserve spaces
user.searchString = `${user.firstName}+${user.lastName}+${user.address.street}+${user.address.city}+${user.notes}`
db.users.find({searchString: {$regex: 'mar', $options: 'i'}})
// returns Bob because 'mar' matches his notes field
// TODO write a client-side function to highlight the matching fragments
full/partial search in MongodB for a "pure" Meteor-project
I adpated flash's code to use it with Meteor-Collections and simpleSchema but without mongoose (means: remove the use of .plugin()-method and schema.path (altough that looks to be a simpleSchema-attribute in flash's code, it did not resolve for me)) and returing the result array instead of a cursor.
Thought that this might help someone, so I share it.
export function partialFullTextSearch(meteorCollection, searchString) {
// builds an "or"-mongoDB-query for all fields with type "String" with a regEx as search parameter
const makePartialSearchQueries = () => {
if (!searchString) return {};
const $or = Object.entries(meteorCollection.simpleSchema().schema())
.reduce((queries, [name, def]) => {
def.type.definitions.some(t => t.type === String) &&
queries.push({[name]: new RegExp(searchString, "gi")});
return queries
}, []);
return {$or}
};
// returns a promise with result as array
const searchPartial = () => meteorCollection.rawCollection()
.find(makePartialSearchQueries(searchString)).toArray();
// returns a promise with result as array
const searchFull = () => meteorCollection.rawCollection()
.find({$text: {$search: searchString}}).toArray();
return searchFull().then(result => {
if (result.length === 0) throw null
else return result
}).catch(() => searchPartial());
}
This returns a Promise, so call it like this (i.e. as a return of a async Meteor-Method searchContact on serverside).
It implies that you attached a simpleSchema to your collection before calling this method.
return partialFullTextSearch(Contacts, searchString).then(result => result);
import re
db.collection.find({"$or": [{"your field name": re.compile(text, re.IGNORECASE)},{"your field name": re.compile(text, re.IGNORECASE)}]})
I am sending a query to mongoDB using mongoose. The collection is named Step. I want the result of this query to be an array of _id values, one per step. Currently I am getting all of the step objects in their entirety, because req.query isn't defined in this case.
service:
this.getSteps = function() {
return $http({
method: 'GET',
url: '/api/step'
})
.then(function(response) {
return response.data;
});
};
controller:
readStep: function (req, res) {
Step.find(req.query, function(err, result) {
if (err) {
res.status(500).send(err);
}
res.status(200).send(result);
});
}
Set the second parameter of the find query to '_id' to retrieve only the _id of the objects.
Step.find(req.query, '_id', function(err, result) {
This will return data like this:
[{_id: 123}, {_id: 234}]
If you want to get an array of the Step ids on their own, use the javascript map function like so
result = result.map(function(doc) {
return doc._id;
});
which will give you an array like this:
[123, 234]
You'll need to use query.select, something like as shown below:
Step.find(query).select({ "_id": 1}).then(....);
I'm not able to type much because I'm responding from my handheld.
Hope this help!
I'm using a MEAN stack and with Mongoose. Is there a way to query MongoDB with multiple ids to only return those specific IDs in one query e.g. /api/products/5001,5002,5003
Is this possible or would I need to query each product individually or add an additional attribute to the products and query by that.
Update: To clarify as suggested below I've managed to get it partially working using {'_id': { $in: [5001,5002,5003]} however I'm having problems figuring out how to pass the list from the api url to the find function.
Using Express.js for router
router.get('/list/:ids', controller.showByIDs);
exports.showByIDs = function(req, res) {
Product.find({'_id': { $in: [req.params.ids]}}, function (err, product) {
if(err) { return handleError(res, err); }
if(!product) { return res.send(404); }
return res.json(product);
})
};
Then trying /api/products/list/5001 works however /api/products/list/5001,5002 doesn't. I'm not sure if it's a syntax problem in the url or my router code that needs to change or the controller.
You can use the $in operator to query for multiple values at once:
Products.find({_id: {$in: [5001, 5002, 5003]}}, function (err, products) { ... });
On the Express side, you need to use a format for the ids parameter that lets you split it into an array of id values, like you had in your first example:
/api/products/5001,5002,5003
Then in your route handler, you can call the split function on the req.params.ids string to turn it into an array of id values that you can use with $in:
exports.showByIDs = function(req, res) {
var ids = req.params.ids.split(',');
Product.find({'_id': { $in: ids}}, function (err, product) {
if(err) { return handleError(res, err); }
if(!product) { return res.send(404); }
return res.json(product);
})
};
I am in the process of changing the schema for one of my MongoDB collections. (I had been storing dates as strings, and now my application stores them as ISODates; I need to go back and change all of the old records to use ISODates as well.) I think I know how to do this using an update, but since this operation will affect tens of thousands of records I'm hesitant to issue an operation that I'm not 100% sure will work. Is there any way to do a "dry run" of an update that will show me, for a small number of records, the original record and how it would be changed?
Edit: I ended up using the approach of adding a new field to each record, and then (after verifying that the data was right) renaming that field to match the original. It looked like this:
db.events.find({timestamp: {$type: 2}})
.forEach( function (e) {
e.newTimestamp = new ISODate(e.timestamp);
db.events.save(e);
} )
db.events.update({},
{$rename: {'newTimestamp': 'timestamp'}},
{multi: true})
By the way, that method for converting the string times to ISODates was what ended up working. (I got the idea from this SO answer.)
My advice would be to add the ISODate as a new field. Once confirmed that all looks good you could then unset the the string date.
Create a test environment with your database structure. Copy a handful of records to it. Problem solved. Not the solution you were looking for, I'm sure. But, I believe, this is the exact circumstances that a 'test environment' should be used for.
Select ID of particular records that you would like to monitor. place in the update {_id:{$in:[<your monitored id>]}}
Another option which depends of the amount of overhead it will cause you -
You can consider writing a script, that performs the find operation, add printouts or run in debug while the save operation is commented out. Once you've gained confidence you can apply the save operation.
var changesLog = [];
var errorsLog = [];
events.find({timestamp: {$type: 2}}, function (err, events) {
if (err) {
debugger;
throw err;
} else {
for (var i = 0; i < events.length; i++) {
console.log('events' + i +"/"+(candidates.length-1));
var currentEvent = events[i];
var shouldUpdateCandidateData = false;
currentEvent.timestamp = new ISODate(currentEvent.timestamp);
var change = currentEvent._id;
changesLog.push(change);
// // ** Dry Run **
// currentEvent.save(function (err) {
// if (err) {
// debugger;
// errorsLog.push(currentEvent._id + ", " + currentEvent.timeStamp + ', ' + err);
// throw err;
// }
// });
}
console.log('Done');
console.log('Changes:');
console.log(changesLog);
console.log('Errors:');
console.log(errorsLog);
return;
}
});
db.collection.find({"_manager": { $exists: true, $ne: null }}).forEach(
function(doc){
doc['_managers']=[doc._manager]; // String --> List
delete doc['_manager']; // Remove "_managers" key-value pair
printjson(doc); // Debug by output the doc result
//db.teams.save(doc); // Save all the changes into doc data
}
)
In my case the collection contain _manager and I would like to change it to _managers list. I have tested it in my local working as expected.
In the several latest versions of MongoDB (at least starting with 4.2), you could do that using a transaction.
const { MongoClient } = require('mongodb')
async function main({ dryRun }) {
const client = new MongoClient('mongodb://127.0.0.1:27017', {
maxPoolSize: 1
})
const pool = await client.connect()
const db = pool.db('someDB')
const session = pool.startSession()
session.startTransaction()
try {
const filter = { id: 'some-id' }
const update = { $rename: { 'newTimestamp': 'timestamp' } }
// This is the important bit
const options = { session: session }
await db.collection('someCollection').updateMany(
filter,
update,
options // using session
)
const afterUpdate = db.collection('someCollection')
.find(
filter,
options // using session
)
.toArray()
console.debug('updated documents', afterUpdate)
if (dryRun) {
// This will roll back any changes made within the session
await session.abortTransaction()
} else {
await session.commitTransaction()
}
} finally {
await session.endSession()
await pool.close()
}
}
const _ = main({ dryRun: true })