Related
We're trying to optimise our read performance on our MongoDB cluster.
We serve a social media like application where users are member of 1 or multiple groups.
We were storing who is in which group and whether he/she is an admin of that group in a separate collection. However we noticed it was quite slow to retrieve the group information for the groups the user is member of. (find(+filter) groupMember documents, populate the groups).
Therefor we recently migrated all the group members to an array on the group collection documents itself.
The schema now looks as following:
The query we execute is simply:
this.model.find({
members: {
$elemMatch: {
userId: new ObjectId(userId),
},
},
})
We expected this to be much more performed because you don't need to populate/lookup anything. The opposite is true however, after deploying this change we noticed a performance decrease.
We have around 40k group documents where the largest groups have around 3k members, most groups are much smaller however.
The groups are indexed and the index is also used. This is an explain plan:
{
"explainVersion": "1",
"queryPlanner": {
"namespace": "***.groups",
"indexFilterSet": false,
"parsedQuery": {
"members": {
"$elemMatch": {
"userId": {
"$eq": "61b091ee9b50220e75208eb6"
}
}
}
},
"queryHash": "DCF50157",
"planCacheKey": "DCF50157",
"maxIndexedOrSolutionsReached": false,
"maxIndexedAndSolutionsReached": false,
"maxScansToExplodeReached": false,
"winningPlan": {
"stage": "FETCH",
"filter": {
"members": {
"$elemMatch": {
"userId": {
"$eq": "61b091ee9b50220e75208eb6"
}
}
}
},
"inputStage": {
"stage": "IXSCAN",
"keyPattern": {
"members.userId": 1
},
"indexName": "members.userId_1",
"isMultiKey": true,
"multiKeyPaths": {
"members.userId": [
"members"
]
},
"isUnique": false,
"isSparse": false,
"isPartial": false,
"indexVersion": 2,
"direction": "forward",
"indexBounds": {
"members.userId": [
"[ObjectId('61b091ee9b50220e75208eb6'), ObjectId('61b091ee9b50220e75208eb6')]"
]
}
}
},
"rejectedPlans": []
},
"executionStats": {
"executionSuccess": true,
"nReturned": 17,
"executionTimeMillis": 0,
"totalKeysExamined": 17,
"totalDocsExamined": 17,
"executionStages": {
"stage": "FETCH",
"filter": {
"members": {
"$elemMatch": {
"userId": {
"$eq": "61b091ee9b50220e75208eb6"
}
}
}
},
"nReturned": 17,
"executionTimeMillisEstimate": 0,
"works": 18,
"advanced": 17,
"needTime": 0,
"needYield": 0,
"saveState": 0,
"restoreState": 0,
"isEOF": 1,
"docsExamined": 17,
"alreadyHasObj": 0,
"inputStage": {
"stage": "IXSCAN",
"nReturned": 17,
"executionTimeMillisEstimate": 0,
"works": 18,
"advanced": 17,
"needTime": 0,
"needYield": 0,
"saveState": 0,
"restoreState": 0,
"isEOF": 1,
"keyPattern": {
"members.userId": 1
},
"indexName": "members.userId_1",
"isMultiKey": true,
"multiKeyPaths": {
"members.userId": [
"members"
]
},
"isUnique": false,
"isSparse": false,
"isPartial": false,
"indexVersion": 2,
"direction": "forward",
"indexBounds": {
"members.userId": [
"[ObjectId('61b091ee9b50220e75208eb6'), ObjectId('61b091ee9b50220e75208eb6')]"
]
},
"keysExamined": 17,
"seeks": 1,
"dupsTested": 17,
"dupsDropped": 0
}
},
"allPlansExecution": []
},
"command": {
"find": "groups",
"filter": {
"members": {
"$elemMatch": {
"userId": "61b091ee9b50220e75208eb6"
}
}
},
"projection": {},
"readConcern": {
"level": "majority"
},
"$db": "***"
},
"serverInfo": {
"host": "***",
"port": 27017,
"version": "6.0.3",
"gitVersion": "f803681c3ae19817d31958965850193de067c516"
},
"serverParameters": {
"internalQueryFacetBufferSizeBytes": 104857600,
"internalQueryFacetMaxOutputDocSizeBytes": 104857600,
"internalLookupStageIntermediateDocumentMaxSizeBytes": 104857600,
"internalDocumentSourceGroupMaxMemoryBytes": 104857600,
"internalQueryMaxBlockingSortMemoryUsageBytes": 104857600,
"internalQueryProhibitBlockingMergeOnMongoS": 0,
"internalQueryMaxAddToSetBytes": 104857600,
"internalDocumentSourceSetWindowFieldsMaxMemoryBytes": 104857600
},
"ok": 1,
"operationTime": {
"$timestamp": "7168789227251957761"
}
}
Under load the query takes 300-400ms, which is not acceptable for us.
However right now we don't really know anymore what would be the best next step in improving the solution. Mongo does not advise any additional indexes or schema improvements at this moment.
What can we do best to get this query really performand?
I have the below dbData wherein in the contracts collection, there is an attribute called via which is an array that holds the mongoose ids of depot entries.
db={
"contracts": [
{
"_id": ObjectId("630c9e23fb146c4b3b801b9e"),
"detention_id": [],
"route_exclude": false,
"trip_count": null,
"fill_first": false,
"is_dedicated": false,
"truck_count": 0,
"running_trip_count": 0,
"is_round_trip_eligible": false,
"is_synced": false,
"bdr_label": "",
"bdr_charge": 0,
"is_migrated": false,
"current_trip_count": 0,
"client_id": ObjectId("5e47c1f4a697c60195050059"),
"updated_by": ObjectId("60a39adad079f9004d144cb8"),
"user_id": ObjectId("60a39adad079f9004d144cb8"),
"transporter_id": ObjectId("61d6d2227465c501d0915794"),
"vehicle_type_id": ObjectId("61e1b3ad00516e004c893145"),
"detention_group": "",
"delivery_type": 2,
"source": "amaravathi",
"destination": "Kozhikode",
"isDirect": false,
"via": [
"6304804a9f90b8018844fc20",
"62fb93a711ea3f019ffe19e6"
],
"distance": 100,
"status": 1,
"contract_type": 1,
"current": {
"minimum_rate": 0,
"multiplying_factor": null,
"min_slab": 0,
"additional_charge_percentage": 0,
"hilly_region_addition_percentage": 0,
"return_freight_rate": 0,
"surge_threshold": 0,
"fixed_rate": null,
"oda_rate": null,
"oda_unit": 3,
"minimum_oda": 0,
"point_charges": 0,
"pickup_point_charges": 0,
"start": ISODate("2022-08-26T00:00:00.000+05:30"),
"end": ISODate("2023-08-24T23:59:00.000+05:30"),
"freight_unit": 1,
"freight_rate": 5000,
"transit_time": 172800000
},
"next": [],
"previous": [],
"lookup_key": "f93187d8364fca313ab92c83ac39eafa",
"created_at": ISODate("2022-08-26T15:26:15.865+05:30"),
"updated_at": ISODate("2022-08-29T13:20:39.058+05:30")
}
],
"depots": [
{
"_id": ObjectId("6304804a9f90b8018844fc20"),
"consignee_ref_ids": [],
"status": 1,
"coordinates": [
12.9039441,
80.14567129999999
],
"unloading_capacity": 0,
"has_pit_stop": false,
"type": 1,
"plant_type": 1,
"ports_associated": [],
"overseas_customer": false,
"sez": false,
"timezone": "Asia/Kolkata",
"fax_number": "",
"is_optima_group_by_cluster": "",
"country": "India",
"name": "CTB",
"reference_id": "CTB",
"client_id": ObjectId("5e47c1f4a697c60195050059"),
"address": "CTB",
"city": "Chennai",
"state": "Tamil Nadu",
"region": "South",
"pincode": "600126",
"phone": "",
"delivery_type": 1,
"short_code": "CTB",
"gstn_no": "HSKK309393k",
"gates": [
{
"coordinates": [
12.9039441,
80.14567129999999
],
"exclude_trucks": [],
"_id": ObjectId("630480789f90b8018844fc21"),
"country": "India",
"name": "CTG1",
"reference_id": "CTG1",
"status": 1,
"address": "CTB",
"city": "Chennai",
"state": "Tamil Nadu",
"region": "South",
"pincode": "600126",
"short_code": "CTG1",
"special_tax_rebate": false,
"gstin": null,
"is_virtual_gate": false,
"category": "",
"delivery_do_not_combine": false,
"combine": false,
"loading_time": {
"start": null,
"end": null
},
"unloading_time": {
"start": null,
"end": null
},
"legal_entity": null,
"display_reference_name": "",
"updated_at": ISODate("2022-08-23T12:53:46.233+05:30"),
"created_at": ISODate("2022-08-23T12:53:36.046+05:30")
}
],
"docks": [],
"updated_by": ObjectId("5eaba6438a4895004916adbe"),
"display_reference_name": "",
"receivers": [],
"created_at": ISODate("2022-08-23T12:52:50.182+05:30"),
"updated_at": ISODate("2022-08-23T13:09:18.468+05:30")
},
{
"_id": ObjectId("62fb93a711ea3f019ffe19e6"),
"consignee_ref_ids": [],
"status": 1,
"coordinates": [
20.593684,
78.96288
],
"unloading_capacity": 0,
"has_pit_stop": false,
"type": 1,
"plant_type": 1,
"ports_associated": [],
"overseas_customer": false,
"sez": false,
"timezone": "Asia/Kolkata",
"fax_number": "",
"is_optima_group_by_cluster": "",
"country": "India",
"name": "Test",
"reference_id": "Test",
"client_id": ObjectId("5e47c1f4a697c60195050059"),
"address": "test",
"city": "Test",
"state": "Test",
"region": "North",
"pincode": "0100301",
"phone": "",
"delivery_type": 1,
"short_code": "test",
"gstn_no": "1111",
"gates": [],
"docks": [],
"updated_by": ObjectId("62e7b04d91a9b50189b4319d"),
"display_reference_name": "",
"receivers": [],
"created_at": ISODate("2022-08-16T18:25:03.453+05:30"),
"updated_at": ISODate("2022-08-16T18:25:03.913+05:30")
},
{
"_id": ObjectId("630cc2ba38f87401899c35f1"),
"consignee_ref_ids": [],
"status": 1,
"coordinates": [
20.593684,
78.96288
],
"unloading_capacity": 0,
"has_pit_stop": false,
"type": 1,
"plant_type": 1,
"ports_associated": [],
"overseas_customer": false,
"sez": false,
"timezone": "Asia/Kolkata",
"fax_number": "",
"is_optima_group_by_cluster": "",
"country": "India",
"name": "B20220829191440",
"reference_id": "B20220829191440",
"client_id": ObjectId("62d8f30ce763689b942d5e12"),
"address": "No 15,GST Street",
"city": "Blr20220829191440",
"state": "Karnataka",
"region": "South",
"pincode": "123456",
"phone": "5588",
"delivery_type": 1,
"short_code": "B214945",
"gstn_no": "123",
"gates": [
{
"coordinates": [
10.9290666,
78.7438114
],
"exclude_trucks": [],
"_id": ObjectId("630cc2bb38f87401899c35f2"),
"country": "India",
"name": "Gate 2",
"reference_id": "BG20220829191440",
"status": 1,
"address": "No 15 GST Road",
"city": "Blr20220829191440",
"state": "Karnataka",
"region": "South",
"pincode": "123456",
"short_code": "G1214945",
"special_tax_rebate": false,
"is_virtual_gate": false,
"category": "b",
"combine": true,
"updated_at": ISODate("2022-08-29T19:14:27.912+05:30"),
"created_at": ISODate("2022-08-29T19:14:27.551+05:30")
}
],
"docks": [],
"updated_by": ObjectId("62d8f30ce763689b942d5e15"),
"display_reference_name": "",
"receivers": [],
"created_at": ISODate("2022-08-29T19:14:26.892+05:30"),
"updated_at": ISODate("2022-08-29T19:14:27.912+05:30")
}
]
}
I am trying to aggregate using lookup such that I need to fetch the entries from the depots based on the ids that are present in via.
Below is the query I have tried
db.contracts.aggregate([
{
$match: {
_id: ObjectId("630c9e23fb146c4b3b801b9e")
}
},
{
$lookup: {
from: "depots",
localField: "via",
as: "depots",
foreignField: "_id"
}
},
{
$sort: {
count: -1
}
}
])
for which I expect to get the contracts entry with a newly added attribute in it called depots that contains the documents for the corressponding two ids held by via.
But the actual response that I got has depots as [].
[
{
"_id": ObjectId("630c9e23fb146c4b3b801b9e"),
"bdr_charge": 0,
"bdr_label": "",
"client_id": ObjectId("5e47c1f4a697c60195050059"),
"contract_type": 1,
"created_at": ISODate("2022-08-26T09:56:15.865Z"),
"current": {
"additional_charge_percentage": 0,
"end": ISODate("2023-08-24T18:29:00Z"),
"fixed_rate": null,
"freight_rate": 5000,
"freight_unit": 1,
"hilly_region_addition_percentage": 0,
"min_slab": 0,
"minimum_oda": 0,
"minimum_rate": 0,
"multiplying_factor": null,
"oda_rate": null,
"oda_unit": 3,
"pickup_point_charges": 0,
"point_charges": 0,
"return_freight_rate": 0,
"start": ISODate("2022-08-25T18:30:00Z"),
"surge_threshold": 0,
"transit_time": 1.728e+08
},
"current_trip_count": 0,
"delivery_type": 2,
"depots": [],
"destination": "Kozhikode",
"detention_group": "",
"detention_id": [],
"distance": 100,
"fill_first": false,
"isDirect": false,
"is_dedicated": false,
"is_migrated": false,
"is_round_trip_eligible": false,
"is_synced": false,
"lookup_key": "f93187d8364fca313ab92c83ac39eafa",
"next": [],
"previous": [],
"route_exclude": false,
"running_trip_count": 0,
"source": "amaravathi",
"status": 1,
"transporter_id": ObjectId("61d6d2227465c501d0915794"),
"trip_count": null,
"truck_count": 0,
"updated_at": ISODate("2022-08-29T07:50:39.058Z"),
"updated_by": ObjectId("60a39adad079f9004d144cb8"),
"user_id": ObjectId("60a39adad079f9004d144cb8"),
"vehicle_type_id": ObjectId("61e1b3ad00516e004c893145"),
"via": [
"6304804a9f90b8018844fc20",
"62fb93a711ea3f019ffe19e6"
]
}
]
Question 1: Could some help me on where Iam going wrong with the $lookup implementation?
Question 2: Also could the same query, be achievable using graphlookup? Atleast with a possible schema change in contracts?
Edit:
when I kept it as objectIds in via instead of strings it works as expected
"via": [
ObjectId("6304804a9f90b8018844fc20"),
ObjectId("62fb93a711ea3f019ffe19e6")
]
When I kept the via as strings, added an additional stage in the pipeline before the $lookup stage to make it work
db.contracts.aggregate([
{
$match: {
_id: ObjectId("630c9e23fb146c4b3b801b9e")
}
},
{
$addFields: {
via: {
$map: {
input: "$via",
in: {
"$toObjectId": "$$this"
}
}
}
}
},
{
$lookup: {
from: "depots",
localField: "via",
as: "depots",
foreignField: "_id"
}
},
{
$sort: {
count: -1
}
}
])
One option is to use a $map stage before the $lookup:
db.contracts.aggregate([
{$match: {_id: ObjectId("630c9e23fb146c4b3b801b9e")}},
{$set: {via: {
$map: {
input: "$via",
in: {"$toObjectId": "$$this"}
}
}
}
},
{$lookup: {
from: "depots",
localField: "via",
as: "depots",
foreignField: "_id"
}
},
{$sort: {count: -1}}
])
See how it works on the playground example
No need to use $graphlookup as you only have one item calling the other and not a recursive call. A relevant alternative to $lookup here will be populate. You can read about this option a bit here
Was able to implement $lookup as well $graphLookup with the below query
db.contracts.aggregate([
{
$match: {
_id: ObjectId("630c9e23fb146c4b3b801b9e")
}
},
{
$addFields: {
via: {
$map: {
input: "$via",
in: {
"$toObjectId": "$$this"
}
}
}
}
},
{
$lookup: {
from: "depots",
localField: "via",
as: "depots",
foreignField: "_id"
}
},
// GraphLookup stage
/* {
"$graphLookup": {
"from": "depots",
"startWith": "$via",
"connectFromField": "via",
"connectToField": "_id",
"as": "depots",
"maxDepth": 2
}
}, */
{
$sort: {
count: -1
}
}
])
Currently I have a collection with the following documents:
[
{
"_id": ObjectId("628e6bd640643f97d6517c75"),
"company": "bau",
"current_version": 0,
"form_name": "don't know",
"history": [],
"id": "23421123-24a9-4a45-a12f-27a330152ax3",
"is_active": True,
"user_id": "999",
},
{
"_id": ObjectId("628eaffe4b8ae2ccdeb9305c"),
"company": "vrau",
"current_version": 0,
"form_name": "exemplo",
"history": [
{
"content": [
{
"field_id": 0,
"label": "insira um texto",
"placeholder": "qualquer texto",
"type": "text",
}
],
"layout": [
{"field_id": 0, "h": 10, "type": "text", "w": 100, "x": 0, "y": 0}
],
"responses": [
{
"client_id": 100,
"response_date": "2020-01-02",
"values": [{"field_id": 0, "value": "um texto"}],
},
{
"client_id": 2,
"response_date": "2020-01-01",
"values": [{"field_id": 0, "value": "roi"}],
},
],
"version": 0,
}
],
"id": "33b66684-24a9-4a45-a12f-27a330152ac8",
"is_active": True,
"user_id": "1",
},
]
I want to change the response fromthe client_id = '2' by I'm receiving the following error:
pymongo.errors.WriteError: The field 'history.0.responses.1' must be an array but is of type object in document {_id: ObjectId('628eaffe4b8ae2ccdeb9305c')}, full error: {'index': 0, 'code': 2, 'errmsg': "The field 'history.0.responses.1' must be an array but is of type object in document {_id: ObjectId('628eaffe4b8ae2ccdeb9305c')}"}
I don't know what I'm doing wrong and this error doesnt make sense to me cuz reponses is an array.
my current query:
collection.update_many(
{"id": "33b66684-24a9-4a45-a12f-27a330152ac8", "history.version": 0},
{
"$push": {
"history.$[h].responses.$[r]": {
"client_id": 2,
"response_date": "2020-01-01",
"values": [{"field_id": 0, "value": "roi"}],
}
}
},
array_filters=[{"h.version": 0}, {"r.client_id": "2"}],
)
Is there another to do it?
It is because you are also performing filter on r, which already resolves to object level in responses array.
You can simply abandon the r arrayFilter if you simply want to push to responses array.
collection.update_many(
{"id": "33b66684-24a9-4a45-a12f-27a330152ac8", "history.version": 0},
{
"$push": {
"history.$[h].responses": {
"client_id": 2,
"response_date": "2020-01-01",
"values": [{"field_id": 0, "value": "roi"}],
}
}
},
array_filters=[{"h.version": 0}],
)
Here is the Mongo playground for your reference. (in native js syntax)
You should use $set instead of $push if you want to update the entry instead of adding an entry. In your given example, the client_id is int while your arrayFilter is string. It could cause problem if it is not intended.
collection.update_many(
{"id": "33b66684-24a9-4a45-a12f-27a330152ac8", "history.version": 0},
{
"$set": {
"history.$[h].responses.$[r]": {
"client_id": 2,
"response_date": "2020-01-01",
"values": [{"field_id": 0, "value": "roi"}],
}
}
},
array_filters=[{"h.version": 0}, {"r.client_id": 2}],
)
Here is the Mongo playground for your reference. (in native js syntax)
Hi can somebody explain me please how to make compound index for this simple query?
find({
"userId":{"$in":["5c778c4f52732c06d3384269","5b274b672a35d168960b1bd5","5bc43d6552732c527345c35d","58ec836e899753a22b95fb19"]},
"end":{"$gte": ISODate('2019-05-03 11:05:00+02:00')}
})
I have done two compound indexes:
userId: 1 + end: 1
userId: 1 + end: -1
but query does not use any of them. What is wrong with this indexes?
Here is the result of explain()
{
"queryPlanner": {
"plannerVersion": 1,
"namespace": "WebApp.SessionLog",
"indexFilterSet": false,
"parsedQuery": {
"$and": [
{
"end": {
"$gte": ISODate("2019-05-03T07:05:00Z")
}
},
{
"userId": {
"$in": [
"58ec836e899753a22b95fb19",
"5b274b672a35d168960b1bd5",
"5bc43d6552732c527345c35d",
"5c778c4f52732c06d3384269"
]
}
}
]
},
"winningPlan": {
"stage": "FETCH",
"filter": {
"userId": {
"$in": [
"58ec836e899753a22b95fb19",
"5b274b672a35d168960b1bd5",
"5bc43d6552732c527345c35d",
"5c778c4f52732c06d3384269"
]
}
},
"inputStage": {
"stage": "IXSCAN",
"keyPattern": {
"end": -1
},
"indexName": "end",
"isMultiKey": false,
"multiKeyPaths": {
"end": []
},
"isUnique": false,
"isSparse": false,
"isPartial": false,
"indexVersion": 2,
"direction": "forward",
"indexBounds": {
"end": [
"[new Date(9223372036854775807), new Date(1556867100000)]"
]
}
}
},
"rejectedPlans": [
{
"stage": "FETCH",
"inputStage": {
"stage": "IXSCAN",
"keyPattern": {
"userId": 1,
"end": -1
},
"indexName": "userId_1_end_-1",
"isMultiKey": false,
"multiKeyPaths": {
"userId": [],
"end": []
},
"isUnique": false,
"isSparse": false,
"isPartial": false,
"indexVersion": 2,
"direction": "forward",
"indexBounds": {
"userId": [
"[\"58ec836e899753a22b95fb19\", \"58ec836e899753a22b95fb19\"]",
"[\"5b274b672a35d168960b1bd5\", \"5b274b672a35d168960b1bd5\"]",
"[\"5bc43d6552732c527345c35d\", \"5bc43d6552732c527345c35d\"]",
"[\"5c778c4f52732c06d3384269\", \"5c778c4f52732c06d3384269\"]"
],
"end": [
"[new Date(9223372036854775807), new Date(1556867100000)]"
]
}
}
},
{
"stage": "FETCH",
"inputStage": {
"stage": "IXSCAN",
"keyPattern": {
"userId": 1,
"end": 1
},
"indexName": "userId_1_end_1",
"isMultiKey": false,
"multiKeyPaths": {
"userId": [],
"end": []
},
"isUnique": false,
"isSparse": false,
"isPartial": false,
"indexVersion": 2,
"direction": "forward",
"indexBounds": {
"userId": [
"[\"58ec836e899753a22b95fb19\", \"58ec836e899753a22b95fb19\"]",
"[\"5b274b672a35d168960b1bd5\", \"5b274b672a35d168960b1bd5\"]",
"[\"5bc43d6552732c527345c35d\", \"5bc43d6552732c527345c35d\"]",
"[\"5c778c4f52732c06d3384269\", \"5c778c4f52732c06d3384269\"]"
],
"end": [
"[new Date(1556867100000), new Date(9223372036854775807)]"
]
}
}
},
{
"stage": "FETCH",
"filter": {
"end": {
"$gte": ISODate("2019-05-03T07:05:00Z")
}
},
"inputStage": {
"stage": "IXSCAN",
"keyPattern": {
"userId": 1
},
"indexName": "userId",
"isMultiKey": false,
"multiKeyPaths": {
"userId": []
},
"isUnique": false,
"isSparse": false,
"isPartial": false,
"indexVersion": 2,
"direction": "forward",
"indexBounds": {
"userId": [
"[\"58ec836e899753a22b95fb19\", \"58ec836e899753a22b95fb19\"]",
"[\"5b274b672a35d168960b1bd5\", \"5b274b672a35d168960b1bd5\"]",
"[\"5bc43d6552732c527345c35d\", \"5bc43d6552732c527345c35d\"]",
"[\"5c778c4f52732c06d3384269\", \"5c778c4f52732c06d3384269\"]"
]
}
}
}
]
},
"serverInfo": {
"host": "mongo1",
"port": 9000,
"version": "3.4.2",
"gitVersion": "3f76e40c105fc223b3e5aac3e20dcd026b83b38b"
},
"ok": 1
}
here is the documents example:
{
"_id": ObjectId("56c49c147569943e7d4e92f4"),
"ip": "213.81.143.50",
"count": 17,
"end": ISODate("2014-12-02T08:39:56Z"),
"userId": "546dda33899753840584752b",
"date": ISODate("2014-12-02T08:36:47Z"),
"logs": [
{
"parameters": {
"action": "dashboard",
"id": null,
"package": ""
},
"ip": "213.81.143.50",
"date": ISODate("2014-12-02T08:36:47Z"),
"presenter": "Dashboard",
"action": "dashboard"
},
{
"parameters": {
"action": "dashboard",
"id": null,
"backlink": ""
},
"ip": "213.81.143.50",
"date": ISODate("2014-12-02T08:36:48Z"),
"presenter": "Dashboard",
"action": "dashboard"
}
]
}
Thanks.
PS: This PS text is only for Stackoverflow editor error.
In query explain result you can see "indexName": "end" under winningPlan.
This mean your query used index with end name.
I need make a query with indice at the mongodb, I will show below a minimal case example of my real case.
I have following collection with flowing data:
devsrv(mongod-3.0.4) test> db.teste.find()
{
"_id": ObjectId("57b324c341aaa4b930ef3b92"),
"a": 1,
"b": 1
}
{
"_id": ObjectId("57b324c941aaa4b930ef3b93"),
"a": 1,
"b": 2
}
{
"_id": ObjectId("57b324cd41aaa4b930ef3b94"),
"a": 1,
"b": 3
}
{
"_id": ObjectId("57b324d141aaa4b930ef3b95"),
"a": 1,
"b": 4
}
{
"_id": ObjectId("57b324d541aaa4b930ef3b96"),
"a": 1,
"b": 5
}
{
"_id": ObjectId("57b324da41aaa4b930ef3b97"),
"a": 1,
"b": 6
}
{
"_id": ObjectId("57b324df41aaa4b930ef3b98"),
"a": 1,
"b": 7
}
{
"_id": ObjectId("57b324e441aaa4b930ef3b99"),
"a": 1,
"b": 8
}
{
"_id": ObjectId("57b324f341aaa4b930ef3b9a"),
"a": 1,
"b": ""
}
{
"_id": ObjectId("57b324f641aaa4b930ef3b9b"),
"a": 1,
"b": " "
}
{
"_id": ObjectId("57b324fc41aaa4b930ef3b9c"),
"a": 1,
"b": null
}
{
"_id": ObjectId("57b3250341aaa4b930ef3b9d"),
"a": 1
}
{
"_id": ObjectId("57b46ace41aaa4b930ef3b9e"),
"a": 2
}
And I have the following indexes:
devsrv(mongod-3.0.4) test> db.teste.getIndexes()
[
{
"v": 1,
"key": {
"_id": 1
},
"name": "_id_",
"ns": "test.teste"
},
{
"v": 1,
"key": {
"a": 1,
"b": 1
},
"name": "a_1_b_1",
"ns": "test.teste"
},
{
"v": 1,
"key": {
"b": 1
},
"name": "b_1",
"ns": "test.teste"
}
]
And I need make a query equal this:
devsrv(mongod-3.0.4) test> db.teste.find({$or:[{"b":null},{"b":""},{"b":" "},{"b":{$lt:3}}],"a":1}).explain("executionStats")
{
"queryPlanner": {
"plannerVersion": 1,
"namespace": "test.teste",
"indexFilterSet": false,
"parsedQuery": {
"$and": [
{
"$or": [
{
"b": {
"$eq": null
}
},
{
"b": {
"$eq": ""
}
},
{
"b": {
"$eq": " "
}
},
{
"b": {
"$lt": 3
}
}
]
},
{
"a": {
"$eq": 1
}
}
]
},
"winningPlan": {
"stage": "FETCH",
"filter": {
"a": {
"$eq": 1
}
},
"inputStage": {
"stage": "FETCH",
"filter": {
"$or": [
{
"b": {
"$eq": null
}
},
{
"b": {
"$eq": ""
}
},
{
"b": {
"$eq": " "
}
},
{
"b": {
"$lt": 3
}
}
]
},
"inputStage": {
"stage": "IXSCAN",
"keyPattern": {
"b": 1
},
"indexName": "b_1",
"isMultiKey": false,
"direction": "forward",
"indexBounds": {
"b": [
"[null, null]",
"[-inf.0, 3.0)",
"[\"\", \"\"]",
"[\" \", \" \"]"
]
}
}
}
},
"rejectedPlans": [
{
"stage": "FETCH",
"filter": {
"$or": [
{
"b": {
"$eq": null
}
},
{
"b": {
"$eq": ""
}
},
{
"b": {
"$eq": " "
}
},
{
"b": {
"$lt": 3
}
}
]
},
"inputStage": {
"stage": "IXSCAN",
"keyPattern": {
"a": 1,
"b": 1
},
"indexName": "a_1_b_1",
"isMultiKey": false,
"direction": "forward",
"indexBounds": {
"a": [
"[1.0, 1.0]"
],
"b": [
"[MinKey, MaxKey]"
]
}
}
}
]
},
"executionStats": {
"executionSuccess": true,
"nReturned": 6,
"executionTimeMillis": 0,
"totalKeysExamined": 8,
"totalDocsExamined": 14,
"executionStages": {
"stage": "FETCH",
"filter": {
"a": {
"$eq": 1
}
},
"nReturned": 6,
"executionTimeMillisEstimate": 0,
"works": 10,
"advanced": 6,
"needTime": 2,
"needFetch": 0,
"saveState": 0,
"restoreState": 0,
"isEOF": 1,
"invalidates": 0,
"docsExamined": 7,
"alreadyHasObj": 7,
"inputStage": {
"stage": "FETCH",
"filter": {
"$or": [
{
"b": {
"$eq": null
}
},
{
"b": {
"$eq": ""
}
},
{
"b": {
"$eq": " "
}
},
{
"b": {
"$lt": 3
}
}
]
},
"nReturned": 7,
"executionTimeMillisEstimate": 0,
"works": 8,
"advanced": 7,
"needTime": 1,
"needFetch": 0,
"saveState": 0,
"restoreState": 0,
"isEOF": 1,
"invalidates": 0,
"docsExamined": 7,
"alreadyHasObj": 0,
"inputStage": {
"stage": "IXSCAN",
"nReturned": 7,
"executionTimeMillisEstimate": 0,
"works": 8,
"advanced": 7,
"needTime": 1,
"needFetch": 0,
"saveState": 0,
"restoreState": 0,
"isEOF": 1,
"invalidates": 0,
"keyPattern": {
"b": 1
},
"indexName": "b_1",
"isMultiKey": false,
"direction": "forward",
"indexBounds": {
"b": [
"[null, null]",
"[-inf.0, 3.0)",
"[\"\", \"\"]",
"[\" \", \" \"]"
]
},
"keysExamined": 8,
"dupsTested": 0,
"dupsDropped": 0,
"seenInvalidated": 0,
"matchTested": 0
}
}
}
},
"serverInfo": {
"host": "devsrv",
"port": 27017,
"version": "3.0.4",
"gitVersion": "0481c958daeb2969800511e7475dc66986fa9ed5"
},
"ok": 1
}
But MongoDB isn't using the two indexes together.
Each $or term is effectively a separate query, so it helps to structure your query so that each term aligns with the index you're hoping to use. In this case that means moving the a: 1 part inside of each $or term:
db.teste.find({
$or:[
{a: 1, b: null},
{a: 1, b: ""},
{a: 1, b: " "},
{a: 1, b: {$lt: 3}}
]}).explain('executionStats')
The explain output shows that the a_1_b_1 is used for this query.
But you can simplify this a bit more by using $in to combine the first three terms into one:
db.teste.find({
$or:[
{a: 1, b: {$in: [null, "", " "]}},
{a: 1, b: {$lt: 3}}
]}).explain('executionStats')
This is also able to use the a_1_b_1 index.
The code is
db.teste.explain("executionStats").find({a: 1,
$or:[{b: null},
{b: ""},
{b: " "},
{b: {$lt:3}}]
}).hint({a: 1, b: 1})
Be careful with the hint command as the query optimizer chooses the most efficient query by measuring actual performance of the query with every suitable index.