In MongoDB official doc, there is a pipeline optimization for multiple $match operators.
When a $match immediately follows another $match, the two stages can coalesce into a single $match combining the conditions with an $and.
For your case, I think there will be a trivial performance difference, if not none. The explain plans for them are nearly identical.
explain output for single $set:
{
"$clusterTime": {
"clusterTime": Timestamp(1734276869, 3),
"signature": {
"hash": BinData(0, "tRv1PQVgmouDxOtttOdSk2z0O1U="),
"keyId": NumberLong(7394893876424605697)
}
},
"command": {
"$db": "75f38391413d12711811639fd9b97e4f",
"filter": {},
"find": "collection",
"maxTimeMS": NumberLong(20000)
},
"executionStats": {
"allPlansExecution": [],
"executionStages": {
"advanced": 1,
"direction": "forward",
"docsExamined": 1,
"executionTimeMillisEstimate": 0,
"isEOF": 1,
"nReturned": 1,
"needTime": 0,
"needYield": 0,
"restoreState": 0,
"saveState": 0,
"stage": "COLLSCAN",
"works": 2
},
"executionSuccess": true,
"executionTimeMillis": 0,
"nReturned": 1,
"totalDocsExamined": 1,
"totalKeysExamined": 0
},
"explainVersion": "1",
"operationTime": Timestamp(1734276869, 3),
"queryPlanner": {
"indexFilterSet": false,
"maxIndexedAndSolutionsReached": false,
"maxIndexedOrSolutionsReached": false,
"maxScansToExplodeReached": false,
"namespace": "75f38391413d12711811639fd9b97e4f.collection",
"parsedQuery": {},
"planCacheKey": "5F5FC979",
"queryHash": "5F5FC979",
"rejectedPlans": [],
"winningPlan": {
"direction": "forward",
"stage": "COLLSCAN"
}
},
"serverParameters": {
"internalDocumentSourceGroupMaxMemoryBytes": 104857600,
"internalDocumentSourceSetWindowFieldsMaxMemoryBytes": 104857600,
"internalLookupStageIntermediateDocumentMaxSizeBytes": 104857600,
"internalQueryFacetBufferSizeBytes": 104857600,
"internalQueryFacetMaxOutputDocSizeBytes": 104857600,
"internalQueryMaxAddToSetBytes": 104857600,
"internalQueryMaxBlockingSortMemoryUsageBytes": 104857600,
"internalQueryProhibitBlockingMergeOnMongoS": 0
}
}
Mongo Playground
explain output for multiple $set:
{
"$clusterTime": {
"clusterTime": Timestamp(1734276224, 3),
"signature": {
"hash": BinData(0, "QUsSlU1zBAvtbFAgbv3/fA7RENU="),
"keyId": NumberLong(7394893876424605697)
}
},
"command": {
"$db": "3d58f286ab6b7455181163098d794665",
"filter": {},
"find": "collection",
"maxTimeMS": NumberLong(20000)
},
"executionStats": {
"allPlansExecution": [],
"executionStages": {
"advanced": 1,
"direction": "forward",
"docsExamined": 1,
"executionTimeMillisEstimate": 0,
"isEOF": 1,
"nReturned": 1,
"needTime": 0,
"needYield": 0,
"restoreState": 0,
"saveState": 0,
"stage": "COLLSCAN",
"works": 2
},
"executionSuccess": true,
"executionTimeMillis": 0,
"nReturned": 1,
"totalDocsExamined": 1,
"totalKeysExamined": 0
},
"explainVersion": "1",
"operationTime": Timestamp(1734276224, 3),
"queryPlanner": {
"indexFilterSet": false,
"maxIndexedAndSolutionsReached": false,
"maxIndexedOrSolutionsReached": false,
"maxScansToExplodeReached": false,
"namespace": "3d58f286ab6b7455181163098d794665.collection",
"parsedQuery": {},
"planCacheKey": "5F5FC979",
"queryHash": "5F5FC979",
"rejectedPlans": [],
"winningPlan": {
"direction": "forward",
"stage": "COLLSCAN"
}
},
"serverParameters": {
"internalDocumentSourceGroupMaxMemoryBytes": 104857600,
"internalDocumentSourceSetWindowFieldsMaxMemoryBytes": 104857600,
"internalLookupStageIntermediateDocumentMaxSizeBytes": 104857600,
"internalQueryFacetBufferSizeBytes": 104857600,
"internalQueryFacetMaxOutputDocSizeBytes": 104857600,
"internalQueryMaxAddToSetBytes": 104857600,
"internalQueryMaxBlockingSortMemoryUsageBytes": 104857600,
"internalQueryProhibitBlockingMergeOnMongoS": 0
}
}
Mongo Playground
Given your query pattern in the example on updating on only 1 document and you are fetching by _id, I don't think there will be any observable performance difference.
There are 2 caveats outside of performance though:
- If in your real use case, you have a dependency on the
$set operations, you need to break them into different stages
e.g. you $set a to be c * d = 12, and b to be a + 1 = 12 + 1 = 13. You will need to do:
db.collection.update({
"_id": "1"
},
[
{
"$set": {
"a": {
"$multiply": [
"$c",
"$d"
]
}
}
},
{
"$set": {
"b": {
"$add": [
"$a",
1
]
}
}
}
])
Mongo Playground
A single $set won't give you expected result of b = 13, but 2 because a is yet to be evaluated as 12(keeping 1 as value)
db.collection.update({
"_id": "1"
},
[
{
"$set": {
"a": {
"$multiply": [
"$c",
"$d"
]
},
"b": {
"$add": [
"$a",
1
]
}
}
}
])
Mongo Playground
- readablility: In real-life and more complex pipelines, you may find breaking up
$set statement more readable or manageable. But I will admit this is quite subjective and you may consider the other way round.
db.collection.update({ "_id": "1" }, [ { "$set": { "a": 100, "b": 200, "c": 300, "d": 400 } } ])also an alternative way using the aggregation pipeline in a single stage