I would like to loop throw all documents on a specific collection of my MongoDB. However every attempt I made failed due to the timeout of the cursor. Here is my code
let MongoClient = require('mongodb').MongoClient;
const url = "my connection URI"
let options = { socketTimeoutMS: 120000, connectTimeoutMS: 120000, keepAlive: 100, poolSize: 5 }
MongoClient.connect(url, options,
function(err, db) {
if (err) throw err
let dbo = db.db("notes")
let collection = dbo.collection("stats-network-consumption")
let stream = collection.find({}, { timeout: false }).stream()
stream.on("data", function(item) {
printTask(item)
})
stream.on('error', function (err) {
console.error(err)
})
stream.on("end", function() {
console.log("DONE!")
db.close()
})
})
The code above runs for about 15 seconds and retrieves between 6000 to 8000 documents and then throws the following error:
{ MongoError: cursor does not exist, was killed or timed out
at queryCallback (/Volumes/safezone/development/workspace-router/migration/node_modules/mongodb-core/lib/wireprotocol/2_6_support.js:136:23)
at /Volumes/safezone/development/workspace-router/migration/node_modules/mongodb-core/lib/connection/pool.js:541:18
at process._tickCallback (internal/process/next_tick.js:150:11)
name: 'MongoError',
message: 'cursor does not exist, was killed or timed out' }
I need to retrieve around 50000 documents so I will need to find a way to avoid the cursor timeout.
As seen on the code above, I've tried to increase the socketTimeoutMS and the connectTimeoutMS, which had no effect on the cursor timeout.
I also have tried to replace stream with a forEach and add .addCursorFlag('noCursorTimeout', true) which also did not help.
I've tried everything I found about mongodb, I did not tried mongoose or alternatives because they use schemas and I'll later have to update the current type of an attribute (which can be tricky with the mongoose schemas).