1. Use ToCursor
and Process Documents Incrementally
The MongoDB C# driver supports iterating over results with a cursor, which retrieves documents in batches from the server (default batch size is 101 documents). This avoids loading all data into memory.
using MongoDB.Driver;
using MongoDB.Bson;
var client = new MongoClient("mongodb://localhost:27017");
var database = client.GetDatabase("YourDatabaseName");
var collection = database.GetCollection<BsonDocument>("YourCollectionName");
// Get a cursor to iterate over the collection
using (var cursor = await collection.Find(new BsonDocument()).ToCursorAsync())
{
while (await cursor.MoveNextAsync())
{
foreach (var document in cursor.Current)
{
// Process one document at a time
Console.WriteLine(document);
}
}
}
using MongoDB.Driver;
using MongoDB.Bson;
var client = new MongoClient("mongodb://localhost:27017");
var database = client.GetDatabase("YourDatabaseName");
var collection = database.GetCollection<BsonDocument>("YourCollectionName");
// Get a cursor to iterate over the collection
using (var cursor = await collection.Find(new BsonDocument()).ToCursorAsync())
{
while (await cursor.MoveNextAsync())
{
foreach (var document in cursor.Current)
{
// Process one document at a time
Console.WriteLine(document);
}
}
}
using MongoDB.Driver; using MongoDB.Bson; var client = new MongoClient("mongodb://localhost:27017"); var database = client.GetDatabase("YourDatabaseName"); var collection = database.GetCollection<BsonDocument>("YourCollectionName"); // Get a cursor to iterate over the collection using (var cursor = await collection.Find(new BsonDocument()).ToCursorAsync()) { while (await cursor.MoveNextAsync()) { foreach (var document in cursor.Current) { // Process one document at a time Console.WriteLine(document); } } }
2. Process in Batches with BatchSize
Explicitly control the batch size to optimize memory usage:
var filter = Builders<BsonDocument>.Filter.Empty;
var options = new FindOptions<BsonDocument>
{
BatchSize = 1000 // Adjust batch size based on your needs
};
using (var cursor = await collection.FindAsync(filter, options))
{
while (await cursor.MoveNextAsync())
{
var batch = cursor.Current; // Process a batch of documents
foreach (var document in batch)
{
Console.WriteLine(document);
}
}
}
var filter = Builders<BsonDocument>.Filter.Empty;
var options = new FindOptions<BsonDocument>
{
BatchSize = 1000 // Adjust batch size based on your needs
};
using (var cursor = await collection.FindAsync(filter, options))
{
while (await cursor.MoveNextAsync())
{
var batch = cursor.Current; // Process a batch of documents
foreach (var document in batch)
{
Console.WriteLine(document);
}
}
}
var filter = Builders<BsonDocument>.Filter.Empty; var options = new FindOptions<BsonDocument> { BatchSize = 1000 // Adjust batch size based on your needs }; using (var cursor = await collection.FindAsync(filter, options)) { while (await cursor.MoveNextAsync()) { var batch = cursor.Current; // Process a batch of documents foreach (var document in batch) { Console.WriteLine(document); } } }
3. Async Stream (C# 8+ with IAsyncEnumerable
)
If using C# 8 or later, you can leverage IAsyncEnumerable
for cleaner iteration:
await foreach (var document in collection.Find(new BsonDocument()).ToAsyncEnumerable())
{
// Process one document at a time
Console.WriteLine(document);
}
await foreach (var document in collection.Find(new BsonDocument()).ToAsyncEnumerable())
{
// Process one document at a time
Console.WriteLine(document);
}
await foreach (var document in collection.Find(new BsonDocument()).ToAsyncEnumerable()) { // Process one document at a time Console.WriteLine(document); }