when i am trying to fetch documents from my collections with aggregate joins, i got an error "aggregation result exceeds maximum document size (16MB)" and i am also using "{allowDiskUse: true}" in my code but again it shows an error. please let me know anyone how to solved it?
0
votes
1 Answers
1
votes
As stated, you are blowing up the "response" size limit because you are not returning a "cursor" and .aggregate()
is instead trying to return the result in a single BSON document.
So instead, use the "cursor"
option which produces a stream
interface you can then act on the events from:
db.collection("bags", function (err, bags) {
var result = [];
var cursor = bags.aggregate(
[
{ "$sort": { "todayDate": -1 }},
//{ "$match": {}},
{ "$lookup": {
"from": "donor",
"localField": "regNo",
"foreignField": "regNo",
"as": "donordata"
}},
{ "$unwind": { "path": "$donordata", "preserveNullAndEmptyArrays": true}}
],
{
"allowDiskUse": true,
"cursor": { "batchSize": 20 }
}
);
cursor.on("data",function(data) {
result.push(data);
});
cursor.on("end",function() {
res.json(result);
})
});
Ideally you should be using the stream on writing ouput lines as well, such as with a stream writer for JSON. But for example purposes we are just appending data into an array for each cursor result that is iterated.
$push
every single document in your collection into an array. Which do doubt you were trying to do when you go this error. That is not how you go about counting total results for paging. – Blakes Seven$unwind
will duplicate the_id
so another alternate is a last stage$project
where you remove the_id
field."_id": 0
– Blakes Seven