I want a collection of 1,000 documents, which I specify by passing 1,000 unique _id keys to the $in operator of a single find() query.
How much faster is this than me running 1,000 find({_id:}) queries?
Something like this should help you
//create some data (100k documents)
for(i=0; i<=100000; i++){
db.foo.insert({"_id":i});
}
//generate some ids to search for
var ids = [];
for(i=0; i<=1000; i++){
ids.push(i);
}
//now let's try the two queries
//#1 - 1000 separate find queries
var timer1Start = new Date().getTime(); //start the stopwatch
ids.forEach(function(i){
var result = db.foo.find({"_id":i});
});
var timer1End = new Date().getTime(); //stop the stopwatch
//#2 $in query
var timer2Start = new Date().getTime(); //start the stopwatch
var result = db.foo.find({"_id": {$in: ids}});
var timer2End = new Date().getTime(); //stop the stopwatch
print("Function #1 = " + (timer1Start - timer1End));
print("Function #2 = " + (timer2Start - timer2End));