I'm requesting a big file with 150K records but it throws 'toString failed' error. nodejs/node#3175 says it's because of maxBufferSize. Request works well for 200 records but its an external api and the requirement is to get all records at once.[no pagination there :( ] Is there any way to set buffersize for this request?
I already asked this question here
EDIT:
request("http://www.site-containing-big-data/api",
function (error, response, body) {
console.log('got something to show');
if(!error && response.statusCode == 200) {
resolve(body);
}else if(error){
reject(error);
}
});
but nothing shows in console other than toString failed
message
solved it. It was an xml file and I was trying to process it directly. Now I'm first saving in a file and then using xml-stream
to process each object one by one.
request.get("http://www.site-containing-big-data/api" )
.on('error', function(errReq) {
console.log('error while reading from big site : ', errReq);
}).on('end', function(){
console.log('got from big site, now processing');
var XmlStream = require('xml-stream') ;
var stream=fs.createReadStream('bigfile.xml');
xml = new XmlStream(stream);
xml.on('endElement: item', function(item) {//item is a node. structure is <items><item></item><item></item></items>
//do something here
});
xml.on('end', function(){
// when processing finished for all objects/items in that file
});
});
})
.pipe(fs.createWriteStream('bigfile.xml'));