I'm working on a node/express (Sails, technically) based service which will be used to retrieve a large number of items. Several calls will need to return many thousands of items as a JSON serialized array.
Inside node will be a basic control loop to retrieve the items in pages. Each page will be retrieved, have some minor processing performed, and then have its items returned to the client.
At present I'm doing a "store and forward" approach, in which each page's items are concat()
to a results
array, and then once all items have been retrieved the results are returned.
What I would like to do is more of a yield or streaming approach, in which items are added to the response as soon as they're ready--avoiding the need to build a large in-memory collection and beginning to send usable data as soon as possible.
You can directly write to res object in express with something like this
var data=[/* a large array of json objects*/];//
//let us divide this large array into chunks of smaller array
function chunk(arr, chunkSize) {
var R = [];
for (var i = 0; i < arr.length; i += chunkSize)
R.push(arr.slice(i, i + chunkSize));
return R;
}
var new_data=chunk(data,10);//[/* array of arrays*/], chunk size is 10
res.writeHead(200, {
'Content-Type': 'application/json',
'Transfer-Encoding': 'chunked'
})
res.write("["); //array starting bracket
for (var i = 0; i < new_data.length - 1; i++) {
res.write(JSON.stringify(new_data[i]) + ',');
}
res.write(JSON.stringify(new_data[new_data.length-i]));
res.write("]"); //array ending bracket
res.end();
And in the client use something like this
// using axios
var url = 'http://localhost:3000';
axios.get(url, { responseType: 'stream' }).then(handleRes);
function handleRes(res) {
// res.headers available here
res.data.on('data', data => {
data = data.toString(); // utf8 by default, change if needed
if (data === '[' || data === ']') return console.log(data);
var jsonStr = data.slice(-1) === '}' ? data : data.slice(0, -1);
console.log(JSON.parse(jsonStr));
})
}