I'm building a small node server that generates PDF files (using Nightmare.js). Each request calls createPage to generate one pdf.
The incoming request tend to all come around the same time, overloading the PC this is running on.
I need to buffer the incoming requests to delay execution of some requests till some of the current requests have completed. How do I do this?
function createPage(o, final) {
//generate pdf files
}
http.createServer(function (request, response) {
var body = [];
request.on('data', function (chunk) {
body.push(chunk);
}).on('end', function () {
body = Buffer.concat(body).toString();
var json = JSON.parse(body);
createPage(json, function (status) {
if (status === true) {
response.writeHead(200, { 'Content-Length': 0 });
console.log('status good');
} else {
response.writeHead(500, { 'Content-Type': 'text/html' });
response.write(' ' + status);
}
response.end('\nEnd of Request \n');
});
});
}).listen(8007);
If I understand correctly, you want to continually accept http requests but throttle the rate at which createPage
is invoked. If so, you probably need to consider a slightly different design. In this current design, every next client will have to wait longer than the previous one to find out if their request has succeeded or failed.
Approach 1: use a queue (rabbitmq, aws sqs, zeromq, kafka, etc). Here's the basic workflow:
Approach 2: Use a queue with message duplexing.