I am trying to send out the binary content from a node.js server. To do this, I allocate a buffer and fill up the buffer with my content, and call response.write()
on it. Once it returns, I reuse the buffer with the new content. However it doesn't seem to work correctly for some reasons.
Here is the server code:
const http = require('http');
async function sendChunk( response, outbuf )
{
console.log( "Sending buffer: %s", outbuf );
// Send the buffer out. If it returns false,
// this means the kernel buffers are full,
// and we have to wait until they are available.
if ( await response.write( outbuf ) === false )
{
await new Promise(resolve => response.once('drain', ()=>{
resolve();
}));
}
}
async function sendData( response )
{
let outbuf = Buffer.alloc( 20 );
for ( let count = 0x45; count < 0x50; count++ )
{
for ( let i = 0; i < outbuf.length; i++ )
{
outbuf[i] = count;
}
await sendChunk( response, outbuf );
}
}
function webRequestHandler( request, response )
{
let body = [];
request.on('error', (err) => {
console.error(err);
return;
});
request.on('data', (chunk) => {
body.push(chunk);
});
response.on('error', (err) => {
console.error( "Error sending response: %s", err);
return;
});
// A whole body collected - process it
request.on('end', async () => {
// Handle the update; can return an error message
response.setHeader('Content-Type', 'text/plain');
await sendData( response );
response.end();
});
}
const webserver = http.createServer( webRequestHandler );
// Create the web service
webserver.on('error', function (err) {
console.log("[" + process.pid + "] " + JSON.stringify(err));
process.exit();
});
webserver.listen( { "host" : "127.0.0.1", "port" : 5252 }, () => {
console.log( "Server running" );
});
When tested via curl http://localhost:5252/
the server prints the following:
Sending buffer: EEEEEEEEEEEEEEEEEEEE
Sending buffer: FFFFFFFFFFFFFFFFFFFF
Sending buffer: GGGGGGGGGGGGGGGGGGGG
Sending buffer: HHHHHHHHHHHHHHHHHHHH
Sending buffer: IIIIIIIIIIIIIIIIIIII
Sending buffer: JJJJJJJJJJJJJJJJJJJJ
Sending buffer: KKKKKKKKKKKKKKKKKKKK
Sending buffer: LLLLLLLLLLLLLLLLLLLL
Sending buffer: MMMMMMMMMMMMMMMMMMMM
Sending buffer: NNNNNNNNNNNNNNNNNNNN
Sending buffer: OOOOOOOOOOOOOOOOOOOO
however the client receives something totally different:
> curl http://localhost:5252/
EEEEEEEEEEEEEEEEEEEEOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOO
What's going on here? It does work if I create a new buffer in sendChunk which is a copy of outbuf. However this seem like waste of RAM, and doesn't really make sense for someone coming from C background, and there once you call send()
on a socket, the data has been copied and you can reuse the source buffer as you wish.
Does node.js work differently? Do I need to create a dedicated buffer for response.write() which can no longer be touched once write is called, even if write has returned and I waited for the drain
event?
I've posted the bug report, and it got closed with an important comment:
You should be passing a callback to .write() to know when node is finished with that chunk of memory instead of relying on the 'drain' event.
Once you make that change, you will see the output on the client that you're expecting.
And indeed once the sendChunk
function is changed as following:
async function sendChunk( response, outbuf )
{
return new Promise( function( resolve, reject) {
if ( response.write( outbuf, ()=>{ resolve(); } ) === false )
{
console.log( "Draining buffer" );
response.once('drain', ()=>{
resolve();
});
}
});
}
so we resolve it only in the function callback, the issue goes away. The core issue here is that response.write
is not awaitable, and returns before the callback is called.
Should have read the documentation more carefully.