I am a newbie to Node.js concepts. I am studying the use of Unix sockets as a means of communication between C and Node.js.
In C: I am sending 1000 iterations of a 100 byte long message.
In Node.js: I am receiving 4 data callbacks of size:
I understand that I am able to receive the complete data. However, how do I get this data in 1000 callbacks each with size 100 bytes (the same way I am sending it).
Server in C (Partial):
char buf[100];
int loop_count = 0;
memset(buf,0xA5,sizeof(buf));
while(loop_count < 1000) {
rc = sizeof(buf);
if (write(cl, buf, rc) != rc) {
if (rc > 0) fprintf(stderr,"partial write");
else {
perror("write error");
exit(-1);
}
}
++loop_count;
}
Client in Node.js:
var net = require('net');
var client = net.createConnection("temp");
#temp is the socket file name
client.on("connect", function() {
console.log("Connection established");
});
client.on("data", function(data) {
console.log("Found data "+data);
});
Sockets buffer data, so while you write blocks of 100 bytes into a socket, a reader won't necessarily read the data in neat 100-byte blocks.
You can use a module like block-stream
to split the data into 100-byte blocks for you:
const BlockStream = require('block-stream');
const net = require('net');
const client = net.createConnection('temp');
const bs = new BlockStream(100);
let count = 0;
client.pipe(block).on('data', function(data) {
count++;
console.log('Found data: ', data);
}).on('end', function() {
console.log('Read %s blocks', count);
});