I have following code:
const fs = require("fs");
const {createInterface} = require("readline");
const {join} = require("path");
const {once} = require("events");
async function readMeta (path) {
const meta = {};
const rs = fs.createReadStream(path);
const lineReader = createInterface({input: rs});
let linesRead = 0;
lineReader.on("line", line => {
switch (linesRead) {
case 0:
meta.name = line;
break;
case 1:
meta.tags = line.split(" ");
break;
case 2:
meta.type = line;
break;
case 3:
meta.id = +line;
}
if (++linesRead === 4) {
lineReader.close();
}
});
await once(lineReader, "close");
rs.close();
return meta;
}
It appears to be working, but logging the lines when the line
event fires reveals otherwise. The event is still being fired after lineReader.close()
has been called resulting in the whole file being read. I have no idea what is causing this. I found some modules that apparently does the job, but i want do keep the dependencies down if possible.
Instead of handling and switching counter I recommend to push lines to array and prevent further push-ing after array size is 4.
Then to call close and after to destructure array to necessary attributes and return object that contains them.
const fs = require("fs");
const {createInterface} = require("readline");
const {join} = require("path");
const {once} = require("events");
async function readMeta (path) {
const meta = {};
const rs = fs.createReadStream(path);
const lineReader = createInterface({input: rs});
const linesRead = [];
lineReader.on("line", line => {
if (linesRead.length === 4) {
lineReader.close();
rs.close();
return;
}
linesRead.push(line.trim());
});
await once(lineReader, "close");
delete rs;
delete lineReader;
const [name, tags, type, id] = linesRead;
return {
id,
name,
type,
tags: tags.split(' '),
};
}