I am trying to read a csv synchronously by using a stream and the async, await keywords, using the fast-csv lib.
However, my function does not seem to return the desired output. My guess is that the functions/streams inside my written function are taking longer than node is considering yielding my console.log function for outputting the arrays length.
How do I return the output
array with all values read from the csv stream?
// CSV Processing
async function readCsv(csvfilepath) {
var output = []
var stream = fs.ReadStream(csvfilepath)
var parser = csv.fromStream(stream, {
headers: true
}).on("data", function (data) {
parser.pause()
t(data, output, function (err) {
// TODO: handle error
parser.resume()
});
}).on("end", function () {
console.log(output) // outputs desired array with objects
console.log(output.length) // length is 100
// return output // does not work. output == [].
});
var t = (data, out, callback) => {
out.push(data) // push the objects to that array
callback()
}
console.log(output.length) // says length = 0
return output // output is empty, should be filled with values exactly like in the 'end' event from the stream
}
I solved it by using Promise objects. The resolve method can be used to return the data which is processed at the 'end' event of the stream, while still being usable with the async/await keywords.
// CSV Processing
function readCsv(csvfilepath) {
var stream = fs.ReadStream(csvfilepath)
return new Promise((resolve, reject) => {
var output = []
const parser = csv.fromStream(stream, {headers:true})
.on('data', (data) => {
parser.pause();
c(data, output, (err) => {
if (err) reject(err)
parser.resume()
})
})
.on('end', () => {
resolve(output) // extraction/return point of the desired data
})
var c = (data, out, callback) => {
out.push(data)
callback()
}
})
}