I have a nodejs script generating a set of objects in a more secure environment and then piping the result to the long-running application, I was hoping this could also provide a way to pass iterable objects by having the stringification/parsing be done by the js and leave the structure transparent on my first script.
Right now I've been trying to create a readable stream from my iterable, or directly write each section with process.stdout.write()
but in both instances, receiving it with process.stdin.on('data', (chunk) => do something with chunk
always results in the all the strings combined.
let env = {
variableString: "somekey",
someIterable: {
someOtherValue: ['more values']
}
}
let streamArr = [];
for (let x in env) {
let y = env[x].replace(/ {4}/g ,'')
streamArr.push(JSON.stringify({[x]:y}))
}
// const readable = new Stream.Readable()
// readable.pipe(process.stdout)
streamArr.forEach(item => process.stdout.write(item))
// Receiving script
process.stdin.setEncoding('utf8');
process.stdin.on('data', (chunk) => {
console.log(chunk)
let i = JSON.parse(chunk);
for (let x in i) {
if(typeof i[x] == 'string') process.env[x] = i[x];
console.log(x)
}
});
process.stdin.on('end', () => {
console.log('end')
});
Ended sort of sidestepping this issue, since my data was small enough I just wrote the whole object to STDOUT using
process.stdout.write(JSON.stringify(env))
And then using the fs
module to receive STDIN since it's the only way I found to synchronously consume the STDIN to initialize environment variables before anything else runs.
const fs = require('fs')
const env = (function () {
let envTmp = fs.readFileSync(0).toString();
envTmp = JSON.parse(envTmp);
return envTmp;
})();