Steps that I need to perform: 1. Fetching the CSV file from a SFTP server 2. The file received is not formatted which I tried using fast-csv parser but was not able to achieve. 3. Post the entire data into a Postgres database.
const Pool = require('pg').Pool
const Client = require('ssh2-sftp-client');
let sftp = new Client();
config = {
host: '0.0.0.0',
port: '22',
username: 'tester',
password: 'password',
}
function storeValues() {
console.log('calling');
sftp.connect(config).then(() => {
sftp.get('/Retail Transaction Data.csv').then((data) => {
console.log(""+data)
**//FORMAT THE DATA FOR THE DATABASE**
const pool = new Pool({
user: 'postgres',
host: 'localhost',
database: 'retailtransaction',
password: 'password',
port: 5433,
})
// open the connection
let query ="INSERT INTO rt_transactions (Trans_ID, Store_Name, Store_Type, Category, Brand, Product, Cust_ID, Quantity, Rate, Amount, Trand_Dt, Months) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12)";
pool.query(query, [data], (error, response) => {
while(error){
console.log(error.message)
}
console.log(response)
});
});
});
}
storeValues();
I used pg-copy-streams to do this task. The documentation on this link is enough to explain everything: https://www.npmjs.com/package/pg-copy-streams
pool.connect(function (err, client, done) {
var stream = client.query(copyTo('COPY my_table TO STDOUT CSV'))
stream.pipe(process.stdout)
stream.on('end', done)
stream.on('error', done)
})