Basically, I'm trying to export the SendGrid event data to GCP storage using a cloud function in Node.js 16 using SendGrid's event Webhook functionality
I'm currently running (with example naming conventions);
const {BigQuery} = require('@google-cloud/bigquery');
const projectId = 'my_project';
const dataset = 'my_dataset';
const tableName = 'sendgrid_events';
//Initialize bigquery object
const bigQuery = new BigQuery({
projectId: projectId
});
exports.helloWorld = (req, res) => {
let message = req.query.message || req.body.message || 'Hello World!';
var timestamp = new Date().getTime()+'';
var data = req.body;
var rows = [];
for(var i=0; i< data.length; i++){
rows.push({
'hit_timestamp' : timestamp,
'email': data[i]['email'],
'event': data[i]['event'],
'category': data[i]['category']+ "",
'event_id': data[i]['sg_event_id'],
'event_payload' : JSON.stringify(data[i]),
});
}
send_to_bq(rows);
res.status(200).send(message);
};
function send_to_bq(rows){bigQuery
.dataset(dataset)
.table(tableName)
.insert(rows)
.then((data) => {
console.log(`Inserted rows : `, rows.length);
//console.log(data);
})
.catch(err => {
console.error('BQ ERROR:', err);
console.error('BQ ERROR:', err.response.insertErrors[1].error);
});
}
However, the data doesn't seem to pass into the table.
Any help is appreciated.
The solution is authentication, when deploying the cloud function setting it as unauthenticated means this boiler plate code works. However, if you require authentication then the permissions variable need to be added so google cloud can verify the data is being passed from the correct place.