I'm programming a method that uses web3js to transfer a token using a smart contract.
When you launch the transfer event, you get as a result the txHash, and if you want to get all the other values associated to the transfer, you have to subscribe to an event and wait for it to happen to get the values.
I have to return the values to the customer, so I subscribe to the transfer event and wait for it to broadcast to return the data.
The problem is that this might take a long time (think from 10 seconds to hours) and it gives me a timeout sometimes, so the frontend team suggested to inform me a webhook endpoint and I forward the event information to it when it happens.
So I have to split the process into two:
Do the transfer and inform the txHash, and start a separate process (2) that listens for the event.
Subscribe to the event and, when it happens, forward it to the webhook provided.
My code right now looks something like this:
function transfer(req, res, next) {
try {
contractRouter.transfer(from, to, tokenId).then(function(result){
transferToWebhook(whHostname, whPath, result);
next();
}).fail(function(err){
return res.status(500).json({status: 'error', name: err.name, message: err.message});
}
catch (ex) {
return res.status(500).json({status: 'error', name: ex.name, message: ex.message});
}
}
and the function that transfers to webhook looks like this:
function transferToWebhook(whHostname, whPath, txHash){
contractRouter.getTransferEvent(txHash).then(function(result){
var postData = JSON.stringify(result);
var options = {
hostname: whHostname,
port: 80,
path: whPath,
method: 'POST',
headers: {
'Content-Type': 'application/json',
}
}
var req = https.request(options, (res) => {
console.log(`STATUS: ${res.statusCode}`);
console.log(`HEADERS: ${JSON.stringify(res.headers)}`);
res.setEncoding('utf8');
res.on('data', (chunk) => {
console.log(`BODY: ${chunk}`);
});
res.on('end', () => {
console.log('No more data in response.');
});
});
req.on('error', (e) => {
console.log(`problem with request: ${e.message}`);
});
req.write(postData);
req.end();
});
}
The function that subscribes to the transfer event looks like this:
function getTransferEvent(txHash){
var deferred = q.defer();
ethereumHandler.setContract(config.get(cABIAddress), cAddress).then(function(abiContract){
Promise.resolve(txHash).then(function resolver(txHash){
abiContract.getPastEvents('Transfer',{filter: {transactionHash: txHash}}, function(error, events){})
.then(function(event){
var returnValues = {
from: event.returnValues.from,
to: event.returnValues.to,
tokenId: event.returnValues.tokenId
}
deferred.resolve(returnValues);
});
});
return deferred.promise;
});
}
The code for the last function works if I put it straight on the transfer function, but it's not called if I try to call it through the transferToWebhook function.
How can I launch the transferToWebhook function after having answered the first request?
You can spawn your process, using the spawn()
method from the child_process
module, then listen on the event (process.on('data')) and use a promsise to consume the returned datas. I am not sure it's gonna solve your case as your function is an object of contractRouter contractRouter.getTransferEvent(txHash)
, but you should be able to adapt it in some way. See an example of what i mean.
in your file.js
const { spawn } = require('child_process')
function transfertToWebHook(data) {
getTransfertEvent(data)
.then((result) => {
const dts = JSON.parse(result)
console.log('the res: ', dts)
// send the res back
})
.catch(e => console.log('handle the err: ', e))
console.log('carry on mother process')
}
function getTransfertEvent(data) {
return new Promise((resolve, reject) => {
const sp = spawn(process.execPath, ['childProcess.js'])
// pass the arguments, here it will be the txHash
sp.stdin.write(data)
sp.stdin.end()
sp.stdout.on('data', (d) => {
// when datas get proceed you get it back.
resolve(d.toString())
})
sp.stdout.on('error', (e) => {
reject(e)
})
console.log('run what ever need to be proceed on the mother process')
})
}
transfertToWebHook('test')
create an other file name childProcess.js.
Use the Tranform stream to process the process.sdtin datas then return them through the process.stdout
const { Transform, pipeline } = require('stream')
const createT = () => {
return new Transform({
transform(chunk, enc, next) {
// here run the code of you getTransfertEventFunction()
// simulate some async process
setTimeout(() => {
// chunk is your passed arguments
// !! chunk is a buffer so encode it as utf8 using 'toString()'
// make it upperCase to simulate some changement
const dt = chunk.toString().toUpperCase()
// return an object as it's what your func return
const proceededDatas = {
name: dt,
from: "from datas",
to: "to datas",
tokenId: "the token"
}
const dataString = JSON.stringify(proceededDatas)
next(null, dataString)
}, 1000)
}
})
}
pipeline(process.stdin, createT(), process.stdout, e => console.log(e))
run the code: node file.js