Search code examples
javascriptexpressasynchronousrecursioncloudant

Is there a way to do asynch calls recursively with only one callback call?


I was wondering if anyone knows of a way to do an asynch call recursively in javascript. I am trying to upload bulk data to a cloudant database 500 items at a time. I do not know how many documents long the json will be (somewhere between 2000-4000) so I created a recursive function to split it into chunks of 500 and upload.

insertDBBulkData: function(trgtDB, dbDocData, callback) {

    // if length > 500, recursively upload 500 at a time
    if(dbDocData.length > 500) {
        console.log("File too big. Length " + dbDocData.length)
        module.exports.insertDBBulkData(trgtDB, dbDocData.slice(500, dbDocData.length), function(err, body) {
            if(err) {   
                console.log(err);
                callback(err, body)
            } else {
                // only callback on last one
                callback(err, body);
            }
        });

        dbDocData = dbDocData.slice(0, 500);
    }

    trgtDB.bulk({"docs": dbDocData}, function(err, body) {
        if(err) {
            callback(err);
        } else {
            console.log("Successfully uploaded " + dbDocData.length + " users. ")
            callback(null, "Success!")
        }
    });
},

The issue is: since I don't know which call will finish last, I don't know when to send a response back to the server (which I can only do once). I have tried using Promises but as far as I know I can't use Promises with this recursive method because I don't have a fixed number of times the method will be called. Is this something that could be achieved with deferred promises?

Any help is appreciated. Thanks!

Solution:

Thanks to Jonas W. I was able to perform this with multiple promises in a for loop. It doesn't use recursion but works a lot better. Here is my solution:

insertDBBulkData: function(trgtDB, dbDocData, callback) {
    const promises = [];

    for(let start = 0; start < dbDocData.length; start += 500) {
        console.log("File too big. Index: " + start);

        var dbBulkDataPromise = new Promise(function(resolve, reject) {
            trgtDB.bulk({"docs": dbDocData.slice(start, start+500)}, function(err, body) {
                if(err) {
                    reject(err);
                } else {
                    resolve("Success")
                }
            });
        });

        promises.push(dbBulkDataPromise);
    }

    Promise.all(promises).then(function(values) {
        console.log(values);
        var completed = true;

        for (message in values) {
            if (message != "Success") {
                completed = false;
            }
        }

        if (completed) {
            callback(null, values)
        } else {
            console.log("Partial upload")
            callback("Partial upload only", null)
        }


    }).catch(function(err) {
        console.log("Error uploading data: " + err);
        callback(err, null)
    });
},

Solution

  • If you wrap the insertion of one bulk into a promise:

    function insert(docs) {
       return new Promise((resolve, reject) => {
          trgtDB.bulk({ docs }, function(err, body) {
            if(err) {
              reject(err);
            } else {
               console.log("Successfully uploaded " + docs.length + " users. ")
               resolve("Success!")
           }
       });
      });
    }
    

    Now you can go over the big bulk data, slice it into chunks, start an insertion for each and then await all the promises:

    const promises = [], size = 500;
    
    for(let start = 0; start < dbDocData.length; start += size)
      promises.push(insert( dbDocData.slice(start, start + size) ));
    
    Promise.all(promises).then(() => {
      console.log("all done");
    }).catch(/* handle errors */);