Search code examples
javascriptnode.jsfsbulk

How to read multiple json file using fs and bulk request


I'm using elasticsearch search engine with my react app, I was reading one file at the backend as you see in the code and it work perfectly, but now I want to read three different JSON files to three different indexes using the "fs" package and bulk request, can you please help me?

the code:

// Start reading the json file
fs.readFile("DocRes.json", { encoding: "utf-8" }, function (err, data) {
  if (err) {
    throw err;
  }

  // Build up a giant bulk request for elasticsearch.
  bulk_request = data.split("\n").reduce(function (bulk_request, line) {
    var obj, ncar;

    try {
      obj = JSON.parse(line);
    } catch (e) {
      console.log("Done reading 1");
      return bulk_request;
    }

    // Rework the data slightly
    ncar = {
      id: obj.id,
      name: obj.name,
      summary: obj.summary,
      image: obj.image,
      approvetool: obj.approvetool,
      num: obj.num,
      date: obj.date,
    };

    bulk_request.push({
      index: { _index: "ncar_index", _type: "ncar", _id: ncar.id },
    });
    bulk_request.push(ncar);
    return bulk_request;
  }, []);

  // A little voodoo to simulate synchronous insert
  var busy = false;
  var callback = function (err, resp) {
    if (err) {
      console.log(err);
    }

    busy = false;
  };

  // Recursively whittle away at bulk_request, 1000 at a time.
  var perhaps_insert = function () {
    if (!busy) {
      busy = true;
      client.bulk(
        {
          body: bulk_request.slice(0, 1000),
        },
        callback
      );
      bulk_request = bulk_request.slice(1000);
      console.log(bulk_request.length);
    }

    if (bulk_request.length > 0) {
      setTimeout(perhaps_insert, 100);
    } else {
      console.log("Inserted all records.");
    }
  };

  perhaps_insert();
});

Solution

  • You can create multiple promises for each file read and feed it to the elastic search bulk_request.

    const fsPromises = require('fs').promises, 
          files = ['filename1', 'filename1'],
          response = [];
    
    const fetchFile = async (filename) => {
          return new Promise((resolve, reject) => {
           const path = path.join(__dirname, filename);
           try {
           const data = await fsPromises.readFile(path)); // make sure path is correct
           resolve(data);
           } catch (e) {
             reject(e)
           }
    
        });
    
    files.forEach((fileName) => results.push(fetchFile()));
    
    Promise.all(results).then(data => console.log(data)).catch(e => console.log(e));
    
    }
    

    Once you get data from all the promises pass it to the elastic search.