Search code examples
google-cloud-functionsgoogle-cloud-dataflowgoogle-api-nodejs-client

Google cloud functions - cannot read property 'getApplicationDefault'


I have deployed a cloud function to invoke a dataflow pipeline template and trying to trigger the function by placing the file in cloud storage bucket.

As node.js prerequisite I have done,

npm init
npm install --save googleapis

Index.js

const google = require('googleapis');

exports.goWithTheDataFlow = function(event, callback) {
 const file = event.data;



   google.auth.getApplicationDefault(function (err, authClient, projectId) {
     if (err) {
       throw err;
     }

 console.log(projectId);
     if (authClient.createScopedRequired && authClient.createScopedRequired()) {
       authClient = authClient.createScoped([
         'https://www.googleapis.com/auth/cloud-platform',
         'https://www.googleapis.com/auth/userinfo.email'
       ]);
     }

     const dataflow = google.dataflow({ version: 'v1b3', auth: authClient });
            console.log(`gs://${file.bucket}/${file.name}`);
     dataflow.projects.templates.create({
       projectId: projectId,
       resource: {
         parameters: {
           inputFile: `gs://${file.bucket}/${file.name}`

         },
         jobName: 'cloud-fn-dataflow-test',
         gcsPath: 'gs://jaison/templates/ApacheBeamTemplate'
       }
     }, function(err, response) {
       if (err) {
         console.error("problem running dataflow template, error was: ", err);
       }
       console.log("Dataflow template response: ", response);
       callback();
     });

   });

 callback();
};

Command used to deploy cloud function:

gcloud beta functions deploy goWithTheDataFlow --stage-bucket cf100stage --trigger-bucket cf100

Dataflow(Apache beam): I was able to execute the dataflow template from console and below is the path of the template,

'gs://jaison/templates/ApacheBeamTemplate'

Function crashes with below error:

TypeError: Cannot read property 'getApplicationDefault' of undefined at exports.goWithTheDataFlow (/user_code/index.js:11:17) at /var/tmp/worker/worker.js:695:16 at /var/tmp/worker/worker.js:660:9 at _combinedTickCallback (internal/process/next_tick.js:73:7) at process._tickDomainCallback (internal/process/next_tick.js:128:9)

Looks like I am missing libraries. Not sure how to fix this. Please help.


Solution

  • My cloud function works with below changes,

    1.Setting up GOOGLE_APPLICATION_CREDENTIALS to service account json file

    export GOOGLE_APPLICATION_CREDENTIALS="/path/of/svc/json/file.json"
    

    2.index.js

    var {google} = require('googleapis');
    
    exports.TriggerBeam = (event, callback) => {
    
    
    const file = event.data;
      const context = event.context;
    
      console.log(`Event ${context.eventId}`);
      console.log(`  Event Type: ${context.eventType}`);
      console.log(`  Bucket: ${file.bucket}`);
      console.log(`  File: ${file.name}`);
      console.log(`  Metageneration: ${file.metageneration}`);
      console.log(`  Created: ${file.timeCreated}`);
      console.log(`  Updated: ${file.updated}`);
    
      google.auth.getApplicationDefault(function (err, authClient, projectId) {
         if (err) {
           throw err;
         }
    
     console.log(projectId);
    
     const dataflow = google.dataflow({ version: 'v1b3', auth: authClient });
            console.log(`gs://${file.bucket}/${file.name}`);
     dataflow.projects.templates.create({
       projectId: projectId,
       resource: {
         parameters: {
           inputFile: `gs://${file.bucket}/${file.name}`
    
         },
         jobName: 'cloud-fn-beam-test',
         gcsPath: 'gs://jaison/templates/ApacheBeamTemplate'
       }
     }, function(err, response) {
       if (err) {
         console.error("problem running dataflow template, error was: ", err);
       }
       console.log("Dataflow template response: ", response);
       callback();
     });
    
       });
    
     callback();
    };