Search code examples
jenkinsgroovyjenkins-pipelinejenkins-groovy

How to run jenkins pipeline jobs in parallel which call the same downstream job?


I am a beginner to jenkins and making groovy scripts for pipelines, I want to trigger a downstream pipeline in parallel for all the files inside a folder given by the user... below is the sample code I wrote:-

def GLOBAL_RELEASE_NUMBER
def GLOBAL_BUILD_NUMBER
pipeline {

  agent { label 'centos7-itest' }
  options {
    timestamps()
    buildDiscarder(
      logRotator(
        daysToKeepStr: '100'
      )
    )
    ansiColor('xterm')
  }

  parameters {
    //some parameters
}




  environment {
    // For python3
 
  }
  stages{
        stage("setting environment") {
        environment {
            //setting up environment
        }
        steps {
            script{
               // deciding build number and release number
            }
        }
        }
        stage("Clone repo & replace variables & call my pipeline") {
                    steps {
                        withCredentials([
                            //credentials 
                        ]){
                            cleanWs()
                            deleteDir()
                            git branch: "${params.branch}", credentialsId: 'jenkins-user-key-vcs', url: '[email protected]:some_repo/devops.git '
                            script {
                                sizingFiles = []
                                def branches = [:]
                                def counter=0

                                if (params.sizing_directory.endsWith(".yaml")) {
                                    sizingFiles.add(params.sizing_directory)
                                } else {

                                    sh(
                                        returnStdout: true,
                                        script: "find ${params.sizing_directory} -type f -name '*.yaml'"
                                    ).trim().split('\n').each { sizingFile ->
                                        sizingFiles.add(sizingFile)
                                    }

                                }
                                    for (def sizingFile in sizingFiles) {
                                        echo "Processing ${sizingFile}"

                                        sh """
                                            sed -i 's/{{[[:space:]]*user[[:space:]]*}}/${params.test_user}/g;
                                                    s/{{[[:space:]]*owner[[:space:]]*}}/my_team/g;
                                                    s/{{[[:space:]]*dept[[:space:]]*}}/team/g;
                                                    s/{{[[:space:]]*task[[:space:]]*}}/sizing/g;
                                                    s/{{[[:space:]]*SoftwareVersion[[:space:]]*}}/$GLOBAL_RELEASE_NUMBER-b$GLOBAL_BUILD_NUMBER/g' ${sizingFile}
                                            cat ${sizingFile}
                                            
                                        """

                                        branches[counter] = { 
                                            stage('myPipeline'){
                                              build job: "Myteam/myPipeline",
                                              wait: false,
                                              parameters: [ 
                                                    text(name: 'sample_yaml', value: readFile(file: sizingFile)),
                                                    string(name: 'branch', value: "${params.branch}")
                                                ]
                                            }
                                            counter+=1

                                        }

                                    }
                                parallel branches 

                            }
                    }
             }
        }
    }
}

The issue is when I trigger this pipeline with a folder containing 2 yaml files, I notice that the the job is triggered for first file and first completes the job and then goes to the next file to run the job for this file. I want to run all the jobs in parallel hence I gave the "wait:false" for the individual jobs. Can someone point out if what I am doing wrong?


Solution

  • To schedule multiple jobs in parallel you can use collectEntries. Example pipeline will looks like this

    def GLOBAL_RELEASE_NUMBER
    def GLOBAL_BUILD_NUMBER
    def generateStage(job) {
      return {
        stage("Terraform configure ${job}") {
          println "Processing ${job}"
    
          sh """
              sed -i 's/{{[[:space:]]*user[[:space:]]*}}/${params.test_user}/g;
                      s/{{[[:space:]]*owner[[:space:]]*}}/my_team/g;
                      s/{{[[:space:]]*dept[[:space:]]*}}/team/g;
                      s/{{[[:space:]]*task[[:space:]]*}}/sizing/g;
                      s/{{[[:space:]]*SoftwareVersion[[:space:]]*}}/$GLOBAL_RELEASE_NUMBER-b$GLOBAL_BUILD_NUMBER/g' ${job}
              cat ${job}
              
          """
    
          branches[counter] = { 
              stage('myPipeline'){
                  build job: "Myteam/myPipeline",
                  wait: false,
                  parameters: [ 
                      text(name: 'sample_yaml', value: readFile(file: job)),
                      string(name: 'branch', value: "${params.branch}")
                  ]
              }
              counter+=1
    
          }
        }
      }
    }
    
    pipeline {
    
      agent { label 'centos7-itest' }
      options {
        timestamps()
        buildDiscarder(
          logRotator(
            daysToKeepStr: '100'
          )
        )
        ansiColor('xterm')
      }
    
      parameters {
        //some parameters
    }
    
      environment {
        // For python3
     
      }
      stages{
            stage("setting environment") {
            environment {
                //setting up environment
            }
            steps {
                script{
                   // deciding build number and release number
                }
            }
            }
            stage("Clone repo & replace variables & call my pipeline") {
                        steps {
                            withCredentials([
                                //credentials 
                            ]){
                                cleanWs()
                                deleteDir()
                                git branch: "${params.branch}", credentialsId: 'jenkins-user-key-vcs', url: '[email protected]:some_repo/devops.git '
                                def sizingFiles = [:]
                                def parallelStagesMap = ''
    
                                if (params.sizing_directory.endsWith(".yaml")) {
                                    sizingFiles.add(params.sizing_directory)
                                } else {
    
                                    sh(
                                        returnStdout: true,
                                        script: "find ${params.sizing_directory} -type f -name '*.yaml'"
                                    ).trim().split('\n').each { sizingFile ->
                                        sizingFiles.add(sizingFile)
                                    }
    
                                }
    
                                // sizingFiles = ['first.yaml', 'second.yaml'] <- I assume that your list looks similar to this
    
    
                                parallelStagesMap = sizingFiles.collectEntries {["${it}" : generateStage(it)]} // This will create calls to function generateStage base on amount of elements in sizingFiles
                                            
                                parallelStagesMap.failFast = false
                                parallel parallelStagesMap
                        }
                 }
            }
        }
    }