groovy 如何并行运行jenkins流水线作业,这些作业调用相同的下游作业?

kr98yfug  于 2023-03-22  发布在  Jenkins
关注(0)|答案(1)|浏览(218)

我是一个初学者Jenkins和制作管道的groovy脚本,我想触发一个下游管道并行的所有文件夹内的文件由用户...下面是我写的示例代码:-

def GLOBAL_RELEASE_NUMBER
def GLOBAL_BUILD_NUMBER
pipeline {

  agent { label 'centos7-itest' }
  options {
    timestamps()
    buildDiscarder(
      logRotator(
        daysToKeepStr: '100'
      )
    )
    ansiColor('xterm')
  }

  parameters {
    //some parameters
}



  environment {
    // For python3
 
  }
  stages{
        stage("setting environment") {
        environment {
            //setting up environment
        }
        steps {
            script{
               // deciding build number and release number
            }
        }
        }
        stage("Clone repo & replace variables & call my pipeline") {
                    steps {
                        withCredentials([
                            //credentials 
                        ]){
                            cleanWs()
                            deleteDir()
                            git branch: "${params.branch}", credentialsId: 'jenkins-user-key-vcs', url: 'git@github.com:some_repo/devops.git '
                            script {
                                sizingFiles = []
                                def branches = [:]
                                def counter=0

                                if (params.sizing_directory.endsWith(".yaml")) {
                                    sizingFiles.add(params.sizing_directory)
                                } else {

                                    sh(
                                        returnStdout: true,
                                        script: "find ${params.sizing_directory} -type f -name '*.yaml'"
                                    ).trim().split('\n').each { sizingFile ->
                                        sizingFiles.add(sizingFile)
                                    }

                                }
                                    for (def sizingFile in sizingFiles) {
                                        echo "Processing ${sizingFile}"

                                        sh """
                                            sed -i 's/{{[[:space:]]*user[[:space:]]*}}/${params.test_user}/g;
                                                    s/{{[[:space:]]*owner[[:space:]]*}}/my_team/g;
                                                    s/{{[[:space:]]*dept[[:space:]]*}}/team/g;
                                                    s/{{[[:space:]]*task[[:space:]]*}}/sizing/g;
                                                    s/{{[[:space:]]*SoftwareVersion[[:space:]]*}}/$GLOBAL_RELEASE_NUMBER-b$GLOBAL_BUILD_NUMBER/g' ${sizingFile}
                                            cat ${sizingFile}
                                            
                                        """

                                        branches[counter] = { 
                                            stage('myPipeline'){
                                              build job: "Myteam/myPipeline",
                                              wait: false,
                                              parameters: [ 
                                                    text(name: 'sample_yaml', value: readFile(file: sizingFile)),
                                                    string(name: 'branch', value: "${params.branch}")
                                                ]
                                            }
                                            counter+=1

                                        }

                                    }
                                parallel branches 

                            }
                    }
             }
        }
    }
}

问题是,当我用一个包含2个yaml文件的文件夹触发这个管道时,我注意到作业是为第一个文件触发的,首先完成作业,然后转到下一个文件来运行这个文件的作业。我想并行运行所有的作业,因此我为各个作业设置了“wait:false”。有人能指出我做错了什么吗?

pu82cl6c

pu82cl6c1#

要并行调度多个作业,可以使用collectEntries

def GLOBAL_RELEASE_NUMBER
def GLOBAL_BUILD_NUMBER
def generateStage(job) {
  return {
    stage("Terraform configure ${job}") {
      println "Processing ${job}"

      sh """
          sed -i 's/{{[[:space:]]*user[[:space:]]*}}/${params.test_user}/g;
                  s/{{[[:space:]]*owner[[:space:]]*}}/my_team/g;
                  s/{{[[:space:]]*dept[[:space:]]*}}/team/g;
                  s/{{[[:space:]]*task[[:space:]]*}}/sizing/g;
                  s/{{[[:space:]]*SoftwareVersion[[:space:]]*}}/$GLOBAL_RELEASE_NUMBER-b$GLOBAL_BUILD_NUMBER/g' ${job}
          cat ${job}
          
      """

      branches[counter] = { 
          stage('myPipeline'){
              build job: "Myteam/myPipeline",
              wait: false,
              parameters: [ 
                  text(name: 'sample_yaml', value: readFile(file: job)),
                  string(name: 'branch', value: "${params.branch}")
              ]
          }
          counter+=1

      }
    }
  }
}

pipeline {

  agent { label 'centos7-itest' }
  options {
    timestamps()
    buildDiscarder(
      logRotator(
        daysToKeepStr: '100'
      )
    )
    ansiColor('xterm')
  }

  parameters {
    //some parameters
}

  environment {
    // For python3
 
  }
  stages{
        stage("setting environment") {
        environment {
            //setting up environment
        }
        steps {
            script{
               // deciding build number and release number
            }
        }
        }
        stage("Clone repo & replace variables & call my pipeline") {
                    steps {
                        withCredentials([
                            //credentials 
                        ]){
                            cleanWs()
                            deleteDir()
                            git branch: "${params.branch}", credentialsId: 'jenkins-user-key-vcs', url: 'git@github.com:some_repo/devops.git '
                            def sizingFiles = [:]
                            def parallelStagesMap = ''

                            if (params.sizing_directory.endsWith(".yaml")) {
                                sizingFiles.add(params.sizing_directory)
                            } else {

                                sh(
                                    returnStdout: true,
                                    script: "find ${params.sizing_directory} -type f -name '*.yaml'"
                                ).trim().split('\n').each { sizingFile ->
                                    sizingFiles.add(sizingFile)
                                }

                            }

                            // sizingFiles = ['first.yaml', 'second.yaml'] <- I assume that your list looks similar to this

                            parallelStagesMap = sizingFiles.collectEntries {["${it}" : generateStage(it)]} // This will create calls to function generateStage base on amount of elements in sizingFiles
                                        
                            parallelStagesMap.failFast = false
                            parallel parallelStagesMap
                    }
             }
        }
    }
}

相关问题