diff --git a/dsl/prereviewciseed.groovy b/dsl/prereviewciseed.groovy index d630812..ad56b91 100644 --- a/dsl/prereviewciseed.groovy +++ b/dsl/prereviewciseed.groovy @@ -1,56 +1,57 @@ // Read the contents of the gathered-jobs.json file a step created for us previously def jobsToParse = readFileFromWorkspace('pre-review/enabled-products.json') def knownJobs = new groovy.json.JsonSlurper().parseText( jobsToParse ) // Iterate over all of the known jobs and create them! knownJobs.each { // Save our job name for later def jobName = "Pre Review CI ${it.product}" // Read in our security token, so we can let Phabricator trigger our seed jobs def securityToken = readFileFromWorkspace("phabricator-remote-token") // Read in the necessary Pipeline script def pipelineTemplate = readFileFromWorkspace("pre-review/seed-job.pipeline") // Prepend the information it will need to run def pipelineScript = """ |def productName = "${it.product}" |${pipelineTemplate}""".stripMargin() // Actually create the job now pipelineJob( jobName ) { // Make sure Phabricator is allowed to trigger the build authenticationToken( securityToken ) // In order for Phabricator to tell us what to build, we need to have parameters... parameters { stringParam('DIFF_ID') + stringParam('REVISION') stringParam('PHID') stringParam('STAGING_URI') stringParam('STAGING_REF') } // Set some necessary properties around build logs and multiple builds... properties { // We don't want to keep build results for this for very long as they're not helpful // We'll set it to keep the last 10 builds and discard everything else buildDiscarder { strategy { logRotator { numToKeepStr("10") daysToKeepStr('') artifactDaysToKeepStr('') artifactNumToKeepStr('') } } } } // This is where the Pipeline script actually happens :) definition { cps { script( pipelineScript ) sandbox() } } } } diff --git a/pre-review/create_review_job.groovy b/pre-review/create_review_job.groovy index 246bd5f..d6af84f 100644 --- a/pre-review/create_review_job.groovy +++ b/pre-review/create_review_job.groovy @@ -1,70 +1,70 @@ // Overall configuration // As developers might be posting reviews for master or stable branches, we'll build against master // While not perfect, this is the best solution we have for now def branchGroup = "kf5-qt5" def ciEnvironment = "production" // Read in our variables, as we'll need to know what job we are creating... def productName = "${PRODUCT_NAME}" -def reviewID = "${DIFF_ID}" +def reviewID = "${REVISION}" def reviewPHID = "${PHID}" def reviewStagingURL = "${STAGING_URI}" def reviewStagingRef = "${STAGING_REF}" // Determine which repository we have here def repositoryName = reviewStagingURL.tokenize(':').last() def projectName = repositoryName.tokenize('.').first() // Create a name for this job def jobName = "Reviews/${reviewID}" // Read in the necessary Pipeline template def pipelineTemplate = readFileFromWorkspace("pre-review/review-build.template") // Now we can construct our Pipeline script // We append a series of variables to the top of it to provide a variety of useful information to the otherwise templated script // These appended variables are what makes one build different to the next, aside from the template which was used def pipelineScript = """ |def repositoryUrl = "${reviewStagingURL}" |def branchToBuild = "${reviewStagingRef}" |def productName = "${productName}" |def projectName = "${projectName}" |def branchGroup = "${branchGroup}" |def ciEnvironment = "${ciEnvironment}" |${pipelineTemplate}""".stripMargin() // Actually create the job now pipelineJob( jobName ) { // In order to be able to report back to PHabricator, we'll need to know a few things // Like the Differential ID number (we technically already do, but the Phabricator plugin expects to see it as a parameter) parameters { stringParam('DIFF_ID') stringParam('PHID') } properties { // We don't want to keep build results forever // We'll set it to keep the last 25 builds and discard everything else buildDiscarder { strategy { logRotator { numToKeepStr("5") daysToKeepStr('') artifactDaysToKeepStr('') artifactNumToKeepStr('') } } } // We don't want to be building the same project more than once // This is to prevent one project hogging resources // And also has a practical component as otherwise an older build could finish afterwards and upload old build results disableConcurrentBuilds() } // This is where the Pipeline script actually happens :) definition { cps { script( pipelineScript ) sandbox() } } } diff --git a/pre-review/seed-job.pipeline b/pre-review/seed-job.pipeline index 69e07eb..cb41059 100644 --- a/pre-review/seed-job.pipeline +++ b/pre-review/seed-job.pipeline @@ -1,14 +1,14 @@ // First things first: make sure our job exists! node('master') { // Grab the CI Tooling... checkout changelog: false, poll: false, scm: [ $class: 'GitSCM', branches: [[name: 'master']], userRemoteConfigs: [[url: 'https://anongit.kde.org/sysadmin/ci-tooling']] ] // Then process the necessary DSL to create the job jobDsl targets: 'pre-review/create_review_job.groovy', additionalParameters: [PRODUCT_NAME: productName] } // Now trigger the individual job we want -build job: "Reviews/${params.DIFF_ID}", parameters: [string(name: 'DIFF_ID', value: params.DIFF_ID), string(name: 'PHID', value: params.PHID)], wait: false +build job: "Reviews/${params.REVISION}", parameters: [string(name: 'DIFF_ID', value: params.DIFF_ID), string(name: 'PHID', value: params.PHID)], wait: false