diff --git a/dsl/craft_cache_jobs.groovy b/dsl/craft_cache_jobs.groovy index 43daaa7..599a5eb 100644 --- a/dsl/craft_cache_jobs.groovy +++ b/dsl/craft_cache_jobs.groovy @@ -1,162 +1,162 @@ // Read the contents of the gathered-jobs.json file a step created for us previously -def jobsToParse = readFileFromWorkspace('craft-cache/devel-pipelines/gathered-jobs.json') -def knownMasterJobs = new groovy.json.JsonSlurper().parseText( jobsToParse ) +def masterJobsToParse = readFileFromWorkspace('craft-cache/devel-pipelines/gathered-jobs.json') +def knownMasterJobs = new groovy.json.JsonSlurper().parseText( masterJobsToParse ) // First do the unstable/master branch... knownMasterJobs.each { // Create our job name def jobName = "Craft_Build_Cache_${it.craftPlatform}" // Read in the necessary Pipeline template def pipelineTemplate = readFileFromWorkspace("craft-cache/devel-pipelines/${it.buildPipeline}.pipeline") // Now we can construct our Pipeline script // We append a series of variables to the top of it to provide a variety of useful information to the otherwise templated script // These appended variables are what makes one build different to the next, aside from the template which was used def pipelineScript = """ |def craftPlatform = "${it.craftPlatform}" |${pipelineTemplate}""".stripMargin() // Actually create the job now pipelineJob( jobName ) { properties { // We don't want to keep build results forever // We'll set it to keep the last 10 builds and discard everything else buildDiscarder { strategy { logRotator { numToKeepStr("5") daysToKeepStr('') artifactDaysToKeepStr('') artifactNumToKeepStr('') } } } // We don't want to be building the same project more than once // This is to prevent one project hogging resources // And also has a practical component as otherwise an older build could finish afterwards and upload old build results disableConcurrentBuilds() } // This is where the Pipeline script actually happens :) definition { cps { script( pipelineScript ) sandbox() } } } } // We also want to create similar jobs for the stable branch // Read the contents of the gathered-jobs.json file a step created for us previously -def jobsToParse = readFileFromWorkspace('craft-cache/stable-pipelines/gathered-jobs.json') -def knownStableJobs = new groovy.json.JsonSlurper().parseText( jobsToParse ) +def stableJobsToParse = readFileFromWorkspace('craft-cache/stable-pipelines/gathered-jobs.json') +def knownStableJobs = new groovy.json.JsonSlurper().parseText( stableJobsToParse ) knownStableJobs.each { // Create our job name def jobName = "Craft_Build_Stable_Cache_${it.craftPlatform}" // Read in the necessary Pipeline template def pipelineTemplate = readFileFromWorkspace("craft-cache/stable-pipelines/${it.buildPipeline}.pipeline") // Now we can construct our Pipeline script // We append a series of variables to the top of it to provide a variety of useful information to the otherwise templated script // These appended variables are what makes one build different to the next, aside from the template which was used def pipelineScript = """ |def craftPlatform = "${it.craftPlatform}" |${pipelineTemplate}""".stripMargin() // Actually create the job now pipelineJob( jobName ) { properties { // We don't want to keep build results forever // We'll set it to keep the last 10 builds and discard everything else buildDiscarder { strategy { logRotator { numToKeepStr("5") daysToKeepStr('') artifactDaysToKeepStr('') artifactNumToKeepStr('') } } } // We don't want to be building the same project more than once // This is to prevent one project hogging resources // And also has a practical component as otherwise an older build could finish afterwards and upload old build results disableConcurrentBuilds() } // This is where the Pipeline script actually happens :) definition { cps { script( pipelineScript ) sandbox() } } } } // We also want to ensure a cleanup job for the nodes is created // Read in the necessary Pipeline template def pipelineScript = readFileFromWorkspace("craft-cache/cleanup-nodes.pipeline") // Actually create the job now pipelineJob( "Craft_Builder_Cleanup" ) { properties { // We don't want to keep build results forever // We'll set it to keep the last 10 builds and discard everything else buildDiscarder { strategy { logRotator { numToKeepStr("5") daysToKeepStr('') artifactDaysToKeepStr('') artifactNumToKeepStr('') } } } // We don't want to be building the same project more than once // This is to prevent one project hogging resources // And also has a practical component as otherwise an older build could finish afterwards and upload old build results disableConcurrentBuilds() } // This is where the Pipeline script actually happens :) definition { cps { script( pipelineScript ) sandbox() } } } // We also want to ensure a MingW for MSVC build exists pipelineScript = readFileFromWorkspace("craft-cache/mingw-for-msvc.pipeline") // Actually create the job now pipelineJob( "MinGW-w64_binaries_for_MSVC" ) { properties { // We don't want to keep build results forever // We'll set it to keep the last 10 builds and discard everything else buildDiscarder { strategy { logRotator { numToKeepStr("5") daysToKeepStr('') artifactDaysToKeepStr('') artifactNumToKeepStr('') } } } // We don't want to be building the same project more than once // This is to prevent one project hogging resources // And also has a practical component as otherwise an older build could finish afterwards and upload old build results disableConcurrentBuilds() } // This is where the Pipeline script actually happens :) definition { cps { script( pipelineScript ) sandbox() } } }