diff --git a/dsl/craft_cache_jobs.groovy b/dsl/craft_cache_jobs.groovy index e1d46ee..f2e1942 100644 --- a/dsl/craft_cache_jobs.groovy +++ b/dsl/craft_cache_jobs.groovy @@ -1,179 +1,227 @@ // Read the contents of the gathered-jobs.json file a step created for us previously def masterJobsToParse = readFileFromWorkspace('craft-cache/master-pipelines/gathered-jobs.json') def knownMasterJobs = new groovy.json.JsonSlurper().parseText( masterJobsToParse ) // First do the unstable/master branch... knownMasterJobs.each { // Create our job name def jobName = "Craft_Build_Master_Cache_${it.craftPlatform}" // Read in the necessary Pipeline template def pipelineTemplate = readFileFromWorkspace("craft-cache/master-pipelines/${it.buildPipeline}.pipeline") // Now we can construct our Pipeline script // We append a series of variables to the top of it to provide a variety of useful information to the otherwise templated script // These appended variables are what makes one build different to the next, aside from the template which was used def pipelineScript = """ |def craftPlatform = "${it.craftPlatform}" |${pipelineTemplate}""".stripMargin() // Actually create the job now pipelineJob( jobName ) { properties { // We don't want to keep build results forever // We'll set it to keep the last 10 builds and discard everything else buildDiscarder { strategy { logRotator { numToKeepStr("5") daysToKeepStr('') artifactDaysToKeepStr('') artifactNumToKeepStr('') } } } // We don't want to be building the same project more than once // This is to prevent one project hogging resources // And also has a practical component as otherwise an older build could finish afterwards and upload old build results disableConcurrentBuilds() } // This is where the Pipeline script actually happens :) definition { cps { script( pipelineScript ) sandbox() } } } } // We also want to create similar jobs for the experimental branch // Read the contents of the gathered-jobs.json file a step created for us previously def experimentalJobsToParse = readFileFromWorkspace('craft-cache/experimental-pipelines/gathered-jobs.json') def knownExperimentalJobs = new groovy.json.JsonSlurper().parseText( experimentalJobsToParse ) knownExperimentalJobs.each { // Create our job name def jobName = "Craft_Build_Experimental_Cache_${it.craftPlatform}" // Read in the necessary Pipeline template def pipelineTemplate = readFileFromWorkspace("craft-cache/experimental-pipelines/${it.buildPipeline}.pipeline") // Now we can construct our Pipeline script // We append a series of variables to the top of it to provide a variety of useful information to the otherwise templated script // These appended variables are what makes one build different to the next, aside from the template which was used def pipelineScript = """ |def craftPlatform = "${it.craftPlatform}" |${pipelineTemplate}""".stripMargin() // Actually create the job now pipelineJob( jobName ) { properties { // We don't want to keep build results forever // We'll set it to keep the last 10 builds and discard everything else buildDiscarder { strategy { logRotator { numToKeepStr("5") daysToKeepStr('') artifactDaysToKeepStr('') artifactNumToKeepStr('') } } } // We don't want to be building the same project more than once // This is to prevent one project hogging resources // And also has a practical component as otherwise an older build could finish afterwards and upload old build results disableConcurrentBuilds() } // This is where the Pipeline script actually happens :) definition { cps { script( pipelineScript ) sandbox() } } } } // Finally, do the Prebuilt Jobs def prebuiltJobsToParse = readFileFromWorkspace('craft-cache/prebuilt-pipelines/gathered-jobs.json') def knownPrebuiltJobs = new groovy.json.JsonSlurper().parseText( prebuiltJobsToParse ) knownPrebuiltJobs.each { // Create our job name def jobName = "Craft_Prebuilt_Binary_Cache_${it.craftPlatform}" // Read in the necessary Pipeline template def pipelineTemplate = readFileFromWorkspace("craft-cache/prebuilt-pipelines/${it.buildPipeline}.pipeline") // Now we can construct our Pipeline script // We append a series of variables to the top of it to provide a variety of useful information to the otherwise templated script // These appended variables are what makes one build different to the next, aside from the template which was used def pipelineScript = """ |def craftPlatform = "${it.craftPlatform}" |${pipelineTemplate}""".stripMargin() // Actually create the job now pipelineJob( jobName ) { properties { // We don't want to keep build results forever // We'll set it to keep the last 10 builds and discard everything else buildDiscarder { strategy { logRotator { numToKeepStr("5") daysToKeepStr('') artifactDaysToKeepStr('') artifactNumToKeepStr('') } } } // We don't want to be building the same project more than once // This is to prevent one project hogging resources // And also has a practical component as otherwise an older build could finish afterwards and upload old build results disableConcurrentBuilds() } // This is where the Pipeline script actually happens :) definition { cps { script( pipelineScript ) sandbox() } } } } // We also want to ensure a cleanup job for the nodes is created // Read in the necessary Pipeline template def pipelineScript = readFileFromWorkspace("craft-cache/cleanup-nodes.pipeline") // Actually create the job now pipelineJob( "Craft_Builder_Cleanup" ) { properties { // We don't want to keep build results forever // We'll set it to keep the last 10 builds and discard everything else buildDiscarder { strategy { logRotator { numToKeepStr("5") daysToKeepStr('') artifactDaysToKeepStr('') artifactNumToKeepStr('') } } } // We don't want to be building the same project more than once // This is to prevent one project hogging resources // And also has a practical component as otherwise an older build could finish afterwards and upload old build results disableConcurrentBuilds() } // This is where the Pipeline script actually happens :) definition { cps { script( pipelineScript ) sandbox() } } } +// gath src files +def srcJobsToParse = readFileFromWorkspace('craft-cache/experimental-pipelines/gathered-jobs.json') +def srcExperimentalJobs = new groovy.json.JsonSlurper().parseText( srcJobsToParse ) + +knownSrcJobs.each { + // Create our job name + def jobName = "Craft_Build_Src_Cache_${it.craftPlatform}" + + // Read in the necessary Pipeline template + def pipelineTemplate = readFileFromWorkspace("craft-cache/src-pipelines/${it.buildPipeline}.pipeline") + // Now we can construct our Pipeline script + // We append a series of variables to the top of it to provide a variety of useful information to the otherwise templated script + // These appended variables are what makes one build different to the next, aside from the template which was used + def pipelineScript = """ + |def craftPlatform = "${it.craftPlatform}" + + |${pipelineTemplate}""".stripMargin() + + // Actually create the job now + pipelineJob( jobName ) { + properties { + // We don't want to keep build results forever + // We'll set it to keep the last 10 builds and discard everything else + buildDiscarder { + strategy { + logRotator { + numToKeepStr("5") + daysToKeepStr('') + artifactDaysToKeepStr('') + artifactNumToKeepStr('') + } + } + } + // We don't want to be building the same project more than once + // This is to prevent one project hogging resources + // And also has a practical component as otherwise an older build could finish afterwards and upload old build results + disableConcurrentBuilds() + } + // This is where the Pipeline script actually happens :) + definition { + cps { + script( pipelineScript ) + sandbox() + } + } + } +} +