diff --git a/dsl/rebuildseed.groovy b/dsl/rebuildseed.groovy new file mode 100644 index 0000000..72e02d5 --- /dev/null +++ b/dsl/rebuildseed.groovy @@ -0,0 +1,72 @@ +// Read the contents of the gathered-jobs.json file a step created for us previously +def jobsToParse = readFileFromWorkspace('gathered-jobs.json') +def knownJobs = new groovy.json.JsonSlurper().parseText( jobsToParse ) + +// Read the contents of the gathered-structure.json file a step created for us previously +def structureToParse = readFileFromWorkspace('gathered-structure.json') +def knownStructure = new groovy.json.JsonSlurper().parseText( structureToParse ) + +// Iterate over all of the known structures and create the necessary jobs to trigger global rebuilds +knownStructure.combinations.each { + // Determine what our product/branchGroup/platform key is + def structureKey = "${it.product} ${it.branchGroup} ${it.platform}" + + // Create our job name + def jobName = "Global Rebuild ${structureKey}" + + // Start assembling our Pipeline script + def pipelineScript = """ + // First things first: rebuild all the dependencies + build job: 'Dependency Build ${structureKey}', quietPeriod: 10, wait: true + + // Now trigger a rebuild of all the individual jobs + """ + + // Now go forth and check all the jobs we know about... + knownJobs.each { + // Determine the structure key for this job + localJobStructure = "${it.product} ${it.branchGroup} ${it.platform}" + + // Does it have the same key? + if( structureKey != localJobStructure ) { + // There is no need for us to rebuild it then so skip over it + continue + } + + // Determine the name this job will have + def localJobName = "${it.product} ${it.name} ${it.branchGroup} ${it.platform}" + + // Add it to the pipeline script + pipelineScript = """ + ${pipelineScript} + build job: '${localJobName}', quietPeriod: 10, wait: false + """ + } + + // Actually create the job now + pipelineJob( jobName ) { + properties { + // We don't want to keep build results forever, as otherwise they fill up the Jenkins host disk and clog things up + // As a compromise to keep useful information around we keep the last 25 builds + buildDiscarder { + strategy { + logRotator { + numToKeepStr("25") + daysToKeepStr('') + artifactDaysToKeepStr('') + artifactNumToKeepStr('') + } + } + } + // Make sure we don't build more than one at the same time + disableConcurrentBuilds() + } + // Magic happens here - put the Pipeline in! + definition { + cps { + script( pipelineScript.stripMargin() ) + sandbox() + } + } + } +}