diff --git a/dsl/globalrebuilds.groovy b/dsl/globalrebuilds.groovy index 392f8ea..b576bca 100644 --- a/dsl/globalrebuilds.groovy +++ b/dsl/globalrebuilds.groovy @@ -1,125 +1,125 @@ // Read the contents of the gathered-jobs.json file a step created for us previously def jobsToParse = readFileFromWorkspace('gathered-jobs.json') def knownJobs = new groovy.json.JsonSlurper().parseText( jobsToParse ) // Read the contents of the gathered-structure.json file a step created for us previously def structureToParse = readFileFromWorkspace('gathered-structure.json') def knownStructure = new groovy.json.JsonSlurper().parseText( structureToParse ) // Make sure the Administration folder exists folder('Administration') // Iterate over all of the known structures and create the necessary jobs to build the dependencies knownStructure.combinations.each { // Create our job name def jobName = "Administration/Dependency Build ${it.product} ${it.branchGroup} ${it.platform}" // Read in the necessary Pipeline template def pipelineTemplate = readFileFromWorkspace("pipeline-templates/dependency-build/${it.platform}.template") // Now we can construct our Pipeline script def pipelineScript = """ |def productName = "${it.product}" |def branchGroup = "${it.branchGroup}" |def currentPlatform = "${it.platform}" |def ciEnvironment = "${knownStructure.environment}" |${pipelineTemplate}""".stripMargin() // Actually create the job now pipelineJob( jobName ) { properties { // We don't want to keep build results forever, as otherwise they fill up the Jenkins host disk and clog things up // As a compromise to keep useful information around we keep the last 25 builds buildDiscarder { strategy { logRotator { - numToKeepStr("25") + numToKeepStr("5") daysToKeepStr('') artifactDaysToKeepStr('') artifactNumToKeepStr('') } } } // Make sure we don't build more than one at the same time // For the dependency builds this is quite important as they're resource expensive and can take quite a while to do their thing disableConcurrentBuilds() } // We want to rebuild dependency jobs once a week to catch up on changes in our dependencies // Of course someone will be able to initiate one manually if needed, but weekly should be enough to catch up on fixes and the like triggers { cron('@weekly') } // Magic happens here - put the Pipeline in! definition { cps { script( pipelineScript ) sandbox() } } } } // Iterate over all of the known structures and create the necessary jobs to trigger global rebuilds knownStructure.combinations.each { // Determine what our product/branchGroup/platform key is def structureKey = "${it.product} ${it.branchGroup} ${it.platform}" // Create our job name def jobName = "Administration/Global Rebuild ${structureKey}" // Start assembling our Pipeline script def pipelineScript = """ |// First things first: rebuild all the dependencies |build job: 'Administration/Dependency Build ${structureKey}', quietPeriod: 10, wait: true |// Now trigger a rebuild of all the individual jobs """.stripMargin().trim() // Now go forth and check all the jobs we know about... knownJobs.each { // Determine the structure key for this job localJobStructure = "${it.product} ${it.branchGroup} ${it.platform}" // Does it have the same key? if( structureKey != localJobStructure ) { // There is no need for us to rebuild it then so skip over it return } // Determine the name this job will have def localJobName = "${it.product}/${it.name}/${it.branchGroup} ${it.platform}" // Add it to the pipeline script pipelineScript = """ |${pipelineScript} |build job: '${localJobName}', quietPeriod: 10, wait: false """.stripMargin().trim() } // Actually create the job now pipelineJob( jobName ) { properties { // We don't want to keep build results forever, as otherwise they fill up the Jenkins host disk and clog things up // As a compromise to keep useful information around we keep the last 25 builds buildDiscarder { strategy { logRotator { numToKeepStr("25") daysToKeepStr('') artifactDaysToKeepStr('') artifactNumToKeepStr('') } } } // Make sure we don't build more than one at the same time disableConcurrentBuilds() } // Magic happens here - put the Pipeline in! definition { cps { script( pipelineScript ) sandbox() } } } }