diff --git a/dsl/flatpak_jobs.groovy b/dsl/flatpak_jobs.groovy --- a/dsl/flatpak_jobs.groovy +++ b/dsl/flatpak_jobs.groovy @@ -56,6 +56,59 @@ } } +knownJobs.each { + // Save our job name for later + def jobName = "${it.name}_flatpak_arm" + + // Read in the necessary Pipeline template + def pipelineTemplate = readFileFromWorkspace("flatpak/build-arm.pipeline") + // Now we can construct our Pipeline script + // We append a series of variables to the top of it to provide a variety of useful information to the otherwise templated script + // These appended variables are what makes one build different to the next, aside from the template which was used + def pipelineScript = """ + |def manifest = "${it.manifest}" + |def repositoryUrl = "${it.repositoryUrl}" + |def branchToBuild = "${it.branch}" + + |${pipelineTemplate}""".stripMargin() + + // Actually create the job now + pipelineJob( jobName ) { + properties { + // We don't want to keep build results forever + // We'll set it to keep the last 10 builds and discard everything else + buildDiscarder { + strategy { + logRotator { + numToKeepStr("5") + daysToKeepStr('') + artifactDaysToKeepStr('') + artifactNumToKeepStr('') + } + } + } + // We don't want to be building the same project more than once + // This is to prevent one project hogging resources + // And also has a practical component as otherwise an older build could finish afterwards and upload old build results + disableConcurrentBuilds() + } + triggers { + // We want to automatically check for changes and trigger a build once a day + pollSCM { + scmpoll_spec('@daily') + ignorePostCommitHooks(true) + } + } + // This is where the Pipeline script actually happens :) + definition { + cps { + script( pipelineScript ) + sandbox() + } + } + } +} + // Additionally, the ARM builds are done separately, but need periodic publishing, so create a job to do just that pipelineScript = readFileFromWorkspace("flatpak/publish-arm.pipeline")