diff --git a/dsl/flatpak_jobs.groovy b/dsl/flatpak_jobs.groovy index 1257e5c..798f7ae 100644 --- a/dsl/flatpak_jobs.groovy +++ b/dsl/flatpak_jobs.groovy @@ -1,93 +1,58 @@ // Read the contents of the gathered-jobs.json file a step created for us previously def jobsToParse = readFileFromWorkspace('flatpak/gathered-jobs.json') def knownJobs = new groovy.json.JsonSlurper().parseText( jobsToParse ) // Iterate over all of the known jobs and create them! knownJobs.each { // Save our job name for later def jobName = "${it.name}_flatpak" // Read in the necessary Pipeline template def pipelineTemplate = readFileFromWorkspace("${it.script}") // Now we can construct our Pipeline script // We append a series of variables to the top of it to provide a variety of useful information to the otherwise templated script // These appended variables are what makes one build different to the next, aside from the template which was used def pipelineScript = """ |def manifest = "${it.manifest}" |def repositoryUrl = "${it.repositoryUrl}" |def branchToBuild = "${it.branch}" |${pipelineTemplate}""".stripMargin() // Actually create the job now pipelineJob( jobName ) { properties { // We don't want to keep build results forever // We'll set it to keep the last 10 builds and discard everything else buildDiscarder { strategy { logRotator { numToKeepStr("5") daysToKeepStr('') artifactDaysToKeepStr('') artifactNumToKeepStr('') } } } // We don't want to be building the same project more than once // This is to prevent one project hogging resources // And also has a practical component as otherwise an older build could finish afterwards and upload old build results disableConcurrentBuilds() } triggers { // We want to automatically check for changes and trigger a build once a day pollSCM { scmpoll_spec('@daily') ignorePostCommitHooks(true) } } // This is where the Pipeline script actually happens :) definition { cps { script( pipelineScript ) sandbox() } } } } -// Additionally, the ARM builds are done separately, but need periodic publishing, so create a job to do just that -pipelineScript = readFileFromWorkspace("flatpak/publish-arm.pipeline") - -// Actually create the job now -pipelineJob( "Flatpak_arm_repo_publish" ) { - properties { - // We don't want to keep build results forever - // We'll set it to keep the last 5 builds and discard everything else - buildDiscarder { - strategy { - logRotator { - numToKeepStr("5") - daysToKeepStr('') - artifactDaysToKeepStr('') - artifactNumToKeepStr('') - } - } - } - // We don't want to be building the same project more than once - // This is to prevent one project hogging resources - // And also has a practical component as otherwise an older build could finish afterwards and upload old build results - disableConcurrentBuilds() - } - triggers { - // We want to automatically do the publishing once a day - cron("@daily") - } - // This is where the Pipeline script actually happens :) - definition { - cps { - script( pipelineScript ) - sandbox() - } - } -} diff --git a/flatpak/publish-arm.pipeline b/flatpak/publish-arm.pipeline deleted file mode 100644 index f8aaa71..0000000 --- a/flatpak/publish-arm.pipeline +++ /dev/null @@ -1,18 +0,0 @@ -// Request a node to be allocated to us -node( "Flatpak" ) { -// We want Timestamps on everything -timestamps { - // We want to catch any errors that occur to allow us to send out notifications (ie. emails) if needed - catchError { - // Finally we can sign the repository - stage('Publishing Repository') { - // Sign and then upload the repository to the final server - sh """ - flatpak build-commit-from --src-repo=\$HOME/repo-arm --gpg-sign=61C45BED \$HOME/public-repo --update-appstream - flatpak build-update-repo --prune --prune-depth=20 --generate-static-deltas --gpg-sign=61C45BED \$HOME/public-repo - rsync -Ha --delete \$HOME/public-repo/ flatpak@milonia.kde.org:/srv/www/distribute.kde.org/flatpak-apps-testing/ - """ - } - } -} -}