diff --git a/dsl/flatpak_jobs.groovy b/dsl/flatpak_jobs.groovy index 1b7fb23..1257e5c 100644 --- a/dsl/flatpak_jobs.groovy +++ b/dsl/flatpak_jobs.groovy @@ -1,93 +1,93 @@ // Read the contents of the gathered-jobs.json file a step created for us previously def jobsToParse = readFileFromWorkspace('flatpak/gathered-jobs.json') def knownJobs = new groovy.json.JsonSlurper().parseText( jobsToParse ) // Iterate over all of the known jobs and create them! knownJobs.each { // Save our job name for later def jobName = "${it.name}_flatpak" // Read in the necessary Pipeline template - def pipelineTemplate = readFileFromWorkspace("flatpak/generic-build.pipeline") + def pipelineTemplate = readFileFromWorkspace("${it.script}") // Now we can construct our Pipeline script // We append a series of variables to the top of it to provide a variety of useful information to the otherwise templated script // These appended variables are what makes one build different to the next, aside from the template which was used def pipelineScript = """ |def manifest = "${it.manifest}" |def repositoryUrl = "${it.repositoryUrl}" |def branchToBuild = "${it.branch}" |${pipelineTemplate}""".stripMargin() // Actually create the job now pipelineJob( jobName ) { properties { // We don't want to keep build results forever // We'll set it to keep the last 10 builds and discard everything else buildDiscarder { strategy { logRotator { numToKeepStr("5") daysToKeepStr('') artifactDaysToKeepStr('') artifactNumToKeepStr('') } } } // We don't want to be building the same project more than once // This is to prevent one project hogging resources // And also has a practical component as otherwise an older build could finish afterwards and upload old build results disableConcurrentBuilds() } triggers { // We want to automatically check for changes and trigger a build once a day pollSCM { scmpoll_spec('@daily') ignorePostCommitHooks(true) } } // This is where the Pipeline script actually happens :) definition { cps { script( pipelineScript ) sandbox() } } } } // Additionally, the ARM builds are done separately, but need periodic publishing, so create a job to do just that pipelineScript = readFileFromWorkspace("flatpak/publish-arm.pipeline") // Actually create the job now pipelineJob( "Flatpak_arm_repo_publish" ) { properties { // We don't want to keep build results forever // We'll set it to keep the last 5 builds and discard everything else buildDiscarder { strategy { logRotator { numToKeepStr("5") daysToKeepStr('') artifactDaysToKeepStr('') artifactNumToKeepStr('') } } } // We don't want to be building the same project more than once // This is to prevent one project hogging resources // And also has a practical component as otherwise an older build could finish afterwards and upload old build results disableConcurrentBuilds() } triggers { // We want to automatically do the publishing once a day cron("@daily") } // This is where the Pipeline script actually happens :) definition { cps { script( pipelineScript ) sandbox() } } } diff --git a/flatpak/gather-jobs.py b/flatpak/gather-jobs.py index 45d6081..4977202 100644 --- a/flatpak/gather-jobs.py +++ b/flatpak/gather-jobs.py @@ -1,127 +1,134 @@ #!/usr/bin/python3 import os import sys import json import yaml import argparse # Parse the command line arguments we've been given parser = argparse.ArgumentParser(description='Utility to determine which jobs need to be registered in Jenkins.') parser.add_argument('--flatpak-manifests', type=str, required=True) arguments = parser.parse_args() # Our output will be a list of Dictionaries, containing several keys: # 1) The name of the job # 2) The Craft Blueprints to be built as part of the job # 3) The Craft Blueprints the job should package # 4) The description for the resulting job jobsGathered = [] # Let's get started! for entry in os.scandir(path=arguments.flatpak_manifests): # Make sure this isn't a directory (as that definitely isn't a Flatpak definition) if entry.is_dir(): continue # Could this be a local Flatpak manifest? if entry.name.endswith('.json'): # We have a winner! # Load the definition in so we can read the information we need from it manifestFile = open( entry.path ) manifest = json.load( manifestFile ) # Make sure we are dealing with an actual application # These have an 'id' specified in them if 'id' not in manifest: print("Skipping non-application " + entry.name) continue # We also have to make sure we have a Manifest that specifies a Git repository # Otherwise it's impossible to have a nightly build if 'modules' not in manifest or 'url' not in manifest['modules'][-1]['sources'][-1]: print("Skipping " + entry.name) continue # Make sure we have a branch for Git # This is optional normally but the rest of our code requires it if 'branch' not in manifest: # Make it master then manifest['branch'] = 'master' # Grab the repository URL # We assume the last repository in the definition is the one we want as this seems to be the case 99% of the time # It isn't guaranteed to be correct but it's the best we have due to how Flatpak specifications work repositoryUrl = manifest['modules'][-1]['sources'][-1]['url'] # Determine the path to where the Manifest (.json) file will be when running the build manifestPath = "flatpak-kde-applications/{0}".format( entry.name ) # Grab the ID and grab the last component (assuming that is the application name) # Transform any dashes into underscores, as Jenkins does not support dashes in job names jobName = manifest['id'].split('.')[-1] jobName = jobName.replace('-', '_').capitalize() # Generate a description... jobDescription = "{0} nightly build for Flatpak".format( manifest['id'] ) # Add it to the list of jobs to be built - jobEntry = { + jobsGathered.append( { 'name': jobName, 'description': jobDescription, 'manifest': manifestPath, 'repositoryUrl': repositoryUrl, - 'branch': manifest['branch'] - } - - jobsGathered.append( jobEntry ) + 'branch': manifest['branch'], + 'script': 'generic-build.pipeline' + }) + jobsGathered.append( { + 'name': jobName + "_arm", + 'description': jobDescription, + 'manifest': manifestPath, + 'repositoryUrl': repositoryUrl, + 'branch': manifest['branch'], + 'script': 'build-arm.pipeline' + }) # Otherwise maybe it could be a remote Flatpak manifest? if entry.name.endswith('remoteapp'): # Another winner! # Because this file is shell format we'll need to do some work to make it usable manifestFile = open( entry.path ) manifest = {} # Go over each line in turn for line in manifestFile.readlines(): # Lines are formatted following the KEY=VALUE syntax key, value = line.strip().split('=', 1) # Add it to the Manifest we're creating manifest[key] = value # Make sure we have a branch for Git # This is optional normally but the rest of our code requires it if 'GITBRANCH' not in manifest: # Make it master then manifest['GITBRANCH'] = 'master' # Grab the ID and grab the last component (assuming that is the application name) # Transform any dashes into underscores, as Jenkins does not support dashes in job names jobName = manifest['ID'].split('.')[-1] jobName = jobName.replace('-', '_').capitalize() # Generate a description... jobDescription = "{0} nightly build for Flatpak".format( manifest['ID'] ) # Add it to the list of jobs to be built jobEntry = { 'name': jobName, 'description': jobDescription, 'manifest': manifest['JSON'], 'repositoryUrl': manifest['GITURL'], 'branch': manifest['GITBRANCH'] } jobsGathered.append( jobEntry ) # If it isn't any of those two then we don't care about it - continue onward to the next one! continue # Now output the jobs we've gathered in JSON to disk # This will subsequently be read in by a Jenkins DSL script and turned into Jenkins Jobs filePath = os.path.join( os.getcwd(), 'gathered-jobs.json') with open(filePath, 'w') as jobsFile: json.dump( jobsGathered, jobsFile, sort_keys=True, indent=2 ) # All done! sys.exit(0)