diff --git a/archive-configs/production.yaml b/archive-configs/production.yaml index 2b41df0..70e2938 100644 --- a/archive-configs/production.yaml +++ b/archive-configs/production.yaml @@ -1,18 +1,17 @@ client: downloadBaseUrl: "https://build-artifacts.kde.org/production/" uploadHostname: "build-artifacts.kde.org" uploadUsername: "productionclient" uploadDirectory: "/home/productionclient/uploads/" uploadTools: "/home/productionclient/ci-tooling/" server: archiveDirectory: "/srv/production/" cacheLocation: WindowsMSVCQt5.11: "C:\\CI\\archives\\WindowsMSVCQt5.11\\" - SUSEQt5.10: "/srv/archives/production/SUSEQt5.10/" - SUSEQt5.11: "/srv/archives/production/SUSEQt5.11/" SUSEQt5.12: "/srv/archives/production/SUSEQt5.12/" + SUSEQt5.13: "/srv/archives/production/SUSEQt5.13/" FreeBSDQt5.12: "/usr/home/jenkins/archives/production/" AndroidQt5.12: "/srv/archives/production/AndroidQt5.12/" ABIReference: "/srv/archives/production/ABIReference" diff --git a/archive-configs/sandbox.yaml b/archive-configs/sandbox.yaml index c73376e..4eaaf44 100644 --- a/archive-configs/sandbox.yaml +++ b/archive-configs/sandbox.yaml @@ -1,18 +1,17 @@ client: downloadBaseUrl: "https://build-artifacts.kde.org/sandbox/" uploadHostname: "build-artifacts.kde.org" uploadUsername: "sandboxclient" uploadDirectory: "/home/sandboxclient/uploads/" uploadTools: "/home/sandboxclient/ci-tooling/" server: archiveDirectory: "/srv/sandbox/" cacheLocation: WindowsMSVCQt5.11: "C:\\CI\\sandbox-archives\\WindowsMSVCQt5.11\\" - SUSEQt5.10: "/srv/archives/sandbox/SUSEQt5.10/" - SUSEQt5.11: "/srv/archives/sandbox/SUSEQt5.11/" SUSEQt5.12: "/srv/archives/sandbox/SUSEQt5.12/" + SUSEQt5.13: "/srv/archives/sandbox/SUSEQt5.13/" FreeBSDQt5.12: "/usr/home/jenkins/archives/sandbox/" AndroidQt5.12: "/srv/archives/sandbox/AndroidQt5.12/" ABIReference: "/srv/archives/sandbox/ABIReference/" diff --git a/build-specs/Applications/akonadi-SUSEQt5.10.yaml b/build-specs/Applications/akonadi-SUSEQt5.12.yaml similarity index 100% rename from build-specs/Applications/akonadi-SUSEQt5.10.yaml rename to build-specs/Applications/akonadi-SUSEQt5.12.yaml diff --git a/custom-jobs/Extragear craft master SUSEQt5.10.pipeline b/custom-jobs/Extragear craft master SUSEQt5.12.pipeline similarity index 98% rename from custom-jobs/Extragear craft master SUSEQt5.10.pipeline rename to custom-jobs/Extragear craft master SUSEQt5.12.pipeline index 2b126d2..f3d09c2 100644 --- a/custom-jobs/Extragear craft master SUSEQt5.10.pipeline +++ b/custom-jobs/Extragear craft master SUSEQt5.12.pipeline @@ -1,54 +1,54 @@ // Request a node to be allocated to us -node( "SUSEQt5.10" ) { +node( "SUSEQt5.12" ) { // We want Timestamps on everything timestamps { // We want to catch any errors that occur to allow us to send out notifications (ie. emails) if needed catchError { // First Thing: Checkout Sources stage('Checkout Sources') { // Craft itself checkout changelog: true, poll: true, scm: [ $class: 'GitSCM', branches: [[name: 'master']], extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'craft/']], userRemoteConfigs: [[url: "https://anongit.kde.org/craft"]] ] // Craftmaster checkout changelog: true, poll: true, scm: [ $class: 'GitSCM', branches: [[name: 'master']], extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'craftmaster/']], userRemoteConfigs: [[url: "https://anongit.kde.org/craftmaster"]] ] // Craft Blueprints for KDE checkout changelog: true, poll: true, scm: [ $class: 'GitSCM', branches: [[name: 'master']], extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'craft-blueprints-kde/']], userRemoteConfigs: [[url: "https://anongit.kde.org/craft-blueprints-kde"]] ] } // Use Craftmaster to get Craft ready to go stage('Setting Up Craft') { // Install craft and it's core dependencies... sh """ python3 craftmaster/CraftMaster.py --config craftmaster/config/CraftBinaryCache.ini -c -i craft """ } // Now run the Craft tests stage('Running Tests') { // Ask Craftmaster to do this for us too sh """ python3 craftmaster/CraftMaster.py --config craftmaster/config/CraftBinaryCache.ini -c --test craft-core """ } } } } diff --git a/custom-jobs/known-jobs.json b/custom-jobs/known-jobs.json index 3fd190c..2fe7b03 100644 --- a/custom-jobs/known-jobs.json +++ b/custom-jobs/known-jobs.json @@ -1,4 +1,4 @@ [ {"name": "Craft Setup Deploy"}, - {"name": "Extragear craft master SUSEQt5.10"} + {"name": "Extragear craft master SUSEQt5.12"} ] diff --git a/helpers/check-platform.py b/helpers/check-platform.py index e045e22..ba922ee 100644 --- a/helpers/check-platform.py +++ b/helpers/check-platform.py @@ -1,42 +1,42 @@ #!/usr/bin/python3 import os import sys import argparse import subprocess import yaml from helperslib import BuildSpecs, Buildable, CommonUtils, EnvironmentHandler parser = argparse.ArgumentParser(description='Utility to check if metainfo.yaml is lying.', formatter_class=argparse.ArgumentDefaultsHelpFormatter) parser.add_argument('metainfo', nargs='+', help='metainfo.yaml files', type=str) arguments = parser.parse_args() allPlatforms = { - 'SUSEQt5.10': 'Linux', 'SUSEQt5.11': 'Linux', 'SUSEQt5.12': 'Linux', + 'SUSEQt5.13': 'Linux', 'FreeBSDQt5.12': 'FreeBSD', 'WindowsMSVCQt5.11': 'Windows', None: 'MacOSX', 'AndroidQt5.12': 'Android' } resolver = Buildable.DependencyResolver() resolver.loadProjectsFromTree(os.path.join( CommonUtils.scriptsBaseDirectory(), 'repo-metadata', 'projects' )) resolver.loadProjectsIgnoreRules(os.path.join( CommonUtils.scriptsBaseDirectory(), 'local-metadata', 'project-ignore-rules.yaml' )) for metainfo in arguments.metainfo: metainfoFile = open(metainfo, 'r', encoding='utf-8') read = yaml.safe_load(metainfoFile) platforms = [p['name'] for p in read['platforms']] if 'All' in platforms: platforms = allPlatforms.values() projectname=os.path.split(metainfo)[-2] p = resolver.retrieveProject( projectname ) if not p: # could not find the project for some reason continue ignoredPlatforms = [allPlatforms[ignoredPlatform] for ignoredPlatform in p.ignoredOnPlatforms] for ip in ignoredPlatforms: if ip in platforms: print('%s: %s is disabled in build.kde.org' % (projectname, ip)) diff --git a/helpers/extract-cmake-dependency-metadata.py b/helpers/extract-cmake-dependency-metadata.py index 10c5fb9..a03aeac 100755 --- a/helpers/extract-cmake-dependency-metadata.py +++ b/helpers/extract-cmake-dependency-metadata.py @@ -1,63 +1,63 @@ #!/usr/bin/python3 import os import sys import argparse import subprocess from helperslib import BuildSpecs, BuildSystem, CommonUtils, EnvironmentHandler, Packages # Parse the command line arguments we've been given parser = argparse.ArgumentParser(description='Utility to extract CMake Dependency Metadata from a build system. Provided to assist packagers with their packages.') parser.add_argument('--product', type=str, required=True) parser.add_argument('--project', type=str, required=True) parser.add_argument('--branchGroup', type=str, required=True) parser.add_argument('--platform', type=str, required=True) parser.add_argument('--usingInstall', type=str, required=True) arguments = parser.parse_args() # Load our build specification, which governs how we handle this build buildSpecification = BuildSpecs.Loader( product=arguments.product, project=arguments.project, branchGroup=arguments.branchGroup, platform=arguments.platform ) # Determine the environment we need to provide for the compilation process buildEnvironment = EnvironmentHandler.generateFor( installPrefix=arguments.usingInstall ) # Determine where our source code is checked out to and where we will be building it # We'll assume that the directory we're running from is where the sources are located sourcesLocation = os.getcwd() buildLocation = CommonUtils.buildDirectoryForSources( sources=sourcesLocation, inSourceBuild=buildSpecification['in-source-build'] ) # Are we allowed to run? -# We only gather this metadata from the principal Linux platform, which at the moment is SUSEQt5.10 for Frameworks and everyone else -if arguments.platform != 'SUSEQt5.10': +# We only gather this metadata from the principal Linux platform, which at the moment is SUSEQt5.12 for Frameworks and everyone else +if arguments.platform != 'SUSEQt5.12': # Then there is nothing for us to do sys.exit(0) # Determine the name we'll use to store the results, as well as the local and remote paths it will be stored at # We use the package name as it's pretty much guaranteed to be unique among all the builds we will be running # As the CMake dependency metadata is only used by Linux packagers, we don't need to take platform into account here dependencyFilename = Packages.nameForProject(arguments.product, arguments.project, arguments.branchGroup) + '.json' localDependencyFilename = os.path.join( sourcesLocation, dependencyFilename ) remoteDependencyFilename = os.path.join( '/srv/dependency-metadata/', dependencyFilename ) # Build the command to run commandToRun = "python3 '{0}/kde-dev-scripts/cmake-dependencies.py'" commandToRun = commandToRun.format( CommonUtils.scriptsBaseDirectory() ) # Time to run the command - open our dependency metadata file... with open(localDependencyFilename, 'w') as localDependencyFile: # Now run the command # We redirect stdout and stderr into the file because this program prints it's results to the console (stdout/stderr) process = subprocess.Popen( commandToRun, stdout=localDependencyFile, stderr=localDependencyFile, shell=True, env=buildEnvironment, cwd=buildLocation ) process.wait() # Now we transfer it to it's final home - establish the ssh connection privateKeyFile = os.path.join( os.path.expanduser('~'), 'Keys', 'cmake-dependencies.key') client = CommonUtils.establishSSHConnection( 'charlotte.kde.org', 'dependencymetadata', privateKeyFile ) # Bring up a SFTP session sftp = client.open_sftp() # Transfer it there sftp.put( localDependencyFilename, remoteDependencyFilename ) # All done, cleanup sftp.close() client.close() diff --git a/helpers/generate-dependency-diagram-data.py b/helpers/generate-dependency-diagram-data.py index 8a0a2e0..fc441eb 100755 --- a/helpers/generate-dependency-diagram-data.py +++ b/helpers/generate-dependency-diagram-data.py @@ -1,79 +1,79 @@ #!/usr/bin/python3 import os import sys import argparse import subprocess from helperslib import BuildSpecs, BuildSystem, CommonUtils, EnvironmentHandler, Packages # Parse the command line arguments we've been given parser = argparse.ArgumentParser(description='Utility to generate dependency diagram information for use by api.kde.org.') parser.add_argument('--product', type=str, required=True) parser.add_argument('--project', type=str, required=True) parser.add_argument('--branchGroup', type=str, required=True) parser.add_argument('--platform', type=str, required=True) parser.add_argument('--usingInstall', type=str, required=True) arguments = parser.parse_args() # Determine the environment we need to provide for the compilation process buildEnvironment = EnvironmentHandler.generateFor( installPrefix=arguments.usingInstall ) # Determine where our source code is checked out to and where we will be building it # We'll assume that the directory we're running from is where the sources are located sourcesLocation = os.getcwd() # Are we allowed to run? -# We only gather this metadata from the principal Linux platform, which at the moment is SUSEQt5.10 for everything -if arguments.platform != 'SUSEQt5.10': +# We only gather this metadata from the principal Linux platform, which at the moment is SUSEQt5.12 for everything +if arguments.platform != 'SUSEQt5.12': # Then there is nothing for us to do sys.exit(0) # First determine where we the data will be stored, both temporarily and on the server # As the API documentation can only be generated once, and we have the greatest capacity available for Linux we will use Linux dependency diagrams on api.kde.org. outputDirectory = os.path.join( sourcesLocation, 'dotdata' ) remoteStoragePath = os.path.join('/home/api/depdiagram-output/', Packages.nameForProject(arguments.product, arguments.project, arguments.branchGroup)) # Build up the command to run commandToRun = 'python "{0}/kapidox/src/depdiagram-prepare" -s "{1}" "{2}"' commandToRun = commandToRun.format( CommonUtils.scriptsBaseDirectory(), sourcesLocation, outputDirectory ) # Run the command, which will generate a pile of *.dot files for us process = subprocess.Popen( commandToRun, stdout=sys.stdout, stderr=sys.stderr, shell=True, env=buildEnvironment ) process.wait() # Do we have something to upload? if not os.path.exists(outputDirectory): # Then exit gracefully, nothing for us to do! sys.exit(0) # Connect to the server to upload the files privateKeyFile = os.path.join( os.path.expanduser('~'), 'Keys', 'api-access.key') client = CommonUtils.establishSSHConnection( 'zivo.kde.org', 'api', privateKeyFile ) # Bring up a SFTP session sftp = client.open_sftp() # Does our storage path exist? if not CommonUtils.sftpFileExists(sftp, remoteStoragePath): # Create it then! sftp.mkdir(remoteStoragePath) # Make sure it has been cleaned out of anything which is in there # This is necessary to ensure any dependency or component which has been dropped doesn't hang around unnecessarily fileListing = sftp.listdir(remoteStoragePath) for fileToRemove in fileListing: pathToRemove = os.path.join(remoteStoragePath, fileToRemove) sftp.remove(pathToRemove) # Upload the files we've just generated for fileToUpload in os.listdir(outputDirectory): # Determine the full local and remote paths fullLocalPath = os.path.join(outputDirectory, fileToUpload) fullRemotePath = os.path.join(remoteStoragePath, fileToUpload) # Upload it! sftp.put(fullLocalPath, fullRemotePath) # All done now, close the remote server connection sftp.close() client.close() # And bow out gracefully sys.exit(0) diff --git a/local-metadata/abi-compliance-checker.yaml b/local-metadata/abi-compliance-checker.yaml index 7ace6cd..5573d9b 100644 --- a/local-metadata/abi-compliance-checker.yaml +++ b/local-metadata/abi-compliance-checker.yaml @@ -1,44 +1,44 @@ # This file is used to specify the builds of abi-compliance-checker. # So far following settings are used: # # - gcc_options: # specify the gcc options to build of headers # # - skip_include_paths: # list pf paths to exclude from searching for headers # # - add_include_paths: # list of paths to search for headers # # for more information about the settings: abi-compliance-checker --info "*": general: gcc_options: - "-std=c++11" - "-fPIC" skip_include_paths: - /usr/lib/python3.6/site-packages/utils/fake_libc_include - /usr/include/clang/AST - /usr/lib64/clang/*/include add_include_paths: - /usr/lib64/qt5/mkspecs/linux-g++ createABIDumpFailHard: False checkABIDumpFailHard: False NoLibrariesFoundFail: False -# "SUSEQt5.10": # special settings for one platform +# "SUSEQt5.12": # special settings for one platform # gcc_options: # - something # - nextsetting # "kde/*": # special settings for all repos in kde (fnmatch) # general: # add_include_paths: # - special1 "kde/pim/akonadi-search": general: gcc_options: - "-std=c++11" - "-fPIC" - "-DQT_NO_KEYWORDS" diff --git a/local-metadata/product-definitions.yaml b/local-metadata/product-definitions.yaml index 05c6c94..9b89ee8 100644 --- a/local-metadata/product-definitions.yaml +++ b/local-metadata/product-definitions.yaml @@ -1,283 +1,283 @@ "Frameworks": includes: - repositories: - "frameworks/*" platforms: - - "SUSEQt5.10" - "SUSEQt5.12" + - "SUSEQt5.13" - "WindowsMSVCQt5.11" - "FreeBSDQt5.12" - repositories: - "frameworks/extra-cmake-modules" - "frameworks/karchive" - "frameworks/attica" - "frameworks/breeze-icons" - "frameworks/kcodecs" - "frameworks/kconfig" - "frameworks/kcoreaddons" - "frameworks/kguiaddons" - "frameworks/kconfig" - "frameworks/ki18n" - "frameworks/kirigami" - "frameworks/kwidgetsaddons" - "frameworks/threadweaver" - "frameworks/kpackage" - "frameworks/kunitconversion" - "frameworks/kimageformats" - "frameworks/kjobwidgets" - "frameworks/kcompletion" - "frameworks/kwindowsystem" - "frameworks/prison" - "frameworks/knotifications" - "frameworks/kholidays" - "frameworks/kitemmodels" - "frameworks/kitemviews" - "frameworks/kplotting" - "frameworks/syndication" platforms: - "AndroidQt5.12" branchGroups: - "kf5-qt5" notifications: - match: "frameworks/*" to: "kde-frameworks-devel@kde.org" failuresOnly: false "Plasma": includes: - repositories: - "kde/workspace/*" - "frameworks/kwindowsystem" - "frameworks/kwayland" - "frameworks/plasma-framework" platforms: - "SUSEQt5.12" - "FreeBSDQt5.12" - repositories: - "kde/workspace/drkonqi" platforms: - "WindowsMSVCQt5.11" branchGroups: - "kf5-qt5" - "stable-kf5-qt5" notifications: - match: "frameworks/*" to: "plasma-devel@kde.org" failuresOnly: true - match: "kde/workspace/*" to: "plasma-devel@kde.org" failuresOnly: true "Applications": includes: - repositories: - "kde/applications/*" - "kde/kdeaccessibility/*" - "kde/kdeadmin/*" - "kde/kdeedu/*" - "kde/kdegames/*" - "kde/kdegraphics/*" - "kde/kdemultimedia/*" - "kde/kdenetwork/*" - "kde/kdesdk/*" - "kde/kdeutils/*" - "kde/kdewebdev/*" - "kde/pim/*" platforms: - - "SUSEQt5.10" + - "SUSEQt5.12" - "FreeBSDQt5.12" - repositories: - "kde/applications/baloo-widgets" - "kde/applications/dolphin" - "kde/applications/kate" - "kde/kdeedu/kalgebra" - "kde/kdeedu/kbruch" - "kde/kdeedu/kig" - "kde/kdeedu/kmplot" - "kde/kdeedu/kturtle" - "kde/kdeedu/kwordquiz" - "kde/kdeedu/marble" - "kde/kdeedu/minuet" - "kde/kdeedu/parley" - "kde/kdegraphics/libs/libkexiv2" - "kde/kdegraphics/kdegraphics-mobipocket" - "kde/kdegraphics/kolourpaint" - "kde/kdegraphics/okular" - "kde/kdenetwork/kio-extras" - "kde/kdesdk/umbrello" - "kde/kdeutils/filelight" - "kde/pim/akonadi" - "kde/pim/kcalcore" - "kde/pim/kcalutils" - 'kde/pim/kidentitymanagement' - 'kde/pim/kcontacts' - "kde/pim/kpimtextedit" platforms: - "WindowsMSVCQt5.11" branchGroups: - "kf5-qt5" - "stable-kf5-qt5" notifications: - match: "kde/pim/*" to: "kde-pim@kde.org" failuresOnly: true - match: "kde/applications/dolphin" to: "kfm-devel@kde.org" failuresOnly: false - match: "kde/applications/konsole" to: "konsole-devel@kde.org" failuresOnly: true - match: "kde/kdeutils/*" to: "kde-utils-devel@kde.org" failuresOnly: false - match: "kde/kdegraphics/okular" to: "okular-devel@kde.org" failuresOnly: false - match: "kde/kdeedu/cantor" to: "alexander.semke@web.de" failuresOnly: false - match: "kde/kdeedu/cantor" to: "warquark@gmail.com" failuresOnly: false - match: "kde/kdemultimedia/kdenlive" to: "kdenlive@kde.org" failuresOnly: false "Extragear": includes: - repositories: - "extragear/graphics/digikam" - "extragear/network/choqok" - "extragear/network/kio-gdrive" - "extragear/office/kbibtex" - "extragear/pim/zanshin" - "extragear/sdk/clazy" - "extragear/utils/kronometer" - "kdereview/rust-qt-binding-generator" platforms: - - "SUSEQt5.10" + - "SUSEQt5.12" - repositories: - "extragear/base/kwebkitpart" - "extragear/base/wacomtablet" - "extragear/edu/gcompris" - "extragear/games/knights" - "extragear/graphics/kgraphviewer" - "extragear/graphics/kphotoalbum" - "extragear/graphics/krita" - "extragear/libs/kuserfeedback" - "extragear/libs/libqaccessibilityclient" - "extragear/libs/pulseaudio-qt" - "extragear/multimedia/amarok" - "extragear/multimedia/kaffeine" - "extragear/network/falkon" - "extragear/network/libktorrent" - "extragear/network/ktorrent" - "extragear/office/tellico" - "extragear/pim/ring-kde" - "extragear/utils/keurocalc" - "extragear/utils/krusader" - "extragear/utils/kio-stash" - "extragear/utils/kmarkdownwebview" - "extragear/utils/plasma-mycroft" - "kdereview/plasma-active-window-control" platforms: - - "SUSEQt5.10" + - "SUSEQt5.12" - "FreeBSDQt5.12" - repositories: - "extragear/base/atcore" - "extragear/network/kdeconnect-kde" - "extragear/network/konversation" - "extragear/edu/labplot" - "extragear/edu/kstars" - "extragear/multimedia/elisa" - "extragear/office/alkimia" - "extragear/office/kile" - "extragear/office/kmymoney" - "extragear/office/skrooge" - "extragear/utils/krename" - "extragear/utils/okteta" - "extragear/utils/kdiff3" - "kdereview/rkward" - "playground/base/peruse" - "playground/graphics/washipad" - "playground/network/ruqola" - "playground/pim/ktimetracker" platforms: - - "SUSEQt5.10" + - "SUSEQt5.12" - "WindowsMSVCQt5.11" - "FreeBSDQt5.12" branchGroups: - "kf5-qt5" - "stable-kf5-qt5" notifications: - match: "extragear/utils/kronometer" to: "kde-utils-devel@kde.org" failuresOnly: false - match: "extragear/graphics/digikam" to: "digikam-devel@kde.org" failuresOnly: false - match: "extragear/network/kio-gdrive" to: "kfm-devel@kde.org" failuresOnly: false - match: "extragear/sdk/clazy" to: "smartins@kde.org" failuresOnly: false - match: "extragear/pim/zanshin" to: "zanshin-devel@kde.org" failuresOnly: false - match: "extragear/edu/labplot" to: "stefan.gerlach@uni-konstanz.de" failuresOnly: false - match: "extragear/edu/labplot" to: "alexander.semke@web.de" failuresOnly: false - match: "extragear/edu/kstars" to: "csaba.kertesz@gmail.com" failuresOnly: false - match: "extragear/edu/kstars" to: "mutlaqja@ikarustech.com" failuresOnly: false - match: "extragear/network/kdeconnect-kde" to: "kdeconnect@kde.org" failuresOnly: false - match: "extragear/utils/kdiff3" to: "reeves87@gmail.com" failuresOnly: true - match: "extragear/multimedia/elisa" to: "elisa@kde.org" failuresOnly: false - match: "playground/pim/ktimetracker" to: "aspotashev@gmail.com" failuresOnly: true "KDevelop": includes: - repositories: - "extragear/kdevelop/*" - "playground/devtools/kdev-*" - "playground/devtools/plugins/kdev-*" platforms: - - "SUSEQt5.10" + - "SUSEQt5.12" - "FreeBSDQt5.12" - "WindowsMSVCQt5.11" branchGroups: - "kf5-qt5" - "stable-kf5-qt5" "Calligra": includes: - repositories: - "calligra/calligra" - "calligra/kexi" - "calligra/calligraplan" - "extragear/libs/kdb" - "extragear/libs/kproperty" - "extragear/libs/kreport" - "extragear/graphics/kdiagram" platforms: - - "SUSEQt5.10" + - "SUSEQt5.12" - "FreeBSDQt5.12" - "WindowsMSVCQt5.11" branchGroups: - "kf5-qt5" - "stable-kf5-qt5" diff --git a/local-metadata/project-ignore-rules.yaml b/local-metadata/project-ignore-rules.yaml index b6aeef5..a5dc566 100644 --- a/local-metadata/project-ignore-rules.yaml +++ b/local-metadata/project-ignore-rules.yaml @@ -1,64 +1,61 @@ -'SUSEQt5.10': -- 'kdesupport/qca' - -'SUSEQt5.11': +'SUSEQt5.12': - 'kdesupport/qca' -'SUSEQt5.12': +'SUSEQt5.13': - 'kdesupport/qca' 'WindowsMSVCQt5.11': - 'kdesupport/polkit-qt-1' - 'kdesupport/qca' - 'frameworks/networkmanager-qt' - 'frameworks/modemmanager-qt' - 'frameworks/bluez-qt' - 'frameworks/prison' - 'frameworks/kdesu' - 'frameworks/kpty' - 'frameworks/kwayland' - 'frameworks/kactivities-stats' - 'kde/workspace/libksysguard' - 'kde/kdenetwork/kaccounts-integration' - 'extragear/libs/pulseaudio-qt' 'FreeBSDQt5.12': - 'kdesupport/polkit-qt-1' - 'frameworks/networkmanager-qt' - 'frameworks/modemmanager-qt' - 'kde/workspace/plymouth-kcm' - 'kde/workspace/plasma-nm' - 'kde/workspace/powerdevil' - 'kde/workspace/plasma-vault' - 'kde/kdeedu/marble' - 'kde/kdegraphics/libs/libkgeomap' - 'kde/kdenetwork/kio-extras' 'AndroidQt5.12': - 'frameworks/polkit-qt-1' # requires glib2 - 'frameworks/kwallet' # requires gpgme - 'frameworks/kactivities' # requires boost - 'frameworks/kactivities-stats' # requires boost - 'frameworks/khtml' # requires kio - 'frameworks/kdelibs4support' # requires kio - 'frameworks/baloo' #kfilemetadata - 'frameworks/modemmanager-qt' - 'frameworks/networkmanager-qt' - 'frameworks/purpose' # requires kio - 'frameworks/breeze-icons' # shouldn't be installed like we do for xdg, we have an ad-hoc solution in kirigami - 'frameworks/kjs' # looks like we need to switch to clang for this one >> https://github.com/android-ndk/ndk/issues/442 - 'frameworks/kpty' # Doesn't build, didn't investigate: kpty.cpp:587:35: error: 'getsid' was not declared in this scope - 'frameworks/kdesu' # requires kpty - 'frameworks/plasma-framework' # requires kactivities - 'frameworks/krunner' # requires plasma - 'frameworks/kwayland' # requires wayland - 'frameworks/kdoctools' # not very useful for Android - 'frameworks/kjsembed' # requires kjs - 'frameworks/kdesignerplugin' # requires kdoctools - 'frameworks/kdewebkit' # we don't have QtWebKitWidgets on the image # to look into: - 'kde/kdegames/libkdegames' # needs openal - 'kde/kdegraphics/libs/libkexiv2' #has exiv2 external dependency - 'frameworks/kfilemetadata' # FIXME needs #include which can't be found ATM - 'kde/pim/kcalcore' # libical master doesn't build diff --git a/pipeline-templates/Frameworks/SUSEQt5.10.template b/pipeline-templates/Frameworks/SUSEQt5.10.template deleted file mode 120000 index d52ac81..0000000 --- a/pipeline-templates/Frameworks/SUSEQt5.10.template +++ /dev/null @@ -1 +0,0 @@ -SUSEQt5.11.template \ No newline at end of file diff --git a/pipeline-templates/Frameworks/SUSEQt5.11.template b/pipeline-templates/Frameworks/SUSEQt5.11.template deleted file mode 100644 index 60364a0..0000000 --- a/pipeline-templates/Frameworks/SUSEQt5.11.template +++ /dev/null @@ -1,225 +0,0 @@ -// Provisionally mark the build as successful -currentBuild.result = 'SUCCESS' - -// Request a node to be allocated to us -node( currentPlatform ) { -// We want Timestamps on everything -timestamps { - // We want to catch any errors that occur to allow us to send out notifications (ie. emails) if needed - catchError { - // First Thing: Checkout Sources - stage('Checkout Sources') { - - // Actual Application Sources - checkout changelog: true, poll: true, scm: [ - $class: 'GitSCM', - branches: [[name: branchToBuild]], - browser: [$class: 'CGit', repoUrl: browserUrl], - extensions: [[$class: 'CloneOption', timeout: 120]], - userRemoteConfigs: [[url: repositoryUrl]] - ] - - // Our CI scripts - checkout changelog: false, poll: false, scm: [ - $class: 'GitSCM', - branches: [[name: 'master']], - extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'ci-tooling/']], - userRemoteConfigs: [[url: 'https://anongit.kde.org/sysadmin/ci-tooling']] - ] - - // Projects metadata and next generation dependency metadata - checkout changelog: false, poll: false, scm: [ - $class: 'GitSCM', - branches: [[name: 'master']], - extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'ci-tooling/repo-metadata/']], - userRemoteConfigs: [[url: 'https://anongit.kde.org/sysadmin/repo-metadata']] - ] - - // Dependency Metadata - checkout changelog: false, poll: false, scm: [ - $class: 'GitSCM', - branches: [[name: 'master']], - extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'ci-tooling/kde-build-metadata/']], - userRemoteConfigs: [[url: 'https://anongit.kde.org/kde-build-metadata']] - ] - - // KApiDox: For api.kde.org metadata extraction - checkout changelog: false, poll: false, scm: [ - $class: 'GitSCM', - branches: [[name: 'master']], - extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'ci-tooling/kapidox/']], - userRemoteConfigs: [[url: 'https://anongit.kde.org/kapidox']] - ] - - // kde-dev-scripts: For packager metadata extraction - checkout changelog: false, poll: false, scm: [ - $class: 'GitSCM', - branches: [[name: 'master']], - extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'ci-tooling/kde-dev-scripts/']], - userRemoteConfigs: [[url: 'https://anongit.kde.org/kde-dev-scripts']] - ] - - } - - // Now Prepare to Build: Get the dependencies ready - stage('Setup Dependencies') { - // Now we can determine what our dependencies are - // Then update to the latest version of the dependencies available from the master server - // Finally extract all of those dependencies in turn into the given 'installTo' directory - sh "python3 -u ci-tooling/helpers/prepare-dependencies.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --environment ${ciEnvironment} --platform ${currentPlatform} --installTo '$HOME/install-prefix/'" - } - - // Now we can configure our build - stage('Configuring Build') { - // This is delegated through a helper script to handle minor special cases like inSourceBuilds, non-CMake build systems, etc - sh "python3 -u ci-tooling/helpers/configure-build.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform} --installTo '$HOME/install-prefix/'" - } - - // Finally we can build it! (Once again, through a helper) - stage('Compiling') { - // We use a helper here so we can determine the appropriate number of CPUs (-j) to build with - sh "python3 -u ci-tooling/helpers/compile-build.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform} --usingInstall '$HOME/install-prefix/'" - } - - // Now we can run our tests - stage('Running Tests') { - // Run the unit tests for this project - // Tests are run in a basic environment (X, DBus) - sh "python3 -u ci-tooling/helpers/run-tests.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform} --usingInstall '$HOME/install-prefix/'" - - // Collect our results - junit allowEmptyResults: true, testResults: 'JUnitTestResults.xml' - } - - // Now ensure that it installs.... - stage('Installing') { - // The helper ensures that DESTDIR and INSTALL_ROOT are set to 'divertTo' - // This allows us to capture the install at the next stage for later reuse in the Setup Dependencies step - sh "python3 -u ci-tooling/helpers/install-build.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform} --installTo '$HOME/install-prefix/' --divertTo '$WORKSPACE/install-divert/'" - } - - // Looks like it built okay - let's capture this for later use - // We'll also take the opportunity to extract metadata from CMake used by packagers and api.kde.org - stage('Capturing Installation') { - // First we create a tar archive of the installation which was diverted - // Then we upload a copy of that to the master server and have it publish the new archive - // Finally to save bandwidth our copy of the tar archive is moved to our local cache for reuse on later builds on this node - sh "python3 -u ci-tooling/helpers/capture-install.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --environment ${ciEnvironment} --platform ${currentPlatform} --divertedTo '$WORKSPACE/install-divert/' --installedTo '$HOME/install-prefix/'" - - // Now we extract the CMake metadata and upload that to the appropriate hosts - sh "python3 -u ci-tooling/helpers/extract-cmake-dependency-metadata.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform} --usingInstall '$HOME/install-prefix/'" - sh "python3 -u ci-tooling/helpers/generate-dependency-diagram-data.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform} --usingInstall '$HOME/install-prefix/'" - } - - // Final thing to do: some code quality checks - stage('Checking Code Quality') { - - // Perform Appstream Compliance Checks - sh "python3 -u ci-tooling/helpers/check-appstream-compliance.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform} --usingInstall '$HOME/install-prefix/' --withDiverted '$WORKSPACE/install-divert/'" - - // Gather ABI Reference information for later checking - sh """ - curl '$BUILD_URL/consoleText' -o currentBuildLog.txt - python3 -u ci-tooling/helpers/create-abi-dump.py --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform} --buildLog currentBuildLog.txt --environment production --usingInstall '$HOME/install-prefix/' || true - """ - - // Save the ABI build logs to review if necessary - archiveArtifacts artifacts: 'logs/*/*/log.txt', onlyIfSuccessful: false, allowEmptyArchive: true - // Save the input for ACC for building abi dumps locally - archiveArtifacts artifacts: 'acc/*.xml', onlyIfSuccessful: false, allowEmptyArchive: true - - // Now perform the ABI Compatibility checks - // This tool will produce reports stored at compat_reports/ which we will also need to capture - sh """ - python3 -u ci-tooling/helpers/check-abi.py --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform} --environment production || true - """ - // Save the ABI Compatibility reports for developers to review if necessary - archiveArtifacts artifacts: 'compat_reports/*_compat_report.html', onlyIfSuccessful: false, allowEmptyArchive: true - - // Save the ABI Compatibility results yaml file - archiveArtifacts artifacts: 'abi-compatibility-results.yaml', onlyIfSuccessful: false, allowEmptyArchive: true - - // Platform Enablement Checks - // Frameworks have a metadata file which specifies the platforms it supports and should be built on - // This check compares that metadata file against the records held by the CI system - sh """ - touch PlatformCheckOutput.txt - if [[ -e metainfo.yaml ]]; then python3 ci-tooling/helpers/check-platform.py '$WORKSPACE/metainfo.yaml' &> PlatformCheckOutput.txt; fi - """ - - // If the platform check indicates there are missing platforms then we should flag the build as unstable - // We start this process by reading the output of the check command - def platformCheckResult = readFile "${env.WORKSPACE}/PlatformCheckOutput.txt" - // Then we check to see if it had anything in it - it will be empty if everything is okay - if( platformCheckResult != '' ) { - // If it does, then mark the build as unstable - currentBuild.result = 'UNSTABLE' - // We also print the check results so it can be examined easily - echo platformCheckResult - } - - // cppcheck is not supported by Pipeline at the moment, so we don't run that for now - // See https://issues.jenkins-ci.org/browse/JENKINS-35096 - - // Perform Cobertura Processing - // First, run the LCov extraction - sh "python3 -u ci-tooling/helpers/extract-lcov-results.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform}" - - // Collect the results from the LCov extraction - step([ - $class: 'CoberturaPublisher', - autoUpdateHealth: false, autoUpdateStability: false, - coberturaReportFile: 'CoberturaLcovResults.xml', - failNoReports: false, failUnhealthy: false, failUnstable: false, - maxNumberOfBuilds: 0, - onlyStable: false, - zoomCoverageChart: false - ]) - - // Scan the logs and publish a warnings report - warnings consoleParsers: [[parserName: 'GNU Make + GNU C Compiler (gcc)'], [parserName: 'Appstreamercli']], excludePattern: "/tmp/**|/home/jenkins/workspace/**/build/**|/usr/include/**" - - } - - } - - // Let's determine if we need to send out notifications - // What happened in our previous build? - def previousResult = currentBuild.previousBuild?.result - // If our condition has changed, is FAILURE or UNSTABLE then we want to send an email - if( previousResult != currentBuild.result || currentBuild.result == 'FAILURE' || currentBuild.result == 'UNSTABLE' ) { - // Start constructing the list of our recipients - // At this point we can only be either a failure or an unstable build, so notify those who have requested unstable build notifications - def mailTo = [ unstableBuildEmails ] - - // If the build was a solid failure (either now or previously) then notify those who want to know about failures only - if( previousResult == 'FAILURE' || currentBuild.result == 'FAILURE' ) { - // Add them to the list - mailTo << buildFailureEmails - } - - // If someone kicked this job off, they're presumably interested as well - mailTo << emailextrecipients( [[$class: 'RequesterRecipientProvider']] ) - // We always want to notify our dashboard as well - mailTo << "kde-dashboard@kde.org" - - // Finalise the list of recipients - mailTo = mailTo.join(',') - - // Send the email now - emailext( - to: mailTo, - body: '${JELLY_SCRIPT,template="html_gmail"}', - mimeType: 'text/html', - subject: 'KDE CI: ${PROJECT_NAME} - Build # ${BUILD_NUMBER} - ${BUILD_STATUS}!', - attachLog: false - ) - } - - // IRC Notifications are currently not supported by Pipeline - // See https://issues.jenkins-ci.org/browse/JENKINS-33922 - // We can probably workaround this using Pursuivant and the emails Jenkins sends out - // This would allow subscribing to build notifications for IRC channels in much the same way one subscribes for Commits and Bugzilla changes - -} -} diff --git a/pipeline-templates/Frameworks/SUSEQt5.12.template b/pipeline-templates/Frameworks/SUSEQt5.12.template deleted file mode 120000 index eb67470..0000000 --- a/pipeline-templates/Frameworks/SUSEQt5.12.template +++ /dev/null @@ -1 +0,0 @@ -SUSEQt5.10.template \ No newline at end of file diff --git a/pipeline-templates/Frameworks/SUSEQt5.12.template b/pipeline-templates/Frameworks/SUSEQt5.12.template new file mode 100644 index 0000000..60364a0 --- /dev/null +++ b/pipeline-templates/Frameworks/SUSEQt5.12.template @@ -0,0 +1,225 @@ +// Provisionally mark the build as successful +currentBuild.result = 'SUCCESS' + +// Request a node to be allocated to us +node( currentPlatform ) { +// We want Timestamps on everything +timestamps { + // We want to catch any errors that occur to allow us to send out notifications (ie. emails) if needed + catchError { + // First Thing: Checkout Sources + stage('Checkout Sources') { + + // Actual Application Sources + checkout changelog: true, poll: true, scm: [ + $class: 'GitSCM', + branches: [[name: branchToBuild]], + browser: [$class: 'CGit', repoUrl: browserUrl], + extensions: [[$class: 'CloneOption', timeout: 120]], + userRemoteConfigs: [[url: repositoryUrl]] + ] + + // Our CI scripts + checkout changelog: false, poll: false, scm: [ + $class: 'GitSCM', + branches: [[name: 'master']], + extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'ci-tooling/']], + userRemoteConfigs: [[url: 'https://anongit.kde.org/sysadmin/ci-tooling']] + ] + + // Projects metadata and next generation dependency metadata + checkout changelog: false, poll: false, scm: [ + $class: 'GitSCM', + branches: [[name: 'master']], + extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'ci-tooling/repo-metadata/']], + userRemoteConfigs: [[url: 'https://anongit.kde.org/sysadmin/repo-metadata']] + ] + + // Dependency Metadata + checkout changelog: false, poll: false, scm: [ + $class: 'GitSCM', + branches: [[name: 'master']], + extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'ci-tooling/kde-build-metadata/']], + userRemoteConfigs: [[url: 'https://anongit.kde.org/kde-build-metadata']] + ] + + // KApiDox: For api.kde.org metadata extraction + checkout changelog: false, poll: false, scm: [ + $class: 'GitSCM', + branches: [[name: 'master']], + extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'ci-tooling/kapidox/']], + userRemoteConfigs: [[url: 'https://anongit.kde.org/kapidox']] + ] + + // kde-dev-scripts: For packager metadata extraction + checkout changelog: false, poll: false, scm: [ + $class: 'GitSCM', + branches: [[name: 'master']], + extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'ci-tooling/kde-dev-scripts/']], + userRemoteConfigs: [[url: 'https://anongit.kde.org/kde-dev-scripts']] + ] + + } + + // Now Prepare to Build: Get the dependencies ready + stage('Setup Dependencies') { + // Now we can determine what our dependencies are + // Then update to the latest version of the dependencies available from the master server + // Finally extract all of those dependencies in turn into the given 'installTo' directory + sh "python3 -u ci-tooling/helpers/prepare-dependencies.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --environment ${ciEnvironment} --platform ${currentPlatform} --installTo '$HOME/install-prefix/'" + } + + // Now we can configure our build + stage('Configuring Build') { + // This is delegated through a helper script to handle minor special cases like inSourceBuilds, non-CMake build systems, etc + sh "python3 -u ci-tooling/helpers/configure-build.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform} --installTo '$HOME/install-prefix/'" + } + + // Finally we can build it! (Once again, through a helper) + stage('Compiling') { + // We use a helper here so we can determine the appropriate number of CPUs (-j) to build with + sh "python3 -u ci-tooling/helpers/compile-build.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform} --usingInstall '$HOME/install-prefix/'" + } + + // Now we can run our tests + stage('Running Tests') { + // Run the unit tests for this project + // Tests are run in a basic environment (X, DBus) + sh "python3 -u ci-tooling/helpers/run-tests.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform} --usingInstall '$HOME/install-prefix/'" + + // Collect our results + junit allowEmptyResults: true, testResults: 'JUnitTestResults.xml' + } + + // Now ensure that it installs.... + stage('Installing') { + // The helper ensures that DESTDIR and INSTALL_ROOT are set to 'divertTo' + // This allows us to capture the install at the next stage for later reuse in the Setup Dependencies step + sh "python3 -u ci-tooling/helpers/install-build.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform} --installTo '$HOME/install-prefix/' --divertTo '$WORKSPACE/install-divert/'" + } + + // Looks like it built okay - let's capture this for later use + // We'll also take the opportunity to extract metadata from CMake used by packagers and api.kde.org + stage('Capturing Installation') { + // First we create a tar archive of the installation which was diverted + // Then we upload a copy of that to the master server and have it publish the new archive + // Finally to save bandwidth our copy of the tar archive is moved to our local cache for reuse on later builds on this node + sh "python3 -u ci-tooling/helpers/capture-install.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --environment ${ciEnvironment} --platform ${currentPlatform} --divertedTo '$WORKSPACE/install-divert/' --installedTo '$HOME/install-prefix/'" + + // Now we extract the CMake metadata and upload that to the appropriate hosts + sh "python3 -u ci-tooling/helpers/extract-cmake-dependency-metadata.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform} --usingInstall '$HOME/install-prefix/'" + sh "python3 -u ci-tooling/helpers/generate-dependency-diagram-data.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform} --usingInstall '$HOME/install-prefix/'" + } + + // Final thing to do: some code quality checks + stage('Checking Code Quality') { + + // Perform Appstream Compliance Checks + sh "python3 -u ci-tooling/helpers/check-appstream-compliance.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform} --usingInstall '$HOME/install-prefix/' --withDiverted '$WORKSPACE/install-divert/'" + + // Gather ABI Reference information for later checking + sh """ + curl '$BUILD_URL/consoleText' -o currentBuildLog.txt + python3 -u ci-tooling/helpers/create-abi-dump.py --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform} --buildLog currentBuildLog.txt --environment production --usingInstall '$HOME/install-prefix/' || true + """ + + // Save the ABI build logs to review if necessary + archiveArtifacts artifacts: 'logs/*/*/log.txt', onlyIfSuccessful: false, allowEmptyArchive: true + // Save the input for ACC for building abi dumps locally + archiveArtifacts artifacts: 'acc/*.xml', onlyIfSuccessful: false, allowEmptyArchive: true + + // Now perform the ABI Compatibility checks + // This tool will produce reports stored at compat_reports/ which we will also need to capture + sh """ + python3 -u ci-tooling/helpers/check-abi.py --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform} --environment production || true + """ + // Save the ABI Compatibility reports for developers to review if necessary + archiveArtifacts artifacts: 'compat_reports/*_compat_report.html', onlyIfSuccessful: false, allowEmptyArchive: true + + // Save the ABI Compatibility results yaml file + archiveArtifacts artifacts: 'abi-compatibility-results.yaml', onlyIfSuccessful: false, allowEmptyArchive: true + + // Platform Enablement Checks + // Frameworks have a metadata file which specifies the platforms it supports and should be built on + // This check compares that metadata file against the records held by the CI system + sh """ + touch PlatformCheckOutput.txt + if [[ -e metainfo.yaml ]]; then python3 ci-tooling/helpers/check-platform.py '$WORKSPACE/metainfo.yaml' &> PlatformCheckOutput.txt; fi + """ + + // If the platform check indicates there are missing platforms then we should flag the build as unstable + // We start this process by reading the output of the check command + def platformCheckResult = readFile "${env.WORKSPACE}/PlatformCheckOutput.txt" + // Then we check to see if it had anything in it - it will be empty if everything is okay + if( platformCheckResult != '' ) { + // If it does, then mark the build as unstable + currentBuild.result = 'UNSTABLE' + // We also print the check results so it can be examined easily + echo platformCheckResult + } + + // cppcheck is not supported by Pipeline at the moment, so we don't run that for now + // See https://issues.jenkins-ci.org/browse/JENKINS-35096 + + // Perform Cobertura Processing + // First, run the LCov extraction + sh "python3 -u ci-tooling/helpers/extract-lcov-results.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform}" + + // Collect the results from the LCov extraction + step([ + $class: 'CoberturaPublisher', + autoUpdateHealth: false, autoUpdateStability: false, + coberturaReportFile: 'CoberturaLcovResults.xml', + failNoReports: false, failUnhealthy: false, failUnstable: false, + maxNumberOfBuilds: 0, + onlyStable: false, + zoomCoverageChart: false + ]) + + // Scan the logs and publish a warnings report + warnings consoleParsers: [[parserName: 'GNU Make + GNU C Compiler (gcc)'], [parserName: 'Appstreamercli']], excludePattern: "/tmp/**|/home/jenkins/workspace/**/build/**|/usr/include/**" + + } + + } + + // Let's determine if we need to send out notifications + // What happened in our previous build? + def previousResult = currentBuild.previousBuild?.result + // If our condition has changed, is FAILURE or UNSTABLE then we want to send an email + if( previousResult != currentBuild.result || currentBuild.result == 'FAILURE' || currentBuild.result == 'UNSTABLE' ) { + // Start constructing the list of our recipients + // At this point we can only be either a failure or an unstable build, so notify those who have requested unstable build notifications + def mailTo = [ unstableBuildEmails ] + + // If the build was a solid failure (either now or previously) then notify those who want to know about failures only + if( previousResult == 'FAILURE' || currentBuild.result == 'FAILURE' ) { + // Add them to the list + mailTo << buildFailureEmails + } + + // If someone kicked this job off, they're presumably interested as well + mailTo << emailextrecipients( [[$class: 'RequesterRecipientProvider']] ) + // We always want to notify our dashboard as well + mailTo << "kde-dashboard@kde.org" + + // Finalise the list of recipients + mailTo = mailTo.join(',') + + // Send the email now + emailext( + to: mailTo, + body: '${JELLY_SCRIPT,template="html_gmail"}', + mimeType: 'text/html', + subject: 'KDE CI: ${PROJECT_NAME} - Build # ${BUILD_NUMBER} - ${BUILD_STATUS}!', + attachLog: false + ) + } + + // IRC Notifications are currently not supported by Pipeline + // See https://issues.jenkins-ci.org/browse/JENKINS-33922 + // We can probably workaround this using Pursuivant and the emails Jenkins sends out + // This would allow subscribing to build notifications for IRC channels in much the same way one subscribes for Commits and Bugzilla changes + +} +} diff --git a/pipeline-templates/Frameworks/SUSEQt5.13.template b/pipeline-templates/Frameworks/SUSEQt5.13.template new file mode 120000 index 0000000..d9efd28 --- /dev/null +++ b/pipeline-templates/Frameworks/SUSEQt5.13.template @@ -0,0 +1 @@ +SUSEQt5.12.template \ No newline at end of file diff --git a/pipeline-templates/SUSEQt5.10.template b/pipeline-templates/SUSEQt5.10.template deleted file mode 100644 index cc4a754..0000000 --- a/pipeline-templates/SUSEQt5.10.template +++ /dev/null @@ -1,205 +0,0 @@ -// Provisionally mark the build as successful -currentBuild.result = 'SUCCESS' - -// Request a node to be allocated to us -node( currentPlatform ) { -// We want Timestamps on everything -timestamps { - // We want to catch any errors that occur to allow us to send out notifications (ie. emails) if needed - catchError { - // First Thing: Checkout Sources - stage('Checkout Sources') { - - // Actual Application Sources - checkout changelog: true, poll: true, scm: [ - $class: 'GitSCM', - branches: [[name: branchToBuild]], - browser: [$class: 'CGit', repoUrl: browserUrl], - extensions: [[$class: 'CloneOption', timeout: 120]], - userRemoteConfigs: [[url: repositoryUrl]] - ] - - // Our CI scripts - checkout changelog: false, poll: false, scm: [ - $class: 'GitSCM', - branches: [[name: 'master']], - extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'ci-tooling/']], - userRemoteConfigs: [[url: 'https://anongit.kde.org/sysadmin/ci-tooling']] - ] - - // Projects metadata and next generation dependency metadata - checkout changelog: false, poll: false, scm: [ - $class: 'GitSCM', - branches: [[name: 'master']], - extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'ci-tooling/repo-metadata/']], - userRemoteConfigs: [[url: 'https://anongit.kde.org/sysadmin/repo-metadata']] - ] - - // Dependency Metadata - checkout changelog: false, poll: false, scm: [ - $class: 'GitSCM', - branches: [[name: 'master']], - extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'ci-tooling/kde-build-metadata/']], - userRemoteConfigs: [[url: 'https://anongit.kde.org/kde-build-metadata']] - ] - - // KApiDox: For api.kde.org metadata extraction - checkout changelog: false, poll: false, scm: [ - $class: 'GitSCM', - branches: [[name: 'master']], - extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'ci-tooling/kapidox/']], - userRemoteConfigs: [[url: 'https://anongit.kde.org/kapidox']] - ] - - // kde-dev-scripts: For packager metadata extraction - checkout changelog: false, poll: false, scm: [ - $class: 'GitSCM', - branches: [[name: 'master']], - extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'ci-tooling/kde-dev-scripts/']], - userRemoteConfigs: [[url: 'https://anongit.kde.org/kde-dev-scripts']] - ] - - } - - // Now Prepare to Build: Get the dependencies ready - stage('Setup Dependencies') { - // Now we can determine what our dependencies are - // Then update to the latest version of the dependencies available from the master server - // Finally extract all of those dependencies in turn into the given 'installTo' directory - sh "python3 -u ci-tooling/helpers/prepare-dependencies.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --environment ${ciEnvironment} --platform ${currentPlatform} --installTo '$HOME/install-prefix/'" - } - - // Now we can configure our build - stage('Configuring Build') { - // This is delegated through a helper script to handle minor special cases like inSourceBuilds, non-CMake build systems, etc - sh "python3 -u ci-tooling/helpers/configure-build.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform} --installTo '$HOME/install-prefix/'" - } - - // Finally we can build it! (Once again, through a helper) - stage('Compiling') { - // We use a helper here so we can determine the appropriate number of CPUs (-j) to build with - sh "python3 -u ci-tooling/helpers/compile-build.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform} --usingInstall '$HOME/install-prefix/'" - } - - // Now ensure that it installs.... - stage('Installing') { - // The helper ensures that DESTDIR and INSTALL_ROOT are set to 'divertTo' - // This allows us to capture the install at the next stage for later reuse in the Setup Dependencies step - sh "python3 -u ci-tooling/helpers/install-build.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform} --installTo '$HOME/install-prefix/' --divertTo '$WORKSPACE/install-divert/'" - } - - // Looks like it built okay - let's capture this for later use - // We'll also take the opportunity to extract metadata from CMake used by packagers and api.kde.org - stage('Capturing Installation') { - // First we create a tar archive of the installation which was diverted - // Then we upload a copy of that to the master server and have it publish the new archive - // Finally to save bandwidth our copy of the tar archive is moved to our local cache for reuse on later builds on this node - sh "python3 -u ci-tooling/helpers/capture-install.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --environment ${ciEnvironment} --platform ${currentPlatform} --divertedTo '$WORKSPACE/install-divert/' --installedTo '$HOME/install-prefix/'" - - // Now we extract the CMake metadata and upload that to the appropriate hosts - sh "python3 -u ci-tooling/helpers/extract-cmake-dependency-metadata.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform} --usingInstall '$HOME/install-prefix/'" - sh "python3 -u ci-tooling/helpers/generate-dependency-diagram-data.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform} --usingInstall '$HOME/install-prefix/'" - } - - // Now we can run our tests - stage('Running Tests') { - // Run the unit tests for this project - // Tests are run in a basic environment (X, DBus) - sh "python3 -u ci-tooling/helpers/run-tests.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform} --usingInstall '$HOME/install-prefix/'" - - // Collect our results - junit allowEmptyResults: true, testResults: 'JUnitTestResults.xml' - } - - // Final thing to do: some code quality checks - stage('Checking Code Quality') { - - // Perform Appstream Compliance Checks - sh "python3 -u ci-tooling/helpers/check-appstream-compliance.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform} --usingInstall '$HOME/install-prefix/' --withDiverted '$WORKSPACE/install-divert/'" - - // Gather ABI Reference information for later checking - sh """ - curl '$BUILD_URL/consoleText' -o currentBuildLog.txt - python3 -u ci-tooling/helpers/create-abi-dump.py --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform} --buildLog currentBuildLog.txt --environment production --usingInstall '$HOME/install-prefix/' || true - """ - // Save the ABI build logs to review if necessary - archiveArtifacts artifacts: 'logs/*/*/log.txt', onlyIfSuccessful: false, allowEmptyArchive: true - // Save the input for ACC for building abi dumps locally - archiveArtifacts artifacts: 'acc/*.xml', onlyIfSuccessful: false, allowEmptyArchive: true - - // Now perform the ABI Compatibility checks - // This tool will produce reports stored at compat_reports/ which we will also need to capture - sh """ - python3 -u ci-tooling/helpers/check-abi.py --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform} --environment production || true - """ - // Save the ABI Compatibility reports for developers to review if necessary - archiveArtifacts artifacts: 'compat_reports/*_compat_report.html', onlyIfSuccessful: false, allowEmptyArchive: true - - // Save the ABI Compatibility results yaml file - archiveArtifacts artifacts: 'abi-compatibility-results.yaml', onlyIfSuccessful: false, allowEmptyArchive: true - - // cppcheck is not supported by Pipeline at the moment, so we don't run that for now - // See https://issues.jenkins-ci.org/browse/JENKINS-35096 - - // Perform Cobertura Processing - // First, run the LCov extraction - sh "python3 -u ci-tooling/helpers/extract-lcov-results.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform}" - - // Collect the results from the LCov extraction - step([ - $class: 'CoberturaPublisher', - autoUpdateHealth: false, autoUpdateStability: false, - coberturaReportFile: 'CoberturaLcovResults.xml', - failNoReports: false, failUnhealthy: false, failUnstable: false, - maxNumberOfBuilds: 0, - onlyStable: false, - zoomCoverageChart: false - ]) - - // Scan the logs and publish a warnings report - warnings consoleParsers: [[parserName: 'GNU Make + GNU C Compiler (gcc)'], [parserName: 'Appstreamercli']], excludePattern: "/tmp/**|/home/jenkins/workspace/**/build/**|/usr/include/**" - - } - - } - - // Let's determine if we need to send out notifications - // What happened in our previous build? - def previousResult = currentBuild.previousBuild?.result - // If our condition has changed, is FAILURE or UNSTABLE then we want to send an email - if( previousResult != currentBuild.result || currentBuild.result == 'FAILURE' || currentBuild.result == 'UNSTABLE' ) { - // Start constructing the list of our recipients - // At this point we can only be either a failure or an unstable build, so notify those who have requested unstable build notifications - def mailTo = [ unstableBuildEmails ] - - // If the build was a solid failure (either now or previously) then notify those who want to know about failures only - if( previousResult == 'FAILURE' || currentBuild.result == 'FAILURE' ) { - // Add them to the list - mailTo << buildFailureEmails - } - - // If someone kicked this job off, they're presumably interested as well - mailTo << emailextrecipients( [[$class: 'RequesterRecipientProvider']] ) - // We always want to notify our dashboard as well - mailTo << "kde-dashboard@kde.org" - - // Finalise the list of recipients - mailTo = mailTo.join(',') - - // Send the email now - emailext( - to: mailTo, - body: '${JELLY_SCRIPT,template="html_gmail"}', - mimeType: 'text/html', - subject: 'KDE CI: ${PROJECT_NAME} - Build # ${BUILD_NUMBER} - ${BUILD_STATUS}!', - attachLog: false - ) - } - - // IRC Notifications are currently not supported by Pipeline - // See https://issues.jenkins-ci.org/browse/JENKINS-33922 - // We can probably workaround this using Pursuivant and the emails Jenkins sends out - // This would allow subscribing to build notifications for IRC channels in much the same way one subscribes for Commits and Bugzilla changes - -} -} diff --git a/pipeline-templates/SUSEQt5.11.template b/pipeline-templates/SUSEQt5.11.template deleted file mode 120000 index eb67470..0000000 --- a/pipeline-templates/SUSEQt5.11.template +++ /dev/null @@ -1 +0,0 @@ -SUSEQt5.10.template \ No newline at end of file diff --git a/pipeline-templates/SUSEQt5.12.template b/pipeline-templates/SUSEQt5.12.template deleted file mode 120000 index eb67470..0000000 --- a/pipeline-templates/SUSEQt5.12.template +++ /dev/null @@ -1 +0,0 @@ -SUSEQt5.10.template \ No newline at end of file diff --git a/pipeline-templates/SUSEQt5.12.template b/pipeline-templates/SUSEQt5.12.template new file mode 100644 index 0000000..cc4a754 --- /dev/null +++ b/pipeline-templates/SUSEQt5.12.template @@ -0,0 +1,205 @@ +// Provisionally mark the build as successful +currentBuild.result = 'SUCCESS' + +// Request a node to be allocated to us +node( currentPlatform ) { +// We want Timestamps on everything +timestamps { + // We want to catch any errors that occur to allow us to send out notifications (ie. emails) if needed + catchError { + // First Thing: Checkout Sources + stage('Checkout Sources') { + + // Actual Application Sources + checkout changelog: true, poll: true, scm: [ + $class: 'GitSCM', + branches: [[name: branchToBuild]], + browser: [$class: 'CGit', repoUrl: browserUrl], + extensions: [[$class: 'CloneOption', timeout: 120]], + userRemoteConfigs: [[url: repositoryUrl]] + ] + + // Our CI scripts + checkout changelog: false, poll: false, scm: [ + $class: 'GitSCM', + branches: [[name: 'master']], + extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'ci-tooling/']], + userRemoteConfigs: [[url: 'https://anongit.kde.org/sysadmin/ci-tooling']] + ] + + // Projects metadata and next generation dependency metadata + checkout changelog: false, poll: false, scm: [ + $class: 'GitSCM', + branches: [[name: 'master']], + extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'ci-tooling/repo-metadata/']], + userRemoteConfigs: [[url: 'https://anongit.kde.org/sysadmin/repo-metadata']] + ] + + // Dependency Metadata + checkout changelog: false, poll: false, scm: [ + $class: 'GitSCM', + branches: [[name: 'master']], + extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'ci-tooling/kde-build-metadata/']], + userRemoteConfigs: [[url: 'https://anongit.kde.org/kde-build-metadata']] + ] + + // KApiDox: For api.kde.org metadata extraction + checkout changelog: false, poll: false, scm: [ + $class: 'GitSCM', + branches: [[name: 'master']], + extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'ci-tooling/kapidox/']], + userRemoteConfigs: [[url: 'https://anongit.kde.org/kapidox']] + ] + + // kde-dev-scripts: For packager metadata extraction + checkout changelog: false, poll: false, scm: [ + $class: 'GitSCM', + branches: [[name: 'master']], + extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'ci-tooling/kde-dev-scripts/']], + userRemoteConfigs: [[url: 'https://anongit.kde.org/kde-dev-scripts']] + ] + + } + + // Now Prepare to Build: Get the dependencies ready + stage('Setup Dependencies') { + // Now we can determine what our dependencies are + // Then update to the latest version of the dependencies available from the master server + // Finally extract all of those dependencies in turn into the given 'installTo' directory + sh "python3 -u ci-tooling/helpers/prepare-dependencies.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --environment ${ciEnvironment} --platform ${currentPlatform} --installTo '$HOME/install-prefix/'" + } + + // Now we can configure our build + stage('Configuring Build') { + // This is delegated through a helper script to handle minor special cases like inSourceBuilds, non-CMake build systems, etc + sh "python3 -u ci-tooling/helpers/configure-build.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform} --installTo '$HOME/install-prefix/'" + } + + // Finally we can build it! (Once again, through a helper) + stage('Compiling') { + // We use a helper here so we can determine the appropriate number of CPUs (-j) to build with + sh "python3 -u ci-tooling/helpers/compile-build.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform} --usingInstall '$HOME/install-prefix/'" + } + + // Now ensure that it installs.... + stage('Installing') { + // The helper ensures that DESTDIR and INSTALL_ROOT are set to 'divertTo' + // This allows us to capture the install at the next stage for later reuse in the Setup Dependencies step + sh "python3 -u ci-tooling/helpers/install-build.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform} --installTo '$HOME/install-prefix/' --divertTo '$WORKSPACE/install-divert/'" + } + + // Looks like it built okay - let's capture this for later use + // We'll also take the opportunity to extract metadata from CMake used by packagers and api.kde.org + stage('Capturing Installation') { + // First we create a tar archive of the installation which was diverted + // Then we upload a copy of that to the master server and have it publish the new archive + // Finally to save bandwidth our copy of the tar archive is moved to our local cache for reuse on later builds on this node + sh "python3 -u ci-tooling/helpers/capture-install.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --environment ${ciEnvironment} --platform ${currentPlatform} --divertedTo '$WORKSPACE/install-divert/' --installedTo '$HOME/install-prefix/'" + + // Now we extract the CMake metadata and upload that to the appropriate hosts + sh "python3 -u ci-tooling/helpers/extract-cmake-dependency-metadata.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform} --usingInstall '$HOME/install-prefix/'" + sh "python3 -u ci-tooling/helpers/generate-dependency-diagram-data.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform} --usingInstall '$HOME/install-prefix/'" + } + + // Now we can run our tests + stage('Running Tests') { + // Run the unit tests for this project + // Tests are run in a basic environment (X, DBus) + sh "python3 -u ci-tooling/helpers/run-tests.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform} --usingInstall '$HOME/install-prefix/'" + + // Collect our results + junit allowEmptyResults: true, testResults: 'JUnitTestResults.xml' + } + + // Final thing to do: some code quality checks + stage('Checking Code Quality') { + + // Perform Appstream Compliance Checks + sh "python3 -u ci-tooling/helpers/check-appstream-compliance.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform} --usingInstall '$HOME/install-prefix/' --withDiverted '$WORKSPACE/install-divert/'" + + // Gather ABI Reference information for later checking + sh """ + curl '$BUILD_URL/consoleText' -o currentBuildLog.txt + python3 -u ci-tooling/helpers/create-abi-dump.py --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform} --buildLog currentBuildLog.txt --environment production --usingInstall '$HOME/install-prefix/' || true + """ + // Save the ABI build logs to review if necessary + archiveArtifacts artifacts: 'logs/*/*/log.txt', onlyIfSuccessful: false, allowEmptyArchive: true + // Save the input for ACC for building abi dumps locally + archiveArtifacts artifacts: 'acc/*.xml', onlyIfSuccessful: false, allowEmptyArchive: true + + // Now perform the ABI Compatibility checks + // This tool will produce reports stored at compat_reports/ which we will also need to capture + sh """ + python3 -u ci-tooling/helpers/check-abi.py --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform} --environment production || true + """ + // Save the ABI Compatibility reports for developers to review if necessary + archiveArtifacts artifacts: 'compat_reports/*_compat_report.html', onlyIfSuccessful: false, allowEmptyArchive: true + + // Save the ABI Compatibility results yaml file + archiveArtifacts artifacts: 'abi-compatibility-results.yaml', onlyIfSuccessful: false, allowEmptyArchive: true + + // cppcheck is not supported by Pipeline at the moment, so we don't run that for now + // See https://issues.jenkins-ci.org/browse/JENKINS-35096 + + // Perform Cobertura Processing + // First, run the LCov extraction + sh "python3 -u ci-tooling/helpers/extract-lcov-results.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform}" + + // Collect the results from the LCov extraction + step([ + $class: 'CoberturaPublisher', + autoUpdateHealth: false, autoUpdateStability: false, + coberturaReportFile: 'CoberturaLcovResults.xml', + failNoReports: false, failUnhealthy: false, failUnstable: false, + maxNumberOfBuilds: 0, + onlyStable: false, + zoomCoverageChart: false + ]) + + // Scan the logs and publish a warnings report + warnings consoleParsers: [[parserName: 'GNU Make + GNU C Compiler (gcc)'], [parserName: 'Appstreamercli']], excludePattern: "/tmp/**|/home/jenkins/workspace/**/build/**|/usr/include/**" + + } + + } + + // Let's determine if we need to send out notifications + // What happened in our previous build? + def previousResult = currentBuild.previousBuild?.result + // If our condition has changed, is FAILURE or UNSTABLE then we want to send an email + if( previousResult != currentBuild.result || currentBuild.result == 'FAILURE' || currentBuild.result == 'UNSTABLE' ) { + // Start constructing the list of our recipients + // At this point we can only be either a failure or an unstable build, so notify those who have requested unstable build notifications + def mailTo = [ unstableBuildEmails ] + + // If the build was a solid failure (either now or previously) then notify those who want to know about failures only + if( previousResult == 'FAILURE' || currentBuild.result == 'FAILURE' ) { + // Add them to the list + mailTo << buildFailureEmails + } + + // If someone kicked this job off, they're presumably interested as well + mailTo << emailextrecipients( [[$class: 'RequesterRecipientProvider']] ) + // We always want to notify our dashboard as well + mailTo << "kde-dashboard@kde.org" + + // Finalise the list of recipients + mailTo = mailTo.join(',') + + // Send the email now + emailext( + to: mailTo, + body: '${JELLY_SCRIPT,template="html_gmail"}', + mimeType: 'text/html', + subject: 'KDE CI: ${PROJECT_NAME} - Build # ${BUILD_NUMBER} - ${BUILD_STATUS}!', + attachLog: false + ) + } + + // IRC Notifications are currently not supported by Pipeline + // See https://issues.jenkins-ci.org/browse/JENKINS-33922 + // We can probably workaround this using Pursuivant and the emails Jenkins sends out + // This would allow subscribing to build notifications for IRC channels in much the same way one subscribes for Commits and Bugzilla changes + +} +} diff --git a/pipeline-templates/SUSEQt5.13.template b/pipeline-templates/SUSEQt5.13.template new file mode 120000 index 0000000..d9efd28 --- /dev/null +++ b/pipeline-templates/SUSEQt5.13.template @@ -0,0 +1 @@ +SUSEQt5.12.template \ No newline at end of file diff --git a/pipeline-templates/dependency-build/SUSEQt5.10.template b/pipeline-templates/dependency-build/SUSEQt5.10.template deleted file mode 100644 index 7868fb6..0000000 --- a/pipeline-templates/dependency-build/SUSEQt5.10.template +++ /dev/null @@ -1,73 +0,0 @@ -// Request a node to be allocated to us -node( currentPlatform ) { -// We want Timestamps on everything -timestamps { - // We want to catch any errors that occur to allow us to send out notifications (ie. emails) if needed - catchError { - // First Thing: Checkout Sources - stage('Checkout Sources') { - - // Our CI scripts - checkout changelog: false, poll: false, scm: [ - $class: 'GitSCM', - branches: [[name: 'master']], - extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'ci-tooling/']], - userRemoteConfigs: [[url: 'https://anongit.kde.org/sysadmin/ci-tooling']] - ] - - // Projects metadata and next generation dependency metadata - checkout changelog: false, poll: false, scm: [ - $class: 'GitSCM', - branches: [[name: 'master']], - extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'ci-tooling/repo-metadata/']], - userRemoteConfigs: [[url: 'https://anongit.kde.org/sysadmin/repo-metadata']] - ] - - // Dependency Metadata - checkout changelog: false, poll: false, scm: [ - $class: 'GitSCM', - branches: [[name: 'master']], - extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'ci-tooling/kde-build-metadata/']], - userRemoteConfigs: [[url: 'https://anongit.kde.org/kde-build-metadata']] - ] - - } - - // Now we run the Product Dependency Build Process - stage('Build Product Dependencies') { - // This script will do the following: - // 1) Determine what is in this Product - // 2) Determine what those repositories depend on - // 3) Determine what dependencies are outside of this Product - // 4) Sort those dependencies into an appropriate order to build them - // 5) Checkout, Configure, Compile, Install and Capture the Installation each of those dependencies in turn - // We can't do this as Pipeline steps unfortunately (at least not easily) - // Tests and Other Quality Tests won't be run during this process - // The results of this process are only intended to be used as part of the base of this Product, so don't need testing - sh "python3 -u ci-tooling/helpers/build-product-dependencies.py --product ${productName} --branchGroup ${branchGroup} --environment ${ciEnvironment} --platform ${currentPlatform} --installTo '$HOME/install-prefix/'" - } - } - - // Let's determine if we need to send out notifications - // What happened in our previous build? - def previousResult = currentBuild.previousBuild?.result - // If our condition has changed, is FAILURE or UNSTABLE then we want to send an email - if( previousResult != currentBuild.result || currentBuild.result == 'FAILURE' || currentBuild.result == 'UNSTABLE' ) { - // Construct the list of our recipients - these people always want to be notified about Dependency Build jobs - def mailTo = [] - // If someone kicked this job off, they're presumably interested as well - mailTo << emailextrecipients( [[$class: 'RequesterRecipientProvider']] ) - // Finalise the list of recipients - mailTo = mailTo.join(',') - - // Send the email now - emailext( - to: mailTo, - body: '${JELLY_SCRIPT,template="html_gmail"}', - mimeType: 'text/html', - subject: 'KDE CI: ${PROJECT_NAME} - Build # ${BUILD_NUMBER} - ${BUILD_STATUS}!', - attachLog: false - ) - } -} -} diff --git a/pipeline-templates/dependency-build/SUSEQt5.11.template b/pipeline-templates/dependency-build/SUSEQt5.11.template deleted file mode 120000 index eb67470..0000000 --- a/pipeline-templates/dependency-build/SUSEQt5.11.template +++ /dev/null @@ -1 +0,0 @@ -SUSEQt5.10.template \ No newline at end of file diff --git a/pipeline-templates/dependency-build/SUSEQt5.12.template b/pipeline-templates/dependency-build/SUSEQt5.12.template deleted file mode 120000 index eb67470..0000000 --- a/pipeline-templates/dependency-build/SUSEQt5.12.template +++ /dev/null @@ -1 +0,0 @@ -SUSEQt5.10.template \ No newline at end of file diff --git a/pipeline-templates/dependency-build/SUSEQt5.12.template b/pipeline-templates/dependency-build/SUSEQt5.12.template new file mode 100644 index 0000000..7868fb6 --- /dev/null +++ b/pipeline-templates/dependency-build/SUSEQt5.12.template @@ -0,0 +1,73 @@ +// Request a node to be allocated to us +node( currentPlatform ) { +// We want Timestamps on everything +timestamps { + // We want to catch any errors that occur to allow us to send out notifications (ie. emails) if needed + catchError { + // First Thing: Checkout Sources + stage('Checkout Sources') { + + // Our CI scripts + checkout changelog: false, poll: false, scm: [ + $class: 'GitSCM', + branches: [[name: 'master']], + extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'ci-tooling/']], + userRemoteConfigs: [[url: 'https://anongit.kde.org/sysadmin/ci-tooling']] + ] + + // Projects metadata and next generation dependency metadata + checkout changelog: false, poll: false, scm: [ + $class: 'GitSCM', + branches: [[name: 'master']], + extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'ci-tooling/repo-metadata/']], + userRemoteConfigs: [[url: 'https://anongit.kde.org/sysadmin/repo-metadata']] + ] + + // Dependency Metadata + checkout changelog: false, poll: false, scm: [ + $class: 'GitSCM', + branches: [[name: 'master']], + extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'ci-tooling/kde-build-metadata/']], + userRemoteConfigs: [[url: 'https://anongit.kde.org/kde-build-metadata']] + ] + + } + + // Now we run the Product Dependency Build Process + stage('Build Product Dependencies') { + // This script will do the following: + // 1) Determine what is in this Product + // 2) Determine what those repositories depend on + // 3) Determine what dependencies are outside of this Product + // 4) Sort those dependencies into an appropriate order to build them + // 5) Checkout, Configure, Compile, Install and Capture the Installation each of those dependencies in turn + // We can't do this as Pipeline steps unfortunately (at least not easily) + // Tests and Other Quality Tests won't be run during this process + // The results of this process are only intended to be used as part of the base of this Product, so don't need testing + sh "python3 -u ci-tooling/helpers/build-product-dependencies.py --product ${productName} --branchGroup ${branchGroup} --environment ${ciEnvironment} --platform ${currentPlatform} --installTo '$HOME/install-prefix/'" + } + } + + // Let's determine if we need to send out notifications + // What happened in our previous build? + def previousResult = currentBuild.previousBuild?.result + // If our condition has changed, is FAILURE or UNSTABLE then we want to send an email + if( previousResult != currentBuild.result || currentBuild.result == 'FAILURE' || currentBuild.result == 'UNSTABLE' ) { + // Construct the list of our recipients - these people always want to be notified about Dependency Build jobs + def mailTo = [] + // If someone kicked this job off, they're presumably interested as well + mailTo << emailextrecipients( [[$class: 'RequesterRecipientProvider']] ) + // Finalise the list of recipients + mailTo = mailTo.join(',') + + // Send the email now + emailext( + to: mailTo, + body: '${JELLY_SCRIPT,template="html_gmail"}', + mimeType: 'text/html', + subject: 'KDE CI: ${PROJECT_NAME} - Build # ${BUILD_NUMBER} - ${BUILD_STATUS}!', + attachLog: false + ) + } +} +} diff --git a/pipeline-templates/dependency-build/SUSEQt5.13.template b/pipeline-templates/dependency-build/SUSEQt5.13.template new file mode 120000 index 0000000..d9efd28 --- /dev/null +++ b/pipeline-templates/dependency-build/SUSEQt5.13.template @@ -0,0 +1 @@ +SUSEQt5.12.template \ No newline at end of file