diff --git a/helpers/create-abi-dump.py b/helpers/create-abi-dump.py index 5bd6b7d..937940f 100755 --- a/helpers/create-abi-dump.py +++ b/helpers/create-abi-dump.py @@ -1,308 +1,310 @@ #!/usr/bin/python3 import os import re import logging import pathlib import argparse import tempfile import subprocess from collections import defaultdict from typing import Dict, List, Union from helperslib import Packages, EnvironmentHandler # Make sure logging is ready to go #logging.basicConfig(level=logging.DEBUG) def cmake_parser(lines: List) -> Dict: """A small cmake parser, if you search for a better solution think about using a proper one based on ply. see https://salsa.debian.org/qt-kde-team/pkg-kde-jenkins/blob/master/hooks/prepare/cmake_update_deps But in our case we are only interested in two keywords and do not need many features. we return a dictonary with keywords and targets. set(VAR "123") -> variables["VAR"]="123" set_target_properties(TARGET PROPERTIES PROP1 A B PROP2 C D) -> targets = { "PROP1":["A","B"], "PROP2":["C","D"], } """ variables = {} # type: Dict[str,str] targets = defaultdict(lambda:defaultdict(list)) # type: Dict[str, Dict[str, List[str]]] ret = { "variables": variables, "targets": targets, } def parse_set(args: str) -> None: """process set lines and updates the variables directory: set(VAR 1.2.3) -> args = ["VAR", "1.2.3"] and we set variable["VAR"] = "1.2.3" """ _args = args.split() if len(_args) == 2: name, value = _args variables[name] = value def parse_set_target_properties(args: str) -> None: """process set_target_properties cmake lines and update the targets directory all argiments of set_target_properties are given in the args parameter as list. as cmake using keyword val1 val2 we need to save the keyword so long we detect a next keyword. args[0] is the target we want to update args[1] must be PROPERTIES """ name, properties, *values = args.split() target = targets[name] if not properties == "PROPERTIES": logging.warning("unknown line: %s"%(args)) # Known set_target_properties keywords keywords = [ "IMPORTED_LINK_DEPENDENT_LIBRARIES_DEBUG", "IMPORTED_LOCATION_DEBUG", "IMPORTED_SONAME_DEBUG", "INTERFACE_INCLUDE_DIRECTORIES", "INTERFACE_LINK_LIBRARIES", "INTERFACE_COMPILE_OPTIONS", "INTERFACE_COMPILE_DEFINITIONS", ] tmpKeyword = None for arg in values: if arg in keywords: tmpKeyword = target[arg] continue tmpKeyword.append(arg) #Keywords we want to react on keywords = { "set": parse_set, "set_target_properties": parse_set_target_properties, } RELINE = re.compile("^\s*(?P[^(]+)\s*\(\s*(?P.*)\s*\)\s*$") for line in lines: m = RELINE.match(line) if m and m.group('keyword') in keywords: keywords[m.group('keyword')](m.group('args')) return ret # Wrapper class to represent a library we have found # This class stores information on the library in question and assists in extracting information concerning it class Library: # Make sure we initialize everything we are going to need def __init__(self, name: str) -> None: # name of the library self.name = name # type: str # The raw cmake Parser output, available for debugging purposes # see cmake_parser function for the return value self.__parser_output = None # type: Union[Dict, None] # Provide a helpful description of the object (to ease debugging) def __repr__(self) -> str: return "".format(self=self) # replace with f-String in python 3.6 # Execute CMake to gather the information we need def runCMake(self, runtimeEnvironment) -> None: """Create a CMakeLists.txt to detect the headers, version and library path""" # Prepare to gather the information we need self.__mlines = [] # type: List[str] # To avoid contaminating the directory we are being run in, make sure we are in a temporary directory # This will also allow us to succeed if we are run from the build direcotry with tempfile.TemporaryDirectory() as d: # Create an appropriate CMakeLists.txt which searches for the library in question cmakeFile = (pathlib.Path(d)/"CMakeLists.txt") cmakeFile.write_text("find_package({self.name} CONFIG REQUIRED)\n".format(self=self)) # replace with f-String in python 3.6 # Now run CMake and ask it to process the CMakeLists.txt file we just generated # We want it to run in trace mode so we can examine the log to extract the information we need proc = subprocess.Popen(['cmake', '.', '--trace-expand'], cwd=d, stdout=subprocess.DEVNULL, stderr=subprocess.PIPE, env=runtimeEnvironment) # cmake prefixes outout with the name of the file, filter only lines with interessting files retarget = re.compile( '.*/{self.name}(Targets[^/]*|Config[^/]*)\.cmake\(\d+\):\s*(.*)$'.format(self=self) ) # replace with f-String in python 3.6 # Start processing the output of CMake, one line at a time for line in proc.stderr: # Make sure it is UTF-8 formatted theLine = line.decode("utf-8") # Did we find a CMake line we were interested in? m = retarget.match(theLine) if m: # Extract the information from that and store it for further processing mline = m.group(2) self.__mlines.append(mline) # Process the information we've now gathered self.__parser_output = cmake_parser(self.__mlines) self.parser_output = self.__parser_output self.mlines = self.__mlines # Extract the version number of the library for easier use self.version = self.__parser_output["variables"]["PACKAGE_VERSION"] # type: str # targets the targets of the libary ( existing so files) # a dict with keys, SONAME = the SONAME of the lib # path = path of the library # include_dirs = the header files for the library self.targets = {} # type: Dict # Helper Function to parse CMake formatted include directory lists def parseIncludeDirs(args: List[str]) -> List[str]: """ cmake using ";" to seperate different paths split the paths and make a unique list of all paths (do not add paths multiple times) """ d = [] # type: List[str] for arg in args: d += arg.split(";") return d # Process the various targets our parser found for t,value in self.__parser_output["targets"].items(): # Particularly, we want to extract: # Library names (sonames) # The path to the CMake library package # Any include directories specified by the CMake library package target = { "SONAME": re.search("\.([\d]*)$",value["IMPORTED_SONAME_DEBUG"][0]).group(1), "path": value["IMPORTED_LOCATION_DEBUG"][0], "include_dirs": parseIncludeDirs(value["INTERFACE_INCLUDE_DIRECTORIES"]), } self.targets[t]=target def createABIDump(self, runtimeEnvironment=None) -> None: """run abi-compliance-checker (acc) to create a ABIDump tar gz First we need to construct a input file for acc, see xml variable. After that we can run acc with the constructed file. """ # Make sure we have a valid runtime environment for CMake and abi-compliance-checker if runtimeEnvironment is None: runtimeEnvironment = os.environ # If we haven't yet run CMake, do so # Otherwise we won't have anything to give to abi-compliance-checker if not self.__parser_output: self.runCMake(runtimeEnvironment) # Start preparations to run abi-compliance-checker # Gather the information we'll need to write the XML configuration file it uses version = self.version headers = [] # type: List[str] libs = [] # type: List[str] additionalIncludes = [] # type: List[str] # From the target information we previously collected... # Grab the list of libraries and include headers for abi-compliance-checker for target in self.targets.values(): # Check each include directory to see if we need to add it.... for i in target['include_dirs']: # ignore general folders, as there are no lib specific headers are placed if i == '/usr/include' or i.endswith("/KF5"): if not i in additionalIncludes: additionalIncludes.append(i) continue # Otherwise, if we don't already have it - add it to the list! if not i in headers: headers.append(i) # If the library path isn't in the list, then we should add it to the list if not target['path'] in libs: libs.append(target['path']) # Now we can go ahead and generate the XML file for abi-compliance-checker xml = """ {version} {headers} {libs} /usr/lib/python3.6/site-packages/utils/fake_libc_include /usr/include/clang/AST /usr/lib64/clang/6.0.1/include {additionalIncludes} """.format(version=version, headers="\n".join(headers), libs="\n".join(libs), additionalIncludes="\n".join(additionalIncludes)) # replace with f-String in Python 3.6 # Write the generated XML out to a file to pass to abi-compliance-checker # We will give this to abi-compliance-checker using it's --dump parameter with open("{version}.xml".format(version=version),"w") as f: # replace with f-String in python 3.6 f.write(xml) # acc is compatible for C/C++ as Qt using C++11 and -fPic we need to set the gcc settings explitly subprocess.check_call(["abi-compliance-checker", "-gcc-options", "-std=c++11 -fPIC", "-l", self.name, "--dump", f.name], env=runtimeEnvironment) # Parse the command line arguments we've been given parser = argparse.ArgumentParser(description='Utility to create abi checker tarballs.') parser.add_argument('--project', type=str, required=True) parser.add_argument('--branchGroup', type=str, required=True) parser.add_argument('--buildLog', type=str, required=True) parser.add_argument('--environment', type=str, required=True) +parser.add_argument('--platform', type=str, required=True) parser.add_argument('--usingInstall', type=str, required=True) arguments = parser.parse_args() # Make sure we have an environment ready for executing commands buildEnvironment = EnvironmentHandler.generateFor( installPrefix=arguments.usingInstall ) # Get ready to start searching for libraries foundLibraries = [] # Search in the build log for the Installing/Up-to-date lines where we install the Config.cmake files. # From this we get a complete List of installed libraries. cmakeConfig = re.compile("^-- (Installing|Up-to-date): .*/([^/]*)Config\.cmake$") with open(arguments.buildLog, encoding='utf-8') as log: for line in log.readlines(): match = cmakeConfig.match(line) if match: foundLibrary = Library( match.group(2) ) foundLibraries.append(foundLibrary) # Initialize the archive manager ourArchive = Packages.Archive(arguments.environment, 'ABIReference', usingCache = False, contentsSuffix = ".abidump") # Determine which SCM revision we are storing # This will be embedded into the package metadata which might help someone doing some debugging # GIT_COMMIT is set by Jenkins Git plugin, so we can rely on that for most of our builds scmRevision = '' if os.getenv('GIT_COMMIT') != '': scmRevision = os.getenv('GIT_COMMIT') if not scmRevision: scmRevision = subprocess.check_output(["git", "log", "--format=%H", "-n 1", "HEAD"]).strip().decode() # Now we generate the ABI dumps for every library we have found for library in foundLibraries: # Create the ABI Dump for this library library.createABIDump( runtimeEnvironment=buildEnvironment ) # Determine where the ABI Dump archive is located # This location is controlled by abi-compliance-checker, but follows a predictable pattern fileName = "abi_dumps/{name}/{version}/ABI.dump".format(name=library.name,version=library.version) extraMetadata = { "SONAME": max([t['SONAME'] for t in library.targets.values()]), # use max because there may be more than one lib inside "version": library.version, "libname": library.name, "targets": list(library.targets), "project": arguments.project, "branchGroup": arguments.branchGroup, + "platform": arguments.platform, } - packageName = "{name}_{scmRevision}".format(name=library.name, scmRevision=scmRevision) + packageName = "{name}_{scmRevision}_{platform}".format(name=library.name, scmRevision=scmRevision) ourArchive.storePackage(packageName, fileName, scmRevision, extraMetadata) diff --git a/pipeline-templates/SUSEQt5.7.template b/pipeline-templates/SUSEQt5.7.template index ddc55a9..362ee77 100644 --- a/pipeline-templates/SUSEQt5.7.template +++ b/pipeline-templates/SUSEQt5.7.template @@ -1,209 +1,209 @@ // Provisionally mark the build as successful currentBuild.result = 'SUCCESS' // Request a node to be allocated to us node( currentPlatform ) { // We want Timestamps on everything timestamps { // We want to catch any errors that occur to allow us to send out notifications (ie. emails) if needed catchError { // First Thing: Checkout Sources stage('Checkout Sources') { // Actual Application Sources checkout changelog: true, poll: true, scm: [ $class: 'GitSCM', branches: [[name: branchToBuild]], browser: [$class: 'CGit', repoUrl: browserUrl], extensions: [[$class: 'CloneOption', timeout: 120]], userRemoteConfigs: [[url: repositoryUrl]] ] // Our CI scripts checkout changelog: false, poll: false, scm: [ $class: 'GitSCM', branches: [[name: 'master']], extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'ci-tooling/']], userRemoteConfigs: [[url: 'https://anongit.kde.org/sysadmin/ci-tooling']] ] // Projects metadata and next generation dependency metadata checkout changelog: false, poll: false, scm: [ $class: 'GitSCM', branches: [[name: 'master']], extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'ci-tooling/repo-metadata/']], userRemoteConfigs: [[url: 'https://anongit.kde.org/sysadmin/repo-metadata']] ] // Dependency Metadata checkout changelog: false, poll: false, scm: [ $class: 'GitSCM', branches: [[name: 'master']], extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'ci-tooling/kde-build-metadata/']], userRemoteConfigs: [[url: 'https://anongit.kde.org/kde-build-metadata']] ] // KApiDox: For api.kde.org metadata extraction checkout changelog: false, poll: false, scm: [ $class: 'GitSCM', branches: [[name: 'master']], extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'ci-tooling/kapidox/']], userRemoteConfigs: [[url: 'https://anongit.kde.org/kapidox']] ] // kde-dev-scripts: For packager metadata extraction checkout changelog: false, poll: false, scm: [ $class: 'GitSCM', branches: [[name: 'master']], extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'ci-tooling/kde-dev-scripts/']], userRemoteConfigs: [[url: 'https://anongit.kde.org/kde-dev-scripts']] ] } // Now Prepare to Build: Get the dependencies ready stage('Setup Dependencies') { // Now we can determine what our dependencies are // Then update to the latest version of the dependencies available from the master server // Finally extract all of those dependencies in turn into the given 'installTo' directory sh "python3 -u ci-tooling/helpers/prepare-dependencies.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --environment ${ciEnvironment} --platform ${currentPlatform} --installTo '$HOME/install-prefix/'" } // Now we can configure our build stage('Configuring Build') { // This is delegated through a helper script to handle minor special cases like inSourceBuilds, non-CMake build systems, etc sh "python3 -u ci-tooling/helpers/configure-build.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform} --installTo '$HOME/install-prefix/'" } // Finally we can build it! (Once again, through a helper) stage('Compiling') { // We use a helper here so we can determine the appropriate number of CPUs (-j) to build with sh "python3 -u ci-tooling/helpers/compile-build.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform} --usingInstall '$HOME/install-prefix/'" } // Now we can run our tests stage('Running Tests') { // Run the unit tests for this project // Tests are run in a basic environment (X, DBus) sh "python3 -u ci-tooling/helpers/run-tests.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform} --usingInstall '$HOME/install-prefix/'" // Collect our results junit allowEmptyResults: true, testResults: 'JUnitTestResults.xml' } // Now ensure that it installs.... stage('Installing') { // The helper ensures that DESTDIR and INSTALL_ROOT are set to 'divertTo' // This allows us to capture the install at the next stage for later reuse in the Setup Dependencies step sh "python3 -u ci-tooling/helpers/install-build.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform} --installTo '$HOME/install-prefix/' --divertTo '$WORKSPACE/install-divert/'" } // Looks like it built okay - let's capture this for later use // We'll also take the opportunity to extract metadata from CMake used by packagers and api.kde.org stage('Capturing Installation') { // First we create a tar archive of the installation which was diverted // Then we upload a copy of that to the master server and have it publish the new archive // Finally to save bandwidth our copy of the tar archive is moved to our local cache for reuse on later builds on this node sh "python3 -u ci-tooling/helpers/capture-install.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --environment ${ciEnvironment} --platform ${currentPlatform} --divertedTo '$WORKSPACE/install-divert/' --installedTo '$HOME/install-prefix/'" // Now we extract the CMake metadata and upload that to the appropriate hosts sh "python3 -u ci-tooling/helpers/extract-cmake-dependency-metadata.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform} --usingInstall '$HOME/install-prefix/'" sh "python3 -u ci-tooling/helpers/generate-dependency-diagram-data.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform} --usingInstall '$HOME/install-prefix/'" } // Final thing to do: some code quality checks stage('Checking Code Quality') { // Perform Appstream Compliance Checks sh "python3 -u ci-tooling/helpers/check-appstream-compliance.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform} --usingInstall '$HOME/install-prefix/' --withDiverted '$WORKSPACE/install-divert/'" // Gather ABI Reference information for later checking sh """ curl '$BUILD_URL/consoleText' -o currentBuildLog.txt - python3 -u ci-tooling/helpers/create-abi-dump.py --project ${projectName} --branchGroup ${branchGroup} --buildLog currentBuildLog.txt --environment production --usingInstall '$HOME/install-prefix/' || true + python3 -u ci-tooling/helpers/create-abi-dump.py --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform} --buildLog currentBuildLog.txt --environment production --usingInstall '$HOME/install-prefix/' || true """ // Platform Enablement Checks // Frameworks have a metadata file which specifies the platforms it supports and should be built on // This check compares that metadata file against the records held by the CI system sh """ touch PlatformCheckOutput.txt if [[ -e metainfo.yaml ]]; then python3 ci-tooling/helpers/check-platform.py '$WORKSPACE/metainfo.yaml' &> PlatformCheckOutput.txt; fi """ // If the platform check indicates there are missing platforms then we should flag the build as unstable // We start this process by reading the output of the check command def platformCheckResult = readFile "${env.WORKSPACE}/PlatformCheckOutput.txt" // Then we check to see if it had anything in it - it will be empty if everything is okay if( platformCheckResult != '' ) { // If it does, then mark the build as unstable currentBuild.result = 'UNSTABLE' // We also print the check results so it can be examined easily echo platformCheckResult } // cppcheck is not supported by Pipeline at the moment, so we don't run that for now // See https://issues.jenkins-ci.org/browse/JENKINS-35096 // Perform Cobertura Processing // First, run the LCov extraction sh "python3 -u ci-tooling/helpers/extract-lcov-results.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform}" // Collect the results from the LCov extraction step([ $class: 'CoberturaPublisher', autoUpdateHealth: false, autoUpdateStability: false, coberturaReportFile: 'CoberturaLcovResults.xml', failNoReports: false, failUnhealthy: false, failUnstable: false, maxNumberOfBuilds: 0, onlyStable: false, zoomCoverageChart: false ]) // Scan the logs and publish a warnings report warnings consoleParsers: [[parserName: 'GNU Make + GNU C Compiler (gcc)'], [parserName: 'Appstreamercli']], excludePattern: "/tmp/**|/home/jenkins/workspace/**/build/**|/usr/include/**" } } // Let's determine if we need to send out notifications // What happened in our previous build? def previousResult = currentBuild.previousBuild?.result // If our condition has changed, is FAILURE or UNSTABLE then we want to send an email if( previousResult != currentBuild.result || currentBuild.result == 'FAILURE' || currentBuild.result == 'UNSTABLE' ) { // Start constructing the list of our recipients // At this point we can only be either a failure or an unstable build, so notify those who have requested unstable build notifications def mailTo = [ unstableBuildEmails ] // If the build was a solid failure (either now or previously) then notify those who want to know about failures only if( previousResult == 'FAILURE' || currentBuild.result == 'FAILURE' ) { // Add them to the list mailTo << buildFailureEmails } // If someone kicked this job off, they're presumably interested as well mailTo << emailextrecipients( [[$class: 'RequesterRecipientProvider']] ) // We always want to notify our dashboard as well mailTo << "kde-dashboard@kde.org" // Finalise the list of recipients mailTo = mailTo.join(',') // Send the email now emailext( to: mailTo, body: '${JELLY_SCRIPT,template="html_gmail"}', mimeType: 'text/html', subject: 'KDE CI: ${PROJECT_NAME} - Build # ${BUILD_NUMBER} - ${BUILD_STATUS}!', attachLog: false ) } // IRC Notifications are currently not supported by Pipeline // See https://issues.jenkins-ci.org/browse/JENKINS-33922 // We can probably workaround this using Pursuivant and the emails Jenkins sends out // This would allow subscribing to build notifications for IRC channels in much the same way one subscribes for Commits and Bugzilla changes } } diff --git a/pipeline-templates/SUSEQt5.9.template b/pipeline-templates/SUSEQt5.9.template index c02a5cb..313aaff 100644 --- a/pipeline-templates/SUSEQt5.9.template +++ b/pipeline-templates/SUSEQt5.9.template @@ -1,190 +1,190 @@ // Provisionally mark the build as successful currentBuild.result = 'SUCCESS' // Request a node to be allocated to us node( currentPlatform ) { // We want Timestamps on everything timestamps { // We want to catch any errors that occur to allow us to send out notifications (ie. emails) if needed catchError { // First Thing: Checkout Sources stage('Checkout Sources') { // Actual Application Sources checkout changelog: true, poll: true, scm: [ $class: 'GitSCM', branches: [[name: branchToBuild]], browser: [$class: 'CGit', repoUrl: browserUrl], extensions: [[$class: 'CloneOption', timeout: 120]], userRemoteConfigs: [[url: repositoryUrl]] ] // Our CI scripts checkout changelog: false, poll: false, scm: [ $class: 'GitSCM', branches: [[name: 'master']], extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'ci-tooling/']], userRemoteConfigs: [[url: 'https://anongit.kde.org/sysadmin/ci-tooling']] ] // Projects metadata and next generation dependency metadata checkout changelog: false, poll: false, scm: [ $class: 'GitSCM', branches: [[name: 'master']], extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'ci-tooling/repo-metadata/']], userRemoteConfigs: [[url: 'https://anongit.kde.org/sysadmin/repo-metadata']] ] // Dependency Metadata checkout changelog: false, poll: false, scm: [ $class: 'GitSCM', branches: [[name: 'master']], extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'ci-tooling/kde-build-metadata/']], userRemoteConfigs: [[url: 'https://anongit.kde.org/kde-build-metadata']] ] // KApiDox: For api.kde.org metadata extraction checkout changelog: false, poll: false, scm: [ $class: 'GitSCM', branches: [[name: 'master']], extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'ci-tooling/kapidox/']], userRemoteConfigs: [[url: 'https://anongit.kde.org/kapidox']] ] // kde-dev-scripts: For packager metadata extraction checkout changelog: false, poll: false, scm: [ $class: 'GitSCM', branches: [[name: 'master']], extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'ci-tooling/kde-dev-scripts/']], userRemoteConfigs: [[url: 'https://anongit.kde.org/kde-dev-scripts']] ] } // Now Prepare to Build: Get the dependencies ready stage('Setup Dependencies') { // Now we can determine what our dependencies are // Then update to the latest version of the dependencies available from the master server // Finally extract all of those dependencies in turn into the given 'installTo' directory sh "python3 -u ci-tooling/helpers/prepare-dependencies.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --environment ${ciEnvironment} --platform ${currentPlatform} --installTo '$HOME/install-prefix/'" } // Now we can configure our build stage('Configuring Build') { // This is delegated through a helper script to handle minor special cases like inSourceBuilds, non-CMake build systems, etc sh "python3 -u ci-tooling/helpers/configure-build.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform} --installTo '$HOME/install-prefix/'" } // Finally we can build it! (Once again, through a helper) stage('Compiling') { // We use a helper here so we can determine the appropriate number of CPUs (-j) to build with sh "python3 -u ci-tooling/helpers/compile-build.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform} --usingInstall '$HOME/install-prefix/'" } // Now ensure that it installs.... stage('Installing') { // The helper ensures that DESTDIR and INSTALL_ROOT are set to 'divertTo' // This allows us to capture the install at the next stage for later reuse in the Setup Dependencies step sh "python3 -u ci-tooling/helpers/install-build.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform} --installTo '$HOME/install-prefix/' --divertTo '$WORKSPACE/install-divert/'" } // Looks like it built okay - let's capture this for later use // We'll also take the opportunity to extract metadata from CMake used by packagers and api.kde.org stage('Capturing Installation') { // First we create a tar archive of the installation which was diverted // Then we upload a copy of that to the master server and have it publish the new archive // Finally to save bandwidth our copy of the tar archive is moved to our local cache for reuse on later builds on this node sh "python3 -u ci-tooling/helpers/capture-install.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --environment ${ciEnvironment} --platform ${currentPlatform} --divertedTo '$WORKSPACE/install-divert/' --installedTo '$HOME/install-prefix/'" // Now we extract the CMake metadata and upload that to the appropriate hosts sh "python3 -u ci-tooling/helpers/extract-cmake-dependency-metadata.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform} --usingInstall '$HOME/install-prefix/'" sh "python3 -u ci-tooling/helpers/generate-dependency-diagram-data.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform} --usingInstall '$HOME/install-prefix/'" } // Now we can run our tests stage('Running Tests') { // Run the unit tests for this project // Tests are run in a basic environment (X, DBus) sh "python3 -u ci-tooling/helpers/run-tests.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform} --usingInstall '$HOME/install-prefix/'" // Collect our results junit allowEmptyResults: true, testResults: 'JUnitTestResults.xml' } // Final thing to do: some code quality checks stage('Checking Code Quality') { // Perform Appstream Compliance Checks sh "python3 -u ci-tooling/helpers/check-appstream-compliance.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform} --usingInstall '$HOME/install-prefix/' --withDiverted '$WORKSPACE/install-divert/'" // Gather ABI Reference information for later checking sh """ curl '$BUILD_URL/consoleText' -o currentBuildLog.txt - python3 -u ci-tooling/helpers/create-abi-dump.py --project ${projectName} --branchGroup ${branchGroup} --buildLog currentBuildLog.txt --environment production --usingInstall '$HOME/install-prefix/' || true + python3 -u ci-tooling/helpers/create-abi-dump.py --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform} --buildLog currentBuildLog.txt --environment production --usingInstall '$HOME/install-prefix/' || true """ // cppcheck is not supported by Pipeline at the moment, so we don't run that for now // See https://issues.jenkins-ci.org/browse/JENKINS-35096 // Perform Cobertura Processing // First, run the LCov extraction sh "python3 -u ci-tooling/helpers/extract-lcov-results.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform}" // Collect the results from the LCov extraction step([ $class: 'CoberturaPublisher', autoUpdateHealth: false, autoUpdateStability: false, coberturaReportFile: 'CoberturaLcovResults.xml', failNoReports: false, failUnhealthy: false, failUnstable: false, maxNumberOfBuilds: 0, onlyStable: false, zoomCoverageChart: false ]) // Scan the logs and publish a warnings report warnings consoleParsers: [[parserName: 'GNU Make + GNU C Compiler (gcc)'], [parserName: 'Appstreamercli']], excludePattern: "/tmp/**|/home/jenkins/workspace/**/build/**|/usr/include/**" } } // Let's determine if we need to send out notifications // What happened in our previous build? def previousResult = currentBuild.previousBuild?.result // If our condition has changed, is FAILURE or UNSTABLE then we want to send an email if( previousResult != currentBuild.result || currentBuild.result == 'FAILURE' || currentBuild.result == 'UNSTABLE' ) { // Start constructing the list of our recipients // At this point we can only be either a failure or an unstable build, so notify those who have requested unstable build notifications def mailTo = [ unstableBuildEmails ] // If the build was a solid failure (either now or previously) then notify those who want to know about failures only if( previousResult == 'FAILURE' || currentBuild.result == 'FAILURE' ) { // Add them to the list mailTo << buildFailureEmails } // If someone kicked this job off, they're presumably interested as well mailTo << emailextrecipients( [[$class: 'RequesterRecipientProvider']] ) // We always want to notify our dashboard as well mailTo << "kde-dashboard@kde.org" // Finalise the list of recipients mailTo = mailTo.join(',') // Send the email now emailext( to: mailTo, body: '${JELLY_SCRIPT,template="html_gmail"}', mimeType: 'text/html', subject: 'KDE CI: ${PROJECT_NAME} - Build # ${BUILD_NUMBER} - ${BUILD_STATUS}!', attachLog: false ) } // IRC Notifications are currently not supported by Pipeline // See https://issues.jenkins-ci.org/browse/JENKINS-33922 // We can probably workaround this using Pursuivant and the emails Jenkins sends out // This would allow subscribing to build notifications for IRC channels in much the same way one subscribes for Commits and Bugzilla changes } }