diff --git a/helpers/check-abi.py b/helpers/check-abi.py index ce33ea2..e4c9bf9 100755 --- a/helpers/check-abi.py +++ b/helpers/check-abi.py @@ -1,169 +1,222 @@ #!/usr/bin/python3 # check the ABIs against a earlier state. # It is designed to run after create-abi-dump has created the abidump already. # abi-compliance-checker creates a html file with the report. # it can be multiple libraries in one repository (e.g messagelib) # so we have multiple html files for one repository # in order to store them as artifacts in Jenkins add: # # archiveArtifacts artifacts: 'compat_reports/*_compat_reports.html', onlyIfSuccessful: false import os import logging import argparse +import decimal +import re import subprocess import sys +import yaml from helperslib import Packages from helperslib.Version import Version class Library: def __init__(self, packageName, library): self.packageName = packageName self.library = library self.candidates = [] def addCandidate(self, key, entry): entry['packageName'] = key self.candidates.append(entry) def candidate(self): """Find the best candidate to check the ABI against.""" candidate = None timestamp = self.library["timestamp"] if not self.candidates: return None # get a list of tagged candidates released = list(filter(lambda i: i['scmRevision'] in HASH2TAG, self.candidates)) if released: # get the first released version, that is available candidate = min(released, key=lambda i: HASH2TAG[i['scmRevision']]) logging.info("Found tag %s(%s) to check against.", HASH2TAG[candidate['scmRevision']].version, candidate['scmRevision']) else: #TODO: we may want to return None, as the library was never released so far. # get oldest candidate. candidate = min(self.candidates, key=lambda e:e['timestamp']) logging.warning("No released version was found, just use the oldest commit.") # the candidate needs to be older than the current build. if timestamp < candidate['timestamp']: return None return candidate +def parseACCOutputToDict(stdout): + """Parse output of abi-compliance-checker for further processing and returning a dict. + extract binary/source compatibility from acc + and calculate a simple bool for the compatibibility. + """ + checkBlock = re.compile(br"""^Binary compatibility: (?P[0-9.]+)%\s* +Source compatibility: (?P[0-9.]+)%\s*$""", re.M) + m = checkBlock.search(stdout).groupdict() + + m['binary'] = decimal.Decimal(m['binary'].decode()) + m['source'] = decimal.Decimal(m['source'].decode()) + compatibility = m['binary'] == 100 and m['source'] == 100 + + return { + 'binaryCompatibility': float(m['binary']), + 'sourceCompatibility': float(m['source']), + 'compatibility': compatibility, + } + # Make sure logging is ready to go logging.basicConfig(level=logging.DEBUG) # Parse the command line arguments we've been given parser = argparse.ArgumentParser(description='Utility to check ABI.') parser.add_argument('--project', type=str, required=True) parser.add_argument('--branchGroup', type=str, required=True) parser.add_argument('--platform', type=str, required=True) parser.add_argument('--environment', type=str, required=True) arguments = parser.parse_args() # Initialize the archive manager ourArchive = Packages.Archive(arguments.environment, 'ABIReference', usingCache = True, contentsSuffix = ".abidump") # Determine which SCM revision we are storing # This will be embedded into the package metadata which might help someone doing some debugging # GIT_COMMIT is set by Jenkins Git plugin, so we can rely on that for most of our builds scmRevision = '' if os.getenv('GIT_COMMIT') != '': scmRevision = os.getenv('GIT_COMMIT') if not scmRevision: scmRevision = subprocess.check_output(["git", "log", "--format=%H", "-n 1", "HEAD"]).strip().decode() # get all tags that are in the current commit tags = subprocess.check_output(["git", "tag", "--merged", scmRevision]).strip().decode().splitlines() # we are not interessed in the commit for annotatated tags itself, we want to know what commit was tagged. commitedTags = [i+"^{}" for i in tags] # resolve tags -> git hashes tagHashes = subprocess.check_output(["git", "rev-parse", *commitedTags]).strip().decode().splitlines() HASH2TAG = {tagHashes[pos]:Version(tag) for pos, tag in enumerate(tags)} # Do we want to check for newer SONAMEs on other buildGroups keepBuildGroup = False if arguments.branchGroup != "kf5-qt5": keepBuildGroup = True # Find all libraries, that are build with the same git commit libraries = [] for key, entry in ourArchive.serverManifest.items(): try: if entry['platform'] != arguments.platform: continue if entry["branchGroup"] != arguments.branchGroup: continue if entry["project"] == arguments.project and entry["scmRevision"] == scmRevision: libraries.append(Library(key,entry)) except KeyError: continue # Find all availabe reference dumps # * same libname # * same SONAME otherwise we have a ABI bump and than it is safe to break ABI for l in libraries: libname = l.library["libname"] soname = l.library["SONAME"] for key, entry in ourArchive.serverManifest.items(): if key == l.packageName: continue if entry['platform'] != arguments.platform: continue # We want to search for the library if entry["libname"] == libname: # only interested, for builds with the same SONAME if entry['SONAME'] == soname: l.addCandidate(key, entry) elif entry['SONAME'] > soname: # Ignore new SONAMEs on other branchGroups. if keepBuildGroup and entry["branchGroup"] != arguments.branchGroup: continue logging.warning("We searched for SONAME = %s, but found a newer SONAME = %s in the builds, that should not happen, as SONAMEs should only rise and never go lower!", soname, entry['SONAME']) # Check every libraries ABI and do not fail, if one is not fine. # Safe the overall retval state retval = 0 +# the dictonary that will be written to abi-compatibility-results.yaml +resultsYamlFile = {} + for l in libraries: library = l.library libname = library['libname'] logging.info("Do an ABI check for %s", libname) candidate = l.candidate() if not candidate: logging.info("Did not found any older build for %s, nothing to check ABI against.",libname) continue # get the packages, we want to test against each other newLibraryPath, _ = ourArchive.retrievePackage(l.packageName) oldLibraryPath, _ = ourArchive.retrievePackage(candidate['packageName']) logging.info("check %s(old) -> %s(new)", candidate['scmRevision'], library['scmRevision']) + reportPath = "compat_reports/{libname}_compat_report.html".format(libname=libname) + + # Basic result yml information + yml = { + 'reportPath': reportPath, + 'ownCommit': scmRevision, + 'otherCommit': candidate['scmRevision'], + } + resultsYamlFile[libname] = yml + + if candidate['scmRevision'] in HASH2TAG: + yml['tag'] = HASH2TAG[candidate['scmRevision']].version + # check ABI and write compat reports - cmd = ["abi-compliance-checker", - "-report-path", "compat_reports/{libname}_compat_report.html".format(libname=libname), + cmd = [ + "abi-compliance-checker", + "-report-path", reportPath, "-l", libname, "--old", oldLibraryPath, - "--new", newLibraryPath] - ret = subprocess.call(cmd) - - if ret != 0: - logging.error("abi-compliance-checker exited with %s", ret) - retval = ret + "--new", newLibraryPath, + ] + logging.debug(" ".join(cmd)) + try: + prog = subprocess.run(cmd, check=True, capture_output=True) + except subprocess.CalledProcessError as e: + if e.returncode == 1: # that means that we are not compatible, but still valid output. + logging.warning("abi-compliance-checker exited with 1:\n%s", prog.stdout.decode()) + + yml.update(parseACCOutputToDict(e.stdout)) + else: + logging.error("abi-compliance-checker exited with %s:\nstdout:\n\ลง%s\nstderr:\n\t%s", e.returncode, e.stdout.decode(), e.stderr.decode()) + retval = e.returncode + yml['error'] = e.returncode + else: + logging.debug(prog.stdout.decode()) + yml.update(parseACCOutputToDict(prog.stdout)) + +with open('abi-compatibility-results.yaml', 'w') as f: + f.write(yaml.dump(resultsYamlFile, default_flow_style=False)) # We had an issue with one of the ABIs if retval != 0: sys.exit(retval) diff --git a/pipeline-templates/SUSEQt5.9.template b/pipeline-templates/SUSEQt5.9.template index 58fb895..2c289ad 100644 --- a/pipeline-templates/SUSEQt5.9.template +++ b/pipeline-templates/SUSEQt5.9.template @@ -1,200 +1,203 @@ // Provisionally mark the build as successful currentBuild.result = 'SUCCESS' // Request a node to be allocated to us node( currentPlatform ) { // We want Timestamps on everything timestamps { // We want to catch any errors that occur to allow us to send out notifications (ie. emails) if needed catchError { // First Thing: Checkout Sources stage('Checkout Sources') { // Actual Application Sources checkout changelog: true, poll: true, scm: [ $class: 'GitSCM', branches: [[name: branchToBuild]], browser: [$class: 'CGit', repoUrl: browserUrl], extensions: [[$class: 'CloneOption', timeout: 120]], userRemoteConfigs: [[url: repositoryUrl]] ] // Our CI scripts checkout changelog: false, poll: false, scm: [ $class: 'GitSCM', branches: [[name: 'master']], extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'ci-tooling/']], userRemoteConfigs: [[url: 'https://anongit.kde.org/sysadmin/ci-tooling']] ] // Projects metadata and next generation dependency metadata checkout changelog: false, poll: false, scm: [ $class: 'GitSCM', branches: [[name: 'master']], extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'ci-tooling/repo-metadata/']], userRemoteConfigs: [[url: 'https://anongit.kde.org/sysadmin/repo-metadata']] ] // Dependency Metadata checkout changelog: false, poll: false, scm: [ $class: 'GitSCM', branches: [[name: 'master']], extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'ci-tooling/kde-build-metadata/']], userRemoteConfigs: [[url: 'https://anongit.kde.org/kde-build-metadata']] ] // KApiDox: For api.kde.org metadata extraction checkout changelog: false, poll: false, scm: [ $class: 'GitSCM', branches: [[name: 'master']], extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'ci-tooling/kapidox/']], userRemoteConfigs: [[url: 'https://anongit.kde.org/kapidox']] ] // kde-dev-scripts: For packager metadata extraction checkout changelog: false, poll: false, scm: [ $class: 'GitSCM', branches: [[name: 'master']], extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'ci-tooling/kde-dev-scripts/']], userRemoteConfigs: [[url: 'https://anongit.kde.org/kde-dev-scripts']] ] } // Now Prepare to Build: Get the dependencies ready stage('Setup Dependencies') { // Now we can determine what our dependencies are // Then update to the latest version of the dependencies available from the master server // Finally extract all of those dependencies in turn into the given 'installTo' directory sh "python3 -u ci-tooling/helpers/prepare-dependencies.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --environment ${ciEnvironment} --platform ${currentPlatform} --installTo '$HOME/install-prefix/'" } // Now we can configure our build stage('Configuring Build') { // This is delegated through a helper script to handle minor special cases like inSourceBuilds, non-CMake build systems, etc sh "python3 -u ci-tooling/helpers/configure-build.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform} --installTo '$HOME/install-prefix/'" } // Finally we can build it! (Once again, through a helper) stage('Compiling') { // We use a helper here so we can determine the appropriate number of CPUs (-j) to build with sh "python3 -u ci-tooling/helpers/compile-build.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform} --usingInstall '$HOME/install-prefix/'" } // Now ensure that it installs.... stage('Installing') { // The helper ensures that DESTDIR and INSTALL_ROOT are set to 'divertTo' // This allows us to capture the install at the next stage for later reuse in the Setup Dependencies step sh "python3 -u ci-tooling/helpers/install-build.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform} --installTo '$HOME/install-prefix/' --divertTo '$WORKSPACE/install-divert/'" } // Looks like it built okay - let's capture this for later use // We'll also take the opportunity to extract metadata from CMake used by packagers and api.kde.org stage('Capturing Installation') { // First we create a tar archive of the installation which was diverted // Then we upload a copy of that to the master server and have it publish the new archive // Finally to save bandwidth our copy of the tar archive is moved to our local cache for reuse on later builds on this node sh "python3 -u ci-tooling/helpers/capture-install.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --environment ${ciEnvironment} --platform ${currentPlatform} --divertedTo '$WORKSPACE/install-divert/' --installedTo '$HOME/install-prefix/'" // Now we extract the CMake metadata and upload that to the appropriate hosts sh "python3 -u ci-tooling/helpers/extract-cmake-dependency-metadata.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform} --usingInstall '$HOME/install-prefix/'" sh "python3 -u ci-tooling/helpers/generate-dependency-diagram-data.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform} --usingInstall '$HOME/install-prefix/'" } // Now we can run our tests stage('Running Tests') { // Run the unit tests for this project // Tests are run in a basic environment (X, DBus) sh "python3 -u ci-tooling/helpers/run-tests.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform} --usingInstall '$HOME/install-prefix/'" // Collect our results junit allowEmptyResults: true, testResults: 'JUnitTestResults.xml' } // Final thing to do: some code quality checks stage('Checking Code Quality') { // Perform Appstream Compliance Checks sh "python3 -u ci-tooling/helpers/check-appstream-compliance.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform} --usingInstall '$HOME/install-prefix/' --withDiverted '$WORKSPACE/install-divert/'" // Gather ABI Reference information for later checking sh """ curl '$BUILD_URL/consoleText' -o currentBuildLog.txt python3 -u ci-tooling/helpers/create-abi-dump.py --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform} --buildLog currentBuildLog.txt --environment production --usingInstall '$HOME/install-prefix/' || true """ // Save the ABI build logs to review if necessary archiveArtifacts artifacts: 'logs/*/*/log.txt', onlyIfSuccessful: false, allowEmptyArchive: true // Now perform the ABI Compatibility checks // This tool will produce reports stored at compat_reports/ which we will also need to capture sh """ python3 -u ci-tooling/helpers/check-abi.py --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform} --environment production || true """ // Save the ABI Compatibility reports for developers to review if necessary archiveArtifacts artifacts: 'compat_reports/*_compat_report.html', onlyIfSuccessful: false, allowEmptyArchive: true + // Save the ABI Compatibility results yaml file + archiveArtifacts artifacts: 'abi-compatibility-results.yaml', onlyIfSuccessful: false, allowEmptyArchive: true + // cppcheck is not supported by Pipeline at the moment, so we don't run that for now // See https://issues.jenkins-ci.org/browse/JENKINS-35096 // Perform Cobertura Processing // First, run the LCov extraction sh "python3 -u ci-tooling/helpers/extract-lcov-results.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform}" // Collect the results from the LCov extraction step([ $class: 'CoberturaPublisher', autoUpdateHealth: false, autoUpdateStability: false, coberturaReportFile: 'CoberturaLcovResults.xml', failNoReports: false, failUnhealthy: false, failUnstable: false, maxNumberOfBuilds: 0, onlyStable: false, zoomCoverageChart: false ]) // Scan the logs and publish a warnings report warnings consoleParsers: [[parserName: 'GNU Make + GNU C Compiler (gcc)'], [parserName: 'Appstreamercli']], excludePattern: "/tmp/**|/home/jenkins/workspace/**/build/**|/usr/include/**" } } // Let's determine if we need to send out notifications // What happened in our previous build? def previousResult = currentBuild.previousBuild?.result // If our condition has changed, is FAILURE or UNSTABLE then we want to send an email if( previousResult != currentBuild.result || currentBuild.result == 'FAILURE' || currentBuild.result == 'UNSTABLE' ) { // Start constructing the list of our recipients // At this point we can only be either a failure or an unstable build, so notify those who have requested unstable build notifications def mailTo = [ unstableBuildEmails ] // If the build was a solid failure (either now or previously) then notify those who want to know about failures only if( previousResult == 'FAILURE' || currentBuild.result == 'FAILURE' ) { // Add them to the list mailTo << buildFailureEmails } // If someone kicked this job off, they're presumably interested as well mailTo << emailextrecipients( [[$class: 'RequesterRecipientProvider']] ) // We always want to notify our dashboard as well mailTo << "kde-dashboard@kde.org" // Finalise the list of recipients mailTo = mailTo.join(',') // Send the email now emailext( to: mailTo, body: '${JELLY_SCRIPT,template="html_gmail"}', mimeType: 'text/html', subject: 'KDE CI: ${PROJECT_NAME} - Build # ${BUILD_NUMBER} - ${BUILD_STATUS}!', attachLog: false ) } // IRC Notifications are currently not supported by Pipeline // See https://issues.jenkins-ci.org/browse/JENKINS-33922 // We can probably workaround this using Pursuivant and the emails Jenkins sends out // This would allow subscribing to build notifications for IRC channels in much the same way one subscribes for Commits and Bugzilla changes } }