diff --git a/pipeline-templates/Frameworks/SUSEQt5.11.template b/pipeline-templates/Frameworks/SUSEQt5.11.template index 6b22e48..bb99e74 100644 --- a/pipeline-templates/Frameworks/SUSEQt5.11.template +++ b/pipeline-templates/Frameworks/SUSEQt5.11.template @@ -1,225 +1,225 @@ // Provisionally mark the build as successful currentBuild.result = 'SUCCESS' // Request a node to be allocated to us node( currentPlatform ) { // We want Timestamps on everything timestamps { // We want to catch any errors that occur to allow us to send out notifications (ie. emails) if needed catchError { // First Thing: Checkout Sources stage('Checkout Sources') { // Actual Application Sources checkout changelog: true, poll: true, scm: [ $class: 'GitSCM', branches: [[name: branchToBuild]], browser: [$class: 'CGit', repoUrl: browserUrl], extensions: [[$class: 'CloneOption', timeout: 120]], userRemoteConfigs: [[url: repositoryUrl]] ] // Our CI scripts checkout changelog: false, poll: false, scm: [ $class: 'GitSCM', branches: [[name: 'master']], extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'ci-tooling/']], userRemoteConfigs: [[url: 'https://anongit.kde.org/sysadmin/ci-tooling']] ] // Projects metadata and next generation dependency metadata checkout changelog: false, poll: false, scm: [ $class: 'GitSCM', branches: [[name: 'master']], extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'ci-tooling/repo-metadata/']], userRemoteConfigs: [[url: 'https://anongit.kde.org/sysadmin/repo-metadata']] ] // Dependency Metadata checkout changelog: false, poll: false, scm: [ $class: 'GitSCM', branches: [[name: 'master']], extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'ci-tooling/kde-build-metadata/']], userRemoteConfigs: [[url: 'https://anongit.kde.org/kde-build-metadata']] ] // KApiDox: For api.kde.org metadata extraction checkout changelog: false, poll: false, scm: [ $class: 'GitSCM', branches: [[name: 'master']], extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'ci-tooling/kapidox/']], userRemoteConfigs: [[url: 'https://anongit.kde.org/kapidox']] ] // kde-dev-scripts: For packager metadata extraction checkout changelog: false, poll: false, scm: [ $class: 'GitSCM', branches: [[name: 'master']], extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'ci-tooling/kde-dev-scripts/']], userRemoteConfigs: [[url: 'https://anongit.kde.org/kde-dev-scripts']] ] } // Now Prepare to Build: Get the dependencies ready stage('Setup Dependencies') { // Now we can determine what our dependencies are // Then update to the latest version of the dependencies available from the master server // Finally extract all of those dependencies in turn into the given 'installTo' directory sh "python3 -u ci-tooling/helpers/prepare-dependencies.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --environment ${ciEnvironment} --platform ${currentPlatform} --installTo '$HOME/install-prefix/'" } // Now we can configure our build stage('Configuring Build') { // This is delegated through a helper script to handle minor special cases like inSourceBuilds, non-CMake build systems, etc sh "python3 -u ci-tooling/helpers/configure-build.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform} --installTo '$HOME/install-prefix/'" } // Finally we can build it! (Once again, through a helper) stage('Compiling') { // We use a helper here so we can determine the appropriate number of CPUs (-j) to build with sh "python3 -u ci-tooling/helpers/compile-build.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform} --usingInstall '$HOME/install-prefix/'" } // Now we can run our tests stage('Running Tests') { // Run the unit tests for this project // Tests are run in a basic environment (X, DBus) sh "python3 -u ci-tooling/helpers/run-tests.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform} --usingInstall '$HOME/install-prefix/'" // Collect our results junit allowEmptyResults: true, testResults: 'JUnitTestResults.xml' } // Now ensure that it installs.... stage('Installing') { // The helper ensures that DESTDIR and INSTALL_ROOT are set to 'divertTo' // This allows us to capture the install at the next stage for later reuse in the Setup Dependencies step sh "python3 -u ci-tooling/helpers/install-build.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform} --installTo '$HOME/install-prefix/' --divertTo '$WORKSPACE/install-divert/'" } // Looks like it built okay - let's capture this for later use // We'll also take the opportunity to extract metadata from CMake used by packagers and api.kde.org stage('Capturing Installation') { // First we create a tar archive of the installation which was diverted // Then we upload a copy of that to the master server and have it publish the new archive // Finally to save bandwidth our copy of the tar archive is moved to our local cache for reuse on later builds on this node sh "python3 -u ci-tooling/helpers/capture-install.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --environment ${ciEnvironment} --platform ${currentPlatform} --divertedTo '$WORKSPACE/install-divert/' --installedTo '$HOME/install-prefix/'" // Now we extract the CMake metadata and upload that to the appropriate hosts sh "python3 -u ci-tooling/helpers/extract-cmake-dependency-metadata.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform} --usingInstall '$HOME/install-prefix/'" sh "python3 -u ci-tooling/helpers/generate-dependency-diagram-data.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform} --usingInstall '$HOME/install-prefix/'" } // Final thing to do: some code quality checks stage('Checking Code Quality') { // Perform Appstream Compliance Checks sh "python3 -u ci-tooling/helpers/check-appstream-compliance.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform} --usingInstall '$HOME/install-prefix/' --withDiverted '$WORKSPACE/install-divert/'" // Gather ABI Reference information for later checking sh """ curl '$BUILD_URL/consoleText' -o currentBuildLog.txt python3 -u ci-tooling/helpers/create-abi-dump.py --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform} --buildLog currentBuildLog.txt --environment production --usingInstall '$HOME/install-prefix/' """ // Save the ABI build logs to review if necessary archiveArtifacts artifacts: 'logs/*/*/log.txt', onlyIfSuccessful: false, allowEmptyArchive: true // Save the input for ACC for building abi dumps locally archiveArtifacts artifacts: 'acc/*.xml', onlyIfSuccessful: false, allowEmptyArchive: true // Now perform the ABI Compatibility checks // This tool will produce reports stored at compat_reports/ which we will also need to capture sh """ - python3 -u ci-tooling/helpers/check-abi.py --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform} --environment production + python3 -u ci-tooling/helpers/check-abi.py --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform} --environment production || true """ // Save the ABI Compatibility reports for developers to review if necessary archiveArtifacts artifacts: 'compat_reports/*_compat_report.html', onlyIfSuccessful: false, allowEmptyArchive: true // Save the ABI Compatibility results yaml file archiveArtifacts artifacts: 'abi-compatibility-results.yaml', onlyIfSuccessful: false, allowEmptyArchive: true // Platform Enablement Checks // Frameworks have a metadata file which specifies the platforms it supports and should be built on // This check compares that metadata file against the records held by the CI system sh """ touch PlatformCheckOutput.txt if [[ -e metainfo.yaml ]]; then python3 ci-tooling/helpers/check-platform.py '$WORKSPACE/metainfo.yaml' &> PlatformCheckOutput.txt; fi """ // If the platform check indicates there are missing platforms then we should flag the build as unstable // We start this process by reading the output of the check command def platformCheckResult = readFile "${env.WORKSPACE}/PlatformCheckOutput.txt" // Then we check to see if it had anything in it - it will be empty if everything is okay if( platformCheckResult != '' ) { // If it does, then mark the build as unstable currentBuild.result = 'UNSTABLE' // We also print the check results so it can be examined easily echo platformCheckResult } // cppcheck is not supported by Pipeline at the moment, so we don't run that for now // See https://issues.jenkins-ci.org/browse/JENKINS-35096 // Perform Cobertura Processing // First, run the LCov extraction sh "python3 -u ci-tooling/helpers/extract-lcov-results.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform}" // Collect the results from the LCov extraction step([ $class: 'CoberturaPublisher', autoUpdateHealth: false, autoUpdateStability: false, coberturaReportFile: 'CoberturaLcovResults.xml', failNoReports: false, failUnhealthy: false, failUnstable: false, maxNumberOfBuilds: 0, onlyStable: false, zoomCoverageChart: false ]) // Scan the logs and publish a warnings report warnings consoleParsers: [[parserName: 'GNU Make + GNU C Compiler (gcc)'], [parserName: 'Appstreamercli']], excludePattern: "/tmp/**|/home/jenkins/workspace/**/build/**|/usr/include/**" } } // Let's determine if we need to send out notifications // What happened in our previous build? def previousResult = currentBuild.previousBuild?.result // If our condition has changed, is FAILURE or UNSTABLE then we want to send an email if( previousResult != currentBuild.result || currentBuild.result == 'FAILURE' || currentBuild.result == 'UNSTABLE' ) { // Start constructing the list of our recipients // At this point we can only be either a failure or an unstable build, so notify those who have requested unstable build notifications def mailTo = [ unstableBuildEmails ] // If the build was a solid failure (either now or previously) then notify those who want to know about failures only if( previousResult == 'FAILURE' || currentBuild.result == 'FAILURE' ) { // Add them to the list mailTo << buildFailureEmails } // If someone kicked this job off, they're presumably interested as well mailTo << emailextrecipients( [[$class: 'RequesterRecipientProvider']] ) // We always want to notify our dashboard as well mailTo << "kde-dashboard@kde.org" // Finalise the list of recipients mailTo = mailTo.join(',') // Send the email now emailext( to: mailTo, body: '${JELLY_SCRIPT,template="html_gmail"}', mimeType: 'text/html', subject: 'KDE CI: ${PROJECT_NAME} - Build # ${BUILD_NUMBER} - ${BUILD_STATUS}!', attachLog: false ) } // IRC Notifications are currently not supported by Pipeline // See https://issues.jenkins-ci.org/browse/JENKINS-33922 // We can probably workaround this using Pursuivant and the emails Jenkins sends out // This would allow subscribing to build notifications for IRC channels in much the same way one subscribes for Commits and Bugzilla changes } } diff --git a/pipeline-templates/SUSEQt5.10.template b/pipeline-templates/SUSEQt5.10.template index 0549b06..93d09f3 100644 --- a/pipeline-templates/SUSEQt5.10.template +++ b/pipeline-templates/SUSEQt5.10.template @@ -1,205 +1,205 @@ // Provisionally mark the build as successful currentBuild.result = 'SUCCESS' // Request a node to be allocated to us node( currentPlatform ) { // We want Timestamps on everything timestamps { // We want to catch any errors that occur to allow us to send out notifications (ie. emails) if needed catchError { // First Thing: Checkout Sources stage('Checkout Sources') { // Actual Application Sources checkout changelog: true, poll: true, scm: [ $class: 'GitSCM', branches: [[name: branchToBuild]], browser: [$class: 'CGit', repoUrl: browserUrl], extensions: [[$class: 'CloneOption', timeout: 120]], userRemoteConfigs: [[url: repositoryUrl]] ] // Our CI scripts checkout changelog: false, poll: false, scm: [ $class: 'GitSCM', branches: [[name: 'master']], extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'ci-tooling/']], userRemoteConfigs: [[url: 'https://anongit.kde.org/sysadmin/ci-tooling']] ] // Projects metadata and next generation dependency metadata checkout changelog: false, poll: false, scm: [ $class: 'GitSCM', branches: [[name: 'master']], extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'ci-tooling/repo-metadata/']], userRemoteConfigs: [[url: 'https://anongit.kde.org/sysadmin/repo-metadata']] ] // Dependency Metadata checkout changelog: false, poll: false, scm: [ $class: 'GitSCM', branches: [[name: 'master']], extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'ci-tooling/kde-build-metadata/']], userRemoteConfigs: [[url: 'https://anongit.kde.org/kde-build-metadata']] ] // KApiDox: For api.kde.org metadata extraction checkout changelog: false, poll: false, scm: [ $class: 'GitSCM', branches: [[name: 'master']], extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'ci-tooling/kapidox/']], userRemoteConfigs: [[url: 'https://anongit.kde.org/kapidox']] ] // kde-dev-scripts: For packager metadata extraction checkout changelog: false, poll: false, scm: [ $class: 'GitSCM', branches: [[name: 'master']], extensions: [[$class: 'RelativeTargetDirectory', relativeTargetDir: 'ci-tooling/kde-dev-scripts/']], userRemoteConfigs: [[url: 'https://anongit.kde.org/kde-dev-scripts']] ] } // Now Prepare to Build: Get the dependencies ready stage('Setup Dependencies') { // Now we can determine what our dependencies are // Then update to the latest version of the dependencies available from the master server // Finally extract all of those dependencies in turn into the given 'installTo' directory sh "python3 -u ci-tooling/helpers/prepare-dependencies.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --environment ${ciEnvironment} --platform ${currentPlatform} --installTo '$HOME/install-prefix/'" } // Now we can configure our build stage('Configuring Build') { // This is delegated through a helper script to handle minor special cases like inSourceBuilds, non-CMake build systems, etc sh "python3 -u ci-tooling/helpers/configure-build.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform} --installTo '$HOME/install-prefix/'" } // Finally we can build it! (Once again, through a helper) stage('Compiling') { // We use a helper here so we can determine the appropriate number of CPUs (-j) to build with sh "python3 -u ci-tooling/helpers/compile-build.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform} --usingInstall '$HOME/install-prefix/'" } // Now ensure that it installs.... stage('Installing') { // The helper ensures that DESTDIR and INSTALL_ROOT are set to 'divertTo' // This allows us to capture the install at the next stage for later reuse in the Setup Dependencies step sh "python3 -u ci-tooling/helpers/install-build.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform} --installTo '$HOME/install-prefix/' --divertTo '$WORKSPACE/install-divert/'" } // Looks like it built okay - let's capture this for later use // We'll also take the opportunity to extract metadata from CMake used by packagers and api.kde.org stage('Capturing Installation') { // First we create a tar archive of the installation which was diverted // Then we upload a copy of that to the master server and have it publish the new archive // Finally to save bandwidth our copy of the tar archive is moved to our local cache for reuse on later builds on this node sh "python3 -u ci-tooling/helpers/capture-install.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --environment ${ciEnvironment} --platform ${currentPlatform} --divertedTo '$WORKSPACE/install-divert/' --installedTo '$HOME/install-prefix/'" // Now we extract the CMake metadata and upload that to the appropriate hosts sh "python3 -u ci-tooling/helpers/extract-cmake-dependency-metadata.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform} --usingInstall '$HOME/install-prefix/'" sh "python3 -u ci-tooling/helpers/generate-dependency-diagram-data.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform} --usingInstall '$HOME/install-prefix/'" } // Now we can run our tests stage('Running Tests') { // Run the unit tests for this project // Tests are run in a basic environment (X, DBus) sh "python3 -u ci-tooling/helpers/run-tests.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform} --usingInstall '$HOME/install-prefix/'" // Collect our results junit allowEmptyResults: true, testResults: 'JUnitTestResults.xml' } // Final thing to do: some code quality checks stage('Checking Code Quality') { // Perform Appstream Compliance Checks sh "python3 -u ci-tooling/helpers/check-appstream-compliance.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform} --usingInstall '$HOME/install-prefix/' --withDiverted '$WORKSPACE/install-divert/'" // Gather ABI Reference information for later checking sh """ curl '$BUILD_URL/consoleText' -o currentBuildLog.txt python3 -u ci-tooling/helpers/create-abi-dump.py --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform} --buildLog currentBuildLog.txt --environment production --usingInstall '$HOME/install-prefix/' """ // Save the ABI build logs to review if necessary archiveArtifacts artifacts: 'logs/*/*/log.txt', onlyIfSuccessful: false, allowEmptyArchive: true // Save the input for ACC for building abi dumps locally archiveArtifacts artifacts: 'acc/*.xml', onlyIfSuccessful: false, allowEmptyArchive: true // Now perform the ABI Compatibility checks // This tool will produce reports stored at compat_reports/ which we will also need to capture sh """ - python3 -u ci-tooling/helpers/check-abi.py --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform} --environment production + python3 -u ci-tooling/helpers/check-abi.py --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform} --environment production || true """ // Save the ABI Compatibility reports for developers to review if necessary archiveArtifacts artifacts: 'compat_reports/*_compat_report.html', onlyIfSuccessful: false, allowEmptyArchive: true // Save the ABI Compatibility results yaml file archiveArtifacts artifacts: 'abi-compatibility-results.yaml', onlyIfSuccessful: false, allowEmptyArchive: true // cppcheck is not supported by Pipeline at the moment, so we don't run that for now // See https://issues.jenkins-ci.org/browse/JENKINS-35096 // Perform Cobertura Processing // First, run the LCov extraction sh "python3 -u ci-tooling/helpers/extract-lcov-results.py --product ${productName} --project ${projectName} --branchGroup ${branchGroup} --platform ${currentPlatform}" // Collect the results from the LCov extraction step([ $class: 'CoberturaPublisher', autoUpdateHealth: false, autoUpdateStability: false, coberturaReportFile: 'CoberturaLcovResults.xml', failNoReports: false, failUnhealthy: false, failUnstable: false, maxNumberOfBuilds: 0, onlyStable: false, zoomCoverageChart: false ]) // Scan the logs and publish a warnings report warnings consoleParsers: [[parserName: 'GNU Make + GNU C Compiler (gcc)'], [parserName: 'Appstreamercli']], excludePattern: "/tmp/**|/home/jenkins/workspace/**/build/**|/usr/include/**" } } // Let's determine if we need to send out notifications // What happened in our previous build? def previousResult = currentBuild.previousBuild?.result // If our condition has changed, is FAILURE or UNSTABLE then we want to send an email if( previousResult != currentBuild.result || currentBuild.result == 'FAILURE' || currentBuild.result == 'UNSTABLE' ) { // Start constructing the list of our recipients // At this point we can only be either a failure or an unstable build, so notify those who have requested unstable build notifications def mailTo = [ unstableBuildEmails ] // If the build was a solid failure (either now or previously) then notify those who want to know about failures only if( previousResult == 'FAILURE' || currentBuild.result == 'FAILURE' ) { // Add them to the list mailTo << buildFailureEmails } // If someone kicked this job off, they're presumably interested as well mailTo << emailextrecipients( [[$class: 'RequesterRecipientProvider']] ) // We always want to notify our dashboard as well mailTo << "kde-dashboard@kde.org" // Finalise the list of recipients mailTo = mailTo.join(',') // Send the email now emailext( to: mailTo, body: '${JELLY_SCRIPT,template="html_gmail"}', mimeType: 'text/html', subject: 'KDE CI: ${PROJECT_NAME} - Build # ${BUILD_NUMBER} - ${BUILD_STATUS}!', attachLog: false ) } // IRC Notifications are currently not supported by Pipeline // See https://issues.jenkins-ci.org/browse/JENKINS-33922 // We can probably workaround this using Pursuivant and the emails Jenkins sends out // This would allow subscribing to build notifications for IRC channels in much the same way one subscribes for Commits and Bugzilla changes } }