diff --git a/helpers/create-abi-dump.py b/helpers/create-abi-dump.py --- a/helpers/create-abi-dump.py +++ b/helpers/create-abi-dump.py @@ -275,6 +275,16 @@ # Initialize the archive manager ourArchive = Packages.Archive(arguments.environment, 'ABIReference', usingCache = False) +# Determine which SCM revision we are storing +# This will be embedded into the package metadata which might help someone doing some debugging +# GIT_COMMIT is set by Jenkins Git plugin, so we can rely on that for most of our builds +scmRevision = '' +if os.getenv('GIT_COMMIT') != '': + scmRevision = os.getenv('GIT_COMMIT') + +if not scmRevision: + scmRevision = subprocess.check_output(["git", "log", "--format=%H", "-n 1", "HEAD"]).strip().decode() + # Now we generate the ABI dumps for every library we have found for library in foundLibraries: # Create the ABI Dump for this library @@ -284,10 +294,11 @@ # This location is controlled by abi-compliance-checker, but follows a predictable pattern fileName = "abi_dumps/{name}/{version}/ABI.dump".format(name=library.name,version=library.version) - # Determine the internal version of the library we have found - # This is based off the CMake package metadata we read in above - scmRevision = max([t['SONAME'] for t in library.targets.values()]) # a more hackish way, to save the SONAME in the metadata - - # Create a name for this entry in the Package archive and store it there - packageName = "{name}_{scmRevision}".format(name=library.name, scmRevision=scmRevision) - ourArchive.storePackage(packageName, fileName, scmRevision) + extraMetadata = { + "SONAME": max([t['SONAME'] for t in lib.targets.values()]), # use max because there may be more than one lib inside + "version": lib.version, + "libname": lib.name, + "targets": lib.targets.keys(), + } + packageName = "{name}_{scmRevision}".format(name=lib.name, scmRevision=scmRevision) + ourArchive.storePackage(packageName, fileName, scmRevision, extraMetadata) diff --git a/helpers/helperslib/Packages.py b/helpers/helperslib/Packages.py --- a/helpers/helperslib/Packages.py +++ b/helpers/helperslib/Packages.py @@ -1,3 +1,4 @@ +import copy import os import re import stat @@ -134,17 +135,25 @@ return ( localContentsPath, localMetadata ) # Generates the metadata which is stored in the .yaml file that accompanies each package which is stored in the archive - def generateMetadataForFile( self, contentsNeedingMetadata, scmRevision ): + # Extra metadata saved to metadata file, and will be written to yaml file, needs to be a dict like object + def generateMetadataForFile( self, contentsNeedingMetadata, scmRevision, extraMetadata=None ): # First, determine the timestamp the file was last modified packageTimestamp = os.path.getmtime( contentsNeedingMetadata ) # Now the checksum packageChecksum = CommonUtils.generateFileChecksum( contentsNeedingMetadata ) - # Build the metadata which we'll be writing out - metadataForPackage = { + + metadataForPackage = {} + + # If we have extraMetadata for this Package pre-seed the metadata dictionary + if extraMetadata: + metadataForPackage = copy.copy(self.extraMetadata) + + # Update/adds the nessary keys, that we want to exist. + metadataForPackage.update({ 'timestamp': packageTimestamp, 'checksum': packageChecksum, 'scmRevision': scmRevision - } + }) # Write the YAML out to a temporary file latestMetadata = tempfile.NamedTemporaryFile(delete=False, mode='w', dir=self.temporaryFileLocation()) @@ -157,13 +166,14 @@ # Stores a package in the archive, either by creation of or updating of an existing package # As part of this process metadata will be generated for the package we are about to add to the archive to assist in caching later on # The package and it's metadata will then be uploaded to the remote archive and published, then transferred to our local cache - def storePackage( self, package, archiveFileToInclude, scmRevision = '' ): + # Extra metadata saved to metadata file, and will be written to yaml file, needs to be a dict like object + def storePackage( self, package, archiveFileToInclude, scmRevision = '', extraMetadata=None ): # Determine the names the metadata and archive files would have respectively metadataFilename = package + ".yaml" contentsFilename = package + ".tar" # Generate metadata for the package we are about to store - archiveMetadata = self.generateMetadataForFile( archiveFileToInclude, scmRevision ) + archiveMetadata = self.generateMetadataForFile( archiveFileToInclude, scmRevision, extraMetadata ) # Connect to the upload host privateKeyFile = os.path.join( os.path.expanduser('~'), 'Keys', self.name + '.key')