diff --git a/helpers/helperslib/Packages.py b/helpers/helperslib/Packages.py --- a/helpers/helperslib/Packages.py +++ b/helpers/helperslib/Packages.py @@ -78,9 +78,22 @@ # Where the remote archive has a newer version then the package will be retrieved from the remote archive # All lookups will be restricted to our current platform, as specified when creating this archive. def retrievePackage( self, package ): + # Does the archive contain what we are after? + # It will never contain it if the use of a local cache has been disabled + # In that case there is nothing we can do and we should bail + if package not in self.serverManifest: + # There is nothing for us to fetch - the server will just yield a 404 + # So let's bow out gracefully here + return ( None, None ) + + # Determine the suffix for the content in the archive + # Should the metadata not specify one, use the archive default suffix + serverMetadata = self.serverManifest[ package ] + contentsSuffix = serverMetadata.get('contentsSuffix', self.contentsSuffix) + # Determine the names the metadata and archive files would have respectively metadataFilename = package + ".yaml" - contentsFilename = package + self.contentsSuffix + contentsFilename = package + contentsSuffix # Begin determining if we need to download or not # We start from the assumption we will need to download an archive @@ -103,13 +116,6 @@ # If it doesn't, we always need to download needToDownload = True - # Does the archive contain what we are after? - # It will never contain it if the use of a local cache has been disabled - if package not in self.serverManifest: - # There is nothing for us to fetch - the server will just yield a 404 - # So let's bow out gracefully here - return ( None, None ) - # Let's retrieve the file if we need to now... if needToDownload: # Download the archive first... @@ -129,12 +135,7 @@ shutil.move( latestMetadata.name, localMetadataPath ) # All done, we can return a tuple of the archive and metadata now - # As we want to return the metadata already parsed (nobody outside this class needs to know it is stored as YAML) we'll load it now - # The archive is returned as a simple path to the file, which can be passed to tarfile.open() as appropriate - with open(localMetadataPath, 'r', encoding='utf-8') as localMetadataFile: - localMetadata = yaml.load( localMetadataFile ) - - return ( localContentsPath, localMetadata ) + return ( localContentsPath(), serverMetadata ) # Generates the metadata which is stored in the .yaml file that accompanies each package which is stored in the archive # Extra metadata saved to metadata file, and will be written to yaml file, needs to be a dict like object @@ -144,7 +145,7 @@ # Now the checksum packageChecksum = CommonUtils.generateFileChecksum( contentsNeedingMetadata ) - # Start preparing the metadata we're going to save alongside the package + # Start preparing the metadata we're going to save alongside the package metadataForPackage = {} # If we have extraMetadata for this Package, then we need to pre-seed the metadata dictionary @@ -155,7 +156,8 @@ metadataForPackage.update({ 'timestamp': packageTimestamp, 'checksum': packageChecksum, - 'scmRevision': scmRevision + 'scmRevision': scmRevision, + 'contentsSuffix': self.contentsSuffix, }) # Write the YAML out to a temporary file @@ -219,22 +221,29 @@ # Performs the package publishing process # This function should only be called on the archive server and will not function correctly on clients. def publishPackage( self, package ): - # Determine the names the metadata and archive files would have respectively + # Determine where we can find the metadata file and what it's final home will be metadataFilename = package + ".yaml" - contentsFilename = package + self.contentsSuffix + stagedMetadataPath = os.path.join( self.config['client']['uploadDirectory'], self.platform, metadataFilename ) + finalMetadataPath = os.path.join( self.config['server']['archiveDirectory'], self.platform, metadataFilename ) + + # Because we need to know the contentsSuffix of the file we're publishing, load the metadata for the file... + with open(stagedMetadataPath, 'r', encoding='utf-8') as metadataFile + metadata = yaml.load( metadataFile ) + contentsSuffix = metadata.get('contentsSuffix', self.contentsSuffix) + + # Now that we know the contentsSuffix, we can go ahead and determine where our package's content can be found and where it needs to be moved to + contentsFilename = package + contentsSuffix + stagedContentsPath = os.path.join( self.config['client']['uploadDirectory'], self.platform, contentsFilename ) + finalContentsPath = os.path.join( self.config['server']['archiveDirectory'], self.platform, contentsFilename ) # Move the contents file first # Assuming we're on the same file system this should be an atomic operation and thus instant # We move the metadata second in case uploadDirectory and archiveDirectory are on different file systems # As the contents file could be several hundred megabytes, while the metadata file should be a matter of a few kilobytes and thus copy across instantly # Also, as the metadata file governs when files should be expired, it is better to over-expire than risk an outdated cached copy being used - stagedContentsPath = os.path.join( self.config['client']['uploadDirectory'], self.platform, contentsFilename ) - finalContentsPath = os.path.join( self.config['server']['archiveDirectory'], self.platform, contentsFilename ) shutil.move( stagedContentsPath, finalContentsPath ) # Now the metadata goes over as well - stagedMetadataPath = os.path.join( self.config['client']['uploadDirectory'], self.platform, metadataFilename ) - finalMetadataPath = os.path.join( self.config['server']['archiveDirectory'], self.platform, metadataFilename ) shutil.move( stagedMetadataPath, finalMetadataPath ) # Now we update the global manifest file for this platform