diff --git a/helpers/helperslib/Packages.py b/helpers/helperslib/Packages.py --- a/helpers/helperslib/Packages.py +++ b/helpers/helperslib/Packages.py @@ -80,7 +80,23 @@ def retrievePackage( self, package ): # Determine the names the metadata and archive files would have respectively metadataFilename = package + ".yaml" - contentsFilename = package + self.contentsSuffix + + def contentsFilename(): + try: + contentsSuffix = localMetadata['contentsSuffix'] + except KeyError: + contentsSuffix = self.contentsSuffix + return package + contentsSuffix + + def localContentsPath(): + return os.path.join(self.cacheLocation(), contentsFilename()) + + # Does the archive contain what we are after? + # It will never contain it if the use of a local cache has been disabled + if package not in self.serverManifest: + # There is nothing for us to fetch - the server will just yield a 404 + # So let's bow out gracefully here + return ( None, None ) # Begin determining if we need to download or not # We start from the assumption we will need to download an archive @@ -96,45 +112,40 @@ serverMetadata = self.serverManifest[ package ] # If the server timestamp is lower or the same, no need to fetch needToDownload = ( serverMetadata['timestamp'] > localMetadata['timestamp'] ) - - # Does the local contents file exist? - localContentsPath = os.path.join(self.cacheLocation(), contentsFilename) - if not os.path.exists( localContentsPath ): - # If it doesn't, we always need to download - needToDownload = True - - # Does the archive contain what we are after? - # It will never contain it if the use of a local cache has been disabled - if package not in self.serverManifest: - # There is nothing for us to fetch - the server will just yield a 404 - # So let's bow out gracefully here - return ( None, None ) + # Does the local contents file exist? + if not os.path.exists( localContentsPath() ): + # If it doesn't, we always need to download + needToDownload = True # Let's retrieve the file if we need to now... if needToDownload: - # Download the archive first... - response = urllib.request.urlopen( self.downloadBaseUrl() + '/' + contentsFilename ) - latestContent = tempfile.NamedTemporaryFile(delete=False, mode='wb', dir=self.temporaryFileLocation()) - latestContent.write( response.read() ) - latestContent.close() - - # Now the metadata file... + # Download the metadata file... response = urllib.request.urlopen( self.downloadBaseUrl() + '/' + metadataFilename ) + content = response.read() latestMetadata = tempfile.NamedTemporaryFile(delete=False, mode='wb', dir=self.temporaryFileLocation()) latestMetadata.write( response.read() ) latestMetadata.close() + # And parse the content so we get the correct contentsSuffix + localMetadata = yaml.load( content ) + + # And now the archive... + response = urllib.request.urlopen( self.downloadBaseUrl() + '/' + contentsFilename() ) + latestContent = tempfile.NamedTemporaryFile(delete=False, mode='wb', dir=self.temporaryFileLocation()) + latestContent.write( response.read() ) + latestContent.close() + # Move both to their final resting places - shutil.move( latestContent.name, localContentsPath ) + shutil.move( latestContent.name, localContentsPath() ) shutil.move( latestMetadata.name, localMetadataPath ) # All done, we can return a tuple of the archive and metadata now # As we want to return the metadata already parsed (nobody outside this class needs to know it is stored as YAML) we'll load it now # The archive is returned as a simple path to the file, which can be passed to tarfile.open() as appropriate with open(localMetadataPath, 'r', encoding='utf-8') as localMetadataFile: localMetadata = yaml.load( localMetadataFile ) - return ( localContentsPath, localMetadata ) + return ( localContentsPath(), localMetadata ) # Generates the metadata which is stored in the .yaml file that accompanies each package which is stored in the archive # Extra metadata saved to metadata file, and will be written to yaml file, needs to be a dict like object @@ -144,7 +155,7 @@ # Now the checksum packageChecksum = CommonUtils.generateFileChecksum( contentsNeedingMetadata ) - # Start preparing the metadata we're going to save alongside the package + # Start preparing the metadata we're going to save alongside the package metadataForPackage = {} # If we have extraMetadata for this Package, then we need to pre-seed the metadata dictionary @@ -155,7 +166,8 @@ metadataForPackage.update({ 'timestamp': packageTimestamp, 'checksum': packageChecksum, - 'scmRevision': scmRevision + 'scmRevision': scmRevision, + 'contentsSuffix': self.contentsSuffix, }) # Write the YAML out to a temporary file @@ -221,20 +233,31 @@ def publishPackage( self, package ): # Determine the names the metadata and archive files would have respectively metadataFilename = package + ".yaml" - contentsFilename = package + self.contentsSuffix + + stagedMetadataPath = os.path.join( self.config['client']['uploadDirectory'], self.platform, metadataFilename ) + finalMetadataPath = os.path.join( self.config['server']['archiveDirectory'], self.platform, metadataFilename ) + + # Get the used cententSuffix use self.contentsSuffix as fallback + contentsSuffix = self.contentsSuffix + with open(stagedMetadataPath, 'r', encoding='utf-8') as metadataFile + metadata = yaml.load( metadataFile ) + try: + contentsSuffix = metadata['contentsSuffix'] + except KeyError: + pass + + contentsFilename = package + contentsSuffix + stagedContentsPath = os.path.join( self.config['client']['uploadDirectory'], self.platform, contentsFilename ) + finalContentsPath = os.path.join( self.config['server']['archiveDirectory'], self.platform, contentsFilename ) # Move the contents file first # Assuming we're on the same file system this should be an atomic operation and thus instant # We move the metadata second in case uploadDirectory and archiveDirectory are on different file systems # As the contents file could be several hundred megabytes, while the metadata file should be a matter of a few kilobytes and thus copy across instantly # Also, as the metadata file governs when files should be expired, it is better to over-expire than risk an outdated cached copy being used - stagedContentsPath = os.path.join( self.config['client']['uploadDirectory'], self.platform, contentsFilename ) - finalContentsPath = os.path.join( self.config['server']['archiveDirectory'], self.platform, contentsFilename ) shutil.move( stagedContentsPath, finalContentsPath ) # Now the metadata goes over as well - stagedMetadataPath = os.path.join( self.config['client']['uploadDirectory'], self.platform, metadataFilename ) - finalMetadataPath = os.path.join( self.config['server']['archiveDirectory'], self.platform, metadataFilename ) shutil.move( stagedMetadataPath, finalMetadataPath ) # Now we update the global manifest file for this platform