diff --git a/helpers/helperslib/Buildable.py b/helpers/helperslib/Buildable.py index 83a53e3..642f6d5 100644 --- a/helpers/helperslib/Buildable.py +++ b/helpers/helperslib/Buildable.py @@ -1,431 +1,431 @@ import os import re import json import yaml import fnmatch # Regex for the dependency rules ruleRegexp = re.compile(r""" (?P[^\[]+) \s* (?: \[ (?P[^ ]+) \] )? \s* : \s* (?P-)? (?P[^\[]+) \s* (:? \[ (?P[^ ]+) \] )? """,re.X) def convertPathToName( path ): # We take everything after the last slash as the name splitPath = path.split('/') return splitPath[-1] # Skeleton class used to hold some variables and data class Project(object): # Setup our defaults def __init__(self, path, metadata = {}): # Store the path and metadata for later use... self.path = path # Store our metadata away as well self.metadata = metadata # By default we use the last component of the path as our identifier self.identifier = convertPathToName(path) # If we have an identifier available to us though, then use it! if 'identifier' in self.metadata: self.identifier = self.metadata['identifier'] # We are not an ignored project by default self.ignore = False # We are not ignored on any platforms by default self.ignoredOnPlatforms = [] # We are not a virtual dependency by default either self.virtualDependency = False # We have no dependencies or negated dependencies self.dependencies = [] self.negatedDeps = [] # Intercept any requests to retrieve attributes - the metadata may be able to help them def __getattr__(self, name): # Does the metadata know about it? if name in self.metadata: return self.metadata[name] # If not then we can't help raise AttributeError # Class which actually performs dependency resolution # It is also responsible for loading projects as requested, along with the dependency data files # It does not need to care about logical-module-structure, as that is out of scope for it and is the concern of Jenkins only class DependencyResolver(object): # Initialise all of our internals def __init__(self): # Our store of projects # This is keyed by project name (not path) with the value being a instance of the Project class self.knownProjects = {} # We need a place to store dynamic dependencies which can cover multiple projects # These can be either positive or negative (negate) rules self.dynamicDependencies = [] self.dynamicNegatedDeps = [] def retrieveProject( self, pathOrName ): # Ensure we are working with a name projectName = convertPathToName( pathOrName ) # Now try to lookup the Project instance. If it doesn't exist return None try: return self.knownProjects[ projectName ] except Exception: return None # Load the projects from the YAML file def loadProjectsFromTree( self, directoryPath ): # Get a listing of everything in this directory filesPresent = os.scandir(directoryPath) # We will recurse into directories beneath this one # If there is a metadata.yaml file present, we will parse it to determine if this is a repository we need to register for entry in filesPresent: # Is it a symlink? We always ignore these if entry.is_symlink(): continue # Is it a directory we need to recurse into? if entry.is_dir(): self.loadProjectsFromTree(entry.path) continue # If the filename isn't metadata.yaml, ignore it if entry.name != 'metadata.yaml': continue # Load the metadata.yaml file and create a project from it projectMetadataFile = open(entry.path, 'r', encoding='utf-8') # Parse the YAML file projectMetadata = yaml.safe_load(projectMetadataFile) # Is it a repository - ie. something we need to know about? - if not projectMetadata['hasrepo'] or not projectMetadata['repoactive']: + if not projectMetadata['repoactive']: continue # Looks like we have a repository - now we need to load it in newProject = Project( projectMetadata['projectpath'], projectMetadata ) # Register it self.knownProjects[ newProject.identifier ] = newProject # Load a file containing a list of projects which we are to ignore def loadProjectsIgnoreList( self, ignoreListPath ): # Open the file... ignoreListFile = open( ignoreListPath, 'r' ) # First, remove any empty lines as well as comments ignoreList = [ project.strip() for project in ignoreListFile if project.find("#") == -1 and project.strip() ] # Now mark any listed project as ignored for entry in ignoreList: # We rely on Python sharing objects aggressively here to ensure that the object in the store will be marked as a project to ignore project = self.retrieveProject(entry) if project: project.ignore = True # Load a file containing details on which Projects should be ignored on certain Platforms def loadProjectsIgnoreRules( self, ignoreRulesPath ): # Open the file ignoreRulesFile = open(ignoreRulesPath, 'r', encoding='utf-8') # Parse the YAML file ignoreRules = yaml.safe_load(ignoreRulesFile) # We'll go through each platform's section in turn for platform in ignoreRules.keys(): # Now go over each rule in turn (which is just the name of the project... for projectPath in ignoreRules[platform]: # Retrieve the project project = self.retrieveProject( projectPath ) # Was it valid? if project == None: # Apparently not continue # Add this platform to the list of ignored platforms for this project project.ignoredOnPlatforms.append( platform ) # Load a file which contains dependency rules for us to evaluate def loadDependenciesFromFile( self, dependencyDataFilename ): # Open the file... dependencyData = open( dependencyDataFilename, 'r' ) for entry in dependencyData: # Cleanup the dependency entry and remove any comments commentPos = entry.find("#") if commentPos >= 0: entry = entry[0:commentPos] # Prepare to extract the data and skip if the extraction fails match = ruleRegexp.search( entry.strip() ) if not match: continue # Determine which project is being assigned the dependency projectPath = match.group('project').lower() project = self.retrieveProject( projectPath ) # Validate it (if the project lookup failed and it is not dynamic, then it is a virtual dependency) if project == None and projectPath[-1] != '*': # Create the virtual dependency project = Project( projectPath ) project.virtualDependency = True # Now register it - we can continue normally after this self.knownProjects[ project.identifier ] = project # Ensure we know the dependency dependencyPath = match.group('dependency').lower() dependency = self.retrieveProject( dependencyPath ) if dependency == None: continue # Is this a dynamic project? if projectPath[-1] == '*': dependencyEntry = ( projectPath, dependency ) # Is it negated or not? if match.group('ignore_dependency'): self.dynamicNegatedDeps.append( dependencyEntry ) else: self.dynamicDependencies.append( dependencyEntry ) # Otherwise it must be a project specific rule else: # Is it negated or not? if match.group('ignore_dependency'): project.negatedDeps.append( dependency ) else: project.dependencies.append( dependency ) # Return a list of dependencies of ourselves def forProject(self, project, platform, includeSubDeps = True, checkDynamicDeps = True): self.dependenciesCache = {} return self.forProjectRec(project, platform, includeSubDeps, checkDynamicDeps) def forProjectRec(self, project, platform, includeSubDeps = True, checkDynamicDeps = True): if project in self.dependenciesCache: return self.dependenciesCache[project] # Initialise - let's get ready to parse the dependencies file ourDeps = finalDynamic = [] if checkDynamicDeps: # Prepare: Get the list of dynamic dependencies and negations which apply to us dynamicDeps = self._resolveDynamicDependencies( project, self.dynamicDependencies ) negatedDynamic = self._resolveDynamicDependencies( project, self.dynamicNegatedDeps ) # Start our list of dependencies # Run the list of dynamic dependencies against the dynamic negations to determine which ones we need to keep ourDeps = finalDynamic = self._processDependencyNegation( dynamicDeps, negatedDynamic + project.negatedDeps ) # Add the project level dependencies to the list of our dependencies # Then run the list of our dependencies against the project negations ourDeps = self._processDependencyNegation( project.dependencies, project.negatedDeps ) + ourDeps # Ensure the current project is not listed (due to a dynamic dependency for instance) ourDeps = [dep for dep in ourDeps if dep != project] # Ensure that no projects which have been ignored (either globally or for this platform) are listed as a dependency ourDeps = [dep for dep in ourDeps if dep.ignore is False and platform not in dep.ignoredOnPlatforms] # Add the dependencies of our dependencies if requested if includeSubDeps: # Determine which projects we need to resolve dependencies for # We process dynamic dependencies separately, as they can be recursive # We also skip any dependency which has been determined as being needed by a higher up iteration to avoid unnecessary resolving of dependencies toLookup = [x for x in ourDeps if x not in finalDynamic] for dependency in toLookup: ourDeps = self.forProjectRec(dependency, platform, includeSubDeps = True) + ourDeps # Process dynamic dependencies here # We won't look at other dynamic dependency rules when we do this # This is slightly limiting, but it is otherwise possible to end up in a dependency resolution loop dynamicLookup = ourDeps + finalDynamic for dependency in dynamicLookup: ourDeps = self.forProjectRec(dependency, platform, includeSubDeps = True, checkDynamicDeps = False) + ourDeps # Re-ensure the current project is not listed and that they are not virtual # Dynamic dependency resolution of sub-dependencies may have re-added it ourDeps = [dep for dep in ourDeps if dep != project and not dep.virtualDependency] # Ensure we don't have any duplicates deps = self.unique(ourDeps) self.dependenciesCache[project] = deps return deps def unique(self, sequence): seen = set() return [x for x in sequence if not (x in seen or seen.add(x))] def _resolveDynamicDependencies(self, project, dynamicDeps): # Go over the dynamic dependencies list we have and see if we match projectDeps = [] for dynamicName, dependency in dynamicDeps: # First we need to see if the dynamic name matches against our path if not fnmatch.fnmatch( project.path, dynamicName ): continue # We match this - add it projectDeps.append( dependency ) return projectDeps def _processDependencyNegation(self, dependentProjects, negatedProjects): # Remove any dependencies which have been negated return [x for x in dependentProjects if not x in negatedProjects] # Class which can parse the product-definitions.yaml file # This allows us to determine what build combinations we need to support class ProductHandler(object): # Set ourselves up def __init__(self, dependencyResolver): # Save the dependency resolver we were given # Projects will be retrieved from this self.dependencyResolver = dependencyResolver # We need a place to store data on the products we know about self.productsData = {} # Load a platform-builds.yaml file def loadProductInformation(self, filePath): # Open the file and load it's content # We expect a YAML format file with open(filePath, 'r') as dataFile: # Parse the YAML file self.productsData = yaml.safe_load( dataFile ) # Let the user know what Products we know about def knownProducts(self): # The keys of the mapping file we've loaded are our known Products return self.productsData.keys() # What Platforms do we know about? def platformsFor(self, product): # In this case a Product is considered to cover all Platforms which one or more rules cover knownPlatforms = [] # Grab the repository rulesets we will be examining rulesetsToCheck = self.productsData[ product ]['includes'] # Start checking each ruleset for currentRuleset in rulesetsToCheck: # Add it's list of Platforms to our list knownPlatforms += currentRuleset['platforms'] # Remove duplicates from the list and return it return list(set(knownPlatforms)) def branchGroupsFor(self, product): # Return the listing straight from the YAML data # No extra processing is needed here return self.productsData[ product ]['branchGroups'] def projectsFor(self, product, platform): # Create a list to store the projects which match matchingProjects = [] # Grab the rulesets we will need to parse rulesetsToCheck = self.productsData[ product ]['includes'] # Let's go over each ruleset now for currentRuleset in rulesetsToCheck: # First, does it cover the Platform we're interested in? if platform not in currentRuleset['platforms']: # Nothing for us to look at then... continue # Now we can check each rule in this ruleset to see if it is matched for rule in currentRuleset['repositories']: # Check to see which projects match this rule matches = [ project for project in self.dependencyResolver.knownProjects.values() if fnmatch.fnmatch( project.path, rule ) ] # Merge in existing matches matchingProjects += matches # Remove any matches which are ignored either globally or on the platform level matchingProjects = [ project for project in matchingProjects if project.ignore is False and platform not in project.ignoredOnPlatforms ] # Remove duplicates and return the list return list(set(matchingProjects)) def notificationsFor(self, product, project, failuresOnly = False): # Do we have any notification rules for this Product? if 'notifications' not in self.productsData[ product ]: # Nothing for us to do if we don't have any rules to look at return list() # Grab the rules for easier reference notificationRules = self.productsData[ product ]['notifications'] # And setup a place to store the addresses we find addressesToNotify = list() # Let's start going over the rules for rule in notificationRules: # Should we be considering this rule? if 'failuresOnly' in rule and rule['failuresOnly'] != failuresOnly: continue # Does the rule match? if fnmatch.fnmatch( project.path, rule['match'] ): # Add this rules address to the list then addressesToNotify.append( rule['to'] ) # All done, return the list of addresses we found return addressesToNotify # Class which handles the logical-module-structure file # It assists us in turning Project paths into the branch we need to be building class BranchResolver(object): # Set ourselves up def __init__(self, dependencyResolver): # Save the dependency resolver we were given # Projects will be retrieved from this self.dependencyResolver = dependencyResolver # We need a place to store dynamic dependencies which can cover multiple projects # These can be either positive or negative (negate) rules self.projectsToBranches = {} # Load a platform-builds.yaml file def loadProjectsToBranchesData(self, filePath): # Open the file and load it's content # We expect a JSON format file with open(filePath, 'r') as mappingFile: # Parse the YAML file self.logicalModuleStructure = json.load( mappingFile ) self.projectsToBranches = self.logicalModuleStructure['groups'] def branchFor(self, project, branchGroup): # For storing the branch we have found so far currentBranch = None # And for storing the rule we are currently matched against currentMatchedRule = '' # Check each mapping we have # If we don't find something specified in a given map, then it's assumed to not be specified # and we continue parsing. If Blank has been set then we accept that as a valid result - it means the build has been disabled for ruleToCheck, groupsToBranches in self.projectsToBranches.items(): # Does this one match? if not fnmatch.fnmatch( project.path, ruleToCheck ): continue # Does it have the branch group we are after? if branchGroup not in groupsToBranches: continue # Does it match the currently matched rule? # If it does, then it's less specific and should be ignored if fnmatch.fnmatch( currentMatchedRule, ruleToCheck ): continue # We have it! currentBranch = groupsToBranches[ branchGroup ] currentMatchedRule = ruleToCheck # Return the branch we found return currentBranch