diff --git a/bin/EmergeBase.py b/bin/EmergeBase.py index 81ded872e..20a86d9e7 100644 --- a/bin/EmergeBase.py +++ b/bin/EmergeBase.py @@ -1,305 +1,300 @@ # # copyright (c) 2009 Ralf Habacker # import os import sys import datetime from ctypes import * import utils import portage import compiler from EmergeConfig import * import utils ## @todo complete a release and binary merge dir below rootdir # 1. enable build type related otDmerge install settings # 2a. use different install databases for debug and release # 3. binary packages which are build type independent should be # marked in both databases or should have a separate install database # question: How to detect reliable this case ? class EmergeBase(object): """base class for emerge system - holds attributes and methods required by base classes""" def __init__( self): # TODO: some __init__ of subclasses need to already have been # called here. That is really the wrong way round. object.__init__(self) utils.debug( "EmergeBase.__init__ called", 2 ) self.filename, self.category, self.subpackage, self.package, mod = portage.PortageInstance._CURRENT_MODULE#ugly workaround we need to replace the constructor if not hasattr(self, 'subinfo'): self.subinfo = mod.subinfo(self, portage.PortageInstance.options) if not hasattr(self, 'buildSystemType'): self.buildSystemType = None # if implicit build time dependency is wanted, depend on internal packages # for this class and all of its ancestor classes if emergeSettings.getboolean("General", "EMERGE_ENABLE_IMPLICID_BUILDTIME_DEPENDENCIES", False): for cls in type(self).mro(): className = cls.__name__ packageName = 'internal/%s' % className if os.path.exists(os.path.join(EmergeStandardDirs.emergeRoot() , 'emerge', 'portage', 'internal', className, '%s-internal.py' % className)): if self.subinfo and not packageName in self.subinfo.buildDependencies: self.subinfo.buildDependencies[packageName] = 'default' if hasattr(self,'alreadyCalled'): return self.alreadyCalled = True self.versioned = False self.CustomDefines = "" self.createCombinedPackage = False ## specifies if a build type related root directory should be used self.useBuildTypeRelatedMergeRoot = False if emergeSettings.getboolean("General","EMERGE_MERGE_ROOT_WITH_BUILD_TYPE", False): self.useBuildTypeRelatedMergeRoot = True self.isoDateToday = str( datetime.date.today() ).replace('-', '') def __str__(self): if self.subpackage: return "%s/%s/%s" % (self.category,self.subpackage,self.package) else: return "%s/%s" % (self.category,self.package) @property def noFetch(self): return emergeSettings.getboolean("General", "WorkOffline", False) @property def noCopy(self): return emergeSettings.getboolean("General", "EMERGE_NOCOPY", False) @property def noFast(self): return emergeSettings.getboolean("General", "EMERGE_NOFAST", True ) @property def noClean(self): return emergeSettings.getboolean("General", "EMERGE_NOCLEAN", False ) @property def forced(self): return emergeSettings.getboolean("General", "EMERGE_FORCED", False ) @property def buildTests(self): return emergeSettings.getboolean("General", "EMERGE_BUILDTESTS", False ) def __adjustPath(self, directory): """return adjusted path""" if not self.subinfo.options.useShortPathes: return directory path = c_char_p(directory) length = windll.kernel32.GetShortPathNameA(path, 0, 0) if length == 0: return directory buf = create_string_buffer('\000' * (length + 1)) windll.kernel32.GetShortPathNameA(path, byref(buf), length+1) # ignore function result... - if utils.verbose() > 0: - print("converting " + directory + " to " + buf.value) + utils.debug("converting " + directory + " to " + buf.value) return buf.value def buildType(self): """return currently selected build type""" return emergeSettings.get("General","EMERGE_BUILDTYPE") def compiler(self): """deprecated""" """return currently selected compiler""" return compiler.getCompilerName() def buildArchitecture(self): """return the target CPU architecture""" compiler.architecture() def workDirPattern(self): """return base directory name for package related work directory""" directory = "" if self.subinfo.options.useCompilerType == True: directory += "%s-" % compiler.getCompilerName() if self.subinfo.options.cmake.useIDE or self.subinfo.options.cmake.openIDE: directory += "ide-" if self.subinfo.options.useBuildType == False: directory += "%s" % (self.buildTarget) elif( self.buildType() == None ): directory += "%s-%s" % ("default", self.buildTarget) else: directory += "%s-%s" % (self.buildType(), self.buildTarget) return directory def imageDirPattern(self): """return base directory name for package related image directory""" directory = "image" # we assume that binary packages are for all compiler and targets ## \todo add image directory support for using binary packages for a specific compiler and build type if self.buildSystemType == 'binary': return directory if self.subinfo.options.useCompilerType == True: directory += '-' + compiler.getCompilerName() if self.subinfo.options.useBuildType == True: directory += '-' + self.buildType() directory += '-' + self.buildTarget return directory def sourceDir(self, dummyIndex=0): utils.abstract() def packageDir(self): """ add documentation """ return self.__adjustPath( portage.getDirname( self.category, self.package ) ) def buildRoot(self): """return absolute path to the root directory of the currently active package""" buildroot = os.path.join( EmergeStandardDirs.emergeRoot(), "build", self.category, self.package ) return self.__adjustPath(buildroot) def workDir(self): """return absolute path to the 'work' subdirectory of the currently active package""" _workDir = os.path.join( self.buildRoot(), "work" ) return self.__adjustPath(_workDir) def buildDir(self): utils.debug("EmergeBase.buildDir() called", 2) builddir = os.path.join(self.workDir(), self.workDirPattern()) if self.subinfo.options.unpack.unpackIntoBuildDir and self.subinfo.hasTargetSourcePath(): builddir = os.path.join(builddir, self.subinfo.targetSourcePath()) utils.debug("package builddir is: %s" % builddir, 2) return self.__adjustPath(builddir) def imageDir(self): """return absolute path to the install root directory of the currently active package """ imageDir = os.path.join( self.buildRoot(), self.imageDirPattern() ) return self.__adjustPath(imageDir) def installDir(self): """return absolute path to the install directory of the currently active package. This path may point to a subdir of imageDir() in case @ref info.targetInstallPath is used """ if self.subinfo.hasInstallPath(): installDir = os.path.join( self.imageDir(), self.subinfo.installPath()) elif self.subinfo.options.install.installPath: installDir = os.path.join(self.imageDir(), self.subinfo.options.install.installPath) else: installDir = self.imageDir() return self.__adjustPath(installDir) def mergeSourceDir(self): """return absolute path to the merge source directory of the currently active package. This path may point to a subdir of imageDir() in case @ref info.targetInstallPath for a specific target or @ref self.subinfo.options.merge.sourcePath is used """ if self.subinfo.hasMergeSourcePath(): directory = os.path.join( self.imageDir(), self.subinfo.mergeSourcePath() ) elif not self.subinfo.options.merge.sourcePath == None: directory = os.path.join( self.imageDir(), self.subinfo.options.merge.sourcePath ) else: directory = self.imageDir() return self.__adjustPath(directory) def mergeDestinationDir(self): """return absolute path to the merge destination directory of the currently active package. This path may point to a subdir of rootdir in case @ref info.targetMergePath for a specific build target or @ref self.subinfo.options.merge.destinationPath is used """ if self.subinfo.hasMergePath(): directory = os.path.join( EmergeStandardDirs.emergeRoot(), self.subinfo.mergePath() ) elif not self.subinfo.options.merge.destinationPath == None: directory = os.path.join( EmergeStandardDirs.emergeRoot(), self.subinfo.options.merge.destinationPath ) elif not self.useBuildTypeRelatedMergeRoot or self.subinfo.options.merge.ignoreBuildType: directory = EmergeStandardDirs.emergeRoot() elif self.buildType() == 'Debug': directory = os.path.join(EmergeStandardDirs.emergeRoot(),'debug') elif self.buildType() == 'Release': directory = os.path.join(EmergeStandardDirs.emergeRoot(),'release') elif self.buildType() == 'RelWithDebInfo': directory = os.path.join(EmergeStandardDirs.emergeRoot(),'relwithdebinfo') else: directory = EmergeStandardDirs.emergeRoot() return self.__adjustPath(directory) def packageDestinationDir( self, withBuildType=True ): """return absolute path to the directory where binary packages are placed into. Default is to optionally append build type subdirectory""" utils.debug( "EmergeBase.packageDestinationDir called", 2 ) dstpath = emergeSettings.get("General","EMERGE_PKGDSTDIR", os.path.join( EmergeStandardDirs.emergeRoot(), "tmp" ) ) if withBuildType: if emergeSettings.getboolean("General", "EMERGE_MERGE_ROOT_WITH_BUILD_TYPE", False ): dstpath = os.path.join( dstpath, self.buildType()) if not os.path.exists(dstpath): utils.createDir(dstpath) return dstpath @property def buildTarget(self): return self.subinfo.buildTarget @property def version(self): return self.subinfo.defaultTarget @property def rootdir(self): return EmergeStandardDirs.emergeRoot() def enterBuildDir(self): - utils.debug( "EmergeBase.enterBuildDir called", 2 ) + utils.trace( "EmergeBase.enterBuildDir called") if ( not os.path.exists( self.buildDir() ) ): os.makedirs( self.buildDir() ) - if utils.verbose() > 0: - print("creating: %s" % self.buildDir()) + utils.debug("creating: %s" % self.buildDir()) os.chdir( self.buildDir() ) - if utils.verbose() > 0: - print("entering: %s" % self.buildDir()) + utils.debug("entering: %s" % self.buildDir()) def enterSourceDir(self): if ( not os.path.exists( self.sourceDir() ) ): return False utils.warning("entering the source directory!") os.chdir( self.sourceDir() ) - if utils.verbose() > 0: - print("entering: %s" % self.sourceDir()) + utils.debug("entering: %s" % self.sourceDir()) def system( self, command, errorMessage="", debuglevel=1, **kw): """convencience function for running system commands. This method prints a debug message and then runs a system command. If the system command returns with errors the method prints an error message and exits if @ref self.subinfo.options.exitOnErrors is true""" - utils.debug( str(command), debuglevel ) if utils.system( command, **kw): return True if self.subinfo.options.exitOnErrors: utils.warning( "while running %s cmd: %s" % (errorMessage, str(command)) ) else: utils.warning( "while running %s cmd: %s" % (errorMessage, str(command)) ) return False def proxySettings(self): host = emergeSettings.get("General", 'EMERGE_PROXY_HOST', "") port = emergeSettings.get("General", 'EMERGE_PROXY_PORT', "") username = emergeSettings.get("General", 'EMERGE_PROXY_USERNAME', "") password = emergeSettings.get("General", 'EMERGE_PROXY_PASSWORD', "") return [host, port, username, password] diff --git a/bin/InstallDB.py b/bin/InstallDB.py index 51eec9af9..53abdf8de 100644 --- a/bin/InstallDB.py +++ b/bin/InstallDB.py @@ -1,362 +1,362 @@ import os import sqlite3 import threading from EmergeConfig import * import utils import portage class InstallPackage(object): """ InstallPackage finalizes an installation. If you call addInstalled or remInstalled an InstallPackage object is returned which you can use to handle file information with the InstallDB. For installation use code similar to this one: # get an InstallPackage object p p = InstallDB.installdb.addInstalled( "cat", "pac", "ver", "prefix" ) # add files ( including the hash ) p.addFiles( [ ( "file1", "hash1" ), ( "file2", "hash2" ), ( "file3", "hash3" ) ] ) if failed: # in case we somehow need to go back p.revert() else: # finalize the installation p.install() Deinstallation works similar: p = InstallDB.installdb.remInstalled( "cat", "pac", "ver", "prefix" ) # get the files ( including the hash ) f = p.getFiles() # f now contains [ ( "file1", "hash1" ), ( "file2", "hash2" ), ( "file3", "hash3" ) ] if failed: # in case we somehow need to go back p.revert() else: # finalize the uninstall p.uninstall() """ def __init__( self, cursor, packageId ): self.cursor = cursor self.packageId = packageId self.fileDict = dict() def addFiles( self, fileDict ): """ appends files to the list of files to be installed """ self.fileDict.update( fileDict ) def getFiles( self ): """ get a list of files that will be uninstalled """ cmd = '''SELECT filename, fileHash FROM fileList WHERE packageId=?;''' - utils.debug( "executing sqlcmd '%s' with parameter %s" % ( cmd, str( self.packageId ) ), 1 ) + utils.debug( "executing sqlcmd '%s' with parameter %s" % ( cmd, str( self.packageId ) ), 2 ) self.cursor.execute(cmd, (self.packageId,)) return self.cursor.fetchall() def revert( self ): """ revert all changes made to the database, use with care """ self.cursor.connection.rollback() def uninstall( self ): """ really uninstall that package """ cmd = '''DELETE FROM fileList WHERE packageId=?;''' - utils.debug( "executing sqlcmd '%s' with parameter %s" % ( cmd, str( self.packageId ) ), 1 ) + utils.debug( "executing sqlcmd '%s' with parameter %s" % ( cmd, str( self.packageId ) ), 2 ) self.cursor.execute(cmd, (self.packageId,)) cmd = '''DELETE FROM packageList WHERE packageId=?;''' - utils.debug( "executing sqlcmd '%s' with parameter %s" % ( cmd, str( self.packageId ) ), 1 ) + utils.debug( "executing sqlcmd '%s' with parameter %s" % ( cmd, str( self.packageId ) ), 2 ) self.cursor.execute(cmd, (self.packageId,)) self.cursor.connection.commit() def install( self ): """ marking the package & package file list installed """ fileNumber = len( self.fileDict ) # keys() and values will stay in the same order if no changes are done in between calls # structure of each tuple: # fileId | packageId == package Id | filenames | file hashes dataList = list(zip( [ None ] * fileNumber, [ self.packageId ] * fileNumber, list(self.fileDict.keys()), list(self.fileDict.values()) )) cmd = '''INSERT INTO fileList VALUES (?, ?, ?, ?)''' - utils.debug( "executing sqlcmd '%s' %s times" % ( cmd, len( self.fileDict ) ), 1 ) + utils.debug( "executing sqlcmd '%s' %s times" % ( cmd, len( self.fileDict ) ), 2 ) self.cursor.executemany( cmd, dataList ) # at last, commit all the changes so that they are committed only after everything is written to the # database self.cursor.connection.commit() def getRevision(self): self.cursor.execute("SELECT revision FROM packageList WHERE packageId == ?", (self.packageId,) ) return self.cursor.fetchall()[0][0] def getVersion(self): self.cursor.execute("SELECT version FROM packageList WHERE packageId == ?", (self.packageId,) ) return self.cursor.fetchall()[0][0] class InstallDB(object): """ a database object which provides the methods for adding and removing a package and checking its installation status. In case the database doesn't exist if the constructor is called, a new database is constructed """ def __init__( self, filename = None ): if filename == None: EmergeStandardDirs.allowShortpaths(False) filename = os.path.join( EmergeStandardDirs.etcPortageDir(), 'install.db' ) EmergeStandardDirs.allowShortpaths(True) self.dbfilename = filename self._prepareDatabase() def getLastId( self ): """ returns the last id from a table, which is essentially the """ cmd = '''SELECT max(packageId) FROM packageList;''' cursor = self.connection.cursor() cursor.execute( cmd ) lastId = cursor.fetchall()[0] return lastId[0] def __constructWhereStmt( self, _dict ): params = [] parametersUsed = False stmt = "" # if not prefix == '' or not category == '' or not package == '': # cmd += ''' WHERE''' # for key in list(_dict.keys()): if not _dict[ key ] == None: if parametersUsed: stmt += ''' AND''' stmt += ''' %s=?''' % key params.append( _dict[ key ] ) parametersUsed = True if not stmt == "": stmt = ''' WHERE''' + stmt return stmt, params def isInstalled( self, category, package, version=None, prefix=None ): """ returns whether a package is installed. If version and prefix are empty, all versions and prefixes will be checked. """ cmd = '''SELECT * FROM packageList''' # TODO: what is the difference between prefix=None and prefix=''? Both happens. Document. stmt, params = self.__constructWhereStmt( { 'prefix': prefix, 'category': category, 'packageName': package, 'version': version } ) cmd += stmt cmd += ''';''' - utils.debug( "executing sqlcmd '%s' with parameters: %s" % ( cmd, tuple( params ) ), 1 ) + utils.debug( "executing sqlcmd '%s' with parameters: %s" % ( cmd, tuple( params ) ), 2 ) cursor = self.connection.cursor() cursor.execute( cmd, tuple( params ) ) isPackageInstalled = len( cursor.fetchall() ) > 0 if isPackageInstalled: utils.debug( """The package %s/%s has been installed in prefix '%s' with - version '%s'.""" % ( category, package, prefix, version ), 1 ) + version '%s'.""" % ( category, package, prefix, version ), 2 ) else: utils.debug( """Couldn't find a trace that the package %s/%s has been installed in prefix '%s' with version '%s'""" % ( category, package, prefix, version ), 1 ) cursor.close() return isPackageInstalled def getInstalled( self, category=None, package=None, prefix=None ): """ returns a list of the installed packages, which can be restricted by adding package, category and prefix. """ cmd = '''SELECT category, packageName, version, prefix FROM packageList''' stmt, params = self.__constructWhereStmt( { 'prefix': prefix, 'category': category, 'packageName': package } ) cmd += stmt cmd += ''';''' - utils.debug( "executing sqlcmd '%s' with parameters: %s" % ( cmd, tuple( params ) ), 1 ) + utils.debug( "executing sqlcmd '%s' with parameters: %s" % ( cmd, tuple( params ) ), 2 ) cursor = self.connection.cursor() cursor.execute( cmd, tuple( params ) ) values = cursor.fetchall() cursor.close() return values def getDistinctInstalled( self, category=None, package=None, prefix=None ): """ returns a list of the installed packages, which can be restricted by adding package, category and prefix. """ cmd = '''SELECT DISTINCT category, packageName, version FROM packageList''' stmt, params = self.__constructWhereStmt( { 'prefix': prefix, 'category': category, 'packageName': package } ) cmd += stmt cmd += ''';''' - utils.debug( "executing sqlcmd '%s' with parameters: %s" % ( cmd, tuple( params ) ), 1 ) + utils.debug( "executing sqlcmd '%s' with parameters: %s" % ( cmd, tuple( params ) ), 2 ) cursor = self.connection.cursor() cursor.execute( cmd, tuple( params ) ) values = cursor.fetchall() cursor.close() return values def getPackageIds( self, category = None, package = None, prefix = None ): """ returns a list of the ids of the packages, which can be restricted by adding package, category and prefix. """ cmd = '''SELECT packageId FROM packageList''' stmt, params = self.__constructWhereStmt( { 'prefix': prefix, 'category': category, 'packageName': package } ) cmd += stmt cmd += ''';''' - utils.debug( "executing sqlcmd '%s' with parameters: %s" % ( cmd, tuple( params ) ), 1 ) + utils.debug( "executing sqlcmd '%s' with parameters: %s" % ( cmd, tuple( params ) ), 2 ) cursor = self.connection.cursor() cursor.execute( cmd, tuple( params ) ) values = [] for row in cursor: values.append( row[0] ) return values def addInstalled( self, category, package, version, prefix=None, ignoreInstalled=False, revision = "" ): """ adds an installed package """ cursor = self.connection.cursor() if self.isInstalled( category, package, version, prefix ) and not ignoreInstalled: raise Exception( 'package %s/%s-%s already installed (prefix %s)' % ( category, package, version, prefix ) ) params = [ None, prefix, category, package, version, revision ] cmd = '''INSERT INTO packageList VALUES (?, ?, ?, ?, ?, ?)''' - utils.debug( "executing sqlcmd '%s' with parameters: %s" % ( cmd, tuple( params ) ), 1 ) + utils.debug( "executing sqlcmd '%s' with parameters: %s" % ( cmd, tuple( params ) ), 2 ) cursor.execute( cmd, tuple( params ) ) return InstallPackage( cursor, self.getLastId() ) def getInstalledPackages( self, category, package, prefix = None ): """ return an installed package """ cursor = self.connection.cursor() return [ InstallPackage( cursor, pId ) for pId in self.getPackageIds( category, package, prefix ) ] def _prepareDatabase( self ): """ prepare a new database and add the required table layout """ if not os.path.exists( self.dbfilename ): EmergeStandardDirs.allowShortpaths(False) if not os.path.exists( EmergeStandardDirs.etcPortageDir( ) ): os.makedirs( EmergeStandardDirs.etcPortageDir( ) ) print( "database does not exist yet: creating database", file = sys.stderr ) self.connection = sqlite3.connect( self.dbfilename ) cursor = self.connection.cursor() # first, create the required tables cursor.execute( '''CREATE TABLE packageList (packageId INTEGER PRIMARY KEY AUTOINCREMENT, prefix TEXT, category TEXT, packageName TEXT, version TEXT, revision TEXT)''' ) cursor.execute( '''CREATE TABLE fileList (fileId INTEGER PRIMARY KEY AUTOINCREMENT, packageId INTEGER, filename TEXT, fileHash TEXT)''' ) self.connection.commit() else: self.connection = sqlite3.connect( self.dbfilename ) cursor = self.connection.cursor() cursor.execute( '''PRAGMA table_info('packageList')''') if not len(cursor.fetchall()) == 6: cursor.execute('''ALTER TABLE packageList ADD COLUMN revision TEXT''') self.connection.commit() EmergeStandardDirs.allowShortpaths(True) # get a global object installdb = InstallDB() # an additional function from portage.py def printInstalled(): """get all the packages that are already installed""" host = target = portage.alwaysTrue portage.printCategoriesPackagesAndVersions( installdb.getDistinctInstalled(), portage.alwaysTrue, host, target ) def main(): """ Testing the class""" # add two databases tempdbpath1 = os.path.join( EmergeStandardDirs.emergeRoot(), "tmp", "temp1.db" ) tempdbpath2 = os.path.join( EmergeStandardDirs.emergeRoot(), "tmp", "temp2.db" ) if not os.path.exists( os.path.join( EmergeStandardDirs.emergeRoot(), "tmp" ) ): os.makedirs( os.path.join( EmergeStandardDirs.emergeRoot(), "tmp" ) ) if os.path.exists( tempdbpath1 ): os.remove( tempdbpath1 ) if os.path.exists( tempdbpath2 ): os.remove( tempdbpath2 ) db_temp = InstallDB( tempdbpath1 ) db = InstallDB( tempdbpath2 ) utils.debug( 'testing installation database' ) # in case the package is still installed, remove it first silently if db.isInstalled( 'win32libs', 'dbus-src', '1.4.0' ): packageList = db.getInstalledPackages( 'win32libs', 'dbus-src' ) # really commit uninstall for package in packageList: package.uninstall() utils.debug_line() utils.new_line() # add a package utils.debug( 'installing package win32libs/dbus-src-1.4.0 (release)' ) package = db.addInstalled( 'win32libs', 'dbus-src', '1.4.0', 'release' ) package.addFiles( dict().fromkeys( [ 'test', 'test1', 'test2' ], 'empty hash' ) ) # now really commit the package package.install() # add another package in a different prefix utils.debug( 'installing package win32libs/dbus-src-1.4.0 (debug)' ) package = db.addInstalled( 'win32libs', 'dbus-src', '1.4.0', 'debug' ) package.addFiles( dict().fromkeys( [ 'test', 'test1', 'test2' ], 'empty hash' ) ) # now really commit the package package.install() utils.debug_line() utils.new_line() utils.debug( 'checking installed packages' ) utils.debug( 'get installed package (release): %s' % db.getInstalled( 'win32libs', 'dbus-src', 'release' ) ) utils.debug( 'get installed package (debug): %s' % db.getInstalled( 'win32libs', 'dbus-src', 'debug' ) ) utils.new_line() utils.debug( 'now trying to remove package & revert it again later' ) # remove the package again packageList = db.getInstalledPackages( 'win32libs', 'dbus-src' ) for pac in packageList: for line in pac.getFiles(): # pylint: disable=W0612 # we could remove the file here # print line pass utils.debug_line() utils.new_line() utils.debug( 'checking installed packages' ) utils.debug( 'get installed package (release): %s' % db.getInstalled( 'win32libs', 'dbus-src', 'release' ) ) utils.debug( 'get installed package (debug): %s' % db.getInstalled( 'win32libs', 'dbus-src', 'debug' ) ) utils.debug_line() utils.new_line() utils.debug( 'reverting removal' ) # now instead of completing the removal, revert it for pac in packageList: pac.revert() utils.debug( 'checking installed packages' ) utils.debug( 'get installed package (release): %s' % db.getInstalled( 'win32libs', 'dbus-src', 'release' ) ) utils.debug( 'get installed package (debug): %s' % db.getInstalled( 'win32libs', 'dbus-src', 'debug' ) ) utils.debug_line() db.getInstalled() db.getInstalled( category='win32libs', prefix='debug' ) db.getInstalled( package='dbus-src' ) utils.new_line() utils.debug( 'now really remove the package' ) packageList = db.getInstalledPackages( 'win32libs', 'dbus-src' ) for pac in packageList: utils.debug( 'removing %s files' % len( pac.getFiles() ) ) pac.uninstall() utils.debug( 'get installed package (release): %s' % db.getInstalled( 'win32libs', 'dbus-src', 'release' ) ) utils.debug( 'get installed package (debug): %s' % db.getInstalled( 'win32libs', 'dbus-src', 'debug' ) ) utils.debug_line() # test the import from the old style (manifest based) databases utils.new_line() print("getInstalled:", db_temp.getInstalled()) if __name__ == '__main__': main() diff --git a/bin/Source/GitSource.py b/bin/Source/GitSource.py index 00717ecd6..d37101e3f 100644 --- a/bin/Source/GitSource.py +++ b/bin/Source/GitSource.py @@ -1,286 +1,285 @@ # # copyright (c) 2009 Ralf Habacker # copyright (c) 2009 Patrick Spendrin # # git support import tempfile from Source.VersionSystemSourceBase import * ## \todo requires installed git package -> add suport for installing packages class GitSource ( VersionSystemSourceBase ): """git support""" def __init__(self, subinfo=None): utils.trace( 'GitSource __init__', 2 ) if subinfo: self.subinfo = subinfo VersionSystemSourceBase.__init__( self ) # detect git installation gitInstallDir = os.path.join( self.rootdir, 'dev-utils', 'git' ) if os.path.exists( gitInstallDir ): self.gitPath = os.path.join(gitInstallDir, 'bin', 'git') - utils.debug( 'using git from %s' % gitInstallDir, 1 ) else: self.gitPath = 'git' def __getCurrentBranch( self ): branch = None if os.path.exists( self.checkoutDir() ): tmpFile = tempfile.TemporaryFile() self.__git("branch -a", stdout=tmpFile ) # TODO: check return value for success tmpFile.seek( 0 ) for line in tmpFile: line = str(line,"UTF-8") if line.startswith("*"): branch = line[2:].rstrip() break return branch def __isLocalBranch( self, branch ): if os.path.exists( self.checkoutDir() ): tmpFile = tempfile.TemporaryFile() self.__git("branch", stdout=tmpFile ) # TODO: check return value for success tmpFile.seek( 0 ) for line in tmpFile: if str(line[2:].rstrip(), "UTF-8") == branch.rstrip(): return True return False def __isTag( self, _tag ): if os.path.exists( self.checkoutDir() ): tmpFile = tempfile.TemporaryFile() self.__git("tag", stdout=tmpFile ) # TODO: check return value for success tmpFile.seek( 0 ) for line in tmpFile: if str(line.rstrip(), "UTF-8") == _tag: return True return False def __getCurrentRevision( self ): """return the revision returned by git show""" # run the command branch = self.__getCurrentBranch() if not self.__isTag( branch ): # open a temporary file - do not use generic tmpfile because this doesn't give a good file object with python with tempfile.TemporaryFile() as tmpFile: self.__git("show", "--abbrev-commit", stdout=tmpFile ) tmpFile.seek( os.SEEK_SET ) # read the temporary file and grab the first line # print the revision - everything else should be quiet now line = tmpFile.readline() return "%s-%s" % (branch, str(line, "UTF-8").replace("commit ", "").strip()) else: # in case this is a tag, print out the tag version return branch def __fetchSingleBranch( self, repopath = None ): utils.trace( 'GitSource __fetchSingleBranch', 2 ) # get the path where the repositories should be stored to if repopath == None: repopath = self.repositoryUrl() utils.debug( "fetching %s" % repopath) # in case you need to move from a read only Url to a writeable one, here it gets replaced repopath = repopath.replace( "[git]", "" ) repoString = utils.replaceVCSUrl( repopath ) [ repoUrl, repoBranch, repoTag ] = utils.splitVCSUrl( repoString ) if not repoBranch and not repoTag: repoBranch = "master" ret = True # only run if wanted (e.g. no --offline is given on the commandline) if ( not self.noFetch ): self.setProxy() safePath = os.environ[ "PATH" ] # add the git path to the PATH variable so that git can be called without path os.environ[ "PATH" ] = os.path.join( self.rootdir, "git", "bin" ) + ";" + safePath checkoutDir = self.checkoutDir() # if we only have the checkoutdir but no .git within, # clean this up first if os.path.exists(checkoutDir) \ and not os.path.exists(checkoutDir + "\.git"): os.rmdir(checkoutDir) if os.path.exists(checkoutDir): if not repoTag: self.__git("pull") ret = self.__git("checkout", repoBranch or "master") and \ self.__git("pull") if self.subinfo.options.fetch.checkoutSubmodules: self.__git("submodule update --init --recursive") else: # it doesn't exist so clone the repo os.makedirs( checkoutDir ) # first try to replace with a repo url from etc/portage/emergehosts.conf recursive = '--recursive' if self.subinfo.options.fetch.checkoutSubmodules else '' ret = self.__git('clone', recursive, repoUrl, '.') # if a branch is given, we should check first if the branch is already downloaded # locally, otherwise we can track the remote branch if ret and repoBranch and not repoTag: track = "" if not self.__isLocalBranch( repoBranch ): track = "--track origin/" ret = self.__git('checkout', "%s%s" % (track, repoBranch )) # we can have tags or revisions in repoTag if ret and repoTag: if self.__isTag( repoTag ): if not self.__isLocalBranch( "_" + repoTag ): ret = self.__git('checkout', '-b', '_%s' % repoTag, repoTag) else: ret = self.__git('checkout', '_%s' % repoTag) else: ret = self.__git('checkout', repoTag) else: utils.debug( "skipping git fetch (--offline)" ) return ret def __git(self, command, *args, **kwargs): """executes a git command in a shell. Default for cwd is self.checkoutDir()""" if command in ('clone', 'checkout', 'fetch', 'pull', 'submodule') and emergeSettings.get( "General", "EMERGE_LOG_DIR") != "": # if stdout/stderr is redirected, git clone qt hangs forever. # It does not with option -q (suppressing progress info) command += ' -q' parts = [self.gitPath, command] parts.extend(args) if not kwargs.get('cwd'): kwargs['cwd'] = self.checkoutDir() - return utils.system(' '.join(parts), **kwargs) + return self.system(' '.join(parts), **kwargs) def __fetchMultipleBranch(self, repopath=None): utils.trace( 'GitSource __fetchMultipleBranch', 2 ) # get the path where the repositories should be stored to if repopath == None: repopath = self.repositoryUrl() utils.debug( "fetching %s" % repopath) # in case you need to move from a read only Url to a writeable one, here it gets replaced repopath = repopath.replace("[git]", "") repoString = utils.replaceVCSUrl( repopath ) [repoUrl, repoBranch, repoTag ] = utils.splitVCSUrl( repoString ) ret = True # only run if wanted (e.g. no --offline is given on the commandline) if ( not self.noFetch ): self.setProxy() safePath = os.environ["PATH"] # add the git path to the PATH variable so that git can be called without path os.environ["PATH"] = os.path.join( self.rootdir, "git", "bin" ) + ";" + safePath rootCheckoutDir = os.path.join(self.checkoutDir(), '.git') if not os.path.exists( rootCheckoutDir ): # it doesn't exist so clone the repo os.makedirs( rootCheckoutDir ) ret = self.__git('clone', '--mirror', repoUrl, '.', cwd=rootCheckoutDir) else: ret = self.__git('fetch', cwd=rootCheckoutDir) if not ret: utils.die( "could not fetch remote data" ) if repoBranch == "": repoBranch = "master" if ret: branchDir = os.path.join(self.checkoutDir(), repoBranch) if not os.path.exists(branchDir): os.makedirs(branchDir) ret = self.__git('clone', '--local --shared -b', repoBranch, rootCheckoutDir, branchDir, cwd=branchDir) else: ret = self.__git('pull') if not ret: utils.die( "could not pull into branch %s" % repoBranch ) if ret: #ret = self.__git('checkout', '-f') ret = self.__git("checkout", "-f", repoTag or repoBranch, cwd=branchDir) else: utils.debug( "skipping git fetch (--offline)" ) return ret def fetch(self, repopath=None): utils.trace( 'GitSource fetch', 2 ) if emergeSettings.getboolean("General","EMERGE_GIT_MULTIBRANCH", False): return self.__fetchMultipleBranch(repopath) else: return self.__fetchSingleBranch(repopath) def applyPatch(self, fileName, patchdepth, unusedSrcDir=None): """apply single patch o git repository""" utils.trace( 'GitSource ', 2 ) if fileName: patchfile = os.path.join ( self.packageDir(), fileName ) if emergeSettings.getboolean("General","EMERGE_GIT_MULTIBRANCH", False): repopath = self.repositoryUrl() # in case you need to move from a read only Url to a writeable one, here it gets replaced repopath = repopath.replace("[git]", "") repoString = utils.replaceVCSUrl( repopath ) repoBranch = utils.splitVCSUrl( repoString )[1] or "master" sourceDir = os.path.join(self.checkoutDir(), repoBranch) else: sourceDir = self.sourceDir() #FIXME this reverts previously applied patches ! #self.__git('checkout', '-f',cwd=sourceDir) sourceDir = self.checkoutDir() return self.__git('apply', '--whitespace=fix', '-p %d' % patchdepth, patchfile, cwd=sourceDir) return True def createPatch( self ): """create patch file from git source into the related package dir. The patch file is named autocreated.patch""" utils.trace( 'GitSource createPatch', 2 ) patchFileName = os.path.join( self.packageDir(), "%s-%s.patch" % \ ( self.package, str( datetime.date.today() ).replace('-', '') ) ) utils.debug("git diff %s" % patchFileName, 1) with open(patchFileName,'wt+') as patchFile: return self.__git('diff', stdout=patchFile) def sourceVersion( self ): """print the revision returned by git show""" utils.trace( 'GitSource sourceVersion', 2 ) return self.__getCurrentRevision() def checkoutDir(self, index=0 ): utils.trace( 'GitSource checkoutDir', 2 ) return VersionSystemSourceBase.checkoutDir( self, index ) def sourceDir(self, index=0 ): utils.trace( 'GitSource sourceDir', 2 ) repopath = self.repositoryUrl() # in case you need to move from a read only Url to a writeable one, here it gets replaced repopath = repopath.replace("[git]", "") repoString = utils.replaceVCSUrl( repopath ) _, repoBranch, _ = utils.splitVCSUrl( repoString ) if repoBranch == "": repoBranch = "master" if emergeSettings.getboolean("General","EMERGE_GIT_MULTIBRANCH", False): sourcedir = os.path.join(self.checkoutDir(index), repoBranch) else: sourcedir = self.checkoutDir(index) if self.subinfo.hasTargetSourcePath(): sourcedir = os.path.join(sourcedir, self.subinfo.targetSourcePath()) utils.debug("using sourcedir: %s" % sourcedir, 2) return sourcedir def getUrls( self ): """print the url where to clone from and the branch/tag/hash""" # in case you need to move from a read only Url to a writeable one, here it gets replaced repopath = self.repositoryUrl().replace( "[git]", "" ) repoString = utils.replaceVCSUrl( repopath ) [ repoUrl, repoBranch, repoTag ] = utils.splitVCSUrl( repoString ) if not repoBranch and not repoTag: repoBranch = "master" print('|'.join([repoUrl, repoBranch, repoTag])) return True diff --git a/bin/VersionInfo.py b/bin/VersionInfo.py index 9b29d0959..39757ba9c 100644 --- a/bin/VersionInfo.py +++ b/bin/VersionInfo.py @@ -1,138 +1,138 @@ # -*- coding: utf-8 -*- # this package contains functions to easily set versions for packages like qt5 or kde # copyright: # Hannah von Reth from EmergeConfig import * import utils class VersionInfo( object ): _VERSION_INFOS = dict( ) _VERSION_INFOS_HINTS = dict( ) def __init__( self, parent ): self.subinfo = parent self.__defaulVersions = None self._fileName = None @property def _defaulVersions( self ): if self.__defaulVersions is None: name = self.subinfo.parent.filename if name in VersionInfo._VERSION_INFOS_HINTS: if VersionInfo._VERSION_INFOS_HINTS[ name ] == None: return None else: #utils.debug("Using cached version info for %s in %s" % (name, _VERSION_INFOS_HINTS[ name ]),0) return VersionInfo._VERSION_INFOS[ VersionInfo._VERSION_INFOS_HINTS[ name ] ] root = os.path.dirname( name ) if self._fileName is None: possibleInis= [ os.path.join( root, "version.ini" ), os.path.join( root, "..", "version.ini" ), os.path.join( root, "..", "..", "version.ini" ) ] else: possibleInis = [self._fileName] for iniPath in possibleInis: iniPath = os.path.abspath( iniPath ) if iniPath in VersionInfo._VERSION_INFOS.keys( ): VersionInfo._VERSION_INFOS_HINTS[ name ] = iniPath - utils.debug( "Found a version info for %s in cache" % name, 1 ) + utils.debug( "Found a version info for %s in cache" % name, 2 ) return VersionInfo._VERSION_INFOS[ iniPath ] elif os.path.exists( iniPath ): config = configparser.ConfigParser( ) config.read( iniPath ) VersionInfo._VERSION_INFOS[ iniPath ] = config VersionInfo._VERSION_INFOS_HINTS[ name ] = iniPath - utils.debug( "Found a version info for %s in %s" % (name, iniPath), 1 ) + utils.debug( "Found a version info for %s in %s" % (name, iniPath), 2 ) return config VersionInfo._VERSION_INFOS_HINTS[ name ] = None return self.__defaulVersions def _getVersionInfo( self, key, default = "" ): if self._defaulVersions.has_section( "General" ) and key in self._defaulVersions[ "General" ]: return self._defaulVersions[ "General" ][ key ] return default def tags( self ): return self._getVersionInfo( "tags" ).split( ";" ) def branches( self ): return self._getVersionInfo( "branches" ).split( ";" ) def tarballs( self ): return self._getVersionInfo( "tarballs" ).split( ";" ) def defaultTarget( self ): name = self._getVersionInfo( "name" ) if ("PortageVersions", name) in emergeSettings: return emergeSettings.get( "PortageVersions", name ) return self._getVersionInfo( "defaulttarget" ) def _replaceVar( self, text, ver, name ): replaces = { "VERSION": ver, "PACKAGE_NAME": name} split_ver = ver.split(".") if len(split_ver) == 3: replaces[ "VERSION_MAJOR"] = split_ver[0] replaces[ "VERSION_MINOR"] = split_ver[1] replaces[ "VERSION_PATCH_LEVEL"] = split_ver[2] while EmergeConfig.variablePatern.search(text): for match in EmergeConfig.variablePatern.findall( text ): text = text.replace( match, replaces[ match[ 2:-1 ].upper() ] ) return text def setDefaultValuesFromFile(self, fileName, tarballUrl = None, tarballDigestUrl = None, tarballInstallSrc = None, gitUrl = None ): self._fileName = os.path.abspath(os.path.join(os.path.dirname(self.subinfo.parent.filename),fileName)) self.setDefaultValues(tarballUrl,tarballDigestUrl,tarballInstallSrc,gitUrl) def setDefaultValues( self, tarballUrl = None, tarballDigestUrl = None, tarballInstallSrc = None, gitUrl = None, packageName = None ): """ Set svn and tarball targets based on the settings in the next version.ini Parameters may contain ${} Variables which then will be replaces. Available variables: ${PACKAGE_NAME} : The name of the package ${VERSION} : The version of the package defined in version.ini If the version matches \d.\d.\d there is also avalible: ${VERSION_MAJOR} : The first part of ${VERSION} ${VERSION_MINOR} : The secon part of ${VERSION} ${VERSION_PATCH_LEVEL} : The the third part of ${VERSION} """ if packageName is None: packageName = self.subinfo.package if tarballUrl is None: tarballUrl = self._getVersionInfo("tarballUrl", None) if tarballDigestUrl is None: tarballDigestUrl = self._getVersionInfo("tarballDigestUrl", None) if tarballInstallSrc is None: tarballInstallSrc = self._getVersionInfo("tarballInstallSrc", None) if gitUrl is None: gitUrl = self._getVersionInfo("gitUrl", None) if not tarballUrl is None: for ver in self.tarballs( ): self.subinfo.targets[ ver ] = self._replaceVar( tarballUrl, ver, packageName ) if not tarballDigestUrl is None: self.subinfo.targetDigestUrls[ ver ] = self._replaceVar( tarballDigestUrl, ver, packageName ) if not tarballInstallSrc is None: self.subinfo.targetInstSrc[ ver ] = self._replaceVar( tarballInstallSrc, ver, packageName) if not gitUrl is None: for ver in self.branches( ): self.subinfo.svnTargets[ ver ] = "%s|%s|" % ( self._replaceVar( gitUrl, ver, packageName ), ver) for ver in self.tags( ): self.subinfo.svnTargets[ ver ] = "%s||%s" % ( self._replaceVar( gitUrl, ver, packageName ), ver) self.subinfo.defaultTarget = self.defaultTarget( ) def packageName( self ): return self.subinfo.package \ No newline at end of file diff --git a/bin/emerge.py b/bin/emerge.py index a4bc306f0..a7e34b6ce 100755 --- a/bin/emerge.py +++ b/bin/emerge.py @@ -1,453 +1,459 @@ #!/usr/bin/env python3 # -*- coding: utf-8 -*- # this will emerge some programs... # copyright: # Holger Schroeder # Patrick Spendrin # Hannah von Reth # The minimum python version for emerge please edit here # if you add code that changes this requirement import sys MIN_PY_VERSION = (3, 4, 0) if sys.version_info[ 0:3 ] < MIN_PY_VERSION: print( "Error: Python too old!", file = sys.stderr ) print( "Emerge needs at least Python Version %s.%s.%s" % MIN_PY_VERSION, file = sys.stderr ) print( "Please install it and adapt your kdesettings.bat", file = sys.stderr ) exit( 1 ) import time import datetime import traceback import argparse import compiler import portageSearch from InstallDB import * from EmergeConfig import * import jenkins def packageIsOutdated( category, package ): newest = portage.PortageInstance.getNewestVersion( category, package ) installed = installdb.getInstalledPackages( category, package ) for pack in installed: version = pack.getVersion( ) if newest != version: return True @utils.log def doExec( package, action, continueFlag = False ): utils.startTimer( "%s for %s" % ( action, package ), 1 ) - utils.debug( "emerge doExec called. action: %s" % action, 2 ) + utils.info("Action: %s for %s" % (action, package)) ret = package.execute( action ) utils.stopTimer( "%s for %s" % ( action, package ) ) return ret or continueFlag def handlePackage( category, packageName, buildAction, continueFlag, skipUpToDateVcs ): - utils.debug( "emerge handlePackage called: %s %s %s" % (category, packageName, buildAction), 2 ) + utils.debug_line( ) + utils.info("Handling package: %s, build action: %s" % (packageName, buildAction)) + success = True package = portage.getPackageInstance( category, packageName ) if package is None: raise portage.PortageException( "Package not found", category, packageName ) if buildAction in [ "all", "full-package", "update", "update-all" ]: success = success and doExec( package, "fetch", continueFlag ) if success and skipUpToDateVcs and package.subinfo.hasSvnTarget( ): revision = package.sourceVersion( ) for p in installdb.getInstalledPackages( category, packageName ): if p.getRevision( ) == revision: + utils.info("Skipping further actions, package is up-to-date") return True success = success and doExec( package, "unpack", continueFlag ) success = success and doExec( package, "compile" ) success = success and doExec( package, "cleanimage" ) success = success and doExec( package, "install" ) if buildAction in [ "all", "update", "update-all" ]: success = success and doExec( package, "qmerge" ) if buildAction == "full-package": success = success and doExec( package, "package" ) success = success or continueFlag elif buildAction in [ "fetch", "unpack", "preconfigure", "configure", "compile", "make", "checkdigest", "dumpdeps", "test", "package", "unmerge", "cleanimage", "cleanbuild", "createpatch", "geturls", "print-revision" ]: success = doExec( package, buildAction, continueFlag ) elif buildAction == "install": success = doExec( package, "cleanimage" ) success = success and doExec( package, "install", continueFlag ) elif buildAction == "qmerge": #success = doExec( package, "cleanimage" ) #success = success and doExec( package, "install") success = success and doExec( package, "qmerge" ) elif buildAction == "generate-jenkins-job": success = jenkins.generateJob(package) elif buildAction == "version-dir": print( "%s-%s" % ( packageName, package.sourceVersion( ) ) ) success = True elif buildAction == "version-package": print( "%s-%s-%s" % ( packageName, compiler.getCompilerName( ), package.sourceVersion( ) ) ) success = True elif buildAction == "print-targets": portage.printTargets( category, packageName ) success = True else: success = utils.error( "could not understand this buildAction: %s" % buildAction ) return success def handleSinglePackage( packageName, args ): deplist = [ ] packageList = [ ] originalPackageList = [ ] categoryList = [ ] targetDict = dict( ) if args.action == "update-all": installedPackages = portage.PortageInstance.getInstallables( ) if portage.PortageInstance.isCategory( packageName ): utils.debug( "Updating installed packages from category " + packageName, 1 ) else: utils.debug( "Updating all installed packages", 1 ) packageList = [ ] for mainCategory, mainPackage in installedPackages: if portage.PortageInstance.isCategory( packageName ) and ( mainCategory != packageName ): continue if installdb.isInstalled( mainCategory, mainPackage, args.buildType ) \ and portage.isPackageUpdateable( mainCategory, mainPackage ): categoryList.append( mainCategory ) packageList.append( mainPackage ) utils.debug( "Will update packages: " + str( packageList ), 1 ) elif args.list_file: listFileObject = open( args.list_file, 'r' ) for line in listFileObject: if line.strip( ).startswith( '#' ): continue try: cat, pac, tar, _ = line.split( ',' ) except: continue categoryList.append( cat ) packageList.append( pac ) originalPackageList.append( pac ) targetDict[ cat + "/" + pac ] = tar elif packageName: packageList, categoryList = portage.getPackagesCategories( packageName ) for entry in packageList: - utils.debug( "%s" % entry, 1 ) - utils.debug_line( 1 ) + utils.debug("Checking dependencies for: %s" % entry, 1) for mainCategory, entry in zip( categoryList, packageList ): deplist = portage.solveDependencies( mainCategory, entry, deplist, args.dependencyType, maxDetpth = args.dependencydepth ) # no package found if len( deplist ) == 0: category = "" if not packageName.find( "/" ) == -1: (category, package) = packageName.split( "/" ) portageSearch.printSearch( category, packageName ) return False for item in deplist: item.enabled = args.ignoreAllInstalled if args.ignoreInstalled and item.category in categoryList and item.package in packageList or packageIsOutdated( item.category, item.package ): item.enabled = True if item.category + "/" + item.package in targetDict: item.target = targetDict[ item.category + "/" + item.package ] if args.target in list( portage.PortageInstance.getAllTargets( item.category, item.package ).keys( ) ): # if no target or a wrong one is defined, simply set the default target here item.target = args.target utils.debug( "dependency: %s" % item, 1 ) + if not deplist: + utils.debug("", 1) + + utils.debug_line( 1 ) #for item in deplist: # cat = item[ 0 ] # pac = item[ 1 ] # ver = item[ 2 ] # if portage.isInstalled( cat, pac, ver, buildType) and updateAll and not portage.isPackageUpdateable( cat, pac, ver ): # print "remove:", cat, pac, ver # deplist.remove( item ) if args.action == "install-deps": # the first dependency is the package itself - ignore it # TODO: why are we our own dependency? del deplist[ 0 ] elif args.action == "update-direct-deps": for item in deplist: item.enabled = True deplist.reverse( ) # package[0] -> category # package[1] -> package # package[2] -> version info = deplist[ -1 ] if not portage.PortageInstance.isVirtualPackage( info.category, info.package ) and \ not args.action in [ "all", "install-deps" ,"generate-jenkins-job"] and\ not args.list_file or\ args.action in ["print-targets"]:#not all commands should be executed on the deps if we are a virtual packages # if a buildAction is given, then do not try to build dependencies # and do the action although the package might already be installed. # This is still a bit problematic since packageName might not be a valid # package # for list files, we also want to handle fetching & packaging per package if not handlePackage( info.category, info.package, args.action, args.doContinue, args.update_fast ): utils.notify( "Emerge %s failed" % args.action, "%s of %s/%s failed" % ( args.action, info.category, info.package), args.action ) return False utils.notify( "Emerge %s finished" % args.action, "%s of %s/%s finished" % ( args.action, info.category, info.package), args.action ) else: if args.dumpDepsFile: dumpDepsFileObject = open( args.dumpDepsFile, 'w+' ) dumpDepsFileObject.write( "# dependency dump of package %s\n" % ( packageName ) ) for info in deplist: isVCSTarget = False if args.dumpDepsFile: dumpDepsFileObject.write( ",".join( [ info.category, info.package, info.target, "" ] ) + "\n" ) isLastPackage = info == deplist[ -1 ] if args.outDateVCS or (args.outDatePackage and isLastPackage): isVCSTarget = portage.PortageInstance.getUpdatableVCSTargets( info.category, info.package ) != [ ] isInstalled = installdb.isInstalled( info.category, info.package ) if args.list_file and args.action != "all": info.enabled = info.package in originalPackageList if ( isInstalled and not info.enabled ) and not ( isInstalled and (args.outDateVCS or ( args.outDatePackage and isLastPackage) ) and isVCSTarget ): if utils.verbose( ) > 1 and info.package == packageName: utils.warning( "already installed %s/%s" % ( info.category, info.package) ) elif utils.verbose( ) > 2 and not info.package == packageName: utils.warning( "already installed %s/%s" % ( info.category, info.package ) ) else: # in case we only want to see which packages are still to be build, simply return the package name if args.probe: if utils.verbose( ) > 0: utils.warning( "pretending %s" % info ) else: if args.action in [ "install-deps", "update-direct-deps" ]: args.action = "all" if not handlePackage( info.category, info.package, args.action, args.doContinue, args.update_fast ): utils.error( "fatal error: package %s/%s %s failed" % \ ( info.category, info.package, args.action ) ) utils.notify( "Emerge build failed", "Build of %s/%s failed" % ( info.category, info.package), args.action ) return False utils.notify( "Emerge build finished", "Build of %s/%s finished" % ( info.category, info.package), args.action ) utils.new_line( ) return True def main( ): parser = argparse.ArgumentParser( prog = "emerge", description = "Emerge is a tool for building KDE-related software under Windows. emerge automates it, looks for the dependencies and fetches them automatically.\ Some options should be used with extreme caution since they will make your kde installation unusable in 999 out of 1000 cases.", epilog = """More information see the README or http://windows.kde.org/. Send feedback to .""" ) def addBuildaAction( x, help = None ): parser.add_argument( "--%s" % x, action = "store_const", dest = "action", const = x, default = "all", help = help ) parser.add_argument( "-p", "--probe", action = "store_true", help = "probing: emerge will only look which files it has to build according to the list of installed files and according to the dependencies of the package." ) parser.add_argument( "--list-file", action = "store", help = "Build all packages from the csv file provided" ) _def = emergeSettings.get( "General", "EMERGE_OPTIONS", "" ) if _def == "": _def = [] else: _def = _def.split( ";" ) parser.add_argument( "--options", action = "append", default = _def, help = "Set emerge property from string . An example for is \"cmake.openIDE=1\" see options.py for more informations." ) parser.add_argument( "-z", "--outDateVCS", action = "store_true", help = "if packages from version control system sources are installed, it marks them as out of date and rebuilds them (tags are not marked as out of date)." ) parser.add_argument( "-sz", "--outDatePackage", action = "store_true", help = "similar to -z, only that it acts only on the last package, and works as normal on the rest." ) parser.add_argument( "-q", "--stayquiet", action = "store_true", dest = "stayQuiet", help = "quiet: there should be no output - The verbose level should be 0" ) parser.add_argument( "-t", "--buildtests", action = "store_true", dest = "buildTests", default = emergeSettings.getboolean( "General", "EMERGE_BUILDTESTS", False ) ) parser.add_argument( "-c", "--continue", action = "store_true", dest = "doContinue" ) parser.add_argument( "--offline", action = "store_true", default = emergeSettings.getboolean( "General", "WorkOffline", False ), help = "do not try to connect to the internet: KDE packages will try to use an existing source tree and other packages would try to use existing packages in the download directory.\ If that doesn't work, the build will fail." ) parser.add_argument( "-f", "--force", action = "store_true", dest = "forced", default = emergeSettings.getboolean( "General", "EMERGE_FORCED", False ) ) parser.add_argument( "--buildtype", choices = [ "Release", "RelWithDebInfo", "MinSizeRel", "Debug" ], dest = "buildType", default = emergeSettings.get( "General", "EMERGE_BUILDTYPE", "RelWithDebInfo" ), help = "This will override the build type set by the environment option EMERGE_BUILDTYPE ." ) parser.add_argument( "-v", "--verbose", action = "count", - default = int( emergeSettings.get( "EmergeDebug", "Verbose", "1" ) ), + default = int( emergeSettings.get( "EmergeDebug", "Verbose", "0" ) ), help = " verbose: increases the verbose level of emerge. Default is 1. verbose level 1 contains some notes from emerge, all output of cmake, make and other programs that are used.\ verbose level 2a dds an option VERBOSE=1 to make and emerge is more verbose highest level is verbose level 3." ) parser.add_argument( "--trace", action = "store", default = int( emergeSettings.get( "General", "EMERGE_TRACE", "0" ) ), type = int ) parser.add_argument( "-i", "--ignoreInstalled", action = "store_true", help = "ignore install: using this option will install a package over an existing install. This can be useful if you want to check some new code and your last build isn't that old." ) parser.add_argument( "-ia", "--ignoreAllInstalled", action = "store_true", help = "ignore all install: using this option will install all package over an existing install. This can be useful if you want to check some new code and your last build isn't that old." ) parser.add_argument( "--target", action = "store", help = "This will override the build of the default target. The default Target is marked with a star in the printout of --print-targets" ) parser.add_argument( "--search", action = "store_true", help = "This will search for a package or a description matching or similar to the search term." ) parser.add_argument( "--nocopy", action = "store_true", default = emergeSettings.getboolean( "General", "EMERGE_NOCOPY", False ), help = "this option is deprecated. In older releases emerge would have copied everything from the SVN source tree to a source directory under KDEROOT\\tmp - currently nocopy is applied\ by default if EMERGE_NOCOPY is not set to \"False\". Be aware that setting EMERGE_NOCOPY to \"False\" might slow down the build process, irritate you and increase the disk space roughly\ by the size of SVN source tree." ) parser.add_argument( "--noclean", action = "store_true", default = emergeSettings.getboolean( "General", "EMERGE_NOCLEAN", False ), help = "this option will try to use an existing build directory. Please handle this option with care - it will possibly break if the directory isn't existing." ) parser.add_argument( "--clean", action = "store_false", dest = "noclean", help = "oposite of --noclean" ) parser.add_argument( "--patchlevel", action = "store", default = emergeSettings.get( "General", "EMERGE_PKGPATCHLVL", "" ), help = "This will add a patch level when used together with --package" ) parser.add_argument( "--log-dir", action = "store", default = emergeSettings.get( "General", "EMERGE_LOG_DIR", "" ), help = "This will log the build output to a logfile in LOG_DIR for each package. Logging information is appended to existing logs." ) parser.add_argument( "--dump-deps-file", action = "store", dest = "dumpDepsFile", help = "Output the dependencies of this package as a csv file suitable for emerge server." ) parser.add_argument( "--dt", action = "store", choices = [ "both", "runtime", "buildtime" ], default = "both", dest = "dependencyType" ) parser.add_argument( "--print-installed", action = "store_true", help = "This will show a list of all packages that are installed currently." ) parser.add_argument( "--print-installable", action = "store_true", help = "his will give you a list of packages that can be installed. Currently you don't need to enter the category and package: only the package will be enough." ) parser.add_argument( "--update-fast", action = "store_true", help = "If the package is installed from svn/git and the revision did not change all steps after fetch are skipped" ) parser.add_argument( "-d", "--dependencydepth", action = "store", type = int, default = -1, help = "By default emerge resolves the whole dependency graph, this option limits the depth of the graph, so a value of 1 would mean only dependencies defined in that package" ) for x in sorted( [ "fetch", "unpack", "preconfigure", "configure", "compile", "make", "install", "qmerge", "manifest", "package", "unmerge", "test", "checkdigest", "dumpdeps", "full-package", "cleanimage", "cleanbuild", "createpatch", "geturls", "version-dir", "version-package", "print-targets", "install-deps" ] ): addBuildaAction( x ) addBuildaAction( "print-revision", "Print the revision of the package and exit" ) addBuildaAction( "update", "Update a single package" ) addBuildaAction( "generate-jenkins-job") parser.add_argument( "packageNames", nargs = argparse.REMAINDER ) args = parser.parse_args( ) if args.stayQuiet == True or args.action in [ "version-dir", "version-package", "print-installable", "print-installed", "print-targets" ]: - utils.setVerbose( 0 ) + utils.setVerbose( -1 ) elif args.verbose: utils.setVerbose( args.verbose ) emergeSettings.set( "General", "WorkOffline", args.offline ) emergeSettings.set( "General", "EMERGE_NOCOPY", args.nocopy ) emergeSettings.set( "General", "EMERGE_NOCLEAN", args.noclean ) emergeSettings.set( "General", "EMERGE_FORCED", args.forced ) emergeSettings.set( "General", "EMERGE_BUILDTESTS", args.buildTests ) emergeSettings.set( "General", "EMERGE_BUILDTYPE", args.buildType ) emergeSettings.set( "PortageVersions", "DefaultTarget", args.target ) emergeSettings.set( "General", "EMERGE_OPTIONS", ";".join( args.options ) ) emergeSettings.set( "General", "EMERGE_LOG_DIR", args.log_dir ) emergeSettings.set( "General", "EMERGE_TRACE", args.trace ) emergeSettings.set( "General", "EMERGE_PKGPATCHLVL", args.patchlevel ) portage.PortageInstance.options = args.options if args.search: for package in args.packageNames: category = "" if not package.find( "/" ) == -1: (category, package) = package.split( "/" ) portageSearch.printSearch( category, package ) return True if args.action in [ "install-deps", "update", "update-all", "package" ] or args.update_fast: args.ignoreInstalled = True if args.action in [ "update", "update-all" ]: args.noclean = True utils.debug( "buildAction: %s" % args.action ) utils.debug( "doPretend: %s" % args.probe, 1 ) utils.debug( "packageName: %s" % args.packageNames ) utils.debug( "buildType: %s" % args.buildType ) utils.debug( "buildTests: %s" % args.buildTests ) utils.debug( "verbose: %d" % utils.verbose( ), 1 ) utils.debug( "trace: %s" % args.trace, 1 ) utils.debug( "KDEROOT: %s" % EmergeStandardDirs.emergeRoot( ), 1 ) utils.debug_line( ) if args.print_installed: printInstalled( ) elif args.print_installable: portage.printInstallables( ) elif args.list_file: handleSinglePackage( "", args ) else: for x in args.packageNames: if not handleSinglePackage( x, args ): return False return True if __name__ == '__main__': succes = True try: utils.startTimer( "Emerge" ) doUpdateTitle = True def updateTitle( startTime, title ): while ( doUpdateTitle ): delta = datetime.datetime.now( ) - startTime utils.setConsoleTitle( "emerge %s %s" % (title, delta) ) time.sleep( 1 ) tittleThread = threading.Thread( target = updateTitle, args = (datetime.datetime.now( ), " ".join( sys.argv[ 1: ] ),) ) tittleThread.setDaemon( True ) tittleThread.start( ) succes = main( ) except KeyboardInterrupt: pass except portage.PortageException as e: utils.debug(e.exception, 1) utils.error(e) except Exception as e: print( e ) traceback.print_tb( e.__traceback__ ) finally: utils.stopTimer( "Emerge" ) doUpdateTitle = False if emergeSettings.getboolean( "EmergeDebug", "DumpSettings", False ): emergeSettings.dump( ) if not succes: exit( 1 ) diff --git a/bin/portage.py b/bin/portage.py index 9b8f0f3e2..af0660df0 100644 --- a/bin/portage.py +++ b/bin/portage.py @@ -1,629 +1,629 @@ ## @package portage # @brief contains portage tree related functions # @note this file should replace all other related portage related files import builtins import importlib from collections import OrderedDict from EmergePackageObject import PackageObjectBase from EmergeConfig import * import InstallDB import utils class PortageCache(object): _rootDirCache = dict() class PortageException(Exception,PackageObjectBase): def __init__(self, message, category, package , exception = None): Exception.__init__(self, message) subpackage, package = getSubPackage(category,package) PackageObjectBase.__init__(self,category,subpackage,package) self.exception = exception def __str__(self): return "%s failed: %s" % (PackageObjectBase.__str__(self),Exception.__str__(self)) class DependencyPackage(PackageObjectBase): """ This class wraps each package and constructs the dependency tree original code is from dependencies.py, integration will come later... """ def __init__( self, category, name, autoExpand = True, parent = None ): subpackage, package = getSubPackage(category,name) PackageObjectBase.__init__(self,category,subpackage,package,version = PortageInstance.getDefaultTarget(category,package)) self.category = category self.runtimeChildren = [] self.buildChildren = [] if parent is None: self._dependencyList = dict() else: self._dependencyList = parent._dependencyList if autoExpand: self.__readChildren() @property def name(self): return self.package def __hash__(self): return self.__str__().__hash__() def __eq__( self, other ): return self.category == other.category and self.name == other.name def __ne__( self, other ): return self.category != other.category or self.name != other.name def __str__(self): return "%s: %s" % (PackageObjectBase.__str__(self), self.version) def __readChildren( self ): runtimeDependencies, buildDependencies = readChildren( self.category, self.name ) self.runtimeChildren = self.__readDependenciesForChildren( list(runtimeDependencies.keys()) ) self.buildChildren = self.__readDependenciesForChildren( list(buildDependencies.keys()) ) def __readDependenciesForChildren( self, deps): children = [] if deps: for line in deps: ( category, package ) = line.split( "/" ) - utils.debug( "category: %s, name: %s" % ( category, package ), 1 ) + utils.debug( "category: %s, name: %s" % ( category, package ), 2 ) try: version = PortageInstance.getNewestVersion( category, package ) except PortageException as e: utils.warning("%s for %s/%s as a dependency of %s/%s" %(e, e.category, e.package, self.category , self.name)) continue if not line in self._dependencyList.keys(): p = DependencyPackage( category, package, False, self ) - utils.debug( "adding package p %s/%s-%s" % ( category, package, version ), 1 ) + utils.debug( "adding package %s/%s-%s" % ( category, package, version ), 2 ) self._dependencyList[ line ] = p p.__readChildren() else: p = self._dependencyList[ line ] children.append( p ) return children def getDependencies( self, depList = [], dep_type="both", single = set(), maxDetpth = -1, depth = 0): """ returns all dependencies """ if dep_type == "runtime": children = self.runtimeChildren elif dep_type == "buildtime": children = self.buildChildren else: children = self.runtimeChildren + self.buildChildren single.add(self) for p in children: if not p in single and not p in depList\ and not p.fullName() in PortageInstance.ignores: if maxDetpth == -1: p.getDependencies( depList, dep_type, single ) elif depth < maxDetpth: p.getDependencies( depList, dep_type, single, maxDetpth = maxDetpth, depth = depth + 1 ) #if self.category != internalCategory: if not self in depList and not PackageObjectBase.__str__(self) in PortageInstance.ignores: depList.append( self ) return depList def buildType(): """return currently selected build type""" return emergeSettings.get("General","EMERGE_BUILDTYPE") def rootDirectories(): # this function should return all currently set portage directories if ("General", "EMERGE_PORTAGE_ROOT" ) in emergeSettings: rootDirs = emergeSettings.get("General", "EMERGE_PORTAGE_ROOT" ).split( ";" ) else: rootDirs = [] if len( rootDirs ) == 0: rootDirs = [ os.path.join( EmergeStandardDirs.emergeRoot(), "emerge", "portage" ) ] return rootDirs def rootDirForCategory( category ): """this function should return the portage directory where it finds the first occurance of the category throws exception if not found """ # this function should return the portage directory where it finds the # first occurance of a category or the default value for i in rootDirectories(): if category and os.path.exists( os.path.join( i, category ) ): return i utils.die( "can't find category %s" % category ) def rootDirForPackage( category, package ): """returns the portage directory where it finds the first occurance of this package """ name = "%s/%s" % ( category, package ) if not name in PortageCache._rootDirCache: for i in rootDirectories(): if os.path.exists( os.path.join( i, category, package ) ): PortageCache._rootDirCache[ name ] = i return PortageCache._rootDirCache[ name ] def getFullPackage( package ): """tries to find a package and returns either category / subpackage / package or category / package returns an empty list if not found """ category = PortageInstance.getCategory( package ) if not category: return [] if package in PortageInstance.subpackages: _cat, subpackage = PortageInstance.subpackages[ package ][0].split('/') if not _cat == category: return [] return [category, subpackage, package] else: return [category, package] def getDirname( category, package ): """ return absolute pathname for a given category and package """ subpackage, package = getSubPackage( category, package ) if category and package: if subpackage: return os.path.join( rootDirForPackage( category, subpackage ), category, subpackage, package ) else: return os.path.join( rootDirForPackage( category, package ), category, package ) else: utils.die( "broken category or package %s/%s" % ( category, package ) ) def getFilename( category, package ): """ return absolute filename for a given category, package """ return os.path.join( getDirname( category, package ), "%s.py" % package ) def VCSDirs(): return [ '.svn', 'CVS', '.hg', '.git' ] class Portage(object): #cache for pacages _packageDict = OrderedDict() options = "" def __init__( self ): """ """ self.categories = {} self.subpackages = {} self.portages = {} self._CURRENT_MODULE = ()#todo refactor package constructor self.ignores = set() if ("Portage", "PACKAGE_IGNORES") in emergeSettings: self.ignores.update(emergeSettings.get("Portage","PACKAGE_IGNORES").split(";")) def addPortageDir( self, directory ): """ adds the categories and packages of a portage directory """ if not os.path.exists( directory ): return categoryList = os.listdir( directory ) # remove vcs directories for vcsdir in VCSDirs(): if vcsdir in categoryList: categoryList.remove( vcsdir ) if "__pycache__" in categoryList: categoryList.remove( "__pycache__" ) dontBuildCategoryList = self.getDontBuildPackagesList( os.path.join( directory ) ) self.portages[ directory ] = [] for category in categoryList: if not os.path.isdir( os.path.join( directory, category ) ): continue self.portages[ directory ].append( category ) packageList = os.listdir( os.path.join( directory, category ) ) # remove vcs directories for vcsdir in VCSDirs(): if vcsdir in packageList: packageList.remove( vcsdir ) if "__pycache__" in packageList: packageList.remove( "__pycache__" ) dontBuildPackageList = self.getDontBuildPackagesList( os.path.join( directory, category ) ) if not category in list(self.categories.keys()): self.categories[ category ] = [] for package in packageList: if not os.path.isdir( os.path.join( directory, category, package ) ): continue if not package in self.categories[ category ]: _enabled = not category in dontBuildCategoryList and not package in dontBuildPackageList self.categories[ category ].append( PackageObjectBase( category=category, package=package, enabled=_enabled ) ) subPackageList = os.listdir( os.path.join( directory, category, package ) ) # remove vcs directories for vcsdir in VCSDirs(): if vcsdir in subPackageList: subPackageList.remove( vcsdir ) if "__pycache__" in subPackageList: subPackageList.remove( "__pycache__" ) for subPackage in subPackageList: if not os.path.isdir( os.path.join( directory, category, package, subPackage ) ) or subPackage in VCSDirs(): continue dontBuildSubPackageList = self.getDontBuildPackagesList( os.path.join( directory, category, package ) ) if not subPackage in list(self.subpackages.keys()): self.subpackages[ subPackage ] = [] if not subPackage in self.categories[ category ]: _enabled = not category in dontBuildCategoryList and not package in dontBuildPackageList and not subPackage in dontBuildSubPackageList self.categories[ category ].append( PackageObjectBase( category=category, subpackage=package, package=subPackage, enabled=_enabled ) ) self.subpackages[ subPackage ].append( category + "/" + package ) def getCategory( self, package ): """ returns the category of this package """ utils.debug( "getCategory: %s" % package, 2 ) for cat in list(self.categories.keys()): if package in self.categories[ cat ]: utils.debug( "getCategory: found category %s for package %s" % ( cat, package ), 3 ) return cat return False def isCategory( self, category ): """ returns whether a certain category exists """ return category in list(self.categories.keys()) def isPackage( self, category, package ): """ returns whether a certain package exists within a category """ return package in self.categories[ category ] def isVirtualPackage( self, category, package ): """ check if that package is of VirtualPackageBase """ if not self.isPackage( category, package ): return False mod = getPackageInstance(category,package) for baseClassObject in mod.__class__.__bases__: if baseClassObject.__name__ == 'VirtualPackageBase': return True return False def getDontBuildPackagesList( self, path ): """ get a list of packages from a dont_build file""" plist = [] if os.path.exists( os.path.join( path, "dont_build.txt" ) ): with open( os.path.join( path, "dont_build.txt" ), "r" ) as f: for line in f: if line.strip().startswith('#'): continue if not line.strip() == "": plist.append(line.strip()) return plist def getAllPackages( self, category ): """returns all packages of a category except those that are listed in a file 'dont_build.txt' in the category directory in case the category doesn't exist, nothing is returned""" if self.isCategory( category ): plist = [] for _p in self.categories[ category ]: if _p: plist.append(_p.package) return plist else: return def getPackageInstance(self, category, package): """return instance of class Package from package file""" fileName = getFilename( category, package ) pack = None mod = None if fileName.endswith(".py") and os.path.isfile(fileName): if not fileName in self._packageDict: utils.debug( "module to import: %s" % fileName, 2 ) if not os.path.isfile( fileName ): try: mod = builtins.__import__( fileName ) except ImportError as e: utils.warning( 'import failed for module %s: %s' % (fileName, str(e)) ) mod = None else: modulename = os.path.basename( fileName )[:-3].replace('.', '_') loader = importlib.machinery.SourceFileLoader(modulename, fileName) try: mod = loader.load_module() except Exception as e: raise PortageException("Failed to load file %s" % fileName, category, package, e) if not mod is None: subpackage, package = getSubPackage( category, package ) self._CURRENT_MODULE = ( fileName, category,subpackage, package, mod ) pack = mod.Package( ) self._packageDict[ fileName ] = pack else: raise PortageException("Failed to find package", category, package) else: pack = self._packageDict[ fileName ] return pack def getDefaultTarget( self, category, package ): """ returns the default package of a specified package """ - utils.debug( "getDefaultTarget: importing file %s" % getFilename( category, package ), 1 ) + utils.debug( "getDefaultTarget: importing file %s" % getFilename( category, package ), 2 ) if not ( category and package ): return dict() info = _getSubinfo( category, package ) if not info is None: return info.defaultTarget else: return None def getAllTargets( self, category, package ): """ returns all targets of a specified package """ - utils.debug( "getAllTargets: importing file %s" % getFilename( category, package ), 1 ) + utils.debug( "getAllTargets: importing file %s" % getFilename( category, package ), 2 ) if not ( category and package ): return dict() info = _getSubinfo( category, package ) if not info is None: tagDict = info.svnTargets tagDict.update( info.targets ) utils.debug( tagDict, 2 ) return tagDict else: return dict() def getAllVCSTargets( self, category, package ): """ returns all version control system targets of a specified package, excluding those which do contain tags """ utils.debug( "getAllVCSTargets: importing file %s" % getFilename( category, package ), 1 ) info = _getSubinfo( category, package ) if not info is None: tagDict = info.svnTargets for key in tagDict: utils.debug( '%s: %s' % ( key, tagDict[key] ), 2 ) return tagDict else: return dict() def getUpdatableVCSTargets( self, category, package ): """ check if the targets are tags or not """ targetDict = PortageInstance.getAllVCSTargets( category, package ) retList = [] for key in targetDict: url = targetDict[ key ] if url: sourceType = utils.getVCSType( url ) if sourceType == "svn": # for svn, ignore tags if not url.startswith( "tags/" ) and not "/tags/" in url: retList.append( key ) elif sourceType == "git": _, branch, tag = utils.splitVCSUrl( url ) if tag == "" and not branch.endswith("-patched"): retList.append( key ) elif not sourceType == "": # for all other vcs types, simply rebuild everything for now retList.append( key ) return retList def getNewestVersion( self, category, package ): """ returns the newest version of this category/package """ if( category == None ): raise PortageException( "Empty category", category, package ) if not self.isCategory( category ): raise PortageException( "Could not find category", category, package ) if not self.isPackage( category, package ): raise PortageException( "Could not find package", category, package ) installed = InstallDB.installdb.getInstalledPackages(category, package ) newest = PortageInstance.getDefaultTarget( category, package ) for pack in installed: version = pack.getVersion() if not version or not newest: continue if utils.parse_version(newest) < utils.parse_version(version): newest = version return newest def getInstallables( self ): """ get all the packages that are within the portage directory """ instList = list() for category in list(self.categories.keys()): for package in self.categories[ category ]: instList.append(package) return instList # when importing this, this static Object should get added PortageInstance = Portage() for _dir in rootDirectories(): PortageInstance.addPortageDir( _dir ) def getSubPackage( category, package ): """ returns package and subpackage names """ """ in case no subpackage could be found, None is returned """ if package in list(PortageInstance.subpackages.keys()): for entry in PortageInstance.subpackages[ package ]: cat, pac = entry.split("/") if cat == category: return pac, package return None, package def getPackageInstance(category, package): """return instance of class Package from package file""" return PortageInstance.getPackageInstance(category, package) def getDependencies( category, package, runtimeOnly = False ): """returns the dependencies of this package as list of strings: category/package""" subpackage, package = getSubPackage( category, package ) if subpackage: utils.debug( "solving package %s/%s/%s %s" % ( category, subpackage, package, getFilename( category, package ) ), 0 ) else: utils.debug( "solving package %s/%s %s" % ( category, package, getFilename( category, package ) ), 0 ) subpackage = package deps = [] for pkg in [ subpackage ]: info = _getSubinfo(category, pkg) if not info is None: depDict = info.dependencies depDict.update( info.runtimeDependencies ) if not runtimeOnly: depDict.update( info.buildDependencies ) for line in list(depDict.keys()): (category, package) = line.split( "/" ) version = PortageInstance.getNewestVersion( category, package ) deps.append( [ category, package, version, depDict[ line ] ] ) return deps def parseListFile( filename ): """parses a csv file used for building a list of specific packages""" categoryList = [] packageList = [] infoDict = {} listFileObject = open( filename, 'r' ) for line in listFileObject: if line.strip().startswith('#'): continue try: cat, pac, tar, plvl = line.split( ',' ) except: continue categoryList.append( cat ) packageList.append( pac ) infoDict[ cat + "/" + pac ] = (tar, plvl) return categoryList, packageList, infoDict def solveDependencies( category, package, depList, dep_type = 'both', maxDetpth = -1 ): depList.reverse() if ( category == "" ): category = PortageInstance.getCategory( package ) utils.debug( "found package in category %s" % category, 2 ) pac = DependencyPackage( category, package, parent = None ) depList = pac.getDependencies( depList, dep_type=dep_type, maxDetpth = maxDetpth, single = set() ) depList.reverse() return depList def printTargets( category, package ): targetsDict = PortageInstance.getAllTargets( category, package ) defaultTarget = PortageInstance.getDefaultTarget( category, package ) if 'svnHEAD' in targetsDict and not targetsDict['svnHEAD']: del targetsDict['svnHEAD'] targetsDictKeys = list(targetsDict.keys()) targetsDictKeys.sort() for i in targetsDictKeys: if defaultTarget == i: print('*', end=' ') else: print(' ', end=' ') print(i) def _getSubinfo( category, package ): pack = getPackageInstance( category, package ) if pack: return pack.subinfo return None def readChildren( category, package ): utils.debug( "solving package %s/%s %s" % ( category, package, getFilename( category, package ) ), 2 ) subinfo = _getSubinfo( category, package ) if subinfo is None: return OrderedDict(), OrderedDict() runtimeDependencies = subinfo.runtimeDependencies buildDependencies = subinfo.buildDependencies commonDependencies = subinfo.dependencies runtimeDependencies.update(commonDependencies) buildDependencies.update(commonDependencies) return runtimeDependencies, buildDependencies def isPackageUpdateable( category, package ): utils.debug( "isPackageUpdateable: importing file %s" % getFilename( category, package ), 2 ) subinfo = _getSubinfo( category, package ) if not subinfo is None: if len( subinfo.svnTargets ) == 1 and not subinfo.svnTargets[ list(subinfo.svnTargets.keys())[0] ]: return False return len( subinfo.svnTargets ) > 0 else: return False def alwaysTrue( *dummyArgs): """we sometimes need a function that always returns True""" return True def getHostAndTarget( hostEnabled, targetEnabled ): """used for messages""" msg = "" if hostEnabled or targetEnabled: msg += "(" if hostEnabled: msg += "H" if hostEnabled and targetEnabled: msg += "/" if targetEnabled: msg += "T" msg += ")" return msg def printCategoriesPackagesAndVersions( lines, condition, hostEnabled=alwaysTrue, targetEnabled=alwaysTrue ): """prints a number of 'lines', each consisting of category, package and version field""" def printLine( cat, pack, ver, hnt="" ): catlen = 25 packlen = 25 print(cat + " " * ( catlen - len( cat ) ) + pack + " " * ( packlen - len( pack ) ) + ver, hnt) printLine( 'Category', 'Package', 'Version' ) printLine( '--------', '-------', '-------' ) for category, package, version in lines: if condition( category, package, version ): printLine( category, package, version ) def printInstallables(): """get all the packages that can be installed""" data = list() for p in PortageInstance.getInstallables(): data.append((p.category,p.package, p.version)) printCategoriesPackagesAndVersions( data, alwaysTrue ) def getPackagesCategories(packageName, defaultCategory = None): - utils.debug( "getPackagesCategories for package name %s" % packageName, 1 ) + utils.trace("getPackagesCategories for package name %s" % packageName) if defaultCategory is None: defaultCategory = emergeSettings.get("General","EMERGE_DEFAULTCATEGORY","kde") packageList, categoryList = [], [] if len( packageName.split( "/" ) ) == 1: if PortageInstance.isCategory( packageName ): utils.debug( "isCategory=True", 2 ) packageList = PortageInstance.getAllPackages( packageName ) categoryList = [ packageName ] * len(packageList) else: utils.debug( "isCategory=False", 2 ) if PortageInstance.isCategory( defaultCategory ) and PortageInstance.isPackage( defaultCategory, packageName ): # prefer the default category packageList = [ packageName ] categoryList = [ defaultCategory ] else: if PortageInstance.getCategory( packageName ): packageList = [ packageName ] categoryList = [ PortageInstance.getCategory( packageName ) ] elif len( packageName.split( "/" ) ) == 2: [ cat, pac ] = packageName.split( "/" ) if PortageInstance.isCategory( cat ): categoryList = [ cat ] else: return packageList, categoryList if len( categoryList ) > 0 and PortageInstance.isPackage( categoryList[0], pac ): packageList = [ pac ] if len( categoryList ) and len( packageList ): utils.debug( "added package %s/%s" % ( categoryList[0], pac ), 2 ) else: utils.error( "unknown packageName" ) return packageList, categoryList diff --git a/bin/utils.py b/bin/utils.py index 245c869a7..d8b17a58f 100644 --- a/bin/utils.py +++ b/bin/utils.py @@ -1,1272 +1,1272 @@ # -*- coding: utf-8 -*- """@brief utilities this file contains some helper functions for emerge """ # copyright: # Holger Schroeder # Patrick Spendrin # Ralf Habacker import http.client import ftplib import urllib.request import urllib.error import urllib.parse import shutil import zipfile import tarfile import hashlib import traceback import tempfile import getpass import subprocess import re import inspect import datetime from operator import itemgetter import ctypes import Notifier.NotificationLoader from EmergeConfig import * if os.name == 'nt': import msvcrt # pylint: disable=F0401 else: import fcntl # pylint: disable=F0401 import configparser def abstract(): caller = inspect.getouterframes(inspect.currentframe())[1][3] raise NotImplementedError(caller + ' must be implemented in subclass') def getCallerFilename(): """ returns the file name of the """ filename = None try: frame=inspect.currentframe() count = 2 while count > 0 and frame: frame = frame.f_back # python 3.3 includes unnecessary importlib frames, skip them if frame and frame.f_code.co_filename != '': count -= 1 finally: if frame: filename = frame.f_code.co_filename del frame return filename ### fetch functions #FIXME: get this from somewhere else: WGetExecutable = os.path.join( EmergeStandardDirs.emergeRoot(), "bin", "wget.exe" ) if not os.path.exists( WGetExecutable ): WGetExecutable = os.path.join( EmergeStandardDirs.emergeRoot(), "dev-utils", "bin", "wget.exe" ) def test4application( appname): """check if the application specified by 'appname' is available""" try: f = open('NUL:') p = subprocess.Popen( appname, stdout=f, stderr=f ) p.wait() return True except OSError: debug( "could not find application %s" % appname, 1 ) return False class Verbose(object): """ This class will work on the overall output verbosity It defines the interface for the option parser but before the default value is taken from the environment variable. There is only one verbosity value for all parts of emerge. Always updates the shell variable EMERGE_VERBOSE. """ - __level = 1 + __level = 0 @staticmethod def increase(): """increase verbosity""" Verbose.setLevel(Verbose.__level + 1) @staticmethod def decrease(): """decrease verbosity""" Verbose.setLevel(Verbose.__level - 1) @staticmethod def level(): return Verbose.__level @staticmethod def setLevel(newLevel): """ set the level by hand for quick and dirty changes """ - Verbose.__level = max(0, newLevel) + Verbose.__level = max(-1, newLevel) def verbose( self ): """ returns the verbosity level for the application """ return Verbose.__level class TemporaryVerbosity(object): """Context handler for temporarily different verbosity""" def __init__(self, tempLevel): self.prevLevel = verbose() setVerbose(tempLevel) def __enter__(self): return self def __exit__(self, exc_type, exc_value, trback): setVerbose(self.prevLevel) def verbose(): """return the value of the verbose level""" return Verbose.level() def setVerbose( _verbose ): Verbose.setLevel(_verbose) def getFiles( urls, destdir, suffix='' , filenames = ''): """download files from 'url' into 'destdir'""" debug( "getfiles called. urls: %s, filenames: %s, suffix: %s" % ( urls, filenames, suffix ), 1 ) # make sure distfiles dir exists if ( not os.path.exists( destdir ) ): os.makedirs( destdir ) if type(urls) == list: urlList = urls else: urlList = urls.split() if filenames == '': filenames = [ os.path.basename(x) for x in urlList ] if type(filenames) == list: filenameList = filenames else: filenameList = filenames.split() dlist = list(zip( urlList , filenameList )) for url,filename in dlist: if ( not getFile( url + suffix, destdir , filename ) ): return False return True def getFile( url, destdir , filename='' ): """download file from 'url' into 'destdir'""" debug( "getFile called. url: %s" % url, 1 ) if url == "": error( "fetch: no url given" ) return False wgetpath = WGetExecutable if ( os.path.exists( wgetpath ) ): return wgetFile( url, destdir , filename ) scheme, host, path, _, _, _ = urllib.parse.urlparse( url ) filename = os.path.basename( path ) debug( "%s\n%s\n%s\n%s" % ( scheme, host, path, filename ) ) if ( scheme == "http" ): return getHttpFile( host, path, destdir, filename ) elif ( scheme == "ftp" ): return getFtpFile( host, path, destdir, filename ) else: error( "getFile: protocol not understood" ) return False def wgetFile( url, destdir, filename=''): """download file with wget from 'url' into 'destdir', if filename is given to the file specified""" compath = WGetExecutable command = "%s --no-check-certificate -c -t 10" % compath if emergeSettings.getboolean("General", "EMERGE_NO_PASSIVE_FTP", False ): command += " --no-passive-ftp " if(filename ==''): command += " -P %s" % destdir else: command += " -O %s" % os.path.join( destdir, filename ) command += " %s" % url debug( "wgetfile called", 1 ) ret = system( command ) debug( "wget ret: %s" % ret, 2) return ret def getFtpFile( host, path, destdir, filename ): """download file from a ftp host specified by 'host' and 'path' into 'destdir' using 'filename' as file name""" # FIXME check return values here (implement useful error handling)... debug( "FIXME getFtpFile called. %s %s" % ( host, path ), 1 ) ftp = ftplib.FTP( host ) ftp.login( "anonymous", "johndoe" ) with open( os.path.join( destdir, filename ), "wb" ) as outfile: ftp.retrbinary( "RETR " + path, outfile.write ) return True def getHttpFile( host, path, destdir, filename ): """download file from a http host specified by 'host' and 'path' into 'destdir' using 'filename' as file name""" # FIXME check return values here (implement useful error handling)... debug( "getHttpFile called. %s %s" % ( host, path ), 1 ) conn = http.client.HTTPConnection( host ) conn.request( "GET", path ) r1 = conn.getresponse() debug( "status: %s; reason: %s" % ( str( r1.status ), str( r1.reason ) ) ) count = 0 while r1.status == 302: if count > 10: debug( "Redirect loop" ) return False count += 1 _, host, path, _, _, _ = urllib.parse.urlparse( r1.getheader( "Location" ) ) debug( "Redirection: %s %s" % ( host, path ), 1 ) conn = http.client.HTTPConnection( host ) conn.request( "GET", path ) r1 = conn.getresponse() debug( "status: %s; reason: %s" % ( str( r1.status ), str( r1.reason ) ) ) data = r1.read() with open( os.path.join( destdir, filename ), "wb" ) as f: f.write( data ) return True def isCrEol(filename): with open(filename, "rb") as f: return str(f.readline(),'UTF-8').endswith("\r\n") def checkFilesDigests( downloaddir, filenames, digests=None ): """check digest of (multiple) files specified by 'filenames' from 'downloaddir'""" if digests != None: if type(digests) == list: digestList = digests elif digests.find("\n") != -1: digestList = digests.splitLines() else: digestList = [digests] i = 0 for filename in filenames: debug( "checking digest of: %s" % filename, 1 ) pathName = os.path.join( downloaddir, filename ) if digests == None: digestFileName = pathName + '.sha1' if not os.path.exists( digestFileName ): digestFileName, _ = os.path.splitext( pathName ) digestFileName += '.sha1' if not os.path.exists( digestFileName ): error( "digest validation request for file %s, but no digest file present" % pathName ) return False currentHash = digestFileSha1( pathName ) with open( digestFileName, "r" ) as f: line = f.readline() digest = re.search('\\b[0-9a-fA-F]{40}\\b', line) if not digest: error( " digestFile %s for file %s does not contain a valid SHA1 hash" % (digestFileName, pathName,) ) return False digest = digest.group(0) if len(digest) != len(currentHash) or digest.find(currentHash) == -1: error( "SHA1 hash for file %s (%s) does not match (%s)" % (pathName, currentHash, digest) ) return False # digest provided in digests parameter else: currentHash = digestFileSha1( pathName ) digest = digestList[i].strip() if len(digest) != len(currentHash) or digest.find(currentHash) == -1: error( "SHA1 hash for file %s (%s) does not match (%s)" % (pathName, currentHash, digest) ) return False i = i + 1 return True def createFilesDigests( downloaddir, filenames ): """create digests of (multiple) files specified by 'filenames' from 'downloaddir'""" digestList = list() for filename in filenames: pathName = os.path.join( downloaddir, filename ) digest = digestFileSha1( pathName ) entry = filename, digest digestList.append(entry) return digestList def createDigestFile(path): """creates a sha1 diget file""" digets = digestFileSha1(path) with open(path + ".sha1","wt+") as f: f.write("%s\n" % digets) def printFilesDigests( digestFiles, buildTarget=None): size = len( digestFiles ) i = 0 for (fileName, digest) in digestFiles: print("%40s %s" % ( fileName, digest ), end=' ') if size == 1: if buildTarget == None: print(" '%s'" % ( digest )) else: print("self.targetDigests['%s'] = '%s'" % ( buildTarget, digest )) else: if buildTarget == None: if i == 0: print(" ['%s'," % ( digest )) elif i == size-1: print(" '%s']" % ( digest )) else: print(" '%s'," % ( digest )) i = i + 1 else: if i == 0: print("self.targetDigests['%s'] = ['%s'," % ( buildTarget, digest )) elif i == size-1: print(" '%s']" % ( digest )) else: print(" '%s'," % ( digest )) i = i + 1 ### unpack functions def unpackFiles( downloaddir, filenames, workdir ): """unpack (multiple) files specified by 'filenames' from 'downloaddir' into 'workdir'""" cleanDirectory( workdir ) for filename in filenames: debug( "unpacking this file: %s" % filename, 1 ) if ( not unpackFile( downloaddir, filename, workdir ) ): return False return True def unpackFile( downloaddir, filename, workdir ): """unpack file specified by 'filename' from 'downloaddir' into 'workdir'""" ( shortname, ext ) = os.path.splitext( filename ) if ( ext == ".zip" ): return unZip( os.path.join( downloaddir, filename ), workdir ) elif ( ext == ".7z" ): return un7zip( os.path.join( downloaddir, filename ), workdir, ext ) elif ( ext == ".tgz" ): return unTar( os.path.join( downloaddir, filename ), workdir ) elif ( ext == ".gz" or ext == ".bz2" or ext == ".lzma" or ext == ".xz" ): _, myext = os.path.splitext( shortname ) if ( myext == ".tar" ): return unTar( os.path.join( downloaddir, filename ), workdir ) else: error( "unpacking %s" % myext ) return False elif ( ext == ".exe" ): warning( "unpack ignoring exe file" ) return True else: error( "dont know how to unpack this file: %s" % filename ) return False def un7zip( fileName, destdir, flag = None ): command = "7za x -r -y -o%s %s" % ( destdir, fileName ) if flag == ".7z": # Actually this is not needed for a normal archive. # But git is an exe file renamed to 7z and we need to specify the type. # Yes it is an ugly hack. command += " -t7z" if verbose() > 0: return system( command ) else: tmp = tempfile.TemporaryFile() return system( command, stdout=tmp ) def unTar( fileName, destdir ): """unpack tar file specified by 'file' into 'destdir'""" debug( "unTar called. file: %s, destdir: %s" % ( fileName, destdir ), 1 ) ( shortname, ext ) = os.path.splitext( fileName ) emerge_tmp = os.path.join(destdir,"emerge_tmp") mode = "r" if ( ext == ".gz" ): mode = "r:gz" #elif(ext == ".bz2"): #mode = "r:bz2" elif(ext == ".lzma" or ext == ".xz" or ext == ".bz2"): un7zip( fileName, emerge_tmp ) _, tarname = os.path.split( shortname ) fileName = os.path.join( emerge_tmp , tarname ) if not os.path.exists( fileName ): error( "couldn't find file %s" % fileName ) return False try: with tarfile.open( fileName, mode ) as tar: # FIXME how to handle errors here ? for tarMember in tar: try: if tarMember.issym(): tarDir = os.path.dirname(tarMember.name) target = tarMember.linkname if not target.startswith("/"):#abspath? target = os.path.normpath("%s/%s"%(tarDir, target)).replace("\\","/") if target in tar.getnames(): tar.extract(target, emerge_tmp ) shutil.move(os.path.join( emerge_tmp , tarDir , tarMember.linkname ),os.path.join( destdir , tarMember.name )) warning("Resolved symlink %s in tarfile %s to %s" % ( tarMember.name, fileName , tarMember.linkname)) else: warning("link target %s for %s not included in tarfile" % ( target , tarMember.name)) else: tar.extract(tarMember, destdir ) except tarfile.TarError: error( "couldn't extract file %s to directory %s" % ( fileName, destdir ) ) return False except IOError: warning("Failed to extract %s to directory %s" % ( tarMember.name, destdir ) ) return True except tarfile.TarError as e: error( "could not open existing tar archive: %s error: %s" % (fileName,e) ) return False finally: if os.path.exists(emerge_tmp): shutil.rmtree(emerge_tmp) def unZip( fileName, destdir ): """unzip file specified by 'file' into 'destdir'""" debug( "unZip called: file %s to destination %s" % ( fileName, destdir ), 1 ) if not os.path.exists( destdir ): os.makedirs( destdir ) try: zipObj = zipfile.ZipFile( fileName ) except (zipfile.BadZipfile, IOError): error( "couldn't extract file %s" % fileName ) return False for name in zipObj.namelist(): if not name.endswith( '/' ): dirname = os.path.join( destdir, os.path.dirname( name ) ) if not os.path.exists( dirname ): os.makedirs( dirname ) with open( os.path.join( destdir, name ), 'wb' ) as outfile: outfile.write( zipObj.read( name ) ) return True def info( message ): - if verbose() > 0: - print("emerge info: %s" % message) + if verbose() >= 0: + print("*** %s ***" % message) return True -def debug( message, level=0 ): +def debug( message, level=1 ): if verbose() > level and verbose() > 0: - print("emerge debug:", message) - sys.stdout.flush() + print("emerge debug (%s): %s" % (level, message)) + sys.stdout.flush() return True def warning( message ): if verbose() > 0: try: print("emerge warning: %s" % message) except UnicodeEncodeError: print("emerge warning: failed to print message") return True def new_line( level=0 ): if verbose() > level and verbose() > 0: print() def debug_line( level=0 ): if verbose() > level and verbose() > 0: print("_" * 80) def error( message ): if verbose() > 0: print("emerge error: %s" % message, file=sys.stderr) return False def die( message ): raise Exception("emerge fatal error: %s" % message) def traceMode(): """return the value of the trace level""" return int(emergeSettings.get( "General", "EMERGE_TRACE", "0" )) def trace( message, dummyLevel=0 ): if traceMode(): #> level: print("emerge trace:", message) sys.stdout.flush() return True def system(cmd, **kw ): """execute cmd in a shell. All keywords are passed to Popen. stdout and stderr might be changed depending on the chosen logging options.""" kw['shell'] = True return systemWithoutShell(cmd, **kw) def systemWithoutShell(cmd, **kw): """execute cmd. All keywords are passed to Popen. stdout and stderr might be changed depending on the chosen logging options.""" debug( "executing command: %s" % cmd, 1 ) if kw.get('stdout') is None: kw['stdout'] = sys.stdout if kw.get('stderr') is None: kw['stderr'] = sys.stderr redirected = False prevStreams = sys.stdout, sys.stderr try: if verbose() == 0 and kw['stdout']== sys.stdout and kw['stderr'] == sys.stderr: redirected = True sys.stderr = sys.stdout = open('test.outlog', 'wb') ret = subprocess.call( cmd, **kw ) finally: if redirected: sys.stderr.close() sys.stdout, sys.stderr = prevStreams return ( ret == 0 ) def copySrcDirToDestDir( srcdir, destdir ): """ deprecated """ return copyDir( srcdir, destdir ) def moveSrcDirToDestDir( srcdir, destdir ): """ deprecated """ return moveDir( srcdir, destdir ) def getFileListFromDirectory( imagedir ): """ create a file list containing hashes """ ret = [] myimagedir = imagedir if ( not imagedir.endswith( "\\" ) ): myimagedir = myimagedir + "\\" for root, _, files in os.walk( imagedir ): for fileName in files: ret.append( ( os.path.join( root, fileName ).replace( myimagedir, "" ), digestFile( os.path.join( root, fileName ) ) ) ) return ret def unmergeFileList(rootdir, fileList, forced=False): """ delete files in the fileList if has matches or forced is True """ for filename, filehash in fileList: fullPath = os.path.join(rootdir, os.path.normcase( filename)) if os.path.isfile(fullPath): currentHash = digestFile(fullPath) if currentHash == filehash or filehash == "": - debug( "deleting file %s" % fullPath) + debug( "deleting file %s" % fullPath, 2) try: os.remove(fullPath) except OSError: system( "cmd /C \"attrib -R %s\"" % fullPath ) os.remove(fullPath) else: if forced: warning( "file %s has different hash: %s %s, deleting anyway" % \ (fullPath, currentHash, filehash ) ) try: os.remove(fullPath) except OSError: system( "cmd /C \"attrib -R %s\"" % fullPath ) os.remove(fullPath) else: warning( "file %s has different hash: %s %s, run with option --force to delete it anyway" % \ (fullPath, currentHash, filehash ) ) elif not os.path.isdir(fullPath): warning( "file %s does not exist" % fullPath) def mergeImageDirToRootDir( imagedir, rootdir , linkOnly = emergeSettings.getboolean("General", "UseHardlinks", False )): copyDir( imagedir, rootdir , linkOnly) def moveEntries( srcdir, destdir ): for entry in os.listdir( srcdir ): - #print "rootdir:", root - debug( "entry: %s" % entry, 1 ) src = os.path.join( srcdir, entry ) dest = os.path.join( destdir, entry ) - debug( "src: %s dest: %s" %( src, dest ), 1 ) + debug("move: %s -> %s" %( src, dest ), 1) if( os.path.isfile( dest ) ): os.remove( dest ) if( os.path.isdir( dest ) ): continue os.rename( src, dest ) def moveImageDirContents( imagedir, relSrcDir, relDestDir ): srcdir = os.path.join( imagedir, relSrcDir ) destdir = os.path.join( imagedir, relDestDir ) if ( not os.path.isdir( destdir ) ): os.mkdir( destdir ) moveEntries( srcdir, destdir ) os.chdir( imagedir ) os.removedirs( relSrcDir ) def fixCmakeImageDir( imagedir, rootdir ): """ when using DESTDIR=foo under windows, it does not _replace_ CMAKE_INSTALL_PREFIX with it, but prepends destdir to it. so when we want to be able to install imagedir into KDEROOT, we have to move things around... """ debug( "fixImageDir: %s %s" % ( imagedir, rootdir ), 1 ) # imagedir = e:\foo\thirdroot\tmp\dbus-0\image # rootdir = e:\foo\thirdroot # files are installed to # e:\foo\thirdroot\tmp\dbus-0\image\foo\thirdroot _, rootpath = os.path.splitdrive( rootdir ) #print "rp:", rootpath if ( rootpath.startswith( "\\" ) ): rootpath = rootpath[1:] # CMAKE_INSTALL_PREFIX = X:\ # -> files are installed to # x:\build\foo\dbus\image\ # --> all fine in this case #print "rp:", rootpath if len(rootpath) == 0: return tmp = os.path.join( imagedir, rootpath ) debug( "tmp: %s" % tmp, 1 ) tmpdir = os.path.join( imagedir, "tMpDiR" ) if ( not os.path.isdir( tmpdir ) ): os.mkdir( tmpdir ) moveEntries( tmp, tmpdir ) os.chdir( imagedir ) os.removedirs( rootpath ) moveEntries( tmpdir, imagedir ) cleanDirectory( tmpdir ) os.rmdir( tmpdir ) def cleanDirectory( directory ): debug("clean directory %s" % directory, 1) if ( os.path.exists( directory ) ): for root, dirs, files in os.walk( directory, topdown=False): for name in files: try: os.remove( os.path.join(root, name) ) except OSError: system( "cmd /C \"attrib -R %s\"" % os.path.join(root, name) ) try: os.remove( os.path.join(root, name) ) except OSError: die( "couldn't delete file %s\n ( %s )" % ( name, os.path.join( root, name ) ) ) for name in dirs: try: os.rmdir( os.path.join(root, name) ) except OSError: system( "cmd /C \"attrib -R %s\"" % os.path.join(root, name) ) try: os.rmdir( os.path.join(root, name) ) except OSError: die( "couldn't delete directory %s\n( %s )" % ( name, os.path.join( root, name ) ) ) else: os.makedirs( directory ) def sedFile( directory, fileName, sedcommand ): """ runs the given sed command on the given file """ olddir = os.getcwd() try: os.chdir( directory ) backup = "%s.orig" % fileName if( os.path.isfile( backup ) ): os.remove( backup ) command = "sed -i.orig %s %s" % ( sedcommand, fileName ) system( command ) finally: os.chdir( olddir ) def digestFile( filepath ): """ md5-digests a file """ fileHash = hashlib.md5() try: with open( filepath, "rb" ) as digFile: for line in digFile: fileHash.update( line ) return fileHash.hexdigest() except IOError: return "" def digestFileSha1( filepath ): """ sha1-digests a file """ fileHash = hashlib.sha1() with open( filepath, "rb" ) as hashFile: for line in hashFile: fileHash.update( line ) return fileHash.hexdigest() def getVCSType( url ): """ return the type of the vcs url """ if not url: return "" if isGitUrl( url ): return "git" elif url.find("://") == -1: return "svn" elif url.startswith("[hg]"): return "hg" elif url.find("svn:") >= 0 or url.find("https:") >= 0 or url.find("http:") >= 0: return "svn" ## \todo complete more cvs access schemes elif url.find("pserver:") >= 0: return "cvs" else: return "" def isGitUrl( Url ): """ this function returns true, if the Url given as parameter is a git url: it either starts with git:// or the first part before the first '|' ends with .git or if the url starts with the token [git] """ if Url.startswith('git://'): return True # split away branch and tags splitUrl = Url.split('|') if splitUrl[0].endswith(".git"): return True if Url.startswith("[git]"): return True return False def splitVCSUrl( Url ): """ this function splits up an url provided by Url into the server name, the path, a branch or tag; it will return a list with 3 strings according to the following scheme: git://servername/path.git|4.5branch|v4.5.1 will result in ['git://servername:path.git', '4.5branch', 'v4.5.1'] This also works for all other dvcs""" splitUrl = Url.split('|') if len(splitUrl) < 3: c = [x for x in splitUrl] for dummy in range(3 - len(splitUrl)): c.append('') else: c = splitUrl[0:3] return c def replaceVCSUrl( Url ): """ this function should be used to replace the url of a server this comes in useful if you e.g. need to switch the server url for a push url on gitorious.org """ configfile = os.path.join(EmergeStandardDirs.etcPortageDir(), "..", "emergehosts.conf" ) replacedict = dict() # FIXME handle svn/git usernames and settings with a distinct naming #todo WTF if ( ("General", "KDESVNUSERNAME") in emergeSettings and emergeSettings.get("General", "KDESVNUSERNAME") != "username" ) : replacedict[ "git://git.kde.org/" ] = "git@git.kde.org:" if os.path.exists( configfile ): config = configparser.ConfigParser() config.read( configfile ) # add the default KDE stuff if the KDE username is set. for section in config.sections(): host = config.get( section, "host" ) replace = config.get( section, "replace" ) replacedict[ host ] = replace for host in list(replacedict.keys()): if not Url.find( host ) == -1: Url = Url.replace( host, replacedict[ host ] ) break return Url def createImportLibs( dll_name, basepath ): """creating the import libraries for the other compiler(if ANSI-C libs)""" dst = os.path.join( basepath, "lib" ) if( not os.path.exists( dst ) ): os.mkdir( dst ) # check whether the required binary tools exist HAVE_GENDEF = test4application( "gendef" ) USE_GENDEF = HAVE_GENDEF HAVE_LIB = test4application( "lib" ) HAVE_DLLTOOL = test4application( "dlltool" ) if verbose() > 1: print("gendef found:", HAVE_GENDEF) print("gendef used:", USE_GENDEF) print("lib found:", HAVE_LIB) print("dlltool found:", HAVE_DLLTOOL) dllpath = os.path.join( basepath, "bin", "%s.dll" % dll_name ) defpath = os.path.join( basepath, "lib", "%s.def" % dll_name ) exppath = os.path.join( basepath, "lib", "%s.exp" % dll_name ) imppath = os.path.join( basepath, "lib", "%s.lib" % dll_name ) gccpath = os.path.join( basepath, "lib", "%s.dll.a" % dll_name ) if not HAVE_GENDEF and os.path.exists( defpath ): HAVE_GENDEF = True USE_GENDEF = False if not HAVE_GENDEF: warning( "system does not have gendef.exe" ) return False if not HAVE_LIB and not os.path.isfile( imppath ): warning( "system does not have lib.exe (from msvc)" ) if not HAVE_DLLTOOL and not os.path.isfile( gccpath ): warning( "system does not have dlltool.exe" ) # create .def if USE_GENDEF: cmd = "gendef - %s -a > %s " % ( dllpath, defpath ) system( cmd ) if( HAVE_LIB and not os.path.isfile( imppath ) ): # create .lib cmd = "lib /machine:x86 /def:%s /out:%s" % ( defpath, imppath ) system( cmd ) if( HAVE_DLLTOOL and not os.path.isfile( gccpath ) ): # create .dll.a cmd = "dlltool -d %s -l %s -k" % ( defpath, gccpath ) system( cmd ) if os.path.exists( defpath ): os.remove( defpath ) if os.path.exists( exppath ): os.remove( exppath ) return True def toMSysPath( path ): path = path.replace( '\\', '/' ) if ( len(path) > 1 and path[1] == ':' ): path = '/' + path[0].lower() + '/' + path[3:] return path def cleanPackageName( basename, packagename ): return os.path.basename( basename ).replace( packagename + "-", "" ).replace( ".py", "" ) def renameDir(src, dest): """ rename a directory """ debug("rename directory from %s to %s" % ( src, dest ), 2) if os.rename( src, dest ) == 0: return False else: return True def createDir(path): """Recursive directory creation function. Makes all intermediate-level directories needed to contain the leaf directory""" if not os.path.exists( path ): debug("creating directory %s " % ( path ), 2) os.makedirs( path ) return True def copyFile(src, dest,linkOnly = emergeSettings.getboolean("General", "UseHardlinks", False)): """ copy file from src to dest""" debug("copy file from %s to %s" % ( src, dest ), 2) destDir = os.path.dirname( dest ) if not os.path.exists( destDir ): os.makedirs( destDir ) if os.path.exists( dest ): warning( "Overriding %s" % dest ) os.remove( dest ) if linkOnly: try: os.link( src , dest ) return True except: warning("Failed to create hardlink %s for %s" % (dest, src)) try: shutil.copy(src,dest) except OSError: system("cmd /C \"attrib -R %s\"" % dest) shutil.copy(src,dest) return True def copyDir( srcdir, destdir,linkOnly = emergeSettings.getboolean("General", "UseHardlinks", False ) ): """ copy directory from srcdir to destdir """ debug( "copyDir called. srcdir: %s, destdir: %s" % ( srcdir, destdir ), 2) if ( not srcdir.endswith( "\\" ) ): srcdir += "\\" if ( not destdir.endswith( "\\" ) ): destdir += "\\" for root, _, files in os.walk( srcdir ): # do not copy files under .svn directories, because they are write-protected # and the they cannot easily be deleted... if ( root.find( ".svn" ) == -1 ): tmpdir = root.replace( srcdir, destdir ) if not os.path.exists( tmpdir ): os.makedirs( tmpdir ) for fileName in files: copyFile(os.path.join( root, fileName ),os.path.join( tmpdir, fileName ), linkOnly) debug( "copy %s to %s" % ( os.path.join( root, fileName ), os.path.join( tmpdir, fileName ) ), 2) def mergeTree(srcdir, destdir): """ copy directory from @p srcdir to @p destdir If a directory in @p destdir exists, just write into it """ fileList = os.listdir(srcdir) for i in fileList: src = os.path.join(srcdir, i) dest = os.path.join(destdir, i) if os.path.exists(dest): if os.path.isdir(dest): mergeTree(src, dest) continue else: os.remove(dest) shutil.move(src, destdir) # Cleanup (only removing empty folders) os.rmdir(srcdir) def moveDir( srcdir, destdir ): """ move directory from srcdir to destdir """ debug( "moveDir called. srcdir: %s, destdir: %s" % ( srcdir, destdir ), 1 ) try: shutil.move( srcdir, destdir ) except Exception as e: warning(e) return False return True def rmtree( directory ): """ recursively delete directory """ debug( "rmtree called. directory: %s" % ( directory ), 2 ) shutil.rmtree ( directory, True ) # ignore errors def moveFile(src, dest): """move file from src to dest""" debug("move file from %s to %s" % ( src, dest ), 2) shutil.move( src, dest ) return True def deleteFile(fileName): """delete file """ if not os.path.exists( fileName ): return False debug("delete file %s " % ( fileName ), 2) os.remove( fileName ) return True def findFiles( directory, pattern=None, fileNames=None): """find files recursivly""" if fileNames == None: fileNames = [] pattern = pattern.lower() for entry in os.listdir(directory): if entry.find(".svn") > -1 or entry.find(".bak") > -1: continue fileName = os.path.join(directory, entry) if os.path.isdir(fileName): findFiles(fileName, pattern, fileNames) elif os.path.isfile(fileName) and pattern == None or entry.lower().find(pattern) > -1: fileNames.append(fileName) return fileNames def putenv(name, value): """set environment variable""" debug("set environment variable -- set %s=%s" % ( name, value ), 2) os.putenv( name, value ) return True def unixToDos(filename): with open(filename, "rt+") as f: return f.read().replace('\n', '\r\n') def applyPatch(sourceDir, f, patchLevel='0'): """apply single patch""" cmd = 'patch -d "%s" -p%s < "%s"' % (sourceDir, patchLevel, f) debug("applying %s" % cmd) if not isCrEol(f): p = subprocess.Popen([ "patch", "-d", sourceDir, "-p", str(patchLevel)], stdin = subprocess.PIPE) p.communicate(bytes(unixToDos(f),'UTF-8')) result = p.wait() == 0 else: result = system( cmd ) if not result: warning( "applying %s failed!" % f) return result def log(fn): def inner(*args, **argv): logdir = emergeSettings.get( "General", "EMERGE_LOG_DIR", "" ) if logdir == "": return fn(*args, **argv) if os.path.isfile(logdir): die("EMERGE_LOG_DIR %s is a file" % logdir) if not os.path.exists(logdir): try: os.mkdir(logdir) except OSError: die("EMERGE_LOG_DIR %s can not be created" % logdir) logfile = "" for a in args: logfile += "%s-" % a logfile = logfile[:-1]#drop last - logfile = "%s.log" % logfile.replace("/","_").replace("\\","_") logfile = os.path.join(logdir, logfile) f = open(logfile, "at") try: old_out = sys.stdout old_err = sys.stderr sys.stdout = f sys.stderr = f return fn(*args, **argv) finally: sys.stdout = old_out sys.stderr = old_err f.close() return inner def getWinVer(): ''' Returns the Windows Version of the system returns "0" if the Version can not be determined ''' try: result = str(subprocess.Popen("cmd /C ver", stdout=subprocess.PIPE).communicate()[0],"windows-1252") except OSError: debug("Windows Version can not be determined", 1) return "0" version = re.search(r"\d+\.\d+\.\d+", result) if(version): return version.group(0) debug("Windows Version can not be determined", 1) return "0" def regQuery(key, value): ''' Query the registry key for value and return the result. ''' query = 'reg query "%s" /v "%s"' % (key, value) debug("Executing registry query %s " % query, 2) result = subprocess.Popen(query, stdout = subprocess.PIPE).communicate()[0] # Output of this command is either an error to stderr # or the key with the value in the next line reValue = re.compile(r"(\s*%s\s*REG_\w+\s*)(.*)" % value) match = reValue.search(str(result, 'windows-1252')) if match and match.group(2): return match.group(2).rstrip() return False def embedManifest(executable, manifest): ''' Embed a manifest to an executable using either the free kdewin manifest if it exists in dev-utils/bin or the one provided by the Microsoft Platform SDK if it is installed' ''' if not os.path.isfile(executable) or not os.path.isfile(manifest): # We die here because this is a problem with the portage files die("embedManifest %s or %s do not exist" % (executable, manifest)) debug("embedding ressource manifest %s into %s" % \ (manifest, executable), 2) mtExe = None mtExe = os.path.join(EmergeStandardDirs.emergeRoot(), "dev-utils", "bin", "mt.exe") if(not os.path.isfile(mtExe)): # If there is no free manifest tool installed on the system # try to fallback on the manifest tool provided by visual studio sdkdir = regQuery("HKLM\SOFTWARE\Microsoft\Microsoft SDKs\Windows", "CurrentInstallFolder") if not sdkdir: debug("embedManifest could not find the Registry Key" " for the Windows SDK", 2) else: mtExe = r'%s' % os.path.join(sdkdir, "Bin", "mt.exe") if not os.path.isfile(os.path.normpath(mtExe)): debug("embedManifest could not find a mt.exe in\n\t %s" % \ os.path.dirname(mtExe), 2) if os.path.isfile(mtExe): return system([mtExe, "-nologo", "-manifest", manifest, "-outputresource:%s;1" % executable]) else: return system(["mt", "-nologo", "-manifest", manifest, "-outputresource:%s;1" % executable]) def getscriptname(): if __name__ == '__main__': return sys.argv[ 0 ] else: return __name__ def prependPath(*parts): """put path in front of the PATH environment variable, if it is not there yet. The last part must be a non empty string, otherwise we do nothing""" if parts[-1]: fullPath = os.path.join(*parts) old = os.getenv("PATH").split(';') if old[0] != fullPath: debug("adding %s to system path" % fullPath, 2) old.insert(0, fullPath) putenv( "PATH", ";".join(old)) def setConsoleTitle(title): if platform.system() == 'Windows': ctypes.windll.kernel32.SetConsoleTitleW(title) _TIMERS = dict() def startTimer(name, level = 0): """starts a timer for meassurement""" if emergeSettings.getboolean( "EmergeDebug", "MeasureTime", False ): if name in _TIMERS: die("%s already in timers" % name) _TIMERS[name] = (datetime.datetime.now() , level) #if verbose() > 0 and ( level == 0 or verbose() > level): #debug( "Task: %s started" % name ) #sys.stdout.flush() def stopTimer(name): """stops a timer for meassurement""" if emergeSettings.getboolean( "EmergeDebug", "MeasureTime", False ): if not name in _TIMERS: debug( "%s not in timers" % name ) return startTime , level = _TIMERS[name] if verbose() > 0 and (level == 0 or verbose() > level): delta = datetime.datetime.now() - startTime print( "Task: %s stopped after: %s" % (name , delta) ) sys.stdout.flush() del _TIMERS[name] def stopAllTimer(): """stops all timer for meassurement""" keys = sorted(list(_TIMERS.items()) , key=itemgetter(1) , reverse=True) for key , _ in keys: stopTimer(key) def notify(title,message,alertClass = None): + info("%s: %s" % (title, message)) + backends = emergeSettings.get( "General","EMERGE_USE_NOTIFY", "") if backends == "": return backends = Notifier.NotificationLoader.load(backends.split(";")) for backend in backends.values(): backend.notify(title,message,alertClass) def levenshtein(s1, s2): if len(s1) < len(s2): return levenshtein(s2, s1) if not s1: return len(s2) previous_row = range(len(s2) + 1) for i, c1 in enumerate(s1): current_row = [i + 1] for j, c2 in enumerate(s2): insertions = previous_row[j + 1] + 1 # j+1 instead of j since previous_row and current_row are one character longer deletions = current_row[j] + 1 # than s2 substitutions = previous_row[j] + (c1 != c2) current_row.append(min(insertions, deletions, substitutions)) previous_row = current_row return previous_row[-1] #taken from https://bitbucket.org/pypa/setuptools/src/a3d16c5f7443ec6e5e4d8d4791682b56130b41b5/pkg_resources.py?at=default def parse_version(s): """Convert a version string to a chronologically-sortable key This is a rough cross between distutils' StrictVersion and LooseVersion; if you give it versions that would work with StrictVersion, then it behaves the same; otherwise it acts like a slightly-smarter LooseVersion. It is *possible* to create pathological version coding schemes that will fool this parser, but they should be very rare in practice. The returned value will be a tuple of strings. Numeric portions of the version are padded to 8 digits so they will compare numerically, but without relying on how numbers compare relative to strings. Dots are dropped, but dashes are retained. Trailing zeros between alpha segments or dashes are suppressed, so that e.g. "2.4.0" is considered the same as "2.4". Alphanumeric parts are lower-cased. The algorithm assumes that strings like "-" and any alpha string that alphabetically follows "final" represents a "patch level". So, "2.4-1" is assumed to be a branch or patch of "2.4", and therefore "2.4.1" is considered newer than "2.4-1", which in turn is newer than "2.4". Strings like "a", "b", "c", "alpha", "beta", "candidate" and so on (that come before "final" alphabetically) are assumed to be pre-release versions, so that the version "2.4" is considered newer than "2.4a1". Finally, to handle miscellaneous cases, the strings "pre", "preview", and "rc" are treated as if they were "c", i.e. as though they were release candidates, and therefore are not as new as a version string that does not contain them, and "dev" is replaced with an '@' so that it sorts lower than than any other pre-release tag. """ def _parse_version_parts(s): component_re = re.compile(r'(\d+ | [a-z]+ | \.| -)', re.VERBOSE) replace = {'pre':'c', 'preview':'c','-':'final-','rc':'c','dev':'@'}.get for part in component_re.split(s): part = replace(part,part) if not part or part=='.': continue if part[:1] in '0123456789': yield part.zfill(8) # pad for numeric comparison else: yield '*'+part yield '*final' # ensure that alpha/beta/candidate are before final parts = [] for part in _parse_version_parts(s.lower()): if part.startswith('*'): if part<'*final': # remove '-' before a prerelease tag while parts and parts[-1]=='*final-': parts.pop() # remove trailing zeros from each series of numeric parts while parts and parts[-1]=='00000000': parts.pop() parts.append(part) return tuple(parts) def createBat(fileName, command): with open(fileName, "wt+") as bat: bat.write("@echo off\r\n") bat.write(command) bat.write("\r\n") # TODO: clanup and speedup (see vlc and cmake) _NIGTHLY_URLS = dict() def getNightlyVersionsFromUrl(url, pattern, timeout = 10, dataLimit = 1024): global _NIGTHLY_URLS if url in _NIGTHLY_URLS: return _NIGTHLY_URLS[url] else: try: with urllib.request.urlopen(url, timeout = timeout) as fh: vers = re.findall( pattern , str(fh.read(dataLimit), "UTF-8")) _NIGTHLY_URLS[url] = vers return vers except Exception as e: print("Nightlys Unavailible for %s: %s" % (url, e)) return [None] diff --git a/kdesettings.ini b/kdesettings.ini index 5f0c4f2bf..7ae932762 100644 --- a/kdesettings.ini +++ b/kdesettings.ini @@ -1,161 +1,162 @@ ## This is the settings file for use with powershell. ## Copy it to your emerge/../etc and call ". .\kdeenv.ps1" or "kdeenv.bat" in your emerge checkout. ## You can use cmake like variables for values in the same section. ## See Paths/${DOWNLOADDIR} [General] ## Here you set the compiler to be used. ## mingw4 - use the mingw gcc compiler (recommended) ## msvc2010, msvc2012, msvc2013 or msvc2015 - use the Microsoft Visual C++ compiler KDECOMPILER = mingw4 ## possible values x86 or x64 Architecture = x86 ## This option should be set to False if you use the msvc 201X Express Edition 64bit compiler ## in all other cases, simply keep this option commented out #Native=False ## This option can be used to override the default make program ## change the value to the path of the executable you want to use instead. EMERGE_MAKE_PROGRAM = jom ## This option can be used to enable a notification backend. ## As soon as the buildprocess of a project has finished a notification will be displayed. ## Possible Backends: ## Snarl http://snarl.fullphat.net/ ## Toaster Toaster will display a Windows 8 toast notification ## Snore https://github.com/Snorenotify/Snorenotify. Snore supports multiple backends. You need to install snore-send using emerge. ## Pushover https://pushover.net - Pushover is a service to receive instant push notifications on your phone or tablet from a variety of sources. ## If using pushover, you may also need to set EMERGE_PUSHOVER_APP_TOKEN (emerge will use a default Token if unset) and EMERGE_PUSHOVER_USER_KEY, which is your user key #EMERGE_USE_NOTIFY = Snarl;Toaster;Pushover #EMERGE_PUSHOVER_APP_TOKEN = aJU9PRUb6nGUUM2idyLfXdU8S5q18i #EMERGE_PUSHOVER_USER_KEY = ## Speed up the merging of packages by using hard links UseHardlinks = True ## Whether to use ccache (only avalible with mingw compiler) #EMERGE_USE_CCACHE = True ## Whether to use ninja (default: False) #EMERGE_USE_NINJA = True ## Whether to build tests (default: True) #EMERGE_BUILDTESTS=False ## the buildtype of this installation, default is RelWithDebInfo ## Possible Values: ## Release ## RelWithDebInfo ## Debug ## MinSizeRel #EMERGE_BUILDTYPE = Debug ## Enable this option if you want to have shorter build times, and less ## disk usage. It will then avoid copying source code files of the KDE ## svn repository. To disable, EMERGE_NOCOPY = False. EMERGE_NOCOPY = True [Paths] ## This is the location of your python installation. ## This value must be set. Python = C:\python34 ## Some applications may need python 2.7 #Python27 = C:\python27 ## Here you change the download directory. ## If you want, so you can share the same download directory between ## mingw and msvc. ## The default value is emerge/../download #DOWNLOADDIR = C:\kde\download ## This option defines the location for git checkouts. ## The default value is emerge/../download/git #KDEGITDIR = ${DOWNLOADDIR}\git ## This option defines the location for svn checkouts. ## The default value is emerge/../download/svn #KDESVNDIR = ${DOWNLOADDIR}\svn ## This option defines the location where the ccache files are stored. ## The default location is KDEROOT/build/ccache #CCACHE_DIR = C:\CCACHE\kf5 [QtSDK] ## This is higly experimental and you will miss certain features like dbus or mysql support. ## Whether to use prebuild Qt binaries. Enabled = False ## The path to the Qt sdk. Path = D:\Qt ## The version of Qt. Version = 5.3 ## The compiler version, if you are not sure what to use, have a look into the derectory set in QtSDK/Path. ## The compiler must be of the same type as General/KDECOMPILER. ## If you are using mingw please make sure you have installed the mingw using the Qt installer. Compiler = mingw482_32 [ShortPath] ## substitute pathes by drives ## This option is needed to avoid path limit problems in case of long base pathes ## and compiling big packages like qt ## If you disable it do _not_ use any paths longer than 6 letters in the ## directory settings EMERGE_USE_SHORT_PATH = True ## each drive could be commented out to skip substution EMERGE_ROOT_DRIVE = r: EMERGE_SVN_DRIVE = s: EMERGE_GIT_DRIVE = q: EMERGE_DOWNLOAD_DRIVE = t: [Portage] ## This adds the possibility to disable cretin packages ## For portage recipes configure options can be added by ## "if self.subinfo.options.isActive("binary/mysql-pkg"):" ## PACKAGE_IGNORES = dev-util/git;dev-util/msys;kdesupport/kdewin;win32libs/boost/boost-python PACKAGE_IGNORES = [PortageVersions] ## Override the default target if this version is available. #DefaultTarget = 5.0 ## Overide the default version for a package. ## For a normal package add category/package, like win32libs/libpng and set the Version ## For meta packages like Qt5 you can directly set the version for the whole package #Qt5 = 5.2.1 #KF5 = 5.2.0 #KDE = 4.89.0 #Boost = 1_55_0 #win32libs/libpng = 1.2.43 binary/vlc = 3.0.0-git [Packager] ## The archive type for packages. ## Possible values are: zip, 7z ## Todo: rename 7ZipArchiveType = zip [EmergeDebug] ## If you want to have verbose output, uncomment the following option ## and set it to positive integer for verbose output and to 0 -## or disable it for normal output. Currently the highest verbosity level -## is 3 (equal to 'emerge -v -v -v'). level 0 equals 'emerge -q' -Verbose = 1 +## (or disable it) for normal output. Currently the highest verbosity level +## is 3 (equal to 'emerge -v -v -v'). level -1 equals 'emerge -q' +## Default is Verbose = 0 +#Verbose = 1 ## Prints time spend on various emerge tasks MeasureTime = False ## Dump internal state of emergeSettings to kdesettings.ini.dump #DumpSettings = True [Environment] ## All values defined here will be populated to the environment #GIT_COMMITTER_EMAIL = foo@bar.com [Version] EMERGE_SETTINGS_VERSION = 1