diff --git a/src/kapidox/data/templates/libinfo.html b/src/kapidox/data/templates/libinfo.html index fc3150b..c7ca4b1 100644 --- a/src/kapidox/data/templates/libinfo.html +++ b/src/kapidox/data/templates/libinfo.html @@ -1,45 +1,49 @@ diff --git a/src/kapidox/models.py b/src/kapidox/models.py index 6eeb020..7015870 100644 --- a/src/kapidox/models.py +++ b/src/kapidox/models.py @@ -1,250 +1,253 @@ # -*- coding: utf-8 -*- # # Copyright 2016 Olivier Churlaud # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR # IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES # OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. # IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT # NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF # THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. import logging import os.path import string from kapidox import utils ## @package kapidox.models # # Contains the classes representing the objects used by kapidox # class Library(object): """ Library """ def __init__(self, metainfo, products, platforms, all_maintainers): """ Constructor of the Library object Args: metainfo: (dict) dictionary describing a library products: (list of Products) list of all already created products platforms: (dict) dictionary of all platforms for which the library is available, where the key is a platform and the value is a restriction. For instance: { 'Linux': '', 'Windows': 'Tested with Windows 10 only' } would work. all_maintainers: (dict of dict) all possible maintainers, where the main key is a username/unique pseudo, and the key is a dictionary of name, email address. For example: { 'username01': { 'name': 'Paul Developer', 'email': 'mail@example.com' }, 'username02': { 'name': 'Marc Developer2', 'email': 'mail2@example.com' } } would work. """ self.product = None self.subproduct = None if 'group' in metainfo: productname = metainfo['group'] self.part_of_group = True else: productname = metainfo['name'] self.part_of_group = False if utils.serialize_name(productname) not in products: productname = metainfo['name'] del metainfo['group'] products[utils.serialize_name(metainfo['name'])] = Product(metainfo, all_maintainers) self.part_of_group = False logging.warning("Group of {} not found: dropped.".format(metainfo['fancyname'])) self.product = products[utils.serialize_name(productname)] if self.product is None: raise ValueError("'{}' does not belong to a product." .format(metainfo['name'])) if 'subgroup' in metainfo and self.part_of_group: for sp in self.product.subproducts: if sp.name == utils.serialize_name(metainfo['subgroup']): self.subproduct = sp if self.subproduct is None: logging.warning("Subgroup {} of library {} not documented, subgroup will be None" .format(metainfo['subgroup'], metainfo['name'])) if self.subproduct is not None: self.parent = self.subproduct self.subproduct.libraries.append(self) else: self.parent = self.product self.product.libraries.append(self) self.name = metainfo['name'] self.fancyname = metainfo['fancyname'] self.description = metainfo.get('description') self.maintainers = utils.set_maintainers(metainfo.get('maintainer'), all_maintainers) self.platforms = platforms self.outputdir = self._set_outputdir(self.part_of_group) self.href = '../' + self.outputdir.lower() + '/html/index.html' self.path = metainfo['path'] self.srcdirs = utils.tolist(metainfo.get('public_source_dirs', ['src'])) self.docdir = utils.tolist(metainfo.get('public_doc_dir', ['docs'])) self.exampledir = utils.tolist(metainfo.get('public_example_dir', ['examples'])) self.dependency_diagram = None self.type = metainfo.get('type', '') self.portingAid = metainfo.get('portingAid', False) self.deprecated = metainfo.get('deprecated', False) self.libraries = metainfo.get('libraries', []) self.cmakename = metainfo.get('cmakename', '') self.irc = metainfo.get('irc', self.product.irc) self.mailinglist = metainfo.get('mailinglist', self.product.mailinglist) + # current assumption is that reponame also used for local clone and that it matches the KDE project id + # possibly should get an override in the metainfo + self.repopath = utils.set_repopath(self.name) def _extend_parent(self, metainfo, key, key_obj, default): if key in metainfo: return metainfo[key] elif getattr(self.product, key_obj) is not None: return getattr(self.product, key_obj) else: return default def _set_outputdir(self, grouped): outputdir = self.name if grouped: outputdir = self.product.outputdir + '/' + outputdir return outputdir.lower() class Product(object): """ Product """ # TODO: If no name and no group, it will fail ! def __init__(self, metainfo, all_maintainers): """ Constructor of the Product object Args: metainfo: (dict) dictionary describing a product all_maintainers: (dict of dict) all possible maintainers, where the main key is a username/unique pseudo, and the key is a dictionary of name, email address. For example: { 'username01': { 'name': 'Paul Developer', 'email': 'mail@example.com' }, 'username02': { 'name': 'Marc Developer2', 'email': 'mail2@example.com' } } would work. """ self.parent = None # if there is a group, the product is the group # else the product is directly the library if 'group_info' in metainfo: self.name = utils.serialize_name(metainfo['group_info'].get('name', metainfo.get('group'))) self.fancyname = metainfo['group_info'].get('fancyname', string.capwords(self.name)) self.description = metainfo['group_info'].get('description') self.long_description = metainfo['group_info'].get('long_description', []) self.maintainers = utils.set_maintainers(metainfo['group_info'].get('maintainer'), all_maintainers) self.platforms = metainfo['group_info'].get('platforms') self.outputdir = self.name self.href = self.outputdir + '/index.html' self.logo_url_src = self._set_logo_src(metainfo['path'], metainfo['group_info']) self.logo_url = self._set_logo() self.libraries = [] # We'll set this later self.subgroups = [] # We'll set this later self.irc = metainfo['group_info'].get('irc', 'kde-devel') self.mailinglist = metainfo['group_info'].get('mailinglist', 'kde-devel') self.subproducts = self._extract_subproducts(metainfo['group_info']) self.part_of_group = True elif 'group' not in metainfo: self.name = utils.serialize_name(metainfo['name']) self.fancyname = metainfo['fancyname'] self.description = metainfo.get('description') self.maintainers = utils.set_maintainers(metainfo.get('maintainer'), all_maintainers) self.platforms = [x['name'] for x in metainfo.get('platforms', [{'name': None}])] self.outputdir = self.name self.href = self.outputdir + '/html/index.html' self.logo_url_src = self._set_logo_src(metainfo['path'], metainfo) self.logo_url = self._set_logo() self.libraries = [] self.irc = None self.mailinglist = None self.part_of_group = False else: raise ValueError("I do not recognize a product in {}." .format(metainfo['name'])) def _extract_subproducts(self, groupinfo): subproducts = [] if 'subgroups' in groupinfo: for sg in groupinfo['subgroups']: sg if 'name' in sg: subproducts.append(Subproduct(sg, self)) return subproducts def _set_logo(self): if self.logo_url_src is not None: filename, ext = os.path.splitext(self.logo_url_src) return self.outputdir + '/' + self.name + ext else: return None def _set_logo_src(self, path, dct): if 'logo' in dct: logo_url = os.path.join(path, dct['logo']) if os.path.isfile(logo_url): return logo_url else: logging.warning("{} logo file doesn't exist, set back to None" .format(self.fancyname)) return None else: return None class Subproduct(object): """ Subproduct """ def __init__(self, spinfo, product): """ Constructor of the Subproduct object Args: spinfo: (dict) description of the subproduct. It is not more than: { 'name': 'Subproduct Name', 'description': 'This subproduct does this and that', 'order': 3, # this is optional } for example. product: (Product) the product it is part of. """ self.fancyname = spinfo['name'] self.name = utils.serialize_name(spinfo['name']) self.description = spinfo.get('description') self.order = spinfo.get('order', 99) # If no order, go to end self.libraries = [] self.product = product self.parent = product diff --git a/src/kapidox/utils.py b/src/kapidox/utils.py index ae1c958..602eb83 100644 --- a/src/kapidox/utils.py +++ b/src/kapidox/utils.py @@ -1,274 +1,292 @@ # -*- coding: utf-8 -*- # # Copyright 2014 Aurélien Gâteau # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR # IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES # OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. # IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT # NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF # THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # Python 2/3 compatibility (NB: we require at least 2.7) from __future__ import division, absolute_import, print_function, unicode_literals from fnmatch import fnmatch import logging import os import re import subprocess import shutil import sys import tempfile +import requests ## @package kapidox.utils # # Multiple usage utils. # # This module contains code which is shared between depdiagram-prepare and # other components. # # Code in this dir should not import any module which is not shipped with # Python because this module is used by depdiagram-prepare, which must be able # to run on builds.kde.org, which may not have all the required dependencies. # def setup_logging(): FORMAT = '%(asctime)s %(levelname)s %(message)s' logging.basicConfig(format=FORMAT, datefmt='%H:%M:%S', level=logging.DEBUG) def tolist(a): """ Return a list based on `a`. """ return a if type(a) is list else [a] def serialize_name(name): """ Return a serialized name. For now it only replaces ' ' with '_' and lower the letters. """ if name is not None: return '_'.join(name.lower().split(' ')) else: return None +def set_repopath(id): + """ Return the repopath for the repo id, queried from projects.kde.org + + Args: + id: unique KDE repo identifier + """ + if id is None: + return None + + try: + r = requests.get('https://projects.kde.org/api/v1/identifier/' + id) + return r.json()['repo'] + except Exception as exc: + # Catch all exceptions here: whatever fails in this function should not + # cause the code to fail + logging.warning("Failed to get data from projects.kde.org: {}".format(exc)) + return None def set_maintainers(maintainer_keys, all_maintainers): """ Expend the name of the maintainers. Args: dictionary: (dict) Dictionary from which the name to expend will be read. key: (string) Key of the dictionary where the name to expend is saved. all_maintainers: (dict of dict) Look-up table where the names and emails of the maintainers are stored. Examples: >>> maintainer_keys = ['arthur', 'toto'] >>> myteam = {'arthur': {'name': 'Arthur Pendragon', 'email': 'arthur@example.com'}, 'toto': {'name': 'Toto', 'email: 'toto123@example.com'} } >>> set_maintainers(maintainer_keys, my_team) """ if not maintainer_keys: maintainers = [] elif isinstance(maintainer_keys, list): maintainers = map(lambda x: all_maintainers.get(x, None), maintainer_keys) else: maintainers = [all_maintainers.get(maintainer_keys, None)] maintainers = [x for x in maintainers if x is not None] return maintainers def parse_fancyname(fw_dir): """Return the framework name for a given source dir The framework name is the name of the toplevel CMake project """ cmakelists_path = os.path.join(fw_dir, "CMakeLists.txt") if not os.path.exists(cmakelists_path): logging.error("No CMakeLists.txt in {}".format(fw_dir)) return None project_re = re.compile(r"project\s*\(\s*([\w\-\_]+)", re.I) with open(cmakelists_path) as f: for line in f.readlines(): match = project_re.search(line) if match: return match.group(1) logging.error("Failed to find framework name: Could not find a " "'project()' command in {}.".format(cmakelists_path)) return None def cache_dir(): """Find/create a semi-long-term cache directory. We do not use tempdir, except as a fallback, because temporary directories are intended for files that only last for the program's execution. """ cachedir = None if sys.platform == 'darwin': try: from AppKit import NSSearchPathForDirectoriesInDomains # http://developer.apple.com/DOCUMENTATION/Cocoa/Reference/Foundation/Miscellaneous/Foundation_Functions/Reference/reference.html#//apple_ref/c/func/NSSearchPathForDirectoriesInDomains # NSApplicationSupportDirectory = 14 # NSUserDomainMask = 1 # True for expanding the tilde into a fully qualified path cachedir = os.path.join( NSSearchPathForDirectoriesInDomains(14, 1, True)[0], 'KApiDox') except: pass elif os.name == "posix": if 'HOME' in os.environ and os.path.exists(os.environ['HOME']): cachedir = os.path.join(os.environ['HOME'], '.cache', 'kapidox') elif os.name == "nt": if 'APPDATA' in os.environ and os.path.exists(os.environ['APPDATA']): cachedir = os.path.join(os.environ['APPDATA'], 'KApiDox') if cachedir is None: cachedir = os.path.join(tempfile.gettempdir(), 'kapidox') if not os.path.isdir(cachedir): os.makedirs(cachedir) return cachedir def svn_export(remote, local, overwrite=False): """Wraps svn export. Args: remote: (string) the remote url. local: (string) the local path where to dowload. overwrite: (bool) whether to overwrite `local` or not. (optional, default = False) Returns: True if success. Raises: FileNotFoundError:   subprocess.CalledProcessError:   """ try: import svn.core import svn.client logging.debug("Using Python libsvn bindings to fetch %s", remote) ctx = svn.client.create_context() ctx.auth_baton = svn.core.svn_auth_open([]) latest = svn.core.svn_opt_revision_t() latest.type = svn.core.svn_opt_revision_head svn.client.export(remote, local, latest, True, ctx) except ImportError: logging.debug("Using external svn client to fetch %s", remote) cmd = ['svn', 'export', '--quiet'] if overwrite: cmd.append('--force') cmd += [remote, local] try: subprocess.check_call(cmd, stderr=subprocess.STDOUT) except subprocess.CalledProcessError as e: raise subprocess.StandardException(e.output) except FileNotFoundError as e: logging.debug("External svn client not found") return False # subversion will set the timestamp to match the server os.utime(local, None) return True def copy_dir_contents(directory, dest): """Copy the contents of a directory Args: directory: (string) the directory to copy the contents of. dest: (string) the directory to copy them into. """ ignored = ['CMakeLists.txt'] ignore = shutil.ignore_patterns(*ignored) for fn in os.listdir(directory): f = os.path.join(directory, fn) if os.path.isfile(f): docopy = True for i in ignored: if fnmatch(fn, i): docopy = False break if docopy: shutil.copy(f, dest) elif os.path.isdir(f): dest_f = os.path.join(dest, fn) if os.path.isdir(dest_f): shutil.rmtree(dest_f) shutil.copytree(f, dest_f, ignore=ignore) _KAPIDOX_VERSION = None def get_kapidox_version(): """Get commit id of running code if it is running from git repository. May return an empty string if it failed to extract the commit id. Assumes .git/HEAD looks like this: ref: refs/heads/master and assumes .git/refs/heads/master contains the commit id """ global _KAPIDOX_VERSION if _KAPIDOX_VERSION is not None: return _KAPIDOX_VERSION _KAPIDOX_VERSION = "" bin_dir = os.path.dirname(sys.argv[0]) git_dir = os.path.join(bin_dir, "..", ".git") if not os.path.isdir(git_dir): # Looks like we are not running from the git repo, exit silently return _KAPIDOX_VERSION git_HEAD = os.path.join(git_dir, "HEAD") if not os.path.isfile(git_HEAD): logging.warning("Getting git info failed: {} is not a file".format(git_HEAD)) return _KAPIDOX_VERSION try: line = open(git_HEAD).readline() ref_name = line.split(": ")[1].strip() with open(os.path.join(git_dir, ref_name)) as f: _KAPIDOX_VERSION = f.read().strip() except Exception as exc: # Catch all exceptions here: whatever fails in this function should not # cause the code to fail logging.warning("Getting git info failed: {}".format(exc)) return _KAPIDOX_VERSION def find_dot_files(dot_dir): """Returns a list of path to files ending with .dot in subdirs of `dot_dir`.""" lst = [] for (root, dirs, files) in os.walk(dot_dir): lst.extend([os.path.join(root, x) for x in files if x.endswith('.dot')]) return lst