diff --git a/src/kapidox/__init__.py b/src/kapidox/__init__.py index e69de29..551e26e 100644 --- a/src/kapidox/__init__.py +++ b/src/kapidox/__init__.py @@ -0,0 +1,2 @@ + +from . import generator, preprocessing, utils, argparserutils \ No newline at end of file diff --git a/src/kapidox/argparserutils.py b/src/kapidox/argparserutils.py index a7dea88..155aae8 100644 --- a/src/kapidox/argparserutils.py +++ b/src/kapidox/argparserutils.py @@ -1,100 +1,131 @@ # -*- coding: utf-8 -*- # # Copyright 2014 Aurélien Gâteau # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR # IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES # OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. # IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT # NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF # THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # Python 2/3 compatibility (NB: we require at least 2.7) from __future__ import division, absolute_import, print_function, unicode_literals +import argparse import logging import os import sys +def parse_args(depdiagram_available): + parser = argparse.ArgumentParser( + description='Generate API documentation for the KDE Frameworks' + ) + group = add_sources_group(parser) + group.add_argument('frameworksdir', + help='Location of the frameworks modules.') + group.add_argument('--depdiagram-dot-dir', + help='Generate dependency diagrams, using the .dot files from DIR.', + metavar="DIR") + add_output_group(parser) + add_qt_doc_group(parser) + add_paths_group(parser) + add_misc_group(parser) + args = parser.parse_args() + check_common_args(args) + + if args.depdiagram_dot_dir and not depdiagram_available: + logging.error('You need to install the Graphviz Python bindings to ' + 'generate dependency diagrams.\n' + 'See .') + exit(1) + + if not os.path.isdir(args.frameworksdir): + logging.error(args.frameworksdir + " is not a directory") + exit(2) + + return args + + def add_sources_group(parser): return parser.add_argument_group('sources') def add_output_group(parser): group = parser.add_argument_group('output options') group.add_argument('--title', default='KDE API Documentation', help='String to use for page titles.') group.add_argument('--man-pages', action='store_true', help='Generate man page documentation.') group.add_argument('--qhp', action='store_true', help='Generate Qt Compressed Help documentation.') group.add_argument('--searchengine', action='store_true', help="Enable Doxygen's search engine feature.") group.add_argument('--api-searchbox', action='store_true', help="Enable the API searchbox (only useful for api.kde.org).") return group def add_qt_doc_group(parser): group = parser.add_argument_group('Qt documentation') group.add_argument('--qtdoc-dir', help='Location of (local) Qt documentation; this is searched ' + 'for tag files to create links to Qt classes.') group.add_argument('--qtdoc-link', help='Override Qt documentation location for the links in the ' + 'html files. May be a path or URL.') group.add_argument('--qtdoc-flatten-links', action='store_true', help='Whether to assume all Qt documentation html files ' + 'are immediately under QTDOC_LINK (useful if you set ' + 'QTDOC_LINK to the online Qt documentation). Ignored ' + 'if QTDOC_LINK is not set.') return group def add_paths_group(parser): group = parser.add_argument_group('paths') group.add_argument('--doxygen', default='doxygen', help='(Path to) the doxygen executable.') group.add_argument('--qhelpgenerator', default='qhelpgenerator', help='(Path to) the qhelpgenerator executable.') return group def add_misc_group(parser): scriptdir = os.path.dirname(os.path.realpath(__file__)) doxdatadir = os.path.join(scriptdir, 'data') group = parser.add_argument_group('misc') group.add_argument('--doxdatadir', default=doxdatadir, help='Location of the HTML header files and support graphics.') group.add_argument('--keep-temp-dirs', action='store_true', help='Do not delete temporary dirs, useful for debugging.') return parser def check_common_args(args): if not _is_doxdatadir(args.doxdatadir): logging.error("{} is not a valid doxdatadir".format(args.doxdatadir)) sys.exit(1) def _is_doxdatadir(directory): for name in ['header.html', 'footer.html', 'htmlresource']: if not os.path.exists(os.path.join(directory, name)): return False return True diff --git a/src/kapidox/generator.py b/src/kapidox/generator.py index 555173b..42d2eb4 100644 --- a/src/kapidox/generator.py +++ b/src/kapidox/generator.py @@ -1,678 +1,742 @@ # -*- coding: utf-8 -*- # # Copyright 2014 Alex Merry # Copyright 2014 Aurélien Gâteau # Copyright 2014 Alex Turbov # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR # IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES # OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. # IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT # NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF # THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # Python 2/3 compatibility (NB: we require at least 2.7) from __future__ import division, absolute_import, print_function, unicode_literals import codecs import datetime import os import logging import shutil import subprocess -import sys import tempfile -import time +import sys -from fnmatch import fnmatch -try: - from urllib.parse import urljoin -except ImportError: +import jinja2 + +if sys.version_info.major < 3: from urlparse import urljoin +else: + from urllib.parse import urljoin + +from kapidox import utils try: - from urllib.request import urlretrieve + from kapidox import depdiagram + DEPDIAGRAM_AVAILABLE = True except ImportError: - from urllib import urlretrieve + DEPDIAGRAM_AVAILABLE = False -import jinja2 - -from kapidox import utils from .doxyfilewriter import DoxyfileWriter __all__ = ( "Context", - "copy_dir_contents", "generate_apidocs", "search_for_tagfiles", - "download_kde_identities", "WARN_LOGFILE", "build_classmap", "postprocess", "create_dirs", "write_mapping_to_php", "create_jinja_environment", ) WARN_LOGFILE = 'doxygen-warnings.log' HTML_SUBDIR = 'html' class Context(object): """ Holds parameters used by the various functions of the generator """ __slots__ = ( # Names 'modulename', 'fancyname', 'title', 'fwinfo', # KApidox files 'doxdatadir', 'resourcedir', # Input 'srcdir', 'tagfiles', 'dependency_diagram', # Output 'outputdir', 'htmldir', 'tagfile', # Output options 'man_pages', 'qhp', 'searchengine', 'api_searchbox', # Binaries 'doxygen', 'qhelpgenerator', ) def __init__(self, args, **kwargs): # Names self.title = args.title # KApidox files self.doxdatadir = args.doxdatadir # Output options self.man_pages = args.man_pages self.qhp = args.qhp self.searchengine = args.searchengine self.api_searchbox = args.api_searchbox # Binaries self.doxygen = args.doxygen self.qhelpgenerator = args.qhelpgenerator for key in self.__slots__: if not hasattr(self, key): setattr(self, key, kwargs.get(key)) def create_jinja_environment(doxdatadir): loader = jinja2.FileSystemLoader(os.path.join(doxdatadir, 'templates')) return jinja2.Environment(loader=loader) +def process_toplevel_html_file(outputfile, doxdatadir, products, title, + api_searchbox=False): + + products.sort(key=lambda x: x['name'].lower()) + mapping = { + 'resources': '.', + 'api_searchbox': api_searchbox, + # steal the doxygen css from one of the frameworks + # this means that all the doxygen-provided images etc. will be found + 'doxygencss': products[0]['outputdir'] + '/html/doxygen.css', + 'title': title, + 'breadcrumbs': { + 'entries': [ + { + 'href': './index.html', + 'text': 'KDE API Reference' + } + ] + }, + 'product_list': products, + } + tmpl = create_jinja_environment(doxdatadir).get_template('frontpage.html') + with codecs.open(outputfile, 'w', 'utf-8') as outf: + outf.write(tmpl.render(mapping)) + + +def process_subgroup_html_files(outputfile, doxdatadir, groups, available_platforms, title, + api_searchbox=False): + + for group in groups: + mapping = { + 'resources': '..', + 'api_searchbox': api_searchbox, + # steal the doxygen css from one of the frameworks + # this means that all the doxygen-provided images etc. will be found + 'doxygencss': group['libraries'][0]['outputdir'] + '/html/doxygen.css', + 'title': title, + 'breadcrumbs': { + 'entries': [ + { + 'href': '../index.html', + 'text': 'KDE API Reference' + }, + { + 'href': './index.html', + 'text': group['fancyname'] + } + ] + }, + 'group': group, + 'available_platforms': sorted(available_platforms), + } + + if not os.path.isdir(group['name']): + os.mkdir(group['name']) + outputfile = group['name']+'/index.html' + tmpl = create_jinja_environment(doxdatadir).get_template('subgroup.html') + with codecs.open(outputfile, 'w', 'utf-8') as outf: + outf.write(tmpl.render(mapping)) + + def create_dirs(ctx): ctx.htmldir = os.path.join(ctx.outputdir, HTML_SUBDIR) ctx.tagfile = os.path.join(ctx.htmldir, ctx.modulename + '.tags') if not os.path.exists(ctx.outputdir): os.makedirs(ctx.outputdir) if os.path.exists(ctx.htmldir): # If we have files left there from a previous run but which are no # longer generated (for example because a C++ class has been removed) # then postprocess will fail because the left-over file has already been # processed. To avoid that, we delete the html dir. shutil.rmtree(ctx.htmldir) os.makedirs(ctx.htmldir) def load_template(path): # Set errors to 'ignore' because we don't want weird characters in Doxygen # output (for example source code listing) to cause a failure content = codecs.open(path, encoding='utf-8', errors='ignore').read() try: return jinja2.Template(content) except jinja2.exceptions.TemplateSyntaxError as exc: logging.error('Failed to parse template {}'.format(path)) raise -def smartjoin(pathorurl1,*args): - """Join paths or URLS - - It figures out which it is from whether the first contains a "://" - """ - if '://' in pathorurl1: - if not pathorurl1.endswith('/'): - pathorurl1 += '/' - return urljoin(pathorurl1,*args) - else: - return os.path.join(pathorurl1,*args) - - def find_tagfiles(docdir, doclink=None, flattenlinks=False, exclude=None, _depth=0): """Find Doxygen-generated tag files in a directory The tag files must have the extention .tags, and must be in the listed directory, a subdirectory or a subdirectory named html of a subdirectory. docdir -- the directory to search doclink -- the path or URL to use when creating the documentation links; if None, this will default to docdir flattenlinks -- if this is True, generated links will assume all the html files are directly under doclink; if False (the default), the html files are assumed to be at the same relative location to doclink as the tag file is to docdir; ignored if doclink is not set Returns a list of pairs of (tag_file,link_path) """ if not os.path.isdir(docdir): return [] if doclink is None: doclink = docdir flattenlinks = False + def smartjoin(pathorurl1, *args): + """Join paths or URLS + + It figures out which it is from whether the first contains a "://" + """ + if '://' in pathorurl1: + if not pathorurl1.endswith('/'): + pathorurl1 += '/' + return urljoin(pathorurl1, *args) + else: + return os.path.join(pathorurl1, *args) + def nestedlink(subdir): if flattenlinks: return doclink else: - return smartjoin(doclink,subdir) + return smartjoin(doclink, subdir) tagfiles = [] entries = os.listdir(docdir) for e in entries: if e == exclude: continue - path = os.path.join(docdir,e) + path = os.path.join(docdir, e) if os.path.isfile(path) and e.endswith('.tags'): - tagfiles.append((path,doclink)) + tagfiles.append((path, doclink)) elif (_depth == 0 or (_depth == 1 and e == 'html')) and os.path.isdir(path): tagfiles += find_tagfiles(path, nestedlink(e), - flattenlinks=flattenlinks, _depth=_depth+1, - exclude=exclude) + flattenlinks=flattenlinks, + _depth=_depth+1, + exclude=exclude) return tagfiles + def search_for_tagfiles(suggestion=None, doclink=None, flattenlinks=False, searchpaths=[], exclude=None): """Find Doxygen-generated tag files See the find_tagfiles documentation for how the search is carried out in each directory; this just allows a list of directories to be searched. At least one of docdir or searchpaths must be given for it to find anything. suggestion -- the first place to look (will complain if there are no documentation tag files there) doclink -- the path or URL to use when creating the documentation links; if None, this will default to docdir flattenlinks -- if this is True, generated links will assume all the html files are directly under doclink; if False (the default), the html files are assumed to be at the same relative location to doclink as the tag file is to docdir; ignored if doclink is not set searchpaths -- other places to look for documentation tag files Returns a list of pairs of (tag_file,link_path) """ if not suggestion is None: if not os.path.isdir(suggestion): logging.warning(suggestion + " is not a directory") else: tagfiles = find_tagfiles(suggestion, doclink, flattenlinks, exclude) if len(tagfiles) == 0: logging.warning(suggestion + " does not contain any tag files") else: return tagfiles for d in searchpaths: tagfiles = find_tagfiles(d, doclink, flattenlinks, exclude) if len(tagfiles) > 0: logging.info("Documentation tag files found at " + d) return tagfiles return [] -def cache_dir(): - """Find/create a semi-long-term cache directory. - - We do not use tempdir, except as a fallback, because temporary directories - are intended for files that only last for the program's execution. - """ - cachedir = None - if sys.platform == 'darwin': - try: - from AppKit import NSSearchPathForDirectoriesInDomains - # http://developer.apple.com/DOCUMENTATION/Cocoa/Reference/Foundation/Miscellaneous/Foundation_Functions/Reference/reference.html#//apple_ref/c/func/NSSearchPathForDirectoriesInDomains - # NSApplicationSupportDirectory = 14 - # NSUserDomainMask = 1 - # True for expanding the tilde into a fully qualified path - cachedir = os.path.join( - NSSearchPathForDirectoriesInDomains(14, 1, True)[0], - 'KApiDox') - except: - pass - elif os.name == "posix": - if 'HOME' in os.environ and os.path.exists(os.environ['HOME']): - cachedir = os.path.join(os.environ['HOME'], '.cache', 'kapidox') - elif os.name == "nt": - if 'APPDATA' in os.environ and os.path.exists(os.environ['APPDATA']): - cachedir = os.path.join(os.environ['APPDATA'], 'KApiDox') - if cachedir is None: - cachedir = os.path.join(tempfile.gettempdir(), 'kapidox') - if not os.path.isdir(cachedir): - os.makedirs(cachedir) - return cachedir - -def svn_export(remote, local, overwrite = False): - """Wraps svn export. - - Raises an exception on failure. - """ - try: - import svn.core, svn.client - logging.debug("Using Python libsvn bindings to fetch %s", remote) - ctx = svn.client.create_context() - ctx.auth_baton = svn.core.svn_auth_open([]) - - latest = svn.core.svn_opt_revision_t() - latest.type = svn.core.svn_opt_revision_head - - svn.client.export(remote, local, latest, True, ctx) - except ImportError: - logging.debug("Using external svn client to fetch %s", remote) - cmd = ['svn', 'export', '--quiet'] - if overwrite: - cmd.append('--force') - cmd += [remote, local] - try: - subprocess.check_call(cmd, stderr=subprocess.STDOUT) - except subprocess.CalledProcessError as e: - raise StandardException(e.output) - except FileNotFoundError as e: - logging.debug("External svn client not found") - return False - # subversion will set the timestamp to match the server - os.utime(local, None) - return True - -def download_kde_identities(): - """Download the "accounts" file on the KDE SVN repository in order to get - the KDE identities with their name and e-mail address - """ - cache_file = os.path.join(cache_dir(), 'kde-accounts') - needs_download = True - if os.path.exists(cache_file): - logging.debug("Found cached identities file at %s", cache_file) - # not quite a day, so that generation on api.kde.org gets a fresh - # copy every time the daily cron job runs it - yesterday = time.time() - (23.5 * 3600) - if os.path.getmtime(cache_file) > yesterday: - needs_download = False - else: - logging.debug("Cached file too old; updating") - if needs_download: - logging.info("Downloading KDE identities") - try: - if not svn_export('svn://anonsvn.kde.org/home/kde/trunk/kde-common/accounts', - cache_file, - overwrite=True): - logging.debug("Falling back to using websvn to fetch identities file") - urlretrieve('http://websvn.kde.org/*checkout*/trunk/kde-common/accounts', - cache_file) - except Exception as e: - if os.path.exists(cache_file): - logging.error('Failed to update KDE identities: %s', e) - else: - logging.error('Failed to fetch KDE identities: %s', e) - return None - - maintainers = {} - - with codecs.open(cache_file, 'r', encoding='utf8') as f: - for line in f: - parts = line.strip().split() - if len(parts) >= 3: - maintainers[parts[0]] = {'name': ' '.join(parts[1:-1]), 'email': parts[-1]} - - return maintainers - -def copy_dir_contents(directory, dest): - """Copy the contents of a directory - - directory -- the directory to copy the contents of - dest -- the directory to copy them into - """ - ignored = ['CMakeLists.txt'] - ignore = shutil.ignore_patterns(*ignored) - for fn in os.listdir(directory): - f = os.path.join(directory,fn) - if os.path.isfile(f): - docopy = True - for i in ignored: - if fnmatch(fn,i): - docopy = False - break - if docopy: - shutil.copy(f,dest) - elif os.path.isdir(f): - dest_f = os.path.join(dest,fn) - if os.path.isdir(dest_f): - shutil.rmtree(dest_f) - shutil.copytree(f, dest_f, ignore=ignore) def menu_items(htmldir, modulename): """Menu items for standard Doxygen files Looks for a set of standard Doxygen files (like namespaces.html) and provides menu text for those it finds in htmldir. htmldir -- the directory the HTML files are contained in Returns a list of maps with 'text' and 'href' keys """ entries = [ {'text': 'Main Page', 'href': 'index.html'}, {'text': 'Namespace List', 'href': 'namespaces.html'}, {'text': 'Namespace Members', 'href': 'namespacemembers.html'}, {'text': 'Alphabetical List', 'href': 'classes.html'}, {'text': 'Class List', 'href': 'annotated.html'}, {'text': 'Class Hierarchy', 'href': 'hierarchy.html'}, {'text': 'File List', 'href': 'files.html'}, {'text': 'File Members', 'href': 'globals.html'}, {'text': 'Modules', 'href': 'modules.html'}, {'text': 'Directories', 'href': 'dirs.html'}, {'text': 'Dependencies', 'href': modulename + '-dependencies.html'}, {'text': 'Related Pages', 'href': 'pages.html'}, ] # NOTE In Python 3 filter() builtin returns an iterable, but not a list # type, so explicit conversion is here! return list(filter( lambda e: os.path.isfile(os.path.join(htmldir, e['href'])), entries)) def parse_dox_html(stream): """Parse the HTML files produced by Doxygen, extract the key/value block we add through header.html and return a dict ready for the Jinja template. The HTML files produced by Doxygen with our custom header and footer files look like this: ... ... The parser fills the dict from the top key/value block, and add the content of the body to the dict using the "content" key. We do not use an XML parser because the HTML file might not be well-formed, for example if the documentation contains raw HTML. The key/value block is kept in a comment so that it does not appear in Qt Compressed Help output, which is not postprocessed by ourself. """ dct = {} body = [] def parse_key_value_block(line): if line == "": return skip_head key, value = line.split(': ', 1) dct[key] = value return parse_key_value_block def skip_head(line): if line == "": return extract_body else: return skip_head def extract_body(line): if line == "": return None body.append(line) return extract_body parser = parse_key_value_block while parser is not None: line = stream.readline().rstrip() parser = parser(line) dct['content'] = '\n'.join(body) return dct def postprocess_internal(htmldir, tmpl, mapping): """Substitute text in HTML files Performs text substitutions on each line in each .html file in a directory. htmldir -- the directory containing the .html files mapping -- a dict of mappings """ for name in os.listdir(htmldir): if name.endswith('.html'): path = os.path.join(htmldir, name) newpath = path + '.new' if name != 'classes.html' and name.startswith('class'): mapping['classname'] = name[5:-5].split('_1_1')[-1] else: mapping['classname'] = None with codecs.open(path, 'r', 'utf-8', errors='ignore') as f: mapping['dox'] = parse_dox_html(f) with codecs.open(newpath, 'w', 'utf-8') as outf: try: html = tmpl.render(mapping) except Exception: logging.error('postprocessing {} failed'.format(path)) raise outf.write(html) os.remove(path) os.rename(newpath, path) + def build_classmap(tagfile): """Parses a tagfile to get a map from classes to files tagfile -- the Doxygen-generated tagfile to parse Returns a list of maps (keys: classname and filename) """ import xml.etree.ElementTree as ET tree = ET.parse(tagfile) tagfile_root = tree.getroot() mapping = [] for compound in tagfile_root: kind = compound.get('kind') if kind == 'class' or kind == 'namespace': name_el = compound.find('name') filename_el = compound.find('filename') mapping.append({'classname': name_el.text, 'filename': filename_el.text}) return mapping + def write_mapping_to_php(mapping, outputfile, varname='map'): """Write a mapping out as PHP code Creates a PHP array as described by mapping. For example, the mapping [("foo","bar"),("x","y")] would cause the file 'bar','x' => 'y') ?> to be written out. mapping -- a list of pairs of strings outputfile -- the file to write to varname -- override the PHP variable name (defaults to 'map') """ logging.info('Generating PHP mapping') - with codecs.open(outputfile,'w','utf-8') as f: + with codecs.open(outputfile, 'w', 'utf-8') as f: f.write(' '" + entry['filename'] + "'") f.write(') ?>') + def generate_dependencies_page(tmp_dir, doxdatadir, modulename, dependency_diagram): """Create `modulename`-dependencies.md in `tmp_dir`""" template_path = os.path.join(doxdatadir, 'dependencies.md.tmpl') out_path = os.path.join(tmp_dir, modulename + '-dependencies.md') tmpl = load_template(template_path) with codecs.open(out_path, 'w', 'utf-8') as outf: txt = tmpl.render({ 'modulename': modulename, 'diagramname': os.path.basename(dependency_diagram), }) outf.write(txt) return out_path + def generate_apidocs(ctx, tmp_dir, doxyfile_entries=None, keep_temp_dirs=False): """Generate the API documentation for a single directory""" def find_src_subdir(d, deeper_subd=None): pth = os.path.join(ctx.fwinfo['path'], d) if deeper_subd is not None: pth = os.path.join(pth, deeper_subd) - if os.path.isdir(pth): + if os.path.isdir(pth) or os.path.isfile(pth) : return [pth] else: return [] # Paths and basic project info # FIXME: preprocessing? # What about providing the build directory? We could get the version # as well, then. input_list = [ctx.fwinfo['path']+"/README.md"] for srcdir in ctx.fwinfo['srcdirs']: input_list.extend(find_src_subdir(srcdir)) input_list.extend(find_src_subdir(ctx.fwinfo['docdir'])) image_path_list = [] if ctx.dependency_diagram: - input_list.append(generate_dependencies_page(tmp_dir, ctx.doxdatadir, ctx.modulename, ctx.dependency_diagram)) + input_list.append(generate_dependencies_page(tmp_dir, + ctx.doxdatadir, + ctx.modulename, + ctx.dependency_diagram)) image_path_list.append(ctx.dependency_diagram) doxyfile_path = os.path.join(tmp_dir, 'Doxyfile') with codecs.open(doxyfile_path, 'w', 'utf-8') as doxyfile: # Global defaults - with codecs.open(os.path.join(ctx.doxdatadir,'Doxyfile.global'), 'r', 'utf-8') as f: + with codecs.open(os.path.join(ctx.doxdatadir, 'Doxyfile.global'), + 'r', 'utf-8') as f: for line in f: doxyfile.write(line) writer = DoxyfileWriter(doxyfile) writer.write_entry('PROJECT_NAME', ctx.fancyname) # FIXME: can we get the project version from CMake? No from GIT TAGS! # Input locations image_path_list.extend(find_src_subdir(ctx.fwinfo['docdir'], 'pics')) writer.write_entries( INPUT=input_list, DOTFILE_DIRS=find_src_subdir(ctx.fwinfo['docdir'], 'dot'), EXAMPLE_PATH=find_src_subdir(ctx.fwinfo['exampledir']), IMAGE_PATH=image_path_list) # Other input settings writer.write_entry('TAGFILES', [f + '=' + loc for f, loc in ctx.tagfiles]) # Output locations writer.write_entries( OUTPUT_DIRECTORY=ctx.outputdir, GENERATE_TAGFILE=ctx.tagfile, HTML_OUTPUT=HTML_SUBDIR, WARN_LOGFILE=os.path.join(ctx.outputdir, WARN_LOGFILE)) # Other output settings writer.write_entries( HTML_HEADER=ctx.doxdatadir + '/header.html', HTML_FOOTER=ctx.doxdatadir + '/footer.html' ) # Always write these, even if QHP is disabled, in case Doxygen.local # overrides it writer.write_entries( QHP_VIRTUAL_FOLDER=ctx.modulename, QHP_NAMESPACE="org.kde." + ctx.modulename, QHG_LOCATION=ctx.qhelpgenerator) writer.write_entries( GENERATE_MAN=ctx.man_pages, GENERATE_QHP=ctx.qhp, SEARCHENGINE=ctx.searchengine) if doxyfile_entries: writer.write_entries(**doxyfile_entries) # Module-specific overrides if find_src_subdir(ctx.fwinfo['docdir']): localdoxyfile = os.path.join(find_src_subdir(ctx.fwinfo['docdir'])[0], 'Doxyfile.local') if os.path.isfile(localdoxyfile): with codecs.open(localdoxyfile, 'r', 'utf-8') as f: for line in f: doxyfile.write(line) logging.info('Running Doxygen') subprocess.call([ctx.doxygen, doxyfile_path]) def postprocess(ctx, classmap, template_mapping=None): + # TODO: copyright must be set from outside copyright = '1996-' + str(datetime.date.today().year) + ' The KDE developers' mapping = { 'doxygencss': 'doxygen.css', 'resources': ctx.resourcedir, 'title': ctx.title, 'fwinfo': ctx.fwinfo, 'copyright': copyright, 'api_searchbox': ctx.api_searchbox, 'doxygen_menu': {'entries': menu_items(ctx.htmldir, ctx.modulename)}, 'class_map': {'classes': classmap}, 'kapidox_version': utils.get_kapidox_version(), } if template_mapping: mapping.update(template_mapping) logging.info('Postprocessing') tmpl = create_jinja_environment(ctx.doxdatadir).get_template('doxygen.html') postprocess_internal(ctx.htmldir, tmpl, mapping) + + +def generate_diagram(png_path, fancyname, dot_files, tmp_dir): + """Generate a dependency diagram for a framework. + """ + def run_cmd(cmd, **kwargs): + try: + subprocess.check_call(cmd, **kwargs) + except subprocess.CalledProcessError as exc: + logging.error('Command {exc.cmd} failed with error code {}.' + .format(exc.returncode)) + return False + return True + + logging.info('Generating dependency diagram') + dot_path = os.path.join(tmp_dir, fancyname + '.dot') + + with open(dot_path, 'w') as f: + with_qt = False + ok = depdiagram.generate(f, dot_files, framework=fancyname, + with_qt=with_qt) + if not ok: + logging.error('Generating diagram failed') + return False + + logging.info('- Simplifying diagram') + simplified_dot_path = os.path.join(tmp_dir, fancyname + '-simplified.dot') + with open(simplified_dot_path, 'w') as f: + if not run_cmd(['tred', dot_path], stdout=f): + return False + + logging.info('- Generating diagram png') + if not run_cmd(['dot', '-Tpng', '-o' + png_path, simplified_dot_path]): + return False + + # These os.unlink() calls are not in a 'finally' block on purpose. + # Keeping the dot files around makes it possible to inspect their content + # when running with the --keep-temp-dirs option. If generation fails and + # --keep-temp-dirs is not set, the files will be removed when the program + # ends because they were created in `tmp_dir`. + os.unlink(dot_path) + os.unlink(simplified_dot_path) + return True + + +def create_fw_context(args, lib, tagfiles): + return Context(args, + # Names + modulename=lib['name'], + fancyname=lib['fancyname'], + fwinfo=lib, + # KApidox files + resourcedir='../..' if lib['parent'] is None else '../../..', + # Input + #srcdir=lib['srcdir'], + tagfiles=tagfiles, + dependency_diagram=lib['dependency_diagram'], + # Output + outputdir=lib['outputdir'], + ) + + +def gen_fw_apidocs(ctx, tmp_base_dir): + create_dirs(ctx) + # tmp_dir is deleted when tmp_base_dir is + tmp_dir = tempfile.mkdtemp(prefix=ctx.modulename + '-', dir=tmp_base_dir) + generate_apidocs(ctx, tmp_dir, + doxyfile_entries=dict(WARN_IF_UNDOCUMENTED=True) + ) + + +def finish_fw_apidocs(ctx, group_menu): + classmap = build_classmap(ctx.tagfile) + write_mapping_to_php(classmap, os.path.join(ctx.outputdir, 'classmap.inc')) + + entries = [{ + 'href': '../../index.html', + 'text': 'KDE API Reference' + }] + if ctx.fwinfo['parent'] is not None: + entries[0]['href'] = '../' + entries[0]['href'] + entries.append({ + 'href': '../../index.html', + 'text': ctx.fwinfo['product']['fancyname'] + }) + entries.append({ + 'href': 'index.html', + 'text': ctx.fancyname + }) + + template_mapping = { + 'breadcrumbs': { + 'entries': entries + }, + } + copyright = '1996-' + str(datetime.date.today().year) + ' The KDE developers' + mapping = { + 'doxygencss': 'doxygen.css', + 'resources': ctx.resourcedir, + 'title': ctx.title, + 'fwinfo': ctx.fwinfo, + 'copyright': copyright, + 'api_searchbox': ctx.api_searchbox, + 'doxygen_menu': {'entries': menu_items(ctx.htmldir, ctx.modulename)}, + 'class_map': {'classes': classmap}, + 'kapidox_version': utils.get_kapidox_version(), + } + if template_mapping: + mapping.update(template_mapping) + logging.info('Postprocessing') + + tmpl = create_jinja_environment(ctx.doxdatadir).get_template('doxygen2.html') + postprocess_internal(ctx.htmldir, tmpl, mapping) + + +def create_fw_tagfile_tuple(lib): + tagfile = os.path.abspath( + os.path.join( + lib['outputdir'], + 'html', + lib['fancyname']+'.tags')) + return (tagfile, '../../' + lib['outputdir'] + '/html/') diff --git a/src/kapidox/preprocessing.py b/src/kapidox/preprocessing.py new file mode 100644 index 0000000..8b34dbb --- /dev/null +++ b/src/kapidox/preprocessing.py @@ -0,0 +1,338 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2016 Olivier Churlaud +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions +# are met: +# +# 1. Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# 2. Redistributions in binary form must reproduce the above copyright +# notice, this list of conditions and the following disclaimer in the +# documentation and/or other materials provided with the distribution. +# +# THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR +# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES +# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. +# IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, +# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT +# NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF +# THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +# Python 2/3 compatibility (NB: we require at least 2.7) +from __future__ import (division, absolute_import, print_function, + unicode_literals) + +import logging +import os +import string + +import yaml + +import kapidox as kdx + +__all__ = ( + "create_metainfo", + "parse_tree", + "set_maintainers") + + +PLATFORM_ALL = "All" +PLATFORM_UNKNOWN = "UNKNOWN" + + +def expand_platform_all(dct, available_platforms): + """If one of the keys of dct is PLATFORM_ALL (or PLATFORM_UNKNOWN), + remove it and add entries for all available platforms to dct""" + + add_all_platforms = False + if PLATFORM_ALL in dct: + note = dct[PLATFORM_ALL] + add_all_platforms = True + del dct[PLATFORM_ALL] + if PLATFORM_UNKNOWN in dct: + add_all_platforms = True + note = dct[PLATFORM_UNKNOWN] + del dct[PLATFORM_UNKNOWN] + if add_all_platforms: + for platform in available_platforms: + if not platform in dct: + dct[platform] = note + + +def create_metainfo(path): + if not os.path.isdir(path): + return None + + metainfo_file = os.path.join(path, 'metainfo.yaml') + if not os.path.isfile(metainfo_file): + return None + + try: + metainfo = yaml.load(open(metainfo_file)) + except: + logging.warning('Could not load metainfo.yaml for {}, skipping it' + .format(path)) + return None + + if metainfo is None: + logging.warning('Empty metainfo.yaml for {}, skipping it' + .format(path)) + return None + + if 'subgroup' in metainfo and 'group' not in metainfo: + logging.warning('Subgroup but no group in {}, skipping it' + .format(path)) + return None + + name = os.path.basename(path) + fancyname = kdx.utils.parse_fancyname(path) + if not fancyname: + logging.warning('Could not find fancy name for {}, skipping it' + .format(path)) + return None + + metainfo.update({ + 'fancyname': fancyname, + 'name': name, + 'public_lib': metainfo.get('public_lib', False), + 'dependency_diagram': None, + 'path': path, + }) + + return metainfo + + +def parse_tree(rootdir): + """Recursively call create_metainfo() in subdirs of rootdir + + Parameters + ---------- + rootdir : string + Top level directory containing the libraries + + Return + ------ + metalist : list of dictionaries + list of metainfo dictionary (see :any:`create_metainfo`) + + """ + metalist = [] + for path, dirs, _ in os.walk(rootdir): + # We don't want to do the recursion in the dotdirs + dirs[:] = [d for d in dirs if not d[0] == '.'] + metainfo = create_metainfo(path) + if metainfo is not None: + + if metainfo['public_lib']: + metalist.append(metainfo) + else: + logging.warning("{} has no public libraries" + .format(metainfo['name'])) + + return metalist + + +def sort_metainfo(metalist, all_maintainers): + products = [] + groups = [] + libraries = [] + available_platforms = set(['Windows', 'MacOSX', 'Linux']) + + for metainfo in metalist: + + try: + platforms = metainfo['platforms'] + platform_lst = [x['name'] for x in platforms + if x['name'] not in (PLATFORM_ALL, + PLATFORM_UNKNOWN)] + + available_platforms.update(set(platform_lst)) + except (KeyError, TypeError): + logging.warning('{} framework lacks valid platform definitions' + .format(metainfo['fancyname'])) + platforms = [dict(name=PLATFORM_UNKNOWN)] + + dct = dict((x['name'], x.get('note', '')) for x in platforms) + + expand_platform_all(dct, available_platforms) + platforms = dct + + lib = extract_lib(metainfo, platforms, all_maintainers) + libraries.append(lib) + + product = extract_product(metainfo, platforms, all_maintainers) + if product is not None: + products.append(product) + + # We have all groups and libraries, let set the parents. + # and check the platforms + for lib in libraries: + if lib['parent'].get('group') is not None: + product_list = [x for x in products if x['name'].lower() == lib['parent']['group'].lower()] + if not product_list: + continue + else: + product = product_list[0] + lib['product'] = product + product['libraries'].append(lib) + if lib['parent'].get('subgroup') is None: + lib['subgroup'] = None + else: + subgroup_list = [x for x in lib['product']['subgroups'] if x['name'].lower() == lib['parent']['subgroup'].lower()] + if not subgroup_list: + logging.warning("Subgroup {} of library {} not documentated, setting subgroup to None" + .format(lib['parent']['subgroup'], lib['name'])) + lib['subgroup'] = None + lib['parent'] = None + else: + subgroup = subgroup_list[0] + lib['subgroup'] = subgroup + subgroup['libraries'].append(lib) + groups.append(product) + else: + lib['parent'] = None + + return products, groups, libraries, available_platforms + + +def extract_lib(metainfo, platforms, all_maintainers): + outputdir = metainfo.get('name') + if 'group' in metainfo: + outputdir = metainfo.get('group') + '/' + outputdir + outputdir = kdx.utils.serialize_name(outputdir) + + lib = { + 'name': metainfo['name'], + 'fancyname': metainfo['fancyname'], + 'description': metainfo.get('description'), + 'maintainers': set_maintainers(metainfo, 'maintainer', all_maintainers), + 'platforms': platforms, + 'parent': {'group': kdx.utils.serialize_name(metainfo.get('group')), + 'subgroup': kdx.utils.serialize_name(metainfo.get('subgroup'))}, + 'href': '../'+outputdir.lower() + '/html/index.html', + 'outputdir': outputdir.lower(), + 'path': metainfo['path'], + 'srcdirs': metainfo.get('public_source_dirs', ['src']), + 'docdir': metainfo.get('public_doc_dir', 'docs'), + 'exampledir': metainfo.get('public_example_dir', 'examples'), + 'dependency_diagram': None, + 'type': metainfo.get('type', ''), + 'portingAid': metainfo.get('portingAid', False), + 'deprecated': metainfo.get('deprecated', False), + 'libraries': metainfo.get('libraries', []), + 'cmakename': metainfo.get('cmakename', '') + } + return lib + + +def extract_product(metainfo, platforms, all_maintainers): + def get_logo_url(dct, name): + # take care of the logo + if 'logo' in dct: + logo_url = os.path.join(metainfo['path'], dct['logo']) + if os.path.isfile(logo_url): + return logo_url + else: + logging.warning("{} logo file doesn't exist, set back to None".format(name)) + return None + else: + return None + + def set_logo(product): + if product['logo_url_src'] is not None: + filename = os.path.basename(product['logo_url_src']) + product['logo_url'] = outputdir + '/'+ product['name'] + '.' + filename.split('.')[-1] + + # if there is a group, the product is the group + # else the product is directly the library + if 'group_info' in metainfo: + outputdir = kdx.utils.serialize_name(metainfo['group']) + product = { + 'name': kdx.utils.serialize_name(metainfo['group']), + 'fancyname': metainfo['group_info'].get('fancyname', string.capwords(metainfo['group'])), + 'description': metainfo['group_info'].get('description'), + 'long_description': metainfo['group_info'].get('long_description', []), + 'maintainers': set_maintainers(metainfo['group_info'], + 'maintainer', + all_maintainers), + 'platforms': metainfo['group_info'].get('platforms'), + 'logo_url_src': get_logo_url(metainfo['group_info'], + metainfo['group']), + 'logo_url': None, # We'll set this later + 'outputdir': outputdir, + 'href': outputdir + '/index.html', + 'libraries': [], # We'll set this later + 'subgroups': [], # We'll set this later + } + + if 'subgroups' in metainfo['group_info']: + for sg in metainfo['group_info']['subgroups']: + if 'name' in sg: + product['subgroups'].append({ + 'fancyname': sg['name'], + 'name': kdx.utils.serialize_name(sg['name']), + 'description': sg.get('description'), + 'order': sg.get('order', 99), # If no order, go to end + 'libraries': [] + }) + set_logo(product) + return product + elif 'group' not in metainfo: + outputdir = metainfo['name'] + + product = { + 'name': kdx.utils.serialize_name(metainfo['name']), + 'fancyname': metainfo['fancyname'], + 'description': metainfo.get('description'), + 'maintainers': set_maintainers(metainfo, + 'maintainer', + all_maintainers), + 'platforms': platforms, + 'logo_url_src': get_logo_url(metainfo, metainfo['fancyname']), + 'logo_url': None, # We'll set that later + 'href': outputdir + '/html/index.html', + 'outputdir': outputdir + } + set_logo(product) + return product + else: + return None + +def set_maintainers(dictionary, key, maintainers): + """ Expend the name of the maintainers. + + Use + --- + metainfo = { 'key1': 'something', 'maintainers': ['arthur', 'toto']} + myteam = [{'arthur': {'name': 'Arthur Pendragon', + 'email': 'arthur@example.com'}, + 'toto': {'name': 'Toto', + 'email: 'toto123@example.com'} + }] + set_maintainers(metainfo, "maintainers", my_team) + + Parameters + ---------- + dictonary : dict + Dictionary from which the name to expend must be read. + key : string + Key of the dictionary where the name to expend is saved. + maintainers : list of dict + Look-up table where the names and emails of the maintainers are stored. + """ + + if key not in dictionary: + fw_maintainers = [] + elif isinstance(dictionary[key], list): + fw_maintainers = map(lambda x: maintainers.get(x, None), + dictionary[key]) + else: + fw_maintainers = [maintainers.get(dictionary[key], None)] + + fw_maintainers = [x for x in fw_maintainers if x is not None] + return fw_maintainers \ No newline at end of file diff --git a/src/kapidox/utils.py b/src/kapidox/utils.py index 04336d8..e2ea446 100644 --- a/src/kapidox/utils.py +++ b/src/kapidox/utils.py @@ -1,105 +1,209 @@ # -*- coding: utf-8 -*- # # Copyright 2014 Aurélien Gâteau # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR # IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES # OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. # IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT # NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF # THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # Python 2/3 compatibility (NB: we require at least 2.7) from __future__ import division, absolute_import, print_function, unicode_literals + +from fnmatch import fnmatch import logging import os import re +import subprocess +import shutil import sys +import tempfile """ This module contains code which is shared between depdiagram-prepare and other components. Code in this dir should not import any module which is not shipped with Python because this module is used by depdiagram-prepare, which must be able to run on builds.kde.org, which may not have all the required dependencies. """ def setup_logging(): FORMAT = '%(asctime)s %(levelname)s %(message)s' logging.basicConfig(format=FORMAT, datefmt='%H:%M:%S', level=logging.DEBUG) +def serialize_name(name): + if name is not None: + return '_'.join(name.lower().split(' ')) + else: + return None + + def parse_fancyname(fw_dir): """Returns the framework name for a given source dir The framework name is the name of the toplevel CMake project """ cmakelists_path = os.path.join(fw_dir, "CMakeLists.txt") if not os.path.exists(cmakelists_path): logging.error("No CMakeLists.txt in {}".format(fw_dir)) return None project_re = re.compile(r"project\s*\(\s*(\w+)", re.I) with open(cmakelists_path) as f: for line in f.readlines(): match = project_re.search(line) if match: return match.group(1) - logging.error("Failed to find framework name: Could not find a 'project()' command in {}.".format(cmakelists_path)) + logging.error("Failed to find framework name: Could not find a " + "'project()' command in {}.".format(cmakelists_path)) return None +def cache_dir(): + """Find/create a semi-long-term cache directory. + + We do not use tempdir, except as a fallback, because temporary directories + are intended for files that only last for the program's execution. + """ + cachedir = None + if sys.platform == 'darwin': + try: + from AppKit import NSSearchPathForDirectoriesInDomains + # http://developer.apple.com/DOCUMENTATION/Cocoa/Reference/Foundation/Miscellaneous/Foundation_Functions/Reference/reference.html#//apple_ref/c/func/NSSearchPathForDirectoriesInDomains + # NSApplicationSupportDirectory = 14 + # NSUserDomainMask = 1 + # True for expanding the tilde into a fully qualified path + cachedir = os.path.join( + NSSearchPathForDirectoriesInDomains(14, 1, True)[0], + 'KApiDox') + except: + pass + elif os.name == "posix": + if 'HOME' in os.environ and os.path.exists(os.environ['HOME']): + cachedir = os.path.join(os.environ['HOME'], '.cache', 'kapidox') + elif os.name == "nt": + if 'APPDATA' in os.environ and os.path.exists(os.environ['APPDATA']): + cachedir = os.path.join(os.environ['APPDATA'], 'KApiDox') + if cachedir is None: + cachedir = os.path.join(tempfile.gettempdir(), 'kapidox') + if not os.path.isdir(cachedir): + os.makedirs(cachedir) + return cachedir + + +def svn_export(remote, local, overwrite=False): + """Wraps svn export. + + Raises an exception on failure. + """ + try: + import svn.core + import svn.client + logging.debug("Using Python libsvn bindings to fetch %s", remote) + ctx = svn.client.create_context() + ctx.auth_baton = svn.core.svn_auth_open([]) + + latest = svn.core.svn_opt_revision_t() + latest.type = svn.core.svn_opt_revision_head + + svn.client.export(remote, local, latest, True, ctx) + except ImportError: + logging.debug("Using external svn client to fetch %s", remote) + cmd = ['svn', 'export', '--quiet'] + if overwrite: + cmd.append('--force') + cmd += [remote, local] + try: + subprocess.check_call(cmd, stderr=subprocess.STDOUT) + except subprocess.CalledProcessError as e: + raise StandardException(e.output) + except FileNotFoundError as e: + logging.debug("External svn client not found") + return False + # subversion will set the timestamp to match the server + os.utime(local, None) + return True + + +def copy_dir_contents(directory, dest): + """Copy the contents of a directory + + directory -- the directory to copy the contents of + dest -- the directory to copy them into + """ + ignored = ['CMakeLists.txt'] + ignore = shutil.ignore_patterns(*ignored) + for fn in os.listdir(directory): + f = os.path.join(directory, fn) + if os.path.isfile(f): + docopy = True + for i in ignored: + if fnmatch(fn, i): + docopy = False + break + if docopy: + shutil.copy(f, dest) + elif os.path.isdir(f): + dest_f = os.path.join(dest, fn) + if os.path.isdir(dest_f): + shutil.rmtree(dest_f) + shutil.copytree(f, dest_f, ignore=ignore) + + _KAPIDOX_VERSION = None def get_kapidox_version(): """Get commit id of running code if it is running from git repository. May return an empty string if it failed to extract the commit id. Assumes .git/HEAD looks like this: ref: refs/heads/master and assumes .git/refs/heads/master contains the commit id """ global _KAPIDOX_VERSION if _KAPIDOX_VERSION is not None: return _KAPIDOX_VERSION _KAPIDOX_VERSION = "" bin_dir = os.path.dirname(sys.argv[0]) git_dir = os.path.join(bin_dir, "..", ".git") if not os.path.isdir(git_dir): # Looks like we are not running from the git repo, exit silently return _KAPIDOX_VERSION git_HEAD = os.path.join(git_dir, "HEAD") if not os.path.isfile(git_HEAD): logging.warning("Getting git info failed: {} is not a file".format(git_HEAD)) return _KAPIDOX_VERSION try: line = open(git_HEAD).readline() ref_name = line.split(": ")[1].strip() with open(os.path.join(git_dir, ref_name)) as f: _KAPIDOX_VERSION = f.read().strip() except Exception as exc: # Catch all exceptions here: whatever fails in this function should not # cause the code to fail logging.warning("Getting git info failed: {}".format(exc)) return _KAPIDOX_VERSION diff --git a/src/kapidox_generate b/src/kapidox_generate index 1c0f0d5..56979cb 100755 --- a/src/kapidox_generate +++ b/src/kapidox_generate @@ -1,549 +1,186 @@ #! /usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2014 Alex Merry # Copyright 2014 Aurélien Gâteau # Copyright 2014 Alex Turbov # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # # 1. Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR # IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES # OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. # IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT # NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF # THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # Python 2/3 compatibility (NB: we require at least 2.7) from __future__ import division, absolute_import, print_function, unicode_literals -import argparse -import datetime import logging import codecs import os import shutil -import string -import subprocess +import sys import tempfile +import time -import yaml +if sys.version_info.major < 3: + from urllib import urlretrieve +else: + from urllib.request import urlretrieve -from kapidox import argparserutils -from kapidox import utils +import kapidox as kdx from kapidox import generator + try: from kapidox import depdiagram DEPDIAGRAM_AVAILABLE = True except ImportError: DEPDIAGRAM_AVAILABLE = False -PLATFORM_ALL = "All" -PLATFORM_UNKNOWN = "UNKNOWN" - -def serialize_name(name): - if name is not None: - return '_'.join(name.lower().split(' ')) - else: - return None - -def create_metainfo(frameworksdir, path): - if not os.path.isdir(path): - return None - - metainfo_file = os.path.join(path, 'metainfo.yaml') - if not os.path.isfile(metainfo_file): - #logging.warning('{} does not contain a library (metainfo.yml missing)'.format(path)) - return None - - # FIXME: option in yaml file to disable docs - try: - metainfo = yaml.load(open(metainfo_file)) - except: - logging.warning('Could not load config.kapidox for {}, skipping it'.format(path)) - return None - - if metainfo is None: - logging.warning('Empty metainfo.yml for {}, skipping it'.format(path)) - return None - - if 'subgroup' in metainfo and 'group' not in metainfo: - logging.warning('Subgroup but no group in {}, skipping it'.format(path)) - return None - - name = path.split('/')[-1] - fancyname = utils.parse_fancyname(path) - if not fancyname: - logging.warning('Could not find fancy name for {}, skipping it'.format(path)) - return None - - metainfo.update({ - 'fancyname': fancyname, - 'name': name, - 'public_lib': metainfo.get('public_lib', False), - 'dependency_diagram': None, - 'path': path, - }) - - return metainfo - - -def sort_metainfo(metalist, all_maintainers): - - def get_logo_url(dct, name): - # take care of the logo - if 'logo' in dct: - logo_url = os.path.join(metainfo['path'], dct['logo']) - if os.path.isfile(logo_url): - return logo_url - else: - logging.warning("{} logo file doesn't exist, set back to None".format(name)) - return None - else: - return None - - products = [] - groups = [] - libraries = [] - available_platforms = set(['Windows', 'MacOSX', 'Linux']) - - for metainfo in metalist: - outputdir = metainfo.get('name') - if 'group' in metainfo: - outputdir = metainfo.get('group') + '/' + outputdir - outputdir = serialize_name(outputdir) - - try: - platforms = metainfo['platforms'] - #platform_info = metainfo['platforms'] - platform_lst = [x['name'] for x in platforms if x['name'] not in (PLATFORM_ALL, PLATFORM_UNKNOWN)] - available_platforms.update(set(platform_lst)) - except (KeyError, TypeError): - logging.warning('{} framework lacks valid platform definitions'.format(metainfo['fancyname'])) - platforms = [dict(name=PLATFORM_UNKNOWN)] - - dct = dict((x['name'], x.get('note', '')) for x in platforms) - - expand_platform_all(dct, available_platforms) - platforms = dct - - lib = { - 'name': metainfo['name'], - 'fancyname': metainfo['fancyname'], - 'description': metainfo.get('description'), - 'maintainers': set_maintainers(metainfo, 'maintainer', all_maintainers), - 'platforms': platforms, - 'parent': {'group': serialize_name(metainfo.get('group')), 'subgroup': serialize_name(metainfo.get('subgroup'))}, - 'href': '../'+outputdir.lower() + '/html/index.html', - 'outputdir': outputdir.lower(), - 'path': metainfo['path'], - 'srcdirs': metainfo.get('public_source_dirs', ['src']), - 'docdir': metainfo.get('public_doc_dir', 'docs'), - 'exampledir': metainfo.get('public_example_dir', 'examples'), - 'dependency_diagram': None, - 'type': metainfo.get('type', ''), - 'portingAid': metainfo.get('portingAid', False), - 'deprecated': metainfo.get('deprecated', False), - 'libraries': metainfo.get('libraries', []), - 'cmakename': metainfo.get('cmakename', '') - } - libraries.append(lib) - - # if there is a group, the product is the group - # else the product is directly the library - if 'group_info' in metainfo: - logo_url_src = get_logo_url(metainfo['group_info'], metainfo['group']) - outputdir = serialize_name(metainfo['group']) - product = { - 'name': serialize_name(metainfo['group']), - 'fancyname': metainfo['group_info'].get('fancyname', string.capwords(metainfo['group'])), - 'description': metainfo['group_info'].get('description'), - 'long_description': metainfo['group_info'].get('long_description', []), - 'maintainers': set_maintainers(metainfo['group_info'], 'maintainer', all_maintainers), - 'platforms': metainfo['group_info'].get('platforms'), - 'logo_url_src': logo_url_src, - 'logo_url': outputdir + '/' + os.path.basename(logo_url_src), - 'href': outputdir + '/index.html', - 'outputdir': outputdir, - 'libraries': [] - } - subgroups = [] - if 'subgroups' in metainfo['group_info']: - for sg in metainfo['group_info']['subgroups']: - if 'name' in sg: - subgroups.append({ - 'fancyname': sg['name'], - 'name': serialize_name(sg['name']), - 'description': sg.get('description'), - 'order': sg.get('order', 99), #if no order leave to end - 'libraries': [] - }) - product['subgroups'] = subgroups; - products.append(product) - elif 'group' not in metainfo: - logo_url_src = get_logo_url(metainfo, metainfo['fancyname']) - products.append({ - 'name': serialize_name(metainfo['name']), - 'fancyname': metainfo['fancyname'], - 'description': metainfo.get('description'), - 'maintainers': set_maintainers(metainfo, 'maintainer', all_maintainers), - 'platform': metainfo.get('platform'), - 'logo_url_src': logo_url_src, - 'logo_url': metainfo['name'] + '/' + os.path.basename(logo_url_src), - 'href': metainfo['name']+'/html/index.html', - 'outputdir': metainfo['name'] - }) - - # We have all groups and libraries, let set the parents. - # and check the platforms - for lib in libraries: - if lib['parent'].get('group') is not None: - product = [x for x in products if x['name'].lower() == lib['parent']['group'].lower()][0] - lib['product'] = product - product['libraries'].append(lib) - if lib['parent'].get('subgroup') is None: - lib['subgroup'] = None - else: - subgroup_list = [x for x in lib['product']['subgroups'] if x['name'].lower() == lib['parent']['subgroup'].lower()] - if not subgroup_list: - logging.warning("Subgroup {} of library {} not documentated, setting subgroup to None" - .format(lib['parent']['subgroup'], lib['name'])) - lib['subgroup'] = None - lib['parent'] = None - else: - subgroup = subgroup_list[0] - lib['subgroup'] = subgroup - subgroup['libraries'].append(lib) - else: - lib['parent'] = None - - groups.append(product) - - return products, groups, libraries, available_platforms - -def expand_platform_all(dct, available_platforms): - """If one of the keys of dct is PLATFORM_ALL (or PLATFORM_UNKNOWN), remove it and add entries for all available platforms to dct""" - add_all_platforms = False - if PLATFORM_ALL in dct: - note = dct[PLATFORM_ALL] - add_all_platforms = True - del dct[PLATFORM_ALL] - if PLATFORM_UNKNOWN in dct: - add_all_platforms = True - note = dct[PLATFORM_UNKNOWN] - del dct[PLATFORM_UNKNOWN] - if add_all_platforms: - for platform in available_platforms: - if not platform in dct: - dct[platform] = note - - -def process_toplevel_html_file(outputfile, doxdatadir, products, title, - api_searchbox=False): - - products.sort(key=lambda x: x['name'].lower()) - mapping = { - 'resources': '.', - 'api_searchbox': api_searchbox, - # steal the doxygen css from one of the frameworks - # this means that all the doxygen-provided images etc. will be found - 'doxygencss': products[0]['outputdir'] + '/html/doxygen.css', - 'title': title, - 'breadcrumbs': { - 'entries': [ - { - 'href': './index.html', - 'text': 'KDE API Reference' - } - ] - }, - 'product_list': products, - } - tmpl = generator.create_jinja_environment(doxdatadir).get_template('frontpage.html') - with codecs.open(outputfile, 'w', 'utf-8') as outf: - outf.write(tmpl.render(mapping)) - for product in products: - logodir = os.path.dirname(product['logo_url']) - if not os.path.isdir(logodir): - os.mkdir(logodir) - shutil.copy(product['logo_url_src'], product['logo_url']) - -def process_subgroup_html_files(outputfile, doxdatadir, groups, available_platforms, title, - api_searchbox=False): - - for group in groups: - mapping = { - 'resources': '..', - 'api_searchbox': api_searchbox, - # steal the doxygen css from one of the frameworks - # this means that all the doxygen-provided images etc. will be found - 'doxygencss': group['libraries'][0]['outputdir'] + '/html/doxygen.css', - 'title': title, - 'breadcrumbs': { - 'entries': [ - { - 'href': '../index.html', - 'text': 'KDE API Reference' - }, - { - 'href': './index.html', - 'text': group['fancyname'] - } - ] - }, - 'group': group, - 'available_platforms': sorted(available_platforms), - } - if not os.path.isdir(group['name']): - os.mkdir(group['name']) - outputfile = group['name']+'/index.html' - tmpl = generator.create_jinja_environment(doxdatadir).get_template('subgroup.html') - with codecs.open(outputfile, 'w', 'utf-8') as outf: - outf.write(tmpl.render(mapping)) def find_dot_files(dot_dir): """Returns a list of path to files ending with .dot in subdirs of `dot_dir`.""" lst = [] for (root, dirs, files) in os.walk(dot_dir): lst.extend([os.path.join(root, x) for x in files if x.endswith('.dot')]) return lst -def generate_diagram(png_path, fancyname, dot_files, tmp_dir): - """Generate a dependency diagram for a framework. +def download_kde_identities(): + """Download the "accounts" file on the KDE SVN repository in order to get + the KDE identities with their name and e-mail address """ - def run_cmd(cmd, **kwargs): + cache_file = os.path.join(kdx.utils.cache_dir(), 'kde-accounts') + needs_download = True + if os.path.exists(cache_file): + logging.debug("Found cached identities file at %s", cache_file) + # not quite a day, so that generation on api.kde.org gets a fresh + # copy every time the daily cron job runs it + yesterday = time.time() - (23.5 * 3600) + if os.path.getmtime(cache_file) > yesterday: + needs_download = False + else: + logging.debug("Cached file too old; updating") + if needs_download: + logging.info("Downloading KDE identities") try: - subprocess.check_call(cmd, **kwargs) - except subprocess.CalledProcessError as exc: - logging.error( - 'Command {exc.cmd} failed with error code {exc.returncode}.'.format(exc=exc)) - return False - return True - - logging.info('Generating dependency diagram') - dot_path = os.path.join(tmp_dir, fancyname + '.dot') - - with open(dot_path, 'w') as f: - with_qt = False - ok = depdiagram.generate(f, dot_files, framework=fancyname, with_qt=with_qt) - if not ok: - logging.error('Generating diagram failed') - return False - - logging.info('- Simplifying diagram') - simplified_dot_path = os.path.join(tmp_dir, fancyname + '-simplified.dot') - with open(simplified_dot_path, 'w') as f: - if not run_cmd(['tred', dot_path], stdout=f): - return False - - logging.info('- Generating diagram png') - if not run_cmd(['dot', '-Tpng', '-o' + png_path, simplified_dot_path]): - return False - - # These os.unlink() calls are not in a 'finally' block on purpose. - # Keeping the dot files around makes it possible to inspect their content - # when running with the --keep-temp-dirs option. If generation fails and - # --keep-temp-dirs is not set, the files will be removed when the program - # ends because they were created in `tmp_dir`. - os.unlink(dot_path) - os.unlink(simplified_dot_path) - return True - - -def set_maintainers(dictionary, key, maintainers): - if key not in dictionary: - fw_maintainers = [] - elif isinstance(dictionary[key], list): - fw_maintainers = map(lambda x: maintainers.get(x, None), - dictionary[key]) - else: - fw_maintainers = [maintainers.get(dictionary[key], None)] - - fw_maintainers = [x for x in fw_maintainers if x is not None] - return fw_maintainers - - -def create_fw_context(args, lib, tagfiles): - return generator.Context(args, - # Names - modulename=lib['name'], - fancyname=lib['fancyname'], - fwinfo=lib, - # KApidox files - resourcedir='../..' if lib['parent'] is None else '../../..', - # Input - #srcdir=lib['srcdir'], - tagfiles=tagfiles, - dependency_diagram=lib['dependency_diagram'], - # Output - outputdir=lib['outputdir'], - ) - -def gen_fw_apidocs(ctx, tmp_base_dir): - generator.create_dirs(ctx) - # tmp_dir is deleted when tmp_base_dir is - tmp_dir = tempfile.mkdtemp(prefix=ctx.modulename + '-', dir=tmp_base_dir) - generator.generate_apidocs(ctx, tmp_dir, - doxyfile_entries=dict(WARN_IF_UNDOCUMENTED=True) - ) - -def finish_fw_apidocs(ctx, group_menu): - classmap = generator.build_classmap(ctx.tagfile) - generator.write_mapping_to_php(classmap, os.path.join(ctx.outputdir, 'classmap.inc')) - - entries = [{ - 'href': '../../index.html', - 'text': 'KDE API Reference' - }] - if ctx.fwinfo['parent'] is not None: - entries[0]['href'] = '../' + entries[0]['href'] - entries.append({ - 'href': '../../index.html', - 'text': ctx.fwinfo['product']['fancyname'] - }) - entries.append({ - 'href': 'index.html', - 'text': ctx.fancyname - }) - - template_mapping={ - 'breadcrumbs': { - 'entries': entries - }, - #'group_menu': group_menu - } - copyright = '1996-' + str(datetime.date.today().year) + ' The KDE developers' - mapping = { - 'doxygencss': 'doxygen.css', - 'resources': ctx.resourcedir, - 'title': ctx.title, - 'fwinfo': ctx.fwinfo, - 'copyright': copyright, - 'api_searchbox': ctx.api_searchbox, - 'doxygen_menu': {'entries': generator.menu_items(ctx.htmldir, ctx.modulename)}, - 'class_map': {'classes': classmap}, - 'kapidox_version': utils.get_kapidox_version(), - } - if template_mapping: - mapping.update(template_mapping) - logging.info('Postprocessing') - - tmpl = generator.create_jinja_environment(ctx.doxdatadir).get_template('doxygen2.html') - generator.postprocess_internal(ctx.htmldir, tmpl, mapping) - -def create_fw_tagfile_tuple(lib): - tagfile = os.path.abspath( - os.path.join( - lib['outputdir'], - 'html', - lib['fancyname']+'.tags')) - return (tagfile, '../../' + lib['outputdir'] + '/html/') - - -def parse_args(): - parser = argparse.ArgumentParser(description='Generate API documentation for the KDE Frameworks') - group = argparserutils.add_sources_group(parser) - group.add_argument('frameworksdir', - help='Location of the frameworks modules.') - group.add_argument('--depdiagram-dot-dir', - help='Generate dependency diagrams, using the .dot files from DIR.', - metavar="DIR") - argparserutils.add_output_group(parser) - argparserutils.add_qt_doc_group(parser) - argparserutils.add_paths_group(parser) - argparserutils.add_misc_group(parser) - args = parser.parse_args() - argparserutils.check_common_args(args) + if not kdx.utils.svn_export( + 'svn://anonsvn.kde.org/home/kde/trunk/kde-common/accounts', + cache_file, + overwrite=True): + logging.debug("Falling back to using websvn to fetch " + "identities file") + urlretrieve('http://websvn.kde.org/*checkout*/trunk/kde-common/accounts', + cache_file) + except Exception as e: + if os.path.exists(cache_file): + logging.error('Failed to update KDE identities: %s', e) + else: + logging.error('Failed to fetch KDE identities: %s', e) + return None - if args.depdiagram_dot_dir and not DEPDIAGRAM_AVAILABLE: - logging.error('You need to install the Graphviz Python bindings to generate dependency diagrams.\nSee .') - exit(1) + maintainers = {} - if not os.path.isdir(args.frameworksdir): - logging.error(args.frameworksdir + " is not a directory") - exit(2) + with codecs.open(cache_file, 'r', encoding='utf8') as f: + for line in f: + parts = line.strip().split() + if len(parts) >= 3: + maintainers[parts[0]] = { + 'name': ' '.join(parts[1:-1]), + 'email': parts[-1] + } - return args + return maintainers def main(): - utils.setup_logging() - args = parse_args() + kdx.utils.setup_logging() + args = kdx.argparserutils.parse_args(DEPDIAGRAM_AVAILABLE) tagfiles = generator.search_for_tagfiles( - suggestion = args.qtdoc_dir, - doclink = args.qtdoc_link, - flattenlinks = args.qtdoc_flatten_links, - searchpaths = ['/usr/share/doc/qt5', '/usr/share/doc/qt']) - maintainers = generator.download_kde_identities() - #tiers = {1:[],2:[],3:[],4:[]} - metalist = [] - - for path, dirs, _ in os.walk(args.frameworksdir): - # We don't want to do the recursion in the dotdirs - dirs[:] = [d for d in dirs if not d[0] == '.'] - metainfo = create_metainfo(args.frameworksdir, path) - if metainfo is not None: - if metainfo['public_lib']: - metalist.append(metainfo) - else: - logging.warning("{} has no public libraries".format(metainfo['name'])) - products, groups, libraries, available_platforms = sort_metainfo(metalist, maintainers) - generator.copy_dir_contents(os.path.join(args.doxdatadir,'htmlresource'),'.') - #group_menu = generate_group_menu(metalist) - - process_toplevel_html_file('index.html', args.doxdatadir, - title=args.title, products=products, api_searchbox=args.api_searchbox) - process_subgroup_html_files('index.html', args.doxdatadir, - title=args.title, groups=groups, available_platforms=available_platforms, - api_searchbox=args.api_searchbox) - tmp_dir = tempfile.mkdtemp(prefix='kgenframeworksapidox-') + suggestion=args.qtdoc_dir, + doclink=args.qtdoc_link, + flattenlinks=args.qtdoc_flatten_links, + searchpaths=['/usr/share/doc/qt5', '/usr/share/doc/qt']) + + maintainers = download_kde_identities() + rootdir = args.frameworksdir + + metalist = kdx.preprocessing.parse_tree(rootdir) + products, groups, libraries, available_platforms = kdx.preprocessing.sort_metainfo(metalist, maintainers) + + kdx.utils.copy_dir_contents(os.path.join(args.doxdatadir,'htmlresource'),'.') + + kdx.generator.process_toplevel_html_file('index.html', + args.doxdatadir, + title=args.title, + products=products, + api_searchbox=args.api_searchbox + ) + kdx.generator.process_subgroup_html_files('index.html', + args.doxdatadir, + title=args.title, + groups=groups, + available_platforms=available_platforms, + api_searchbox=args.api_searchbox + ) + tmp_dir = tempfile.mkdtemp(prefix='kapidox-') try: if args.depdiagram_dot_dir: dot_files = find_dot_files(args.depdiagram_dot_dir) assert(dot_files) for lib in libraries: logging.info('# Generating doc for {}'.format(lib['fancyname'])) if args.depdiagram_dot_dir: png_path = os.path.join(tmp_dir, lib['name']) + '.png' - ok = generate_diagram(png_path, lib['name'], dot_files, tmp_dir) + ok = kdx.generator.generate_diagram(png_path, lib['name'], + dot_files, tmp_dir) if ok: lib['dependency_diagram'] = png_path - ctx = create_fw_context(args, lib, tagfiles) - gen_fw_apidocs(ctx, tmp_dir) - tagfiles.append(create_fw_tagfile_tuple(lib)) + ctx = kdx.generator.create_fw_context(args, lib, tagfiles) + kdx.generator.gen_fw_apidocs(ctx, tmp_dir) + tagfiles.append(kdx.generator.create_fw_tagfile_tuple(lib)) + # Rebuild for interdependencies # FIXME: can we be cleverer about deps? for lib in libraries: - logging.info('# Rebuilding {} for interdependencies'.format(lib['name'])) + logging.info('# Rebuilding {} for interdependencies' + .format(lib['name'])) shutil.rmtree(lib['outputdir']) - ctx = create_fw_context(args, lib, tagfiles) - gen_fw_apidocs(ctx, tmp_dir) - finish_fw_apidocs(ctx, None) + ctx = kdx.generator.create_fw_context(args, lib, tagfiles) + kdx.generator.gen_fw_apidocs(ctx, tmp_dir) + kdx.generator.finish_fw_apidocs(ctx, None) logging.info('# Done') finally: + for product in products: + if product['logo_url'] is not None: + logodir = os.path.dirname(product['logo_url']) + if not os.path.isdir(logodir): + os.mkdir(logodir) + shutil.copy(product['logo_url_src'], product['logo_url']) + if args.keep_temp_dirs: logging.info('Kept temp dir at {}'.format(tmp_dir)) else: shutil.rmtree(tmp_dir) if __name__ == "__main__": main() -