From fa87543e7d6bb243ff068c64069fc43342041073 Mon Sep 17 00:00:00 2001 From: David Robillard Date: Mon, 14 Oct 2019 11:01:21 +0200 Subject: Upgrade to waf 2.0.18 --- Build.py | 28 +++++-- Configure.py | 20 +++-- Context.py | 6 +- Logs.py | 5 +- Node.py | 3 +- Runner.py | 15 ++-- Scripting.py | 7 +- TaskGen.py | 2 +- Tools/asm.py | 37 ++++++++- Tools/c_aliases.py | 6 +- Tools/c_config.py | 9 +- Tools/c_tests.py | 3 +- Tools/gas.py | 1 + Tools/javaw.py | 28 +++++-- Tools/md5_tstamp.py | 5 +- Tools/msvc.py | 2 +- Tools/nasm.py | 5 ++ Tools/python.py | 35 +++++--- Tools/qt5.py | 6 +- Tools/waf_unit_test.py | 2 +- Utils.py | 8 +- extras/clang_cross.py | 92 ++++++++++++++++++++ extras/clang_cross_common.py | 113 +++++++++++++++++++++++++ extras/clangxx_cross.py | 106 +++++++++++++++++++++++ extras/color_msvc.py | 59 +++++++++++++ extras/doxygen.py | 20 +++-- extras/fast_partial.py | 29 +++++-- extras/genpybind.py | 194 +++++++++++++++++++++++++++++++++++++++++++ extras/local_rpath.py | 8 +- extras/msvcdeps.py | 73 ++++++++-------- extras/objcopy.py | 9 +- extras/protoc.py | 1 + extras/pyqt5.py | 17 ++-- extras/sphinx.py | 81 ++++++++++++++++++ extras/syms.py | 2 +- processor.py | 4 + 36 files changed, 920 insertions(+), 121 deletions(-) create mode 100644 extras/clang_cross.py create mode 100644 extras/clang_cross_common.py create mode 100644 extras/clangxx_cross.py create mode 100644 extras/color_msvc.py create mode 100644 extras/genpybind.py create mode 100644 extras/sphinx.py diff --git a/Build.py b/Build.py index 8143dbc..39f0991 100644 --- a/Build.py +++ b/Build.py @@ -1160,11 +1160,19 @@ class inst(Task.Task): # same size and identical timestamps -> make no copy if st1.st_mtime + 2 >= st2.st_mtime and st1.st_size == st2.st_size: if not self.generator.bld.progress_bar: - Logs.info('- install %s (from %s)', tgt, lbl) + + c1 = Logs.colors.NORMAL + c2 = Logs.colors.BLUE + + Logs.info('%s- install %s%s%s (from %s)', c1, c2, tgt, c1, lbl) return False if not self.generator.bld.progress_bar: - Logs.info('+ install %s (from %s)', tgt, lbl) + + c1 = Logs.colors.NORMAL + c2 = Logs.colors.BLUE + + Logs.info('%s+ install %s%s%s (from %s)', c1, c2, tgt, c1, lbl) # Give best attempt at making destination overwritable, # like the 'install' utility used by 'make install' does. @@ -1221,14 +1229,18 @@ class inst(Task.Task): """ if os.path.islink(tgt) and os.readlink(tgt) == src: if not self.generator.bld.progress_bar: - Logs.info('- symlink %s (to %s)', tgt, src) + c1 = Logs.colors.NORMAL + c2 = Logs.colors.BLUE + Logs.info('%s- symlink %s%s%s (to %s)', c1, c2, tgt, c1, src) else: try: os.remove(tgt) except OSError: pass if not self.generator.bld.progress_bar: - Logs.info('+ symlink %s (to %s)', tgt, src) + c1 = Logs.colors.NORMAL + c2 = Logs.colors.BLUE + Logs.info('%s+ symlink %s%s%s (to %s)', c1, c2, tgt, c1, src) os.symlink(src, tgt) self.fix_perms(tgt) @@ -1237,7 +1249,9 @@ class inst(Task.Task): See :py:meth:`waflib.Build.inst.do_install` """ if not self.generator.bld.progress_bar: - Logs.info('- remove %s', tgt) + c1 = Logs.colors.NORMAL + c2 = Logs.colors.BLUE + Logs.info('%s- remove %s%s%s', c1, c2, tgt, c1) #self.uninstall.append(tgt) try: @@ -1257,7 +1271,9 @@ class inst(Task.Task): """ try: if not self.generator.bld.progress_bar: - Logs.info('- remove %s', tgt) + c1 = Logs.colors.NORMAL + c2 = Logs.colors.BLUE + Logs.info('%s- remove %s%s%s', c1, c2, tgt, c1) os.remove(tgt) except OSError: pass diff --git a/Configure.py b/Configure.py index db09c0e..5762eb6 100644 --- a/Configure.py +++ b/Configure.py @@ -524,7 +524,7 @@ def run_build(self, *k, **kw): Though this function returns *0* by default, the build may set an attribute named *retval* on the build context object to return a particular value. See :py:func:`waflib.Tools.c_config.test_exec_fun` for example. - This function also provides a limited cache. To use it, provide the following option:: + This function also features a cache which can be enabled by the following option:: def options(opt): opt.add_option('--confcache', dest='confcache', default=0, @@ -535,10 +535,21 @@ def run_build(self, *k, **kw): $ waf configure --confcache """ - lst = [str(v) for (p, v) in kw.items() if p != 'env'] - h = Utils.h_list(lst) + buf = [] + for key in sorted(kw.keys()): + v = kw[key] + if hasattr(v, '__call__'): + buf.append(Utils.h_fun(v)) + else: + buf.append(str(v)) + h = Utils.h_list(buf) dir = self.bldnode.abspath() + os.sep + (not Utils.is_win32 and '.' or '') + 'conf_check_' + Utils.to_hex(h) + cachemode = kw.get('confcache', getattr(Options.options, 'confcache', None)) + + if not cachemode and os.path.exists(dir): + shutil.rmtree(dir) + try: os.makedirs(dir) except OSError: @@ -549,7 +560,6 @@ def run_build(self, *k, **kw): except OSError: self.fatal('cannot use the configuration test folder %r' % dir) - cachemode = getattr(Options.options, 'confcache', None) if cachemode == 1: try: proj = ConfigSet.ConfigSet(os.path.join(dir, 'cache_run_build')) @@ -589,7 +599,7 @@ def run_build(self, *k, **kw): else: ret = getattr(bld, 'retval', 0) finally: - if cachemode == 1: + if cachemode: # cache the results each time proj = ConfigSet.ConfigSet() proj['cache_run_build'] = ret diff --git a/Context.py b/Context.py index 876ea46..e3305fa 100644 --- a/Context.py +++ b/Context.py @@ -11,13 +11,13 @@ from waflib import Utils, Errors, Logs import waflib.Node # the following 3 constants are updated on each new release (do not touch) -HEXVERSION=0x2000f00 +HEXVERSION=0x2001200 """Constant updated on new releases""" -WAFVERSION="2.0.15" +WAFVERSION="2.0.18" """Constant updated on new releases""" -WAFREVISION="503db290b73ef738a495e0d116d6f8ee0b98dcc2" +WAFREVISION="314689b8994259a84f0de0aaef74d7ce91f541ad" """Git revision when the waf version is updated""" ABI = 20 diff --git a/Logs.py b/Logs.py index 11dc34f..298411d 100644 --- a/Logs.py +++ b/Logs.py @@ -237,7 +237,10 @@ class formatter(logging.Formatter): if rec.levelno >= logging.INFO: # the goal of this is to format without the leading "Logs, hour" prefix if rec.args: - return msg % rec.args + try: + return msg % rec.args + except UnicodeDecodeError: + return msg.encode('utf-8') % rec.args return msg rec.msg = msg diff --git a/Node.py b/Node.py index 4ac1ea8..2ad1846 100644 --- a/Node.py +++ b/Node.py @@ -73,7 +73,7 @@ def ant_matcher(s, ignorecase): if k == '**': accu.append(k) else: - k = k.replace('.', '[.]').replace('*','.*').replace('?', '.').replace('+', '\\+') + k = k.replace('.', '[.]').replace('*', '.*').replace('?', '.').replace('+', '\\+') k = '^%s$' % k try: exp = re.compile(k, flags=reflags) @@ -595,7 +595,6 @@ class Node(object): :rtype: iterator """ dircont = self.listdir() - dircont.sort() try: lst = set(self.children.keys()) diff --git a/Runner.py b/Runner.py index 5d27669..91d5547 100644 --- a/Runner.py +++ b/Runner.py @@ -337,11 +337,16 @@ class Parallel(object): if hasattr(tsk, 'semaphore'): sem = tsk.semaphore - sem.release(tsk) - while sem.waiting and not sem.is_locked(): - # take a frozen task, make it ready to run - x = sem.waiting.pop() - self._add_task(x) + try: + sem.release(tsk) + except KeyError: + # TODO + pass + else: + while sem.waiting and not sem.is_locked(): + # take a frozen task, make it ready to run + x = sem.waiting.pop() + self._add_task(x) def get_out(self): """ diff --git a/Scripting.py b/Scripting.py index ae17a8b..68dccf2 100644 --- a/Scripting.py +++ b/Scripting.py @@ -332,7 +332,12 @@ def distclean(ctx): else: remove_and_log(env.out_dir, shutil.rmtree) - for k in (env.out_dir, env.top_dir, env.run_dir): + env_dirs = [env.out_dir] + if not ctx.options.no_lock_in_top: + env_dirs.append(env.top_dir) + if not ctx.options.no_lock_in_run: + env_dirs.append(env.run_dir) + for k in env_dirs: p = os.path.join(k, Options.lockfile) remove_and_log(p, os.remove) diff --git a/TaskGen.py b/TaskGen.py index 532b7d5..f8f92bd 100644 --- a/TaskGen.py +++ b/TaskGen.py @@ -905,7 +905,7 @@ def process_subst(self): # paranoid safety measure for the general case foo.in->foo.h with ambiguous dependencies for xt in HEADER_EXTS: if b.name.endswith(xt): - tsk.ext_in = tsk.ext_in + ['.h'] + tsk.ext_out = tsk.ext_out + ['.h'] break inst_to = getattr(self, 'install_path', None) diff --git a/Tools/asm.py b/Tools/asm.py index b6f26fb..a57e83b 100644 --- a/Tools/asm.py +++ b/Tools/asm.py @@ -34,9 +34,22 @@ Support for pure asm programs and libraries should also work:: target = 'asmtest') """ -from waflib import Task +import re +from waflib import Errors, Logs, Task from waflib.Tools.ccroot import link_task, stlink_task from waflib.TaskGen import extension +from waflib.Tools import c_preproc + +re_lines = re.compile( + '^[ \t]*(?:%)[ \t]*(ifdef|ifndef|if|else|elif|endif|include|import|define|undef)[ \t]*(.*)\r*$', + re.IGNORECASE | re.MULTILINE) + +class asm_parser(c_preproc.c_parser): + def filter_comments(self, node): + code = node.read() + code = c_preproc.re_nl.sub('', code) + code = c_preproc.re_cpp.sub(c_preproc.repl, code) + return re_lines.findall(code) class asm(Task.Task): """ @@ -45,6 +58,28 @@ class asm(Task.Task): color = 'BLUE' run_str = '${AS} ${ASFLAGS} ${ASMPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${AS_SRC_F}${SRC} ${AS_TGT_F}${TGT}' + def scan(self): + if self.env.ASM_NAME == 'gas': + return c_preproc.scan(self) + Logs.warn('There is no dependency scanner for Nasm!') + return [[], []] + elif self.env.ASM_NAME == 'nasm': + Logs.warn('The Nasm dependency scanner is incomplete!') + + try: + incn = self.generator.includes_nodes + except AttributeError: + raise Errors.WafError('%r is missing the "asm" feature' % self.generator) + + if c_preproc.go_absolute: + nodepaths = incn + else: + nodepaths = [x for x in incn if x.is_child_of(x.ctx.srcnode) or x.is_child_of(x.ctx.bldnode)] + + tmp = asm_parser(nodepaths) + tmp.start(self.inputs[0], self.env) + return (tmp.nodes, tmp.names) + @extension('.s', '.S', '.asm', '.ASM', '.spp', '.SPP') def asm_hook(self, node): """ diff --git a/Tools/c_aliases.py b/Tools/c_aliases.py index c9d5369..985e048 100644 --- a/Tools/c_aliases.py +++ b/Tools/c_aliases.py @@ -47,10 +47,12 @@ def sniff_features(**kw): if x in exts: feats.append('cxx') break - if 'c' in exts or 'vala' in exts or 'gs' in exts: feats.append('c') + if 's' in exts or 'S' in exts: + feats.append('asm') + for x in 'f f90 F F90 for FOR'.split(): if x in exts: feats.append('fc') @@ -66,7 +68,7 @@ def sniff_features(**kw): if typ in ('program', 'shlib', 'stlib'): will_link = False for x in feats: - if x in ('cxx', 'd', 'fc', 'c'): + if x in ('cxx', 'd', 'fc', 'c', 'asm'): feats.append(x + typ) will_link = True if not will_link and not kw.get('features', []): diff --git a/Tools/c_config.py b/Tools/c_config.py index d546be9..80580cc 100644 --- a/Tools/c_config.py +++ b/Tools/c_config.py @@ -659,20 +659,21 @@ class test_exec(Task.Task): """ color = 'PINK' def run(self): + cmd = [self.inputs[0].abspath()] + getattr(self.generator, 'test_args', []) if getattr(self.generator, 'rpath', None): if getattr(self.generator, 'define_ret', False): - self.generator.bld.retval = self.generator.bld.cmd_and_log([self.inputs[0].abspath()]) + self.generator.bld.retval = self.generator.bld.cmd_and_log(cmd) else: - self.generator.bld.retval = self.generator.bld.exec_command([self.inputs[0].abspath()]) + self.generator.bld.retval = self.generator.bld.exec_command(cmd) else: env = self.env.env or {} env.update(dict(os.environ)) for var in ('LD_LIBRARY_PATH', 'DYLD_LIBRARY_PATH', 'PATH'): env[var] = self.inputs[0].parent.abspath() + os.path.pathsep + env.get(var, '') if getattr(self.generator, 'define_ret', False): - self.generator.bld.retval = self.generator.bld.cmd_and_log([self.inputs[0].abspath()], env=env) + self.generator.bld.retval = self.generator.bld.cmd_and_log(cmd, env=env) else: - self.generator.bld.retval = self.generator.bld.exec_command([self.inputs[0].abspath()], env=env) + self.generator.bld.retval = self.generator.bld.exec_command(cmd, env=env) @feature('test_exec') @after_method('apply_link') diff --git a/Tools/c_tests.py b/Tools/c_tests.py index f858df5..7a4094f 100644 --- a/Tools/c_tests.py +++ b/Tools/c_tests.py @@ -224,6 +224,7 @@ def check_endianness(self): def check_msg(self): return tmp[0] self.check(fragment=ENDIAN_FRAGMENT, features='c grep_for_endianness', - msg='Checking for endianness', define='ENDIANNESS', tmp=tmp, okmsg=check_msg) + msg='Checking for endianness', define='ENDIANNESS', tmp=tmp, + okmsg=check_msg, confcache=None) return tmp[0] diff --git a/Tools/gas.py b/Tools/gas.py index 77afed7..4a8745a 100644 --- a/Tools/gas.py +++ b/Tools/gas.py @@ -16,3 +16,4 @@ def configure(conf): conf.env.ASLNK_TGT_F = ['-o'] conf.find_ar() conf.load('asm') + conf.env.ASM_NAME = 'gas' diff --git a/Tools/javaw.py b/Tools/javaw.py index 9daed39..ceb08c2 100644 --- a/Tools/javaw.py +++ b/Tools/javaw.py @@ -212,6 +212,11 @@ def java_use_rec(self, name, **kw): # is already guaranteed by ordering done between the single tasks if hasattr(y, 'jar_task'): self.use_lst.append(y.jar_task.outputs[0].abspath()) + else: + if hasattr(y,'outdir'): + self.use_lst.append(y.outdir.abspath()) + else: + self.use_lst.append(y.path.get_bld().abspath()) for x in self.to_list(getattr(y, 'use', [])): self.java_use_rec(x) @@ -230,16 +235,25 @@ def use_javac_files(self): get = self.bld.get_tgen_by_name for x in names: try: - y = get(x) + tg = get(x) except Errors.WafError: self.uselib.append(x) else: - y.post() - if hasattr(y, 'jar_task'): - self.use_lst.append(y.jar_task.outputs[0].abspath()) - self.javac_task.set_run_after(y.jar_task) + tg.post() + if hasattr(tg, 'jar_task'): + self.use_lst.append(tg.jar_task.outputs[0].abspath()) + self.javac_task.set_run_after(tg.jar_task) + self.javac_task.dep_nodes.extend(tg.jar_task.outputs) else: - for tsk in y.tasks: + if hasattr(tg, 'outdir'): + base_node = tg.outdir + else: + base_node = tg.path.get_bld() + + self.use_lst.append(base_node.abspath()) + self.javac_task.dep_nodes.extend([x for x in base_node.ant_glob(JAR_RE, remove=False, quiet=True)]) + + for tsk in tg.tasks: self.javac_task.set_run_after(tsk) # If recurse use scan is enabled recursively add use attribute for each used one @@ -471,7 +485,7 @@ def configure(self): self.env.JAVA_HOME = [self.environ['JAVA_HOME']] for x in 'javac java jar javadoc'.split(): - self.find_program(x, var=x.upper(), path_list=java_path) + self.find_program(x, var=x.upper(), path_list=java_path, mandatory=(x not in ('javadoc'))) if 'CLASSPATH' in self.environ: v.CLASSPATH = self.environ['CLASSPATH'] diff --git a/Tools/md5_tstamp.py b/Tools/md5_tstamp.py index 2a58792..d1569fa 100644 --- a/Tools/md5_tstamp.py +++ b/Tools/md5_tstamp.py @@ -2,7 +2,10 @@ # encoding: utf-8 """ -Re-calculate md5 hashes of files only when the file time have changed. +Re-calculate md5 hashes of files only when the file time have changed:: + + def options(opt): + opt.load('md5_tstamp') The hashes can also reflect either the file contents (STRONGEST=True) or the file time and file size. diff --git a/Tools/msvc.py b/Tools/msvc.py index ff58449..f169c7f 100644 --- a/Tools/msvc.py +++ b/Tools/msvc.py @@ -969,7 +969,7 @@ def apply_flags_msvc(self): if not is_static: for f in self.env.LINKFLAGS: d = f.lower() - if d[1:] == 'debug': + if d[1:] in ('debug', 'debug:full', 'debug:fastlink'): pdbnode = self.link_task.outputs[0].change_ext('.pdb') self.link_task.outputs.append(pdbnode) diff --git a/Tools/nasm.py b/Tools/nasm.py index 411d582..9c51c18 100644 --- a/Tools/nasm.py +++ b/Tools/nasm.py @@ -24,3 +24,8 @@ def configure(conf): conf.env.ASLNK_TGT_F = ['-o'] conf.load('asm') conf.env.ASMPATH_ST = '-I%s' + os.sep + txt = conf.cmd_and_log(conf.env.AS + ['--version']) + if 'yasm' in txt.lower(): + conf.env.ASM_NAME = 'yasm' + else: + conf.env.ASM_NAME = 'nasm' diff --git a/Tools/python.py b/Tools/python.py index 01a2c9a..7c45a76 100644 --- a/Tools/python.py +++ b/Tools/python.py @@ -79,14 +79,19 @@ def process_py(self, node): """ Add signature of .py file, so it will be byte-compiled when necessary """ - assert(hasattr(self, 'install_path')), 'add features="py"' + assert(hasattr(self, 'install_path')), 'add features="py" for target "%s" in "%s/wscript".' % (self.target, self.path.nice_path()) + self.install_from = getattr(self, 'install_from', None) + relative_trick = getattr(self, 'relative_trick', True) + if self.install_from: + assert isinstance(self.install_from, Node.Node), \ + 'add features="py" for target "%s" in "%s/wscript" (%s).' % (self.target, self.path.nice_path(), type(self.install_from)) # where to install the python file if self.install_path: if self.install_from: - self.add_install_files(install_to=self.install_path, install_from=node, cwd=self.install_from, relative_trick=True) + self.add_install_files(install_to=self.install_path, install_from=node, cwd=self.install_from, relative_trick=relative_trick) else: - self.add_install_files(install_to=self.install_path, install_from=node, relative_trick=True) + self.add_install_files(install_to=self.install_path, install_from=node, relative_trick=relative_trick) lst = [] if self.env.PYC: @@ -96,9 +101,11 @@ def process_py(self, node): if self.install_path: if self.install_from: - pyd = Utils.subst_vars("%s/%s" % (self.install_path, node.path_from(self.install_from)), self.env) + target_dir = node.path_from(self.install_from) if relative_trick else node.name + pyd = Utils.subst_vars("%s/%s" % (self.install_path, target_dir), self.env) else: - pyd = Utils.subst_vars("%s/%s" % (self.install_path, node.path_from(self.path)), self.env) + target_dir = node.path_from(self.path) if relative_trick else node.name + pyd = Utils.subst_vars("%s/%s" % (self.install_path, target_dir), self.env) else: pyd = node.abspath() @@ -115,7 +122,7 @@ def process_py(self, node): tsk.pyd = pyd if self.install_path: - self.add_install_files(install_to=os.path.dirname(pyd), install_from=pyobj, cwd=node.parent.get_bld(), relative_trick=True) + self.add_install_files(install_to=os.path.dirname(pyd), install_from=pyobj, cwd=node.parent.get_bld(), relative_trick=relative_trick) class pyc(Task.Task): """ @@ -342,7 +349,13 @@ def check_python_headers(conf, features='pyembed pyext'): if 'pyembed' in features: for flags in all_flags: - conf.check_cfg(msg='Asking python-config for pyembed %r flags' % ' '.join(flags), path=env.PYTHON_CONFIG, package='', uselib_store='PYEMBED', args=flags) + # Python 3.8 has different flags for pyembed, needs --embed + embedflags = flags + ['--embed'] + try: + conf.check_cfg(msg='Asking python-config for pyembed %r flags' % ' '.join(embedflags), path=env.PYTHON_CONFIG, package='', uselib_store='PYEMBED', args=embedflags) + except conf.errors.ConfigurationError: + # However Python < 3.8 doesn't accept --embed, so we need a fallback + conf.check_cfg(msg='Asking python-config for pyembed %r flags' % ' '.join(flags), path=env.PYTHON_CONFIG, package='', uselib_store='PYEMBED', args=flags) try: conf.test_pyembed(xx) @@ -427,11 +440,11 @@ def check_python_headers(conf, features='pyembed pyext'): # Code using the Python API needs to be compiled with -fno-strict-aliasing if env.CC_NAME == 'gcc': - env.append_value('CFLAGS_PYEMBED', ['-fno-strict-aliasing']) - env.append_value('CFLAGS_PYEXT', ['-fno-strict-aliasing']) + env.append_unique('CFLAGS_PYEMBED', ['-fno-strict-aliasing']) + env.append_unique('CFLAGS_PYEXT', ['-fno-strict-aliasing']) if env.CXX_NAME == 'gcc': - env.append_value('CXXFLAGS_PYEMBED', ['-fno-strict-aliasing']) - env.append_value('CXXFLAGS_PYEXT', ['-fno-strict-aliasing']) + env.append_unique('CXXFLAGS_PYEMBED', ['-fno-strict-aliasing']) + env.append_unique('CXXFLAGS_PYEXT', ['-fno-strict-aliasing']) if env.CC_NAME == "msvc": from distutils.msvccompiler import MSVCCompiler diff --git a/Tools/qt5.py b/Tools/qt5.py index 9f43280..287c253 100644 --- a/Tools/qt5.py +++ b/Tools/qt5.py @@ -74,7 +74,7 @@ else: import os, sys, re from waflib.Tools import cxx -from waflib import Task, Utils, Options, Errors, Context +from waflib import Build, Task, Utils, Options, Errors, Context from waflib.TaskGen import feature, after_method, extension, before_method from waflib.Configure import conf from waflib import Logs @@ -167,6 +167,10 @@ class qxx(Task.classes['cxx']): node = self.inputs[0] bld = self.generator.bld + # skip on uninstall due to generated files + if bld.is_install == Build.UNINSTALL: + return + try: # compute the signature once to know if there is a moc file to create self.signature() diff --git a/Tools/waf_unit_test.py b/Tools/waf_unit_test.py index 74d6c05..6ff6f72 100644 --- a/Tools/waf_unit_test.py +++ b/Tools/waf_unit_test.py @@ -214,7 +214,7 @@ class utest(Task.Task): 'cmd': cmd } script_file = self.inputs[0].abspath() + '_run.py' - Utils.writef(script_file, script_code) + Utils.writef(script_file, script_code, encoding='utf-8') os.chmod(script_file, Utils.O755) if Logs.verbose > 1: Logs.info('Test debug file written as %r' % script_file) diff --git a/Utils.py b/Utils.py index 4b808a8..7472226 100644 --- a/Utils.py +++ b/Utils.py @@ -604,6 +604,12 @@ def h_list(lst): """ return md5(repr(lst).encode()).digest() +if sys.hexversion < 0x3000000: + def h_list_python2(lst): + return md5(repr(lst)).digest() + h_list_python2.__doc__ = h_list.__doc__ + h_list = h_list_python2 + def h_fun(fun): """ Hash functions @@ -879,7 +885,7 @@ def get_process(): except IndexError: filepath = os.path.dirname(os.path.abspath(__file__)) + os.sep + 'processor.py' cmd = [sys.executable, '-c', readf(filepath)] - return subprocess.Popen(cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE, bufsize=0) + return subprocess.Popen(cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE, bufsize=0, close_fds=not is_win32) def run_prefork_process(cmd, kwargs, cargs): """ diff --git a/extras/clang_cross.py b/extras/clang_cross.py new file mode 100644 index 0000000..1b51e28 --- /dev/null +++ b/extras/clang_cross.py @@ -0,0 +1,92 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Krzysztof KosiƄski 2014 +# DragoonX6 2018 + +""" +Detect the Clang C compiler +This version is an attempt at supporting the -target and -sysroot flag of Clang. +""" + +from waflib.Tools import ccroot, ar, gcc +from waflib.Configure import conf +import waflib.Context +import waflib.extras.clang_cross_common + +def options(opt): + """ + Target triplet for clang:: + $ waf configure --clang-target-triple=x86_64-pc-linux-gnu + """ + cc_compiler_opts = opt.add_option_group('Configuration options') + cc_compiler_opts.add_option('--clang-target-triple', default=None, + help='Target triple for clang', + dest='clang_target_triple') + cc_compiler_opts.add_option('--clang-sysroot', default=None, + help='Sysroot for clang', + dest='clang_sysroot') + +@conf +def find_clang(conf): + """ + Finds the program clang and executes it to ensure it really is clang + """ + + import os + + cc = conf.find_program('clang', var='CC') + + if conf.options.clang_target_triple != None: + conf.env.append_value('CC', ['-target', conf.options.clang_target_triple]) + + if conf.options.clang_sysroot != None: + sysroot = str() + + if os.path.isabs(conf.options.clang_sysroot): + sysroot = conf.options.clang_sysroot + else: + sysroot = os.path.normpath(os.path.join(os.getcwd(), conf.options.clang_sysroot)) + + conf.env.append_value('CC', ['--sysroot', sysroot]) + + conf.get_cc_version(cc, clang=True) + conf.env.CC_NAME = 'clang' + +@conf +def clang_modifier_x86_64_w64_mingw32(conf): + conf.gcc_modifier_win32() + +@conf +def clang_modifier_i386_w64_mingw32(conf): + conf.gcc_modifier_win32() + +@conf +def clang_modifier_x86_64_windows_msvc(conf): + conf.clang_modifier_msvc() + + # Allow the user to override any flags if they so desire. + clang_modifier_user_func = getattr(conf, 'clang_modifier_x86_64_windows_msvc_user', None) + if clang_modifier_user_func: + clang_modifier_user_func() + +@conf +def clang_modifier_i386_windows_msvc(conf): + conf.clang_modifier_msvc() + + # Allow the user to override any flags if they so desire. + clang_modifier_user_func = getattr(conf, 'clang_modifier_i386_windows_msvc_user', None) + if clang_modifier_user_func: + clang_modifier_user_func() + +def configure(conf): + conf.find_clang() + conf.find_program(['llvm-ar', 'ar'], var='AR') + conf.find_ar() + conf.gcc_common_flags() + # Allow the user to provide flags for the target platform. + conf.gcc_modifier_platform() + # And allow more fine grained control based on the compiler's triplet. + conf.clang_modifier_target_triple() + conf.cc_load_tools() + conf.cc_add_flags() + conf.link_add_flags() diff --git a/extras/clang_cross_common.py b/extras/clang_cross_common.py new file mode 100644 index 0000000..b76a070 --- /dev/null +++ b/extras/clang_cross_common.py @@ -0,0 +1,113 @@ +#!/usr/bin/env python +# encoding: utf-8 +# DragoonX6 2018 + +""" +Common routines for cross_clang.py and cross_clangxx.py +""" + +from waflib.Configure import conf +import waflib.Context + +def normalize_target_triple(target_triple): + target_triple = target_triple[:-1] + normalized_triple = target_triple.replace('--', '-unknown-') + + if normalized_triple.startswith('-'): + normalized_triple = 'unknown' + normalized_triple + + if normalized_triple.endswith('-'): + normalized_triple += 'unknown' + + # Normalize MinGW builds to *arch*-w64-mingw32 + if normalized_triple.endswith('windows-gnu'): + normalized_triple = normalized_triple[:normalized_triple.index('-')] + '-w64-mingw32' + + # Strip the vendor when doing msvc builds, since it's unused anyway. + if normalized_triple.endswith('windows-msvc'): + normalized_triple = normalized_triple[:normalized_triple.index('-')] + '-windows-msvc' + + return normalized_triple.replace('-', '_') + +@conf +def clang_modifier_msvc(conf): + import os + + """ + Really basic setup to use clang in msvc mode. + We actually don't really want to do a lot, even though clang is msvc compatible + in this mode, that doesn't mean we're actually using msvc. + It's probably the best to leave it to the user, we can assume msvc mode if the user + uses the clang-cl frontend, but this module only concerns itself with the gcc-like frontend. + """ + v = conf.env + v.cprogram_PATTERN = '%s.exe' + + v.cshlib_PATTERN = '%s.dll' + v.implib_PATTERN = '%s.lib' + v.IMPLIB_ST = '-Wl,-IMPLIB:%s' + v.SHLIB_MARKER = [] + + v.CFLAGS_cshlib = [] + v.LINKFLAGS_cshlib = ['-Wl,-DLL'] + v.cstlib_PATTERN = '%s.lib' + v.STLIB_MARKER = [] + + del(v.AR) + conf.find_program(['llvm-lib', 'lib'], var='AR') + v.ARFLAGS = ['-nologo'] + v.AR_TGT_F = ['-out:'] + + # Default to the linker supplied with llvm instead of link.exe or ld + v.LINK_CC = v.CC + ['-fuse-ld=lld', '-nostdlib'] + v.CCLNK_TGT_F = ['-o'] + v.def_PATTERN = '-Wl,-def:%s' + + v.LINKFLAGS = [] + + v.LIB_ST = '-l%s' + v.LIBPATH_ST = '-Wl,-LIBPATH:%s' + v.STLIB_ST = '-l%s' + v.STLIBPATH_ST = '-Wl,-LIBPATH:%s' + + CFLAGS_CRT_COMMON = [ + '-Xclang', '--dependent-lib=oldnames', + '-Xclang', '-fno-rtti-data', + '-D_MT' + ] + + v.CFLAGS_CRT_MULTITHREADED = CFLAGS_CRT_COMMON + [ + '-Xclang', '-flto-visibility-public-std', + '-Xclang', '--dependent-lib=libcmt', + ] + v.CXXFLAGS_CRT_MULTITHREADED = v.CFLAGS_CRT_MULTITHREADED + + v.CFLAGS_CRT_MULTITHREADED_DBG = CFLAGS_CRT_COMMON + [ + '-D_DEBUG', + '-Xclang', '-flto-visibility-public-std', + '-Xclang', '--dependent-lib=libcmtd', + ] + v.CXXFLAGS_CRT_MULTITHREADED_DBG = v.CFLAGS_CRT_MULTITHREADED_DBG + + v.CFLAGS_CRT_MULTITHREADED_DLL = CFLAGS_CRT_COMMON + [ + '-D_DLL', + '-Xclang', '--dependent-lib=msvcrt' + ] + v.CXXFLAGS_CRT_MULTITHREADED_DLL = v.CFLAGS_CRT_MULTITHREADED_DLL + + v.CFLAGS_CRT_MULTITHREADED_DLL_DBG = CFLAGS_CRT_COMMON + [ + '-D_DLL', + '-D_DEBUG', + '-Xclang', '--dependent-lib=msvcrtd', + ] + v.CXXFLAGS_CRT_MULTITHREADED_DLL_DBG = v.CFLAGS_CRT_MULTITHREADED_DLL_DBG + +@conf +def clang_modifier_target_triple(conf, cpp=False): + compiler = conf.env.CXX if cpp else conf.env.CC + output = conf.cmd_and_log(compiler + ['-dumpmachine'], output=waflib.Context.STDOUT) + + modifier = ('clangxx' if cpp else 'clang') + '_modifier_' + clang_modifier_func = getattr(conf, modifier + normalize_target_triple(output), None) + if clang_modifier_func: + clang_modifier_func() diff --git a/extras/clangxx_cross.py b/extras/clangxx_cross.py new file mode 100644 index 0000000..0ad38ad --- /dev/null +++ b/extras/clangxx_cross.py @@ -0,0 +1,106 @@ +#!/usr/bin/env python +# encoding: utf-8 +# Thomas Nagy 2009-2018 (ita) +# DragoonX6 2018 + +""" +Detect the Clang++ C++ compiler +This version is an attempt at supporting the -target and -sysroot flag of Clang++. +""" + +from waflib.Tools import ccroot, ar, gxx +from waflib.Configure import conf +import waflib.extras.clang_cross_common + +def options(opt): + """ + Target triplet for clang++:: + $ waf configure --clangxx-target-triple=x86_64-pc-linux-gnu + """ + cxx_compiler_opts = opt.add_option_group('Configuration options') + cxx_compiler_opts.add_option('--clangxx-target-triple', default=None, + help='Target triple for clang++', + dest='clangxx_target_triple') + cxx_compiler_opts.add_option('--clangxx-sysroot', default=None, + help='Sysroot for clang++', + dest='clangxx_sysroot') + +@conf +def find_clangxx(conf): + """ + Finds the program clang++, and executes it to ensure it really is clang++ + """ + + import os + + cxx = conf.find_program('clang++', var='CXX') + + if conf.options.clangxx_target_triple != None: + conf.env.append_value('CXX', ['-target', conf.options.clangxx_target_triple]) + + if conf.options.clangxx_sysroot != None: + sysroot = str() + + if os.path.isabs(conf.options.clangxx_sysroot): + sysroot = conf.options.clangxx_sysroot + else: + sysroot = os.path.normpath(os.path.join(os.getcwd(), conf.options.clangxx_sysroot)) + + conf.env.append_value('CXX', ['--sysroot', sysroot]) + + conf.get_cc_version(cxx, clang=True) + conf.env.CXX_NAME = 'clang' + +@conf +def clangxx_modifier_x86_64_w64_mingw32(conf): + conf.gcc_modifier_win32() + +@conf +def clangxx_modifier_i386_w64_mingw32(conf): + conf.gcc_modifier_win32() + +@conf +def clangxx_modifier_msvc(conf): + v = conf.env + v.cxxprogram_PATTERN = v.cprogram_PATTERN + v.cxxshlib_PATTERN = v.cshlib_PATTERN + + v.CXXFLAGS_cxxshlib = [] + v.LINKFLAGS_cxxshlib = v.LINKFLAGS_cshlib + v.cxxstlib_PATTERN = v.cstlib_PATTERN + + v.LINK_CXX = v.CXX + ['-fuse-ld=lld', '-nostdlib'] + v.CXXLNK_TGT_F = v.CCLNK_TGT_F + +@conf +def clangxx_modifier_x86_64_windows_msvc(conf): + conf.clang_modifier_msvc() + conf.clangxx_modifier_msvc() + + # Allow the user to override any flags if they so desire. + clang_modifier_user_func = getattr(conf, 'clangxx_modifier_x86_64_windows_msvc_user', None) + if clang_modifier_user_func: + clang_modifier_user_func() + +@conf +def clangxx_modifier_i386_windows_msvc(conf): + conf.clang_modifier_msvc() + conf.clangxx_modifier_msvc() + + # Allow the user to override any flags if they so desire. + clang_modifier_user_func = getattr(conf, 'clangxx_modifier_i386_windows_msvc_user', None) + if clang_modifier_user_func: + clang_modifier_user_func() + +def configure(conf): + conf.find_clangxx() + conf.find_program(['llvm-ar', 'ar'], var='AR') + conf.find_ar() + conf.gxx_common_flags() + # Allow the user to provide flags for the target platform. + conf.gxx_modifier_platform() + # And allow more fine grained control based on the compiler's triplet. + conf.clang_modifier_target_triple(cpp=True) + conf.cxx_load_tools() + conf.cxx_add_flags() + conf.link_add_flags() diff --git a/extras/color_msvc.py b/extras/color_msvc.py new file mode 100644 index 0000000..60bacb7 --- /dev/null +++ b/extras/color_msvc.py @@ -0,0 +1,59 @@ +#!/usr/bin/env python +# encoding: utf-8 + +# Replaces the default formatter by one which understands MSVC output and colorizes it. +# Modified from color_gcc.py + +__author__ = __maintainer__ = "Alibek Omarov " +__copyright__ = "Alibek Omarov, 2019" + +import sys +from waflib import Logs + +class ColorMSVCFormatter(Logs.formatter): + def __init__(self, colors): + self.colors = colors + Logs.formatter.__init__(self) + + def parseMessage(self, line, color): + # Split messaage from 'disk:filepath: type: message' + arr = line.split(':', 3) + if len(arr) < 4: + return line + + colored = self.colors.BOLD + arr[0] + ':' + arr[1] + ':' + self.colors.NORMAL + colored += color + arr[2] + ':' + self.colors.NORMAL + colored += arr[3] + return colored + + def format(self, rec): + frame = sys._getframe() + while frame: + func = frame.f_code.co_name + if func == 'exec_command': + cmd = frame.f_locals.get('cmd') + if isinstance(cmd, list): + # Fix file case, it may be CL.EXE or cl.exe + argv0 = cmd[0].lower() + if 'cl.exe' in argv0: + lines = [] + # This will not work with "localized" versions + # of MSVC + for line in rec.msg.splitlines(): + if ': warning ' in line: + lines.append(self.parseMessage(line, self.colors.YELLOW)) + elif ': error ' in line: + lines.append(self.parseMessage(line, self.colors.RED)) + elif ': fatal error ' in line: + lines.append(self.parseMessage(line, self.colors.RED + self.colors.BOLD)) + elif ': note: ' in line: + lines.append(self.parseMessage(line, self.colors.CYAN)) + else: + lines.append(line) + rec.msg = "\n".join(lines) + frame = frame.f_back + return Logs.formatter.format(self, rec) + +def options(opt): + Logs.log.handlers[0].setFormatter(ColorMSVCFormatter(Logs.colors)) + diff --git a/extras/doxygen.py b/extras/doxygen.py index 28f56e9..20cd9e1 100644 --- a/extras/doxygen.py +++ b/extras/doxygen.py @@ -27,6 +27,7 @@ When using this tool, the wscript will look like: """ import os, os.path, re +from collections import OrderedDict from waflib import Task, Utils, Node from waflib.TaskGen import feature @@ -40,7 +41,13 @@ inc m mm py f90c cc cxx cpp c++ java ii ixx ipp i++ inl h hh hxx re_rl = re.compile('\\\\\r*\n', re.MULTILINE) re_nl = re.compile('\r*\n', re.M) def parse_doxy(txt): - tbl = {} + ''' + Parses a doxygen file. + Returns an ordered dictionary. We cannot return a default dictionary, as the + order in which the entries are reported does matter, especially for the + '@INCLUDE' lines. + ''' + tbl = OrderedDict() txt = re_rl.sub('', txt) lines = re_nl.split(txt) for x in lines: @@ -78,6 +85,12 @@ class doxygen(Task.Task): if not getattr(self, 'pars', None): txt = self.inputs[0].read() self.pars = parse_doxy(txt) + + # Override with any parameters passed to the task generator + if getattr(self.generator, 'pars', None): + for k, v in self.generator.pars.items(): + self.pars[k] = v + if self.pars.get('OUTPUT_DIRECTORY'): # Use the path parsed from the Doxyfile as an absolute path output_node = self.inputs[0].parent.get_bld().make_node(self.pars['OUTPUT_DIRECTORY']) @@ -87,11 +100,6 @@ class doxygen(Task.Task): output_node.mkdir() self.pars['OUTPUT_DIRECTORY'] = output_node.abspath() - # Override with any parameters passed to the task generator - if getattr(self.generator, 'pars', None): - for k, v in self.generator.pars.items(): - self.pars[k] = v - self.doxy_inputs = getattr(self, 'doxy_inputs', []) if not self.pars.get('INPUT'): self.doxy_inputs.append(self.inputs[0].parent) diff --git a/extras/fast_partial.py b/extras/fast_partial.py index d5b6144..90a9472 100644 --- a/extras/fast_partial.py +++ b/extras/fast_partial.py @@ -18,7 +18,9 @@ Usage:: opt.load('fast_partial') Assumptions: +* Start with a clean build (run "waf distclean" after enabling) * Mostly for C/C++/Fortran targets with link tasks (object-only targets are not handled) + try it in the folder generated by utils/genbench.py * For full project builds: no --targets and no pruning from subfolders * The installation phase is ignored * `use=` dependencies are specified up front even across build groups @@ -130,12 +132,18 @@ class bld_proxy(object): data[x] = getattr(self, x) db = os.path.join(self.variant_dir, Context.DBFILE + self.store_key) - try: - waflib.Node.pickle_lock.acquire() + with waflib.Node.pickle_lock: waflib.Node.Nod3 = self.node_class - x = Build.cPickle.dumps(data, Build.PROTOCOL) - finally: - waflib.Node.pickle_lock.release() + try: + x = Build.cPickle.dumps(data, Build.PROTOCOL) + except Build.cPickle.PicklingError: + root = data['root'] + for node_deps in data['node_deps'].values(): + for idx, node in enumerate(node_deps): + # there may be more cross-context Node objects to fix, + # but this should be the main source + node_deps[idx] = root.find_node(node.abspath()) + x = Build.cPickle.dumps(data, Build.PROTOCOL) Logs.debug('rev_use: storing %s', db) Utils.writef(db + '.tmp', x, m='wb') @@ -392,12 +400,17 @@ def is_stale(self): Logs.debug('rev_use: must post %r because this is a clean build') return True - # 3. check if the configuration changed - if os.stat(self.bld.bldnode.find_node('c4che/build.config.py').abspath()).st_mtime > dbstat: + # 3.a check if the configuration exists + cache_node = self.bld.bldnode.find_node('c4che/build.config.py') + if not cache_node: + return True + + # 3.b check if the configuration changed + if os.stat(cache_node.abspath()).st_mtime > dbstat: Logs.debug('rev_use: must post %r because the configuration has changed', self.name) return True - # 3.a any tstamp data? + # 3.c any tstamp data? try: f_deps = self.bld.f_deps except AttributeError: diff --git a/extras/genpybind.py b/extras/genpybind.py new file mode 100644 index 0000000..ac206ee --- /dev/null +++ b/extras/genpybind.py @@ -0,0 +1,194 @@ +import os +import pipes +import subprocess +import sys + +from waflib import Logs, Task, Context +from waflib.Tools.c_preproc import scan as scan_impl +# ^-- Note: waflib.extras.gccdeps.scan does not work for us, +# due to its current implementation: +# The -MD flag is injected into the {C,CXX}FLAGS environment variable and +# dependencies are read out in a separate step after compiling by reading +# the .d file saved alongside the object file. +# As the genpybind task refers to a header file that is never compiled itself, +# gccdeps will not be able to extract the list of dependencies. + +from waflib.TaskGen import feature, before_method + + +def join_args(args): + return " ".join(pipes.quote(arg) for arg in args) + + +def configure(cfg): + cfg.load("compiler_cxx") + cfg.load("python") + cfg.check_python_version(minver=(2, 7)) + if not cfg.env.LLVM_CONFIG: + cfg.find_program("llvm-config", var="LLVM_CONFIG") + if not cfg.env.GENPYBIND: + cfg.find_program("genpybind", var="GENPYBIND") + + # find clang reasource dir for builtin headers + cfg.env.GENPYBIND_RESOURCE_DIR = os.path.join( + cfg.cmd_and_log(cfg.env.LLVM_CONFIG + ["--libdir"]).strip(), + "clang", + cfg.cmd_and_log(cfg.env.LLVM_CONFIG + ["--version"]).strip()) + if os.path.exists(cfg.env.GENPYBIND_RESOURCE_DIR): + cfg.msg("Checking clang resource dir", cfg.env.GENPYBIND_RESOURCE_DIR) + else: + cfg.fatal("Clang resource dir not found") + + +@feature("genpybind") +@before_method("process_source") +def generate_genpybind_source(self): + """ + Run genpybind on the headers provided in `source` and compile/link the + generated code instead. This works by generating the code on the fly and + swapping the source node before `process_source` is run. + """ + # name of module defaults to name of target + module = getattr(self, "module", self.target) + + # create temporary source file in build directory to hold generated code + out = "genpybind-%s.%d.cpp" % (module, self.idx) + out = self.path.get_bld().find_or_declare(out) + + task = self.create_task("genpybind", self.to_nodes(self.source), out) + # used to detect whether CFLAGS or CXXFLAGS should be passed to genpybind + task.features = self.features + task.module = module + # can be used to select definitions to include in the current module + # (when header files are shared by more than one module) + task.genpybind_tags = self.to_list(getattr(self, "genpybind_tags", [])) + # additional include directories + task.includes = self.to_list(getattr(self, "includes", [])) + task.genpybind = self.env.GENPYBIND + + # Tell waf to compile/link the generated code instead of the headers + # originally passed-in via the `source` parameter. (see `process_source`) + self.source = [out] + + +class genpybind(Task.Task): # pylint: disable=invalid-name + """ + Runs genpybind on headers provided as input to this task. + Generated code will be written to the first (and only) output node. + """ + quiet = True + color = "PINK" + scan = scan_impl + + @staticmethod + def keyword(): + return "Analyzing" + + def run(self): + if not self.inputs: + return + + args = self.find_genpybind() + self._arguments( + resource_dir=self.env.GENPYBIND_RESOURCE_DIR) + + output = self.run_genpybind(args) + + # For debugging / log output + pasteable_command = join_args(args) + + # write generated code to file in build directory + # (will be compiled during process_source stage) + (output_node,) = self.outputs + output_node.write("// {}\n{}\n".format( + pasteable_command.replace("\n", "\n// "), output)) + + def find_genpybind(self): + return self.genpybind + + def run_genpybind(self, args): + bld = self.generator.bld + + kwargs = dict(cwd=bld.variant_dir) + if hasattr(bld, "log_command"): + bld.log_command(args, kwargs) + else: + Logs.debug("runner: {!r}".format(args)) + proc = subprocess.Popen( + args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, **kwargs) + stdout, stderr = proc.communicate() + + if not isinstance(stdout, str): + stdout = stdout.decode(sys.stdout.encoding, errors="replace") + if not isinstance(stderr, str): + stderr = stderr.decode(sys.stderr.encoding, errors="replace") + + if proc.returncode != 0: + bld.fatal( + "genpybind returned {code} during the following call:" + "\n{command}\n\n{stdout}\n\n{stderr}".format( + code=proc.returncode, + command=join_args(args), + stdout=stdout, + stderr=stderr, + )) + + if stderr.strip(): + Logs.debug("non-fatal warnings during genpybind run:\n{}".format(stderr)) + + return stdout + + def _include_paths(self): + return self.generator.to_incnodes(self.includes + self.env.INCLUDES) + + def _inputs_as_relative_includes(self): + include_paths = self._include_paths() + relative_includes = [] + for node in self.inputs: + for inc in include_paths: + if node.is_child_of(inc): + relative_includes.append(node.path_from(inc)) + break + else: + self.generator.bld.fatal("could not resolve {}".format(node)) + return relative_includes + + def _arguments(self, genpybind_parse=None, resource_dir=None): + args = [] + relative_includes = self._inputs_as_relative_includes() + is_cxx = "cxx" in self.features + + # options for genpybind + args.extend(["--genpybind-module", self.module]) + if self.genpybind_tags: + args.extend(["--genpybind-tag"] + self.genpybind_tags) + if relative_includes: + args.extend(["--genpybind-include"] + relative_includes) + if genpybind_parse: + args.extend(["--genpybind-parse", genpybind_parse]) + + args.append("--") + + # headers to be processed by genpybind + args.extend(node.abspath() for node in self.inputs) + + args.append("--") + + # options for clang/genpybind-parse + args.append("-D__GENPYBIND__") + args.append("-xc++" if is_cxx else "-xc") + has_std_argument = False + for flag in self.env["CXXFLAGS" if is_cxx else "CFLAGS"]: + flag = flag.replace("-std=gnu", "-std=c") + if flag.startswith("-std=c"): + has_std_argument = True + args.append(flag) + if not has_std_argument: + args.append("-std=c++14") + args.extend("-I{}".format(n.abspath()) for n in self._include_paths()) + args.extend("-D{}".format(p) for p in self.env.DEFINES) + + # point to clang resource dir, if specified + if resource_dir: + args.append("-resource-dir={}".format(resource_dir)) + + return args diff --git a/extras/local_rpath.py b/extras/local_rpath.py index b2507e1..e3923d9 100644 --- a/extras/local_rpath.py +++ b/extras/local_rpath.py @@ -2,18 +2,20 @@ # encoding: utf-8 # Thomas Nagy, 2011 (ita) +import copy from waflib.TaskGen import after_method, feature @after_method('propagate_uselib_vars') @feature('cprogram', 'cshlib', 'cxxprogram', 'cxxshlib', 'fcprogram', 'fcshlib') def add_rpath_stuff(self): - all = self.to_list(getattr(self, 'use', [])) + all = copy.copy(self.to_list(getattr(self, 'use', []))) while all: name = all.pop() try: tg = self.bld.get_tgen_by_name(name) except: continue - self.env.append_value('RPATH', tg.link_task.outputs[0].parent.abspath()) - all.extend(self.to_list(getattr(tg, 'use', []))) + if hasattr(tg, 'link_task'): + self.env.append_value('RPATH', tg.link_task.outputs[0].parent.abspath()) + all.extend(self.to_list(getattr(tg, 'use', []))) diff --git a/extras/msvcdeps.py b/extras/msvcdeps.py index fc1ecd4..873a419 100644 --- a/extras/msvcdeps.py +++ b/extras/msvcdeps.py @@ -50,28 +50,35 @@ def apply_msvcdeps_flags(taskgen): if taskgen.env.get_flat(flag).find(PREPROCESSOR_FLAG) < 0: taskgen.env.append_value(flag, PREPROCESSOR_FLAG) - # Figure out what casing conventions the user's shell used when - # launching Waf - (drive, _) = os.path.splitdrive(taskgen.bld.srcnode.abspath()) - taskgen.msvcdeps_drive_lowercase = drive == drive.lower() - def path_to_node(base_node, path, cached_nodes): - # Take the base node and the path and return a node - # Results are cached because searching the node tree is expensive - # The following code is executed by threads, it is not safe, so a lock is needed... - if getattr(path, '__hash__'): - node_lookup_key = (base_node, path) - else: - # Not hashable, assume it is a list and join into a string - node_lookup_key = (base_node, os.path.sep.join(path)) + ''' + Take the base node and the path and return a node + Results are cached because searching the node tree is expensive + The following code is executed by threads, it is not safe, so a lock is needed... + ''' + # normalize the path because ant_glob() does not understand + # parent path components (..) + path = os.path.normpath(path) + + # normalize the path case to increase likelihood of a cache hit + path = os.path.normcase(path) + + # ant_glob interprets [] and () characters, so those must be replaced + path = path.replace('[', '?').replace(']', '?').replace('(', '[(]').replace(')', '[)]') + + node_lookup_key = (base_node, path) + try: - lock.acquire() node = cached_nodes[node_lookup_key] except KeyError: - node = base_node.find_resource(path) - cached_nodes[node_lookup_key] = node - finally: - lock.release() + # retry with lock on cache miss + with lock: + try: + node = cached_nodes[node_lookup_key] + except KeyError: + node_list = base_node.ant_glob([path], ignorecase=True, remove=False, quiet=True, regex=False) + node = cached_nodes[node_lookup_key] = node_list[0] if node_list else None + return node def post_run(self): @@ -86,11 +93,6 @@ def post_run(self): unresolved_names = [] resolved_nodes = [] - lowercase = self.generator.msvcdeps_drive_lowercase - correct_case_path = bld.path.abspath() - correct_case_path_len = len(correct_case_path) - correct_case_path_norm = os.path.normcase(correct_case_path) - # Dynamically bind to the cache try: cached_nodes = bld.cached_nodes @@ -100,26 +102,15 @@ def post_run(self): for path in self.msvcdeps_paths: node = None if os.path.isabs(path): - # Force drive letter to match conventions of main source tree - drive, tail = os.path.splitdrive(path) - - if os.path.normcase(path[:correct_case_path_len]) == correct_case_path_norm: - # Path is in the sandbox, force it to be correct. MSVC sometimes returns a lowercase path. - path = correct_case_path + path[correct_case_path_len:] - else: - # Check the drive letter - if lowercase and (drive != drive.lower()): - path = drive.lower() + tail - elif (not lowercase) and (drive != drive.upper()): - path = drive.upper() + tail node = path_to_node(bld.root, path, cached_nodes) else: + # when calling find_resource, make sure the path does not begin with '..' base_node = bld.bldnode - # when calling find_resource, make sure the path does not begin by '..' path = [k for k in Utils.split_path(path) if k and k != '.'] while path[0] == '..': - path = path[1:] + path.pop(0) base_node = base_node.parent + path = os.sep.join(path) node = path_to_node(base_node, path, cached_nodes) @@ -213,8 +204,12 @@ def exec_command(self, cmd, **kw): raw_out = self.generator.bld.cmd_and_log(cmd + ['@' + tmp], **kw) ret = 0 except Errors.WafError as e: - raw_out = e.stdout - ret = e.returncode + # Use e.msg if e.stdout is not set + raw_out = getattr(e, 'stdout', e.msg) + + # Return non-zero error code even if we didn't + # get one from the exception object + ret = getattr(e, 'returncode', 1) for line in raw_out.splitlines(): if line.startswith(INCLUDE_PATTERN): diff --git a/extras/objcopy.py b/extras/objcopy.py index 82d8359..bb7ca6e 100644 --- a/extras/objcopy.py +++ b/extras/objcopy.py @@ -15,7 +15,7 @@ objcopy_flags Additional flags passed to objcopy. """ from waflib.Utils import def_attrs -from waflib import Task +from waflib import Task, Options from waflib.TaskGen import feature, after_method class objcopy(Task.Task): @@ -46,5 +46,8 @@ def map_objcopy(self): self.add_install_files(install_to=self.objcopy_install_path, install_from=task.outputs[0]) def configure(ctx): - ctx.find_program('objcopy', var='OBJCOPY', mandatory=True) - + program_name = 'objcopy' + prefix = getattr(Options.options, 'cross_prefix', None) + if prefix: + program_name = '{}-{}'.format(prefix, program_name) + ctx.find_program(program_name, var='OBJCOPY', mandatory=True) diff --git a/extras/protoc.py b/extras/protoc.py index 839c510..4a519cc 100644 --- a/extras/protoc.py +++ b/extras/protoc.py @@ -175,6 +175,7 @@ def process_protoc(self, node): self.javac_task.srcdir.append(node.parent.get_bld()) protoc_flags.append('--java_out=%s' % node.parent.get_bld().bldpath()) + node.parent.get_bld().mkdir() tsk = self.create_task('protoc', node, out_nodes) tsk.env.append_value('PROTOC_FLAGS', protoc_flags) diff --git a/extras/pyqt5.py b/extras/pyqt5.py index 80f43b8..9c94176 100644 --- a/extras/pyqt5.py +++ b/extras/pyqt5.py @@ -1,6 +1,6 @@ #!/usr/bin/env python # encoding: utf-8 -# Federico Pellegrin, 2016-2018 (fedepell) adapted for Python +# Federico Pellegrin, 2016-2019 (fedepell) adapted for Python """ This tool helps with finding Python Qt5 tools and libraries, @@ -30,7 +30,7 @@ Load the "pyqt5" tool. Add into the sources list also the qrc resources files or ui5 definition files and they will be translated into python code -with the system tools (PyQt5, pyside2, PyQt4 are searched in this +with the system tools (PyQt5, PySide2, PyQt4 are searched in this order) and then compiled """ @@ -207,11 +207,15 @@ def configure(self): @conf def find_pyqt5_binaries(self): """ - Detects PyQt5 or pyside2 programs such as pyuic5/pyside2-uic, pyrcc5/pyside2-rcc + Detects PyQt5 or PySide2 programs such as pyuic5/pyside2-uic, pyrcc5/pyside2-rcc """ env = self.env - if getattr(Options.options, 'want_pyside2', True): + if getattr(Options.options, 'want_pyqt5', True): + self.find_program(['pyuic5'], var='QT_PYUIC') + self.find_program(['pyrcc5'], var='QT_PYRCC') + self.find_program(['pylupdate5'], var='QT_PYLUPDATE') + elif getattr(Options.options, 'want_pyside2', True): self.find_program(['pyside2-uic'], var='QT_PYUIC') self.find_program(['pyside2-rcc'], var='QT_PYRCC') self.find_program(['pyside2-lupdate'], var='QT_PYLUPDATE') @@ -227,7 +231,7 @@ def find_pyqt5_binaries(self): if not env.QT_PYUIC: self.fatal('cannot find the uic compiler for python for qt5') - if not env.QT_PYUIC: + if not env.QT_PYRCC: self.fatal('cannot find the rcc compiler for python for qt5') self.find_program(['lrelease-qt5', 'lrelease'], var='QT_LRELEASE') @@ -237,5 +241,6 @@ def options(opt): Command-line options """ pyqt5opt=opt.add_option_group("Python QT5 Options") - pyqt5opt.add_option('--pyqt5-pyside2', action='store_true', default=False, dest='want_pyside2', help='use pyside2 bindings as python QT5 bindings (default PyQt5 is searched first, PySide2 after)') + pyqt5opt.add_option('--pyqt5-pyqt5', action='store_true', default=False, dest='want_pyqt5', help='use PyQt5 bindings as python QT5 bindings (default PyQt5 is searched first, PySide2 after, PyQt4 last)') + pyqt5opt.add_option('--pyqt5-pyside2', action='store_true', default=False, dest='want_pyside2', help='use PySide2 bindings as python QT5 bindings (default PyQt5 is searched first, PySide2 after, PyQt4 last)') pyqt5opt.add_option('--pyqt5-pyqt4', action='store_true', default=False, dest='want_pyqt4', help='use PyQt4 bindings as python QT5 bindings (default PyQt5 is searched first, PySide2 after, PyQt4 last)') diff --git a/extras/sphinx.py b/extras/sphinx.py new file mode 100644 index 0000000..ce11110 --- /dev/null +++ b/extras/sphinx.py @@ -0,0 +1,81 @@ +"""Support for Sphinx documentation + +This is a wrapper for sphinx-build program. Please note that sphinx-build supports only one output format which can +passed to build via sphinx_output_format attribute. The default output format is html. + +Example wscript: + +def configure(cnf): + conf.load('sphinx') + +def build(bld): + bld( + features='sphinx', + sphinx_source='sources', # path to source directory + sphinx_options='-a -v', # sphinx-build program additional options + sphinx_output_format='man' # output format of sphinx documentation + ) + +""" + +from waflib.Node import Node +from waflib import Utils +from waflib.Task import Task +from waflib.TaskGen import feature, after_method + + +def configure(cnf): + """Check if sphinx-build program is available and loads gnu_dirs tool.""" + cnf.find_program('sphinx-build', var='SPHINX_BUILD', mandatory=False) + cnf.load('gnu_dirs') + + +@feature('sphinx') +def build_sphinx(self): + """Builds sphinx sources. + """ + if not self.env.SPHINX_BUILD: + self.bld.fatal('Program SPHINX_BUILD not defined.') + if not getattr(self, 'sphinx_source', None): + self.bld.fatal('Attribute sphinx_source not defined.') + if not isinstance(self.sphinx_source, Node): + self.sphinx_source = self.path.find_node(self.sphinx_source) + if not self.sphinx_source: + self.bld.fatal('Can\'t find sphinx_source: %r' % self.sphinx_source) + + Utils.def_attrs(self, sphinx_output_format='html') + self.env.SPHINX_OUTPUT_FORMAT = self.sphinx_output_format + self.env.SPHINX_OPTIONS = getattr(self, 'sphinx_options', []) + + for source_file in self.sphinx_source.ant_glob('**/*'): + self.bld.add_manual_dependency(self.sphinx_source, source_file) + + sphinx_build_task = self.create_task('SphinxBuildingTask') + sphinx_build_task.set_inputs(self.sphinx_source) + sphinx_build_task.set_outputs(self.path.get_bld()) + + # the sphinx-build results are in directory + sphinx_output_directory = self.path.get_bld().make_node(self.env.SPHINX_OUTPUT_FORMAT) + sphinx_output_directory.mkdir() + Utils.def_attrs(self, install_path=get_install_path(self)) + self.add_install_files(install_to=self.install_path, + install_from=sphinx_output_directory.ant_glob('**/*'), + cwd=sphinx_output_directory, + relative_trick=True) + + +def get_install_path(tg): + if tg.env.SPHINX_OUTPUT_FORMAT == 'man': + return tg.env.MANDIR + elif tg.env.SPHINX_OUTPUT_FORMAT == 'info': + return tg.env.INFODIR + else: + return tg.env.DOCDIR + + +class SphinxBuildingTask(Task): + color = 'BOLD' + run_str = '${SPHINX_BUILD} -M ${SPHINX_OUTPUT_FORMAT} ${SRC} ${TGT} ${SPHINX_OPTIONS}' + + def keyword(self): + return 'Compiling (%s)' % self.env.SPHINX_OUTPUT_FORMAT diff --git a/extras/syms.py b/extras/syms.py index dfa0059..562f708 100644 --- a/extras/syms.py +++ b/extras/syms.py @@ -31,7 +31,7 @@ class gen_sym(Task): if self.env.DEST_BINFMT == 'pe': #gcc uses nm, and has a preceding _ on windows re_nm = re.compile(r'(T|D)\s+_(?P%s)\b' % reg) elif self.env.DEST_BINFMT=='mac-o': - re_nm=re.compile(r'(T|D)\s+(?P_?%s)\b' % reg) + re_nm=re.compile(r'(T|D)\s+(?P_?(%s))\b' % reg) else: re_nm = re.compile(r'(T|D)\s+(?P%s)\b' % reg) cmd = (self.env.NM or ['nm']) + ['-g', obj.abspath()] diff --git a/processor.py b/processor.py index 2eecf3b..eff2e69 100755 --- a/processor.py +++ b/processor.py @@ -27,6 +27,10 @@ def run(): [cmd, kwargs, cargs] = cPickle.loads(base64.b64decode(txt)) cargs = cargs or {} + if not 'close_fds' in kwargs: + # workers have no fds + kwargs['close_fds'] = False + ret = 1 out, err, ex, trace = (None, None, None, None) try: -- cgit v1.2.1