summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--Build.py6
-rw-r--r--Configure.py25
-rwxr-xr-x[-rw-r--r--]Context.py8
-rw-r--r--Options.py21
-rw-r--r--Scripting.py6
-rw-r--r--Tools/asm.py5
-rw-r--r--Tools/c_config.py6
-rw-r--r--Tools/compiler_c.py2
-rw-r--r--Tools/compiler_cxx.py2
-rw-r--r--Tools/irixcc.py14
-rw-r--r--Tools/javaw.py2
-rw-r--r--Tools/msvc.py16
-rw-r--r--Tools/qt5.py26
-rw-r--r--Tools/waf_unit_test.py10
-rw-r--r--extras/boost.py5
-rw-r--r--extras/c_dumbpreproc.py2
-rw-r--r--extras/clang_compilation_database.py2
-rw-r--r--extras/doxygen.py5
-rw-r--r--extras/file_to_object.py9
-rw-r--r--extras/gccdeps.py36
-rwxr-xr-xextras/javatest.py16
-rw-r--r--extras/msvcdeps.py27
-rw-r--r--extras/pch.py4
-rw-r--r--extras/sphinx.py40
-rw-r--r--extras/wafcache.py46
-rw-r--r--extras/xcode6.py18
26 files changed, 255 insertions, 104 deletions
diff --git a/Build.py b/Build.py
index 39f0991..5283761 100644
--- a/Build.py
+++ b/Build.py
@@ -753,10 +753,12 @@ class BuildContext(Context.Context):
else:
ln = self.launch_node()
if ln.is_child_of(self.bldnode):
- Logs.warn('Building from the build directory, forcing --targets=*')
+ if Logs.verbose > 1:
+ Logs.warn('Building from the build directory, forcing --targets=*')
ln = self.srcnode
elif not ln.is_child_of(self.srcnode):
- Logs.warn('CWD %s is not under %s, forcing --targets=* (run distclean?)', ln.abspath(), self.srcnode.abspath())
+ if Logs.verbose > 1:
+ Logs.warn('CWD %s is not under %s, forcing --targets=* (run distclean?)', ln.abspath(), self.srcnode.abspath())
ln = self.srcnode
def is_post(tg, ln):
diff --git a/Configure.py b/Configure.py
index 5762eb6..e733394 100644
--- a/Configure.py
+++ b/Configure.py
@@ -508,23 +508,27 @@ def find_binary(self, filenames, exts, paths):
@conf
def run_build(self, *k, **kw):
"""
- Create a temporary build context to execute a build. A reference to that build
- context is kept on self.test_bld for debugging purposes, and you should not rely
- on it too much (read the note on the cache below).
- The parameters given in the arguments to this function are passed as arguments for
- a single task generator created in the build. Only three parameters are obligatory:
+ Create a temporary build context to execute a build. A temporary reference to that build
+ context is kept on self.test_bld for debugging purposes.
+ The arguments to this function are passed to a single task generator for that build.
+ Only three parameters are mandatory:
:param features: features to pass to a task generator created in the build
:type features: list of string
:param compile_filename: file to create for the compilation (default: *test.c*)
:type compile_filename: string
- :param code: code to write in the filename to compile
+ :param code: input file contents
:type code: string
- Though this function returns *0* by default, the build may set an attribute named *retval* on the
+ Though this function returns *0* by default, the build may bind attribute named *retval* on the
build context object to return a particular value. See :py:func:`waflib.Tools.c_config.test_exec_fun` for example.
- This function also features a cache which can be enabled by the following option::
+ The temporary builds creates a temporary folder; the name of that folder is calculated
+ by hashing input arguments to this function, with the exception of :py:class:`waflib.ConfigSet.ConfigSet`
+ objects which are used for both reading and writing values.
+
+ This function also features a cache which is disabled by default; that cache relies
+ on the hash value calculated as indicated above::
def options(opt):
opt.add_option('--confcache', dest='confcache', default=0,
@@ -538,7 +542,10 @@ def run_build(self, *k, **kw):
buf = []
for key in sorted(kw.keys()):
v = kw[key]
- if hasattr(v, '__call__'):
+ if isinstance(v, ConfigSet.ConfigSet):
+ # values are being written to, so they are excluded from contributing to the hash
+ continue
+ elif hasattr(v, '__call__'):
buf.append(Utils.h_fun(v))
else:
buf.append(str(v))
diff --git a/Context.py b/Context.py
index ec3cb66..0ce9df6 100644..100755
--- a/Context.py
+++ b/Context.py
@@ -18,13 +18,13 @@ else:
import imp
# the following 3 constants are updated on each new release (do not touch)
-HEXVERSION=0x2001300
+HEXVERSION=0x2001500
"""Constant updated on new releases"""
-WAFVERSION="2.0.19"
+WAFVERSION="2.0.21"
"""Constant updated on new releases"""
-WAFREVISION="e83405712e95b47c040763fdfa468c04dfe72e4b"
+WAFREVISION="edde20a6425a5c3eb6b47d5f3f5c4fbc93fed5f4"
"""Git revision when the waf version is updated"""
WAFNAME="waf"
@@ -530,7 +530,7 @@ class Context(ctx):
"""
Prints a configuration message of the form ``msg: result``.
The second part of the message will be in colors. The output
- can be disabled easly by setting ``in_msg`` to a positive value::
+ can be disabled easily by setting ``in_msg`` to a positive value::
def configure(conf):
self.in_msg = 1
diff --git a/Options.py b/Options.py
index db67953..d410491 100644
--- a/Options.py
+++ b/Options.py
@@ -62,6 +62,21 @@ class opt_parser(optparse.OptionParser):
else:
self.error(str(e))
+ def _process_long_opt(self, rargs, values):
+ # --custom-option=-ftxyz is interpreted as -f -t... see #2280
+ if self.allow_unknown:
+ back = [] + rargs
+ try:
+ optparse.OptionParser._process_long_opt(self, rargs, values)
+ except optparse.BadOptionError:
+ while rargs:
+ rargs.pop()
+ rargs.extend(back)
+ rargs.pop(0)
+ raise
+ else:
+ optparse.OptionParser._process_long_opt(self, rargs, values)
+
def print_usage(self, file=None):
return self.print_help(file)
@@ -141,9 +156,9 @@ class OptionsContext(Context.Context):
gr.add_option('-o', '--out', action='store', default='', help='build dir for the project', dest='out')
gr.add_option('-t', '--top', action='store', default='', help='src dir for the project', dest='top')
- gr.add_option('--no-lock-in-run', action='store_true', default='', help=optparse.SUPPRESS_HELP, dest='no_lock_in_run')
- gr.add_option('--no-lock-in-out', action='store_true', default='', help=optparse.SUPPRESS_HELP, dest='no_lock_in_out')
- gr.add_option('--no-lock-in-top', action='store_true', default='', help=optparse.SUPPRESS_HELP, dest='no_lock_in_top')
+ gr.add_option('--no-lock-in-run', action='store_true', default=os.environ.get('NO_LOCK_IN_RUN', ''), help=optparse.SUPPRESS_HELP, dest='no_lock_in_run')
+ gr.add_option('--no-lock-in-out', action='store_true', default=os.environ.get('NO_LOCK_IN_OUT', ''), help=optparse.SUPPRESS_HELP, dest='no_lock_in_out')
+ gr.add_option('--no-lock-in-top', action='store_true', default=os.environ.get('NO_LOCK_IN_TOP', ''), help=optparse.SUPPRESS_HELP, dest='no_lock_in_top')
default_prefix = getattr(Context.g_module, 'default_prefix', os.environ.get('PREFIX'))
if not default_prefix:
diff --git a/Scripting.py b/Scripting.py
index 68dccf2..da83a21 100644
--- a/Scripting.py
+++ b/Scripting.py
@@ -306,7 +306,7 @@ def distclean(ctx):
# remove a build folder, if any
cur = '.'
- if ctx.options.no_lock_in_top:
+ if os.environ.get('NO_LOCK_IN_TOP') or ctx.options.no_lock_in_top:
cur = ctx.options.out
try:
@@ -333,9 +333,9 @@ def distclean(ctx):
remove_and_log(env.out_dir, shutil.rmtree)
env_dirs = [env.out_dir]
- if not ctx.options.no_lock_in_top:
+ if not (os.environ.get('NO_LOCK_IN_TOP') or ctx.options.no_lock_in_top):
env_dirs.append(env.top_dir)
- if not ctx.options.no_lock_in_run:
+ if not (os.environ.get('NO_LOCK_IN_RUN') or ctx.options.no_lock_in_run):
env_dirs.append(env.run_dir)
for k in env_dirs:
p = os.path.join(k, Options.lockfile)
diff --git a/Tools/asm.py b/Tools/asm.py
index a57e83b..1d34dda 100644
--- a/Tools/asm.py
+++ b/Tools/asm.py
@@ -56,13 +56,11 @@ class asm(Task.Task):
Compiles asm files by gas/nasm/yasm/...
"""
color = 'BLUE'
- run_str = '${AS} ${ASFLAGS} ${ASMPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${AS_SRC_F}${SRC} ${AS_TGT_F}${TGT}'
+ run_str = '${AS} ${ASFLAGS} ${ASMPATH_ST:INCPATHS} ${ASMDEFINES_ST:DEFINES} ${AS_SRC_F}${SRC} ${AS_TGT_F}${TGT}'
def scan(self):
if self.env.ASM_NAME == 'gas':
return c_preproc.scan(self)
- Logs.warn('There is no dependency scanner for Nasm!')
- return [[], []]
elif self.env.ASM_NAME == 'nasm':
Logs.warn('The Nasm dependency scanner is incomplete!')
@@ -106,3 +104,4 @@ class asmstlib(stlink_task):
def configure(conf):
conf.env.ASMPATH_ST = '-I%s'
+ conf.env.ASMDEFINES_ST = '-D%s'
diff --git a/Tools/c_config.py b/Tools/c_config.py
index 537af03..03b6bf6 100644
--- a/Tools/c_config.py
+++ b/Tools/c_config.py
@@ -68,6 +68,7 @@ MACRO_TO_DEST_CPU = {
'__s390__' : 's390',
'__sh__' : 'sh',
'__xtensa__' : 'xtensa',
+'__e2k__' : 'e2k',
}
@conf
@@ -150,7 +151,7 @@ def parse_flags(self, line, uselib_store, env=None, force_static=False, posix=No
elif x.startswith('-std='):
prefix = 'CXXFLAGS' if '++' in x else 'CFLAGS'
app(prefix, x)
- elif x.startswith('+') or x in ('-pthread', '-fPIC', '-fpic', '-fPIE', '-fpie'):
+ elif x.startswith('+') or x in ('-pthread', '-fPIC', '-fpic', '-fPIE', '-fpie', '-flto', '-fno-lto'):
app('CFLAGS', x)
app('CXXFLAGS', x)
app('LINKFLAGS', x)
@@ -1282,10 +1283,11 @@ def multicheck(self, *k, **kw):
tasks = []
id_to_task = {}
- for dct in k:
+ for counter, dct in enumerate(k):
x = Task.classes['cfgtask'](bld=bld, env=None)
tasks.append(x)
x.args = dct
+ x.args['multicheck_counter'] = counter
x.bld = bld
x.conf = self
x.args = dct
diff --git a/Tools/compiler_c.py b/Tools/compiler_c.py
index 2dba3f8..931dc57 100644
--- a/Tools/compiler_c.py
+++ b/Tools/compiler_c.py
@@ -37,7 +37,7 @@ from waflib.Logs import debug
c_compiler = {
'win32': ['msvc', 'gcc', 'clang'],
-'cygwin': ['gcc'],
+'cygwin': ['gcc', 'clang'],
'darwin': ['clang', 'gcc'],
'aix': ['xlc', 'gcc', 'clang'],
'linux': ['gcc', 'clang', 'icc'],
diff --git a/Tools/compiler_cxx.py b/Tools/compiler_cxx.py
index 1af65a2..09fca7e 100644
--- a/Tools/compiler_cxx.py
+++ b/Tools/compiler_cxx.py
@@ -38,7 +38,7 @@ from waflib.Logs import debug
cxx_compiler = {
'win32': ['msvc', 'g++', 'clang++'],
-'cygwin': ['g++'],
+'cygwin': ['g++', 'clang++'],
'darwin': ['clang++', 'g++'],
'aix': ['xlc++', 'g++', 'clang++'],
'linux': ['g++', 'clang++', 'icpc'],
diff --git a/Tools/irixcc.py b/Tools/irixcc.py
index c3ae1ac..0335c13 100644
--- a/Tools/irixcc.py
+++ b/Tools/irixcc.py
@@ -13,22 +13,11 @@ from waflib.Configure import conf
@conf
def find_irixcc(conf):
v = conf.env
- cc = None
- if v.CC:
- cc = v.CC
- elif 'CC' in conf.environ:
- cc = conf.environ['CC']
- if not cc:
- cc = conf.find_program('cc', var='CC')
- if not cc:
- conf.fatal('irixcc was not found')
-
+ cc = conf.find_program('cc', var='CC')
try:
conf.cmd_and_log(cc + ['-version'])
except Errors.WafError:
conf.fatal('%r -version could not be executed' % cc)
-
- v.CC = cc
v.CC_NAME = 'irix'
@conf
@@ -57,7 +46,6 @@ def irixcc_common_flags(conf):
def configure(conf):
conf.find_irixcc()
- conf.find_cpp()
conf.find_ar()
conf.irixcc_common_flags()
conf.cc_load_tools()
diff --git a/Tools/javaw.py b/Tools/javaw.py
index ceb08c2..b7f5dd1 100644
--- a/Tools/javaw.py
+++ b/Tools/javaw.py
@@ -251,7 +251,7 @@ def use_javac_files(self):
base_node = tg.path.get_bld()
self.use_lst.append(base_node.abspath())
- self.javac_task.dep_nodes.extend([x for x in base_node.ant_glob(JAR_RE, remove=False, quiet=True)])
+ self.javac_task.dep_nodes.extend([dx for dx in base_node.ant_glob(JAR_RE, remove=False, quiet=True)])
for tsk in tg.tasks:
self.javac_task.set_run_after(tsk)
diff --git a/Tools/msvc.py b/Tools/msvc.py
index 26ca7b2..37233be 100644
--- a/Tools/msvc.py
+++ b/Tools/msvc.py
@@ -99,7 +99,13 @@ all_icl_platforms = [ ('intel64', 'amd64'), ('em64t', 'amd64'), ('ia32', 'x86'),
"""List of icl platforms"""
def options(opt):
- opt.add_option('--msvc_version', type='string', help = 'msvc version, eg: "msvc 10.0,msvc 9.0"', default='')
+ default_ver = ''
+ vsver = os.getenv('VSCMD_VER')
+ if vsver:
+ m = re.match(r'(^\d+\.\d+).*', vsver)
+ if m:
+ default_ver = 'msvc %s' % m.group(1)
+ opt.add_option('--msvc_version', type='string', help = 'msvc version, eg: "msvc 10.0,msvc 9.0"', default=default_ver)
opt.add_option('--msvc_targets', type='string', help = 'msvc targets, eg: "x64,arm"', default='')
opt.add_option('--no-msvc-lazy', action='store_false', help = 'lazily check msvc target environments', default=True, dest='msvc_lazy')
@@ -723,10 +729,6 @@ def libname_msvc(self, libname, is_static=False):
_libpaths = self.env.LIBPATH
static_libs=[
- 'lib%ss.a' % lib,
- 'lib%s.a' % lib,
- '%ss.a' % lib,
- '%s.a' %lib,
'lib%ss.lib' % lib,
'lib%s.lib' % lib,
'%ss.lib' % lib,
@@ -926,7 +928,7 @@ def msvc_common_flags(conf):
v.LIB_ST = '%s.lib'
v.LIBPATH_ST = '/LIBPATH:%s'
- v.STLIB_ST = '%s.a'
+ v.STLIB_ST = '%s.lib'
v.STLIBPATH_ST = '/LIBPATH:%s'
if v.MSVC_MANIFEST:
@@ -940,7 +942,7 @@ def msvc_common_flags(conf):
v.IMPLIB_ST = '/IMPLIB:%s'
v.LINKFLAGS_cstlib = []
- v.cstlib_PATTERN = v.cxxstlib_PATTERN = '%s.a'
+ v.cstlib_PATTERN = v.cxxstlib_PATTERN = '%s.lib'
v.cprogram_PATTERN = v.cxxprogram_PATTERN = '%s.exe'
diff --git a/Tools/qt5.py b/Tools/qt5.py
index 99e021b..cff2028 100644
--- a/Tools/qt5.py
+++ b/Tools/qt5.py
@@ -57,7 +57,23 @@ A few options (--qt{dir,bin,...}) and environment variables
(QT5_{ROOT,DIR,MOC,UIC,XCOMPILE}) allow finer tuning of the tool,
tool path selection, etc; please read the source for more info.
-The detection uses pkg-config on Linux by default. To force static library detection use:
+The detection uses pkg-config on Linux by default. The list of
+libraries to be requested to pkg-config is formulated by scanning
+in the QTLIBS directory (that can be passed via --qtlibs or by
+setting the environment variable QT5_LIBDIR otherwise is derived
+by querying qmake for QT_INSTALL_LIBS directory) for shared/static
+libraries present.
+Alternatively the list of libraries to be requested via pkg-config
+can be set using the qt5_vars attribute, ie:
+
+ conf.qt5_vars = ['Qt5Core', 'Qt5Gui', 'Qt5Widgets', 'Qt5Test'];
+
+This can speed up configuration phase if needed libraries are
+known beforehand, can improve detection on systems with a
+sparse QT5 libraries installation (ie. NIX) and can improve
+detection of some header-only Qt modules (ie. Qt5UiPlugin).
+
+To force static library detection use:
QT5_XCOMPILE=1 QT5_FORCE_STATIC=1 waf configure
"""
@@ -466,6 +482,9 @@ def configure(self):
The detection uses the program ``pkg-config`` through :py:func:`waflib.Tools.config_c.check_cfg`
"""
+ if 'COMPILER_CXX' not in self.env:
+ self.fatal('No CXX compiler defined: did you forget to configure compiler_cxx first?')
+
self.find_qt5_binaries()
self.set_qt5_libs_dir()
self.set_qt5_libs_to_check()
@@ -478,9 +497,6 @@ def configure(self):
if not has_xml:
Logs.error('No xml.sax support was found, rcc dependencies will be incomplete!')
- if 'COMPILER_CXX' not in self.env:
- self.fatal('No CXX compiler defined: did you forget to configure compiler_cxx first?')
-
# Qt5 may be compiled with '-reduce-relocations' which requires dependent programs to have -fPIE or -fPIC?
frag = '#include <QMap>\nint main(int argc, char **argv) {QMap<int,int> m;return m.keys().size();}\n'
uses = 'QT5CORE'
@@ -637,7 +653,7 @@ def set_qt5_libs_dir(self):
except Errors.WafError:
qtdir = self.cmd_and_log(env.QMAKE + ['-query', 'QT_INSTALL_PREFIX']).strip()
qtlibs = os.path.join(qtdir, 'lib')
- self.msg('Found the Qt5 libraries in', qtlibs)
+ self.msg('Found the Qt5 library path', qtlibs)
env.QTLIBS = qtlibs
@conf
diff --git a/Tools/waf_unit_test.py b/Tools/waf_unit_test.py
index 6ff6f72..dc66fe9 100644
--- a/Tools/waf_unit_test.py
+++ b/Tools/waf_unit_test.py
@@ -97,6 +97,7 @@ def make_interpreted_test(self):
if isinstance(v, str):
v = v.split(os.pathsep)
self.ut_env[k] = os.pathsep.join(p + v)
+ self.env.append_value('UT_DEPS', ['%r%r' % (key, self.ut_env[key]) for key in self.ut_env])
@feature('test')
@after_method('apply_link', 'process_use')
@@ -108,7 +109,8 @@ def make_test(self):
tsk = self.create_task('utest', self.link_task.outputs)
if getattr(self, 'ut_str', None):
self.ut_run, lst = Task.compile_fun(self.ut_str, shell=getattr(self, 'ut_shell', False))
- tsk.vars = lst + tsk.vars
+ tsk.vars = tsk.vars + lst
+ self.env.append_value('UT_DEPS', self.ut_str)
self.handle_ut_cwd('ut_cwd')
@@ -139,6 +141,10 @@ def make_test(self):
if not hasattr(self, 'ut_cmd'):
self.ut_cmd = getattr(Options.options, 'testcmd', False)
+ self.env.append_value('UT_DEPS', str(self.ut_cmd))
+ self.env.append_value('UT_DEPS', self.ut_paths)
+ self.env.append_value('UT_DEPS', ['%r%r' % (key, self.ut_env[key]) for key in self.ut_env])
+
@taskgen_method
def add_test_results(self, tup):
"""Override and return tup[1] to interrupt the build immediately if a test does not run"""
@@ -159,7 +165,7 @@ class utest(Task.Task):
"""
color = 'PINK'
after = ['vnum', 'inst']
- vars = []
+ vars = ['UT_DEPS']
def runnable_status(self):
"""
diff --git a/extras/boost.py b/extras/boost.py
index c2aaaa9..93b312a 100644
--- a/extras/boost.py
+++ b/extras/boost.py
@@ -270,10 +270,12 @@ def boost_get_libs(self, *k, **kw):
return file
return None
+ # extensions from Tools.ccroot.lib_patterns
+ wo_ext = re.compile(r"\.(a|so|lib|dll|dylib)(\.[0-9\.]+)?$")
def format_lib_name(name):
if name.startswith('lib') and self.env.CC_NAME != 'msvc':
name = name[3:]
- return name[:name.rfind('.')]
+ return wo_ext.sub("", name)
def match_libs(lib_names, is_static):
libs = []
@@ -522,4 +524,3 @@ def install_boost(self):
except:
continue
install_boost.done = False
-
diff --git a/extras/c_dumbpreproc.py b/extras/c_dumbpreproc.py
index ce9e1a4..1fdd5c3 100644
--- a/extras/c_dumbpreproc.py
+++ b/extras/c_dumbpreproc.py
@@ -66,7 +66,7 @@ class dumb_parser(parser):
if x == c_preproc.POPFILE:
self.currentnode_stack.pop()
continue
- self.tryfind(y)
+ self.tryfind(y, env=env)
c_preproc.c_parser = dumb_parser
diff --git a/extras/clang_compilation_database.py b/extras/clang_compilation_database.py
index 1398b0a..ff71f22 100644
--- a/extras/clang_compilation_database.py
+++ b/extras/clang_compilation_database.py
@@ -123,7 +123,7 @@ def patch_execute():
"""
Invoke clangdb command before build
"""
- if type(self) == Build.BuildContext:
+ if self.cmd.startswith('build'):
Scripting.run_command('clangdb')
old_execute_build(self)
diff --git a/extras/doxygen.py b/extras/doxygen.py
index 20cd9e1..0fda703 100644
--- a/extras/doxygen.py
+++ b/extras/doxygen.py
@@ -69,6 +69,7 @@ def parse_doxy(txt):
class doxygen(Task.Task):
vars = ['DOXYGEN', 'DOXYFLAGS']
color = 'BLUE'
+ ext_in = [ '.py', '.c', '.h', '.java', '.pb.cc' ]
def runnable_status(self):
'''
@@ -207,10 +208,10 @@ def process_doxy(self):
self.bld.fatal('doxygen file %s not found' % self.doxyfile)
# the task instance
- dsk = self.create_task('doxygen', node)
+ dsk = self.create_task('doxygen', node, always_run=getattr(self, 'always', False))
if getattr(self, 'doxy_tar', None):
- tsk = self.create_task('tar')
+ tsk = self.create_task('tar', always_run=getattr(self, 'always', False))
tsk.input_tasks = [dsk]
tsk.set_outputs(self.path.find_or_declare(self.doxy_tar))
if self.doxy_tar.endswith('bz2'):
diff --git a/extras/file_to_object.py b/extras/file_to_object.py
index 1393b51..13d2aef 100644
--- a/extras/file_to_object.py
+++ b/extras/file_to_object.py
@@ -31,7 +31,7 @@ Known issues:
"""
-import os
+import os, sys
from waflib import Task, TaskGen, Errors
def filename_c_escape(x):
@@ -95,12 +95,17 @@ class file_to_object_c(Task.Task):
name = "_binary_" + "".join(name)
+ def char_to_num(ch):
+ if sys.version_info[0] < 3:
+ return ord(ch)
+ return ch
+
data = self.inputs[0].read('rb')
lines, line = [], []
for idx_byte, byte in enumerate(data):
line.append(byte)
if len(line) > 15 or idx_byte == size-1:
- lines.append(", ".join(("0x%02x" % ord(x)) for x in line))
+ lines.append(", ".join(("0x%02x" % char_to_num(x)) for x in line))
line = []
data = ",\n ".join(lines)
diff --git a/extras/gccdeps.py b/extras/gccdeps.py
index bfabe72..1fc9373 100644
--- a/extras/gccdeps.py
+++ b/extras/gccdeps.py
@@ -27,7 +27,7 @@ if not c_preproc.go_absolute:
gccdeps_flags = ['-MMD']
# Third-party tools are allowed to add extra names in here with append()
-supported_compilers = ['gcc', 'icc', 'clang']
+supported_compilers = ['gas', 'gcc', 'icc', 'clang']
def scan(self):
if not self.__class__.__name__ in self.env.ENABLE_GCCDEPS:
@@ -163,10 +163,25 @@ def post_run(self):
def sig_implicit_deps(self):
if not self.__class__.__name__ in self.env.ENABLE_GCCDEPS:
return super(self.derived_gccdeps, self).sig_implicit_deps()
+ bld = self.generator.bld
+
try:
- return Task.Task.sig_implicit_deps(self)
- except Errors.WafError:
- return Utils.SIG_NIL
+ return self.compute_sig_implicit_deps()
+ except Errors.TaskNotReady:
+ raise ValueError("Please specify the build order precisely with gccdeps (asm/c/c++ tasks)")
+ except EnvironmentError:
+ # If a file is renamed, assume the dependencies are stale and must be recalculated
+ for x in bld.node_deps.get(self.uid(), []):
+ if not x.is_bld() and not x.exists():
+ try:
+ del x.parent.children[x.name]
+ except KeyError:
+ pass
+
+ key = self.uid()
+ bld.node_deps[key] = []
+ bld.raw_deps[key] = []
+ return Utils.SIG_NIL
def wrap_compiled_task(classname):
derived_class = type(classname, (Task.classes[classname],), {})
@@ -175,14 +190,14 @@ def wrap_compiled_task(classname):
derived_class.scan = scan
derived_class.sig_implicit_deps = sig_implicit_deps
-for k in ('c', 'cxx'):
+for k in ('asm', 'c', 'cxx'):
if k in Task.classes:
wrap_compiled_task(k)
@before_method('process_source')
@feature('force_gccdeps')
def force_gccdeps(self):
- self.env.ENABLE_GCCDEPS = ['c', 'cxx']
+ self.env.ENABLE_GCCDEPS = ['asm', 'c', 'cxx']
def configure(conf):
# in case someone provides a --enable-gccdeps command-line option
@@ -191,6 +206,15 @@ def configure(conf):
global gccdeps_flags
flags = conf.env.GCCDEPS_FLAGS or gccdeps_flags
+ if conf.env.ASM_NAME in supported_compilers:
+ try:
+ conf.check(fragment='', features='asm force_gccdeps', asflags=flags, compile_filename='test.S', msg='Checking for asm flags %r' % ''.join(flags))
+ except Errors.ConfigurationError:
+ pass
+ else:
+ conf.env.append_value('ASFLAGS', flags)
+ conf.env.append_unique('ENABLE_GCCDEPS', 'asm')
+
if conf.env.CC_NAME in supported_compilers:
try:
conf.check(fragment='int main() { return 0; }', features='c force_gccdeps', cflags=flags, msg='Checking for c flags %r' % ''.join(flags))
diff --git a/extras/javatest.py b/extras/javatest.py
index f3c6cbf..76d40ed 100755
--- a/extras/javatest.py
+++ b/extras/javatest.py
@@ -60,6 +60,8 @@ import os
from waflib import Task, TaskGen, Options, Errors, Utils, Logs
from waflib.Tools import ccroot
+JAR_RE = '**/*'
+
def _process_use_rec(self, name):
"""
Recursively process ``use`` for task generator with name ``name``..
@@ -139,6 +141,20 @@ def javatest_process_use(self):
# Only add to libpath if the link task is not a Python extension
extend_unique(self.javatest_libpaths, [tg.link_task.outputs[0].parent.abspath()])
+ if 'javac' in tg.features or 'jar' in tg.features:
+ if hasattr(tg, 'jar_task'):
+ # For Java JAR tasks depend on generated JAR
+ extend_unique(self.javatest_dep_nodes, tg.jar_task.outputs)
+ else:
+ # For Java non-JAR ones we need to glob generated files (Java output files are not predictable)
+ if hasattr(tg, 'outdir'):
+ base_node = tg.outdir
+ else:
+ base_node = tg.path.get_bld()
+
+ self.javatest_dep_nodes.extend([dx for dx in base_node.ant_glob(JAR_RE, remove=False, quiet=True)])
+
+
@TaskGen.feature('javatest')
@TaskGen.after_method('apply_java', 'use_javac_files', 'set_classpath', 'javatest_process_use')
diff --git a/extras/msvcdeps.py b/extras/msvcdeps.py
index 873a419..52985dc 100644
--- a/extras/msvcdeps.py
+++ b/extras/msvcdeps.py
@@ -150,11 +150,25 @@ def scan(self):
def sig_implicit_deps(self):
if self.env.CC_NAME not in supported_compilers:
return super(self.derived_msvcdeps, self).sig_implicit_deps()
+ bld = self.generator.bld
try:
- return Task.Task.sig_implicit_deps(self)
- except Errors.WafError:
- return Utils.SIG_NIL
+ return self.compute_sig_implicit_deps()
+ except Errors.TaskNotReady:
+ raise ValueError("Please specify the build order precisely with msvcdeps (c/c++ tasks)")
+ except EnvironmentError:
+ # If a file is renamed, assume the dependencies are stale and must be recalculated
+ for x in bld.node_deps.get(self.uid(), []):
+ if not x.is_bld() and not x.exists():
+ try:
+ del x.parent.children[x.name]
+ except KeyError:
+ pass
+
+ key = self.uid()
+ bld.node_deps[key] = []
+ bld.raw_deps[key] = []
+ return Utils.SIG_NIL
def exec_command(self, cmd, **kw):
if self.env.CC_NAME not in supported_compilers:
@@ -211,11 +225,14 @@ def exec_command(self, cmd, **kw):
# get one from the exception object
ret = getattr(e, 'returncode', 1)
+ Logs.debug('msvcdeps: Running for: %s' % self.inputs[0])
for line in raw_out.splitlines():
if line.startswith(INCLUDE_PATTERN):
- inc_path = line[len(INCLUDE_PATTERN):].strip()
+ # Only strip whitespace after log to preserve
+ # dependency structure in debug output
+ inc_path = line[len(INCLUDE_PATTERN):]
Logs.debug('msvcdeps: Regex matched %s', inc_path)
- self.msvcdeps_paths.append(inc_path)
+ self.msvcdeps_paths.append(inc_path.strip())
else:
out.append(line)
diff --git a/extras/pch.py b/extras/pch.py
index 103e752..b44c7a2 100644
--- a/extras/pch.py
+++ b/extras/pch.py
@@ -90,7 +90,7 @@ def apply_pch(self):
if getattr(self, 'name', None):
try:
- task = self.bld.pch_tasks["%s.%s" % (self.name, self.idx)]
+ task = self.bld.pch_tasks[self.name]
self.bld.fatal("Duplicated 'pch' task with name %r" % "%s.%s" % (self.name, self.idx))
except KeyError:
pass
@@ -104,7 +104,7 @@ def apply_pch(self):
self.pch_task = task
if getattr(self, 'name', None):
- self.bld.pch_tasks["%s.%s" % (self.name, self.idx)] = task
+ self.bld.pch_tasks[self.name] = task
@TaskGen.feature('cxx')
@TaskGen.after_method('process_source', 'propagate_uselib_vars')
diff --git a/extras/sphinx.py b/extras/sphinx.py
index ce11110..71d1028 100644
--- a/extras/sphinx.py
+++ b/extras/sphinx.py
@@ -20,7 +20,7 @@ def build(bld):
from waflib.Node import Node
from waflib import Utils
-from waflib.Task import Task
+from waflib import Task
from waflib.TaskGen import feature, after_method
@@ -55,13 +55,9 @@ def build_sphinx(self):
sphinx_build_task.set_outputs(self.path.get_bld())
# the sphinx-build results are in <build + output_format> directory
- sphinx_output_directory = self.path.get_bld().make_node(self.env.SPHINX_OUTPUT_FORMAT)
- sphinx_output_directory.mkdir()
+ self.sphinx_output_directory = self.path.get_bld().make_node(self.env.SPHINX_OUTPUT_FORMAT)
+ self.sphinx_output_directory.mkdir()
Utils.def_attrs(self, install_path=get_install_path(self))
- self.add_install_files(install_to=self.install_path,
- install_from=sphinx_output_directory.ant_glob('**/*'),
- cwd=sphinx_output_directory,
- relative_trick=True)
def get_install_path(tg):
@@ -73,9 +69,37 @@ def get_install_path(tg):
return tg.env.DOCDIR
-class SphinxBuildingTask(Task):
+class SphinxBuildingTask(Task.Task):
color = 'BOLD'
run_str = '${SPHINX_BUILD} -M ${SPHINX_OUTPUT_FORMAT} ${SRC} ${TGT} ${SPHINX_OPTIONS}'
def keyword(self):
return 'Compiling (%s)' % self.env.SPHINX_OUTPUT_FORMAT
+
+ def runnable_status(self):
+
+ for x in self.run_after:
+ if not x.hasrun:
+ return Task.ASK_LATER
+
+ self.signature()
+ ret = Task.Task.runnable_status(self)
+ if ret == Task.SKIP_ME:
+ # in case the files were removed
+ self.add_install()
+ return ret
+
+
+ def post_run(self):
+ self.add_install()
+ return Task.Task.post_run(self)
+
+
+ def add_install(self):
+ nodes = self.generator.sphinx_output_directory.ant_glob('**/*', quiet=True)
+ self.outputs += nodes
+ self.generator.add_install_files(install_to=self.generator.install_path,
+ install_from=nodes,
+ postpone=False,
+ cwd=self.generator.sphinx_output_directory,
+ relative_trick=True)
diff --git a/extras/wafcache.py b/extras/wafcache.py
index 8b9567f..088fd0d 100644
--- a/extras/wafcache.py
+++ b/extras/wafcache.py
@@ -16,10 +16,19 @@ The following environment variables may be set:
- URL to a cache server, for example:
export WAFCACHE=http://localhost:8080/files/
in that case, GET/POST requests are made to urls of the form
- http://localhost:8080/files/000000000/0 (cache management is then up to the server)
- - GCS or S3 bucket
- gs://my-bucket/
- s3://my-bucket/
+ http://localhost:8080/files/000000000/0 (cache management is delegated to the server)
+ - GCS, S3 or MINIO bucket
+ gs://my-bucket/ (uses gsutil command line tool or WAFCACHE_CMD)
+ s3://my-bucket/ (uses aws command line tool or WAFCACHE_CMD)
+ minio://my-bucket/ (uses mc command line tool or WAFCACHE_CMD)
+* WAFCACHE_CMD: bucket upload/download command, for example:
+ WAFCACHE_CMD="gsutil cp %{SRC} %{TGT}"
+ Note that the WAFCACHE bucket value is used for the source or destination
+ depending on the operation (upload or download). For example, with:
+ WAFCACHE="gs://mybucket/"
+ the following commands may be run:
+ gsutil cp build/myprogram gs://mybucket/aa/aaaaa/1
+ gsutil cp gs://mybucket/bb/bbbbb/2 build/somefile
* WAFCACHE_NO_PUSH: if set, disables pushing to the cache
* WAFCACHE_VERBOSITY: if set, displays more detailed cache operations
@@ -30,6 +39,7 @@ File cache specific options:
* WAFCACHE_EVICT_MAX_BYTES: maximum amount of cache size in bytes (10GB)
* WAFCACHE_EVICT_INTERVAL_MINUTES: minimum time interval to try
and trim the cache (3 minutess)
+
Usage::
def build(bld):
@@ -41,7 +51,7 @@ To troubleshoot::
waf clean build --zones=wafcache
"""
-import atexit, base64, errno, fcntl, getpass, os, shutil, sys, time, traceback, urllib3
+import atexit, base64, errno, fcntl, getpass, os, re, shutil, sys, time, traceback, urllib3, shlex
try:
import subprocess32 as subprocess
except ImportError:
@@ -53,6 +63,7 @@ if not os.path.isdir(base_cache):
default_wafcache_dir = os.path.join(base_cache, 'wafcache_' + getpass.getuser())
CACHE_DIR = os.environ.get('WAFCACHE', default_wafcache_dir)
+WAFCACHE_CMD = os.environ.get('WAFCACHE_CMD')
TRIM_MAX_FOLDERS = int(os.environ.get('WAFCACHE_TRIM_MAX_FOLDER', 1000000))
EVICT_INTERVAL_MINUTES = int(os.environ.get('WAFCACHE_EVICT_INTERVAL_MINUTES', 3))
EVICT_MAX_BYTES = int(os.environ.get('WAFCACHE_EVICT_MAX_BYTES', 10**10))
@@ -60,6 +71,8 @@ WAFCACHE_NO_PUSH = 1 if os.environ.get('WAFCACHE_NO_PUSH') else 0
WAFCACHE_VERBOSITY = 1 if os.environ.get('WAFCACHE_VERBOSITY') else 0
OK = "ok"
+re_waf_cmd = re.compile('(?P<src>%{SRC})|(?P<tgt>%{TGT})')
+
try:
import cPickle
except ImportError:
@@ -233,8 +246,9 @@ def build(bld):
# already called once
return
- for x in range(bld.jobs):
- process_pool.append(get_process())
+ # pre-allocation
+ processes = [get_process() for x in range(bld.jobs)]
+ process_pool.extend(processes)
Task.Task.can_retrieve_cache = can_retrieve_cache
Task.Task.put_files_cache = put_files_cache
@@ -449,10 +463,20 @@ class fcache(object):
class bucket_cache(object):
def bucket_copy(self, source, target):
- if CACHE_DIR.startswith('s3://'):
+ if WAFCACHE_CMD:
+ def replacer(match):
+ if match.group('src'):
+ return source
+ elif match.group('tgt'):
+ return target
+ cmd = [re_waf_cmd.sub(replacer, x) for x in shlex.split(WAFCACHE_CMD)]
+ elif CACHE_DIR.startswith('s3://'):
cmd = ['aws', 's3', 'cp', source, target]
- else:
+ elif CACHE_DIR.startswith('gs://'):
cmd = ['gsutil', 'cp', source, target]
+ else:
+ cmd = ['mc', 'cp', source, target]
+
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = proc.communicate()
if proc.returncode:
@@ -510,7 +534,9 @@ def loop(service):
sys.stdout.flush()
if __name__ == '__main__':
- if CACHE_DIR.startswith('s3://') or CACHE_DIR.startswith('gs://'):
+ if CACHE_DIR.startswith('s3://') or CACHE_DIR.startswith('gs://') or CACHE_DIR.startswith('minio://'):
+ if CACHE_DIR.startswith('minio://'):
+ CACHE_DIR = CACHE_DIR[8:] # minio doesn't need the protocol part, uses config aliases
service = bucket_cache()
elif CACHE_DIR.startswith('http'):
service = netcache()
diff --git a/extras/xcode6.py b/extras/xcode6.py
index 91bbff1..c5b3091 100644
--- a/extras/xcode6.py
+++ b/extras/xcode6.py
@@ -99,7 +99,7 @@ env.PROJ_CONFIGURATION = {
...
}
'Release': {
- 'ARCHS' x86_64'
+ 'ARCHS': x86_64'
...
}
}
@@ -163,12 +163,12 @@ class XCodeNode(object):
result = result + "\t\t}"
return result
elif isinstance(value, str):
- return "\"%s\"" % value
+ return '"%s"' % value.replace('"', '\\\\\\"')
elif isinstance(value, list):
result = "(\n"
for i in value:
- result = result + "\t\t\t%s,\n" % self.tostring(i)
- result = result + "\t\t)"
+ result = result + "\t\t\t\t%s,\n" % self.tostring(i)
+ result = result + "\t\t\t)"
return result
elif isinstance(value, XCodeNode):
return value._id
@@ -565,13 +565,13 @@ def process_xcode(self):
# Override target specific build settings
bldsettings = {
'HEADER_SEARCH_PATHS': ['$(inherited)'] + self.env['INCPATHS'],
- 'LIBRARY_SEARCH_PATHS': ['$(inherited)'] + Utils.to_list(self.env.LIBPATH) + Utils.to_list(self.env.STLIBPATH) + Utils.to_list(self.env.LIBDIR) ,
+ 'LIBRARY_SEARCH_PATHS': ['$(inherited)'] + Utils.to_list(self.env.LIBPATH) + Utils.to_list(self.env.STLIBPATH) + Utils.to_list(self.env.LIBDIR),
'FRAMEWORK_SEARCH_PATHS': ['$(inherited)'] + Utils.to_list(self.env.FRAMEWORKPATH),
- 'OTHER_LDFLAGS': libs + ' ' + frameworks,
- 'OTHER_LIBTOOLFLAGS': bld.env['LINKFLAGS'],
+ 'OTHER_LDFLAGS': libs + ' ' + frameworks + ' ' + ' '.join(bld.env['LINKFLAGS']),
'OTHER_CPLUSPLUSFLAGS': Utils.to_list(self.env['CXXFLAGS']),
'OTHER_CFLAGS': Utils.to_list(self.env['CFLAGS']),
- 'INSTALL_PATH': []
+ 'INSTALL_PATH': [],
+ 'GCC_PREPROCESSOR_DEFINITIONS': self.env['DEFINES']
}
# Install path
@@ -591,7 +591,7 @@ def process_xcode(self):
# The keys represents different build configuration, e.g. Debug, Release and so on..
# Insert our generated build settings to all configuration names
- keys = set(settings.keys() + bld.env.PROJ_CONFIGURATION.keys())
+ keys = set(settings.keys()) | set(bld.env.PROJ_CONFIGURATION.keys())
for k in keys:
if k in settings:
settings[k].update(bldsettings)