summaryrefslogtreecommitdiffstats
path: root/extras
diff options
context:
space:
mode:
Diffstat (limited to 'extras')
-rw-r--r--extras/boost.py5
-rw-r--r--extras/c_dumbpreproc.py2
-rw-r--r--extras/clang_compilation_database.py2
-rw-r--r--extras/doxygen.py5
-rw-r--r--extras/file_to_object.py9
-rw-r--r--extras/gccdeps.py36
-rwxr-xr-xextras/javatest.py16
-rw-r--r--extras/msvcdeps.py27
-rw-r--r--extras/pch.py4
-rw-r--r--extras/sphinx.py40
-rw-r--r--extras/wafcache.py46
-rw-r--r--extras/xcode6.py18
12 files changed, 162 insertions, 48 deletions
diff --git a/extras/boost.py b/extras/boost.py
index c2aaaa9..93b312a 100644
--- a/extras/boost.py
+++ b/extras/boost.py
@@ -270,10 +270,12 @@ def boost_get_libs(self, *k, **kw):
return file
return None
+ # extensions from Tools.ccroot.lib_patterns
+ wo_ext = re.compile(r"\.(a|so|lib|dll|dylib)(\.[0-9\.]+)?$")
def format_lib_name(name):
if name.startswith('lib') and self.env.CC_NAME != 'msvc':
name = name[3:]
- return name[:name.rfind('.')]
+ return wo_ext.sub("", name)
def match_libs(lib_names, is_static):
libs = []
@@ -522,4 +524,3 @@ def install_boost(self):
except:
continue
install_boost.done = False
-
diff --git a/extras/c_dumbpreproc.py b/extras/c_dumbpreproc.py
index ce9e1a4..1fdd5c3 100644
--- a/extras/c_dumbpreproc.py
+++ b/extras/c_dumbpreproc.py
@@ -66,7 +66,7 @@ class dumb_parser(parser):
if x == c_preproc.POPFILE:
self.currentnode_stack.pop()
continue
- self.tryfind(y)
+ self.tryfind(y, env=env)
c_preproc.c_parser = dumb_parser
diff --git a/extras/clang_compilation_database.py b/extras/clang_compilation_database.py
index 1398b0a..ff71f22 100644
--- a/extras/clang_compilation_database.py
+++ b/extras/clang_compilation_database.py
@@ -123,7 +123,7 @@ def patch_execute():
"""
Invoke clangdb command before build
"""
- if type(self) == Build.BuildContext:
+ if self.cmd.startswith('build'):
Scripting.run_command('clangdb')
old_execute_build(self)
diff --git a/extras/doxygen.py b/extras/doxygen.py
index 20cd9e1..0fda703 100644
--- a/extras/doxygen.py
+++ b/extras/doxygen.py
@@ -69,6 +69,7 @@ def parse_doxy(txt):
class doxygen(Task.Task):
vars = ['DOXYGEN', 'DOXYFLAGS']
color = 'BLUE'
+ ext_in = [ '.py', '.c', '.h', '.java', '.pb.cc' ]
def runnable_status(self):
'''
@@ -207,10 +208,10 @@ def process_doxy(self):
self.bld.fatal('doxygen file %s not found' % self.doxyfile)
# the task instance
- dsk = self.create_task('doxygen', node)
+ dsk = self.create_task('doxygen', node, always_run=getattr(self, 'always', False))
if getattr(self, 'doxy_tar', None):
- tsk = self.create_task('tar')
+ tsk = self.create_task('tar', always_run=getattr(self, 'always', False))
tsk.input_tasks = [dsk]
tsk.set_outputs(self.path.find_or_declare(self.doxy_tar))
if self.doxy_tar.endswith('bz2'):
diff --git a/extras/file_to_object.py b/extras/file_to_object.py
index 1393b51..13d2aef 100644
--- a/extras/file_to_object.py
+++ b/extras/file_to_object.py
@@ -31,7 +31,7 @@ Known issues:
"""
-import os
+import os, sys
from waflib import Task, TaskGen, Errors
def filename_c_escape(x):
@@ -95,12 +95,17 @@ class file_to_object_c(Task.Task):
name = "_binary_" + "".join(name)
+ def char_to_num(ch):
+ if sys.version_info[0] < 3:
+ return ord(ch)
+ return ch
+
data = self.inputs[0].read('rb')
lines, line = [], []
for idx_byte, byte in enumerate(data):
line.append(byte)
if len(line) > 15 or idx_byte == size-1:
- lines.append(", ".join(("0x%02x" % ord(x)) for x in line))
+ lines.append(", ".join(("0x%02x" % char_to_num(x)) for x in line))
line = []
data = ",\n ".join(lines)
diff --git a/extras/gccdeps.py b/extras/gccdeps.py
index bfabe72..1fc9373 100644
--- a/extras/gccdeps.py
+++ b/extras/gccdeps.py
@@ -27,7 +27,7 @@ if not c_preproc.go_absolute:
gccdeps_flags = ['-MMD']
# Third-party tools are allowed to add extra names in here with append()
-supported_compilers = ['gcc', 'icc', 'clang']
+supported_compilers = ['gas', 'gcc', 'icc', 'clang']
def scan(self):
if not self.__class__.__name__ in self.env.ENABLE_GCCDEPS:
@@ -163,10 +163,25 @@ def post_run(self):
def sig_implicit_deps(self):
if not self.__class__.__name__ in self.env.ENABLE_GCCDEPS:
return super(self.derived_gccdeps, self).sig_implicit_deps()
+ bld = self.generator.bld
+
try:
- return Task.Task.sig_implicit_deps(self)
- except Errors.WafError:
- return Utils.SIG_NIL
+ return self.compute_sig_implicit_deps()
+ except Errors.TaskNotReady:
+ raise ValueError("Please specify the build order precisely with gccdeps (asm/c/c++ tasks)")
+ except EnvironmentError:
+ # If a file is renamed, assume the dependencies are stale and must be recalculated
+ for x in bld.node_deps.get(self.uid(), []):
+ if not x.is_bld() and not x.exists():
+ try:
+ del x.parent.children[x.name]
+ except KeyError:
+ pass
+
+ key = self.uid()
+ bld.node_deps[key] = []
+ bld.raw_deps[key] = []
+ return Utils.SIG_NIL
def wrap_compiled_task(classname):
derived_class = type(classname, (Task.classes[classname],), {})
@@ -175,14 +190,14 @@ def wrap_compiled_task(classname):
derived_class.scan = scan
derived_class.sig_implicit_deps = sig_implicit_deps
-for k in ('c', 'cxx'):
+for k in ('asm', 'c', 'cxx'):
if k in Task.classes:
wrap_compiled_task(k)
@before_method('process_source')
@feature('force_gccdeps')
def force_gccdeps(self):
- self.env.ENABLE_GCCDEPS = ['c', 'cxx']
+ self.env.ENABLE_GCCDEPS = ['asm', 'c', 'cxx']
def configure(conf):
# in case someone provides a --enable-gccdeps command-line option
@@ -191,6 +206,15 @@ def configure(conf):
global gccdeps_flags
flags = conf.env.GCCDEPS_FLAGS or gccdeps_flags
+ if conf.env.ASM_NAME in supported_compilers:
+ try:
+ conf.check(fragment='', features='asm force_gccdeps', asflags=flags, compile_filename='test.S', msg='Checking for asm flags %r' % ''.join(flags))
+ except Errors.ConfigurationError:
+ pass
+ else:
+ conf.env.append_value('ASFLAGS', flags)
+ conf.env.append_unique('ENABLE_GCCDEPS', 'asm')
+
if conf.env.CC_NAME in supported_compilers:
try:
conf.check(fragment='int main() { return 0; }', features='c force_gccdeps', cflags=flags, msg='Checking for c flags %r' % ''.join(flags))
diff --git a/extras/javatest.py b/extras/javatest.py
index f3c6cbf..76d40ed 100755
--- a/extras/javatest.py
+++ b/extras/javatest.py
@@ -60,6 +60,8 @@ import os
from waflib import Task, TaskGen, Options, Errors, Utils, Logs
from waflib.Tools import ccroot
+JAR_RE = '**/*'
+
def _process_use_rec(self, name):
"""
Recursively process ``use`` for task generator with name ``name``..
@@ -139,6 +141,20 @@ def javatest_process_use(self):
# Only add to libpath if the link task is not a Python extension
extend_unique(self.javatest_libpaths, [tg.link_task.outputs[0].parent.abspath()])
+ if 'javac' in tg.features or 'jar' in tg.features:
+ if hasattr(tg, 'jar_task'):
+ # For Java JAR tasks depend on generated JAR
+ extend_unique(self.javatest_dep_nodes, tg.jar_task.outputs)
+ else:
+ # For Java non-JAR ones we need to glob generated files (Java output files are not predictable)
+ if hasattr(tg, 'outdir'):
+ base_node = tg.outdir
+ else:
+ base_node = tg.path.get_bld()
+
+ self.javatest_dep_nodes.extend([dx for dx in base_node.ant_glob(JAR_RE, remove=False, quiet=True)])
+
+
@TaskGen.feature('javatest')
@TaskGen.after_method('apply_java', 'use_javac_files', 'set_classpath', 'javatest_process_use')
diff --git a/extras/msvcdeps.py b/extras/msvcdeps.py
index 873a419..52985dc 100644
--- a/extras/msvcdeps.py
+++ b/extras/msvcdeps.py
@@ -150,11 +150,25 @@ def scan(self):
def sig_implicit_deps(self):
if self.env.CC_NAME not in supported_compilers:
return super(self.derived_msvcdeps, self).sig_implicit_deps()
+ bld = self.generator.bld
try:
- return Task.Task.sig_implicit_deps(self)
- except Errors.WafError:
- return Utils.SIG_NIL
+ return self.compute_sig_implicit_deps()
+ except Errors.TaskNotReady:
+ raise ValueError("Please specify the build order precisely with msvcdeps (c/c++ tasks)")
+ except EnvironmentError:
+ # If a file is renamed, assume the dependencies are stale and must be recalculated
+ for x in bld.node_deps.get(self.uid(), []):
+ if not x.is_bld() and not x.exists():
+ try:
+ del x.parent.children[x.name]
+ except KeyError:
+ pass
+
+ key = self.uid()
+ bld.node_deps[key] = []
+ bld.raw_deps[key] = []
+ return Utils.SIG_NIL
def exec_command(self, cmd, **kw):
if self.env.CC_NAME not in supported_compilers:
@@ -211,11 +225,14 @@ def exec_command(self, cmd, **kw):
# get one from the exception object
ret = getattr(e, 'returncode', 1)
+ Logs.debug('msvcdeps: Running for: %s' % self.inputs[0])
for line in raw_out.splitlines():
if line.startswith(INCLUDE_PATTERN):
- inc_path = line[len(INCLUDE_PATTERN):].strip()
+ # Only strip whitespace after log to preserve
+ # dependency structure in debug output
+ inc_path = line[len(INCLUDE_PATTERN):]
Logs.debug('msvcdeps: Regex matched %s', inc_path)
- self.msvcdeps_paths.append(inc_path)
+ self.msvcdeps_paths.append(inc_path.strip())
else:
out.append(line)
diff --git a/extras/pch.py b/extras/pch.py
index 103e752..b44c7a2 100644
--- a/extras/pch.py
+++ b/extras/pch.py
@@ -90,7 +90,7 @@ def apply_pch(self):
if getattr(self, 'name', None):
try:
- task = self.bld.pch_tasks["%s.%s" % (self.name, self.idx)]
+ task = self.bld.pch_tasks[self.name]
self.bld.fatal("Duplicated 'pch' task with name %r" % "%s.%s" % (self.name, self.idx))
except KeyError:
pass
@@ -104,7 +104,7 @@ def apply_pch(self):
self.pch_task = task
if getattr(self, 'name', None):
- self.bld.pch_tasks["%s.%s" % (self.name, self.idx)] = task
+ self.bld.pch_tasks[self.name] = task
@TaskGen.feature('cxx')
@TaskGen.after_method('process_source', 'propagate_uselib_vars')
diff --git a/extras/sphinx.py b/extras/sphinx.py
index ce11110..71d1028 100644
--- a/extras/sphinx.py
+++ b/extras/sphinx.py
@@ -20,7 +20,7 @@ def build(bld):
from waflib.Node import Node
from waflib import Utils
-from waflib.Task import Task
+from waflib import Task
from waflib.TaskGen import feature, after_method
@@ -55,13 +55,9 @@ def build_sphinx(self):
sphinx_build_task.set_outputs(self.path.get_bld())
# the sphinx-build results are in <build + output_format> directory
- sphinx_output_directory = self.path.get_bld().make_node(self.env.SPHINX_OUTPUT_FORMAT)
- sphinx_output_directory.mkdir()
+ self.sphinx_output_directory = self.path.get_bld().make_node(self.env.SPHINX_OUTPUT_FORMAT)
+ self.sphinx_output_directory.mkdir()
Utils.def_attrs(self, install_path=get_install_path(self))
- self.add_install_files(install_to=self.install_path,
- install_from=sphinx_output_directory.ant_glob('**/*'),
- cwd=sphinx_output_directory,
- relative_trick=True)
def get_install_path(tg):
@@ -73,9 +69,37 @@ def get_install_path(tg):
return tg.env.DOCDIR
-class SphinxBuildingTask(Task):
+class SphinxBuildingTask(Task.Task):
color = 'BOLD'
run_str = '${SPHINX_BUILD} -M ${SPHINX_OUTPUT_FORMAT} ${SRC} ${TGT} ${SPHINX_OPTIONS}'
def keyword(self):
return 'Compiling (%s)' % self.env.SPHINX_OUTPUT_FORMAT
+
+ def runnable_status(self):
+
+ for x in self.run_after:
+ if not x.hasrun:
+ return Task.ASK_LATER
+
+ self.signature()
+ ret = Task.Task.runnable_status(self)
+ if ret == Task.SKIP_ME:
+ # in case the files were removed
+ self.add_install()
+ return ret
+
+
+ def post_run(self):
+ self.add_install()
+ return Task.Task.post_run(self)
+
+
+ def add_install(self):
+ nodes = self.generator.sphinx_output_directory.ant_glob('**/*', quiet=True)
+ self.outputs += nodes
+ self.generator.add_install_files(install_to=self.generator.install_path,
+ install_from=nodes,
+ postpone=False,
+ cwd=self.generator.sphinx_output_directory,
+ relative_trick=True)
diff --git a/extras/wafcache.py b/extras/wafcache.py
index 8b9567f..088fd0d 100644
--- a/extras/wafcache.py
+++ b/extras/wafcache.py
@@ -16,10 +16,19 @@ The following environment variables may be set:
- URL to a cache server, for example:
export WAFCACHE=http://localhost:8080/files/
in that case, GET/POST requests are made to urls of the form
- http://localhost:8080/files/000000000/0 (cache management is then up to the server)
- - GCS or S3 bucket
- gs://my-bucket/
- s3://my-bucket/
+ http://localhost:8080/files/000000000/0 (cache management is delegated to the server)
+ - GCS, S3 or MINIO bucket
+ gs://my-bucket/ (uses gsutil command line tool or WAFCACHE_CMD)
+ s3://my-bucket/ (uses aws command line tool or WAFCACHE_CMD)
+ minio://my-bucket/ (uses mc command line tool or WAFCACHE_CMD)
+* WAFCACHE_CMD: bucket upload/download command, for example:
+ WAFCACHE_CMD="gsutil cp %{SRC} %{TGT}"
+ Note that the WAFCACHE bucket value is used for the source or destination
+ depending on the operation (upload or download). For example, with:
+ WAFCACHE="gs://mybucket/"
+ the following commands may be run:
+ gsutil cp build/myprogram gs://mybucket/aa/aaaaa/1
+ gsutil cp gs://mybucket/bb/bbbbb/2 build/somefile
* WAFCACHE_NO_PUSH: if set, disables pushing to the cache
* WAFCACHE_VERBOSITY: if set, displays more detailed cache operations
@@ -30,6 +39,7 @@ File cache specific options:
* WAFCACHE_EVICT_MAX_BYTES: maximum amount of cache size in bytes (10GB)
* WAFCACHE_EVICT_INTERVAL_MINUTES: minimum time interval to try
and trim the cache (3 minutess)
+
Usage::
def build(bld):
@@ -41,7 +51,7 @@ To troubleshoot::
waf clean build --zones=wafcache
"""
-import atexit, base64, errno, fcntl, getpass, os, shutil, sys, time, traceback, urllib3
+import atexit, base64, errno, fcntl, getpass, os, re, shutil, sys, time, traceback, urllib3, shlex
try:
import subprocess32 as subprocess
except ImportError:
@@ -53,6 +63,7 @@ if not os.path.isdir(base_cache):
default_wafcache_dir = os.path.join(base_cache, 'wafcache_' + getpass.getuser())
CACHE_DIR = os.environ.get('WAFCACHE', default_wafcache_dir)
+WAFCACHE_CMD = os.environ.get('WAFCACHE_CMD')
TRIM_MAX_FOLDERS = int(os.environ.get('WAFCACHE_TRIM_MAX_FOLDER', 1000000))
EVICT_INTERVAL_MINUTES = int(os.environ.get('WAFCACHE_EVICT_INTERVAL_MINUTES', 3))
EVICT_MAX_BYTES = int(os.environ.get('WAFCACHE_EVICT_MAX_BYTES', 10**10))
@@ -60,6 +71,8 @@ WAFCACHE_NO_PUSH = 1 if os.environ.get('WAFCACHE_NO_PUSH') else 0
WAFCACHE_VERBOSITY = 1 if os.environ.get('WAFCACHE_VERBOSITY') else 0
OK = "ok"
+re_waf_cmd = re.compile('(?P<src>%{SRC})|(?P<tgt>%{TGT})')
+
try:
import cPickle
except ImportError:
@@ -233,8 +246,9 @@ def build(bld):
# already called once
return
- for x in range(bld.jobs):
- process_pool.append(get_process())
+ # pre-allocation
+ processes = [get_process() for x in range(bld.jobs)]
+ process_pool.extend(processes)
Task.Task.can_retrieve_cache = can_retrieve_cache
Task.Task.put_files_cache = put_files_cache
@@ -449,10 +463,20 @@ class fcache(object):
class bucket_cache(object):
def bucket_copy(self, source, target):
- if CACHE_DIR.startswith('s3://'):
+ if WAFCACHE_CMD:
+ def replacer(match):
+ if match.group('src'):
+ return source
+ elif match.group('tgt'):
+ return target
+ cmd = [re_waf_cmd.sub(replacer, x) for x in shlex.split(WAFCACHE_CMD)]
+ elif CACHE_DIR.startswith('s3://'):
cmd = ['aws', 's3', 'cp', source, target]
- else:
+ elif CACHE_DIR.startswith('gs://'):
cmd = ['gsutil', 'cp', source, target]
+ else:
+ cmd = ['mc', 'cp', source, target]
+
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = proc.communicate()
if proc.returncode:
@@ -510,7 +534,9 @@ def loop(service):
sys.stdout.flush()
if __name__ == '__main__':
- if CACHE_DIR.startswith('s3://') or CACHE_DIR.startswith('gs://'):
+ if CACHE_DIR.startswith('s3://') or CACHE_DIR.startswith('gs://') or CACHE_DIR.startswith('minio://'):
+ if CACHE_DIR.startswith('minio://'):
+ CACHE_DIR = CACHE_DIR[8:] # minio doesn't need the protocol part, uses config aliases
service = bucket_cache()
elif CACHE_DIR.startswith('http'):
service = netcache()
diff --git a/extras/xcode6.py b/extras/xcode6.py
index 91bbff1..c5b3091 100644
--- a/extras/xcode6.py
+++ b/extras/xcode6.py
@@ -99,7 +99,7 @@ env.PROJ_CONFIGURATION = {
...
}
'Release': {
- 'ARCHS' x86_64'
+ 'ARCHS': x86_64'
...
}
}
@@ -163,12 +163,12 @@ class XCodeNode(object):
result = result + "\t\t}"
return result
elif isinstance(value, str):
- return "\"%s\"" % value
+ return '"%s"' % value.replace('"', '\\\\\\"')
elif isinstance(value, list):
result = "(\n"
for i in value:
- result = result + "\t\t\t%s,\n" % self.tostring(i)
- result = result + "\t\t)"
+ result = result + "\t\t\t\t%s,\n" % self.tostring(i)
+ result = result + "\t\t\t)"
return result
elif isinstance(value, XCodeNode):
return value._id
@@ -565,13 +565,13 @@ def process_xcode(self):
# Override target specific build settings
bldsettings = {
'HEADER_SEARCH_PATHS': ['$(inherited)'] + self.env['INCPATHS'],
- 'LIBRARY_SEARCH_PATHS': ['$(inherited)'] + Utils.to_list(self.env.LIBPATH) + Utils.to_list(self.env.STLIBPATH) + Utils.to_list(self.env.LIBDIR) ,
+ 'LIBRARY_SEARCH_PATHS': ['$(inherited)'] + Utils.to_list(self.env.LIBPATH) + Utils.to_list(self.env.STLIBPATH) + Utils.to_list(self.env.LIBDIR),
'FRAMEWORK_SEARCH_PATHS': ['$(inherited)'] + Utils.to_list(self.env.FRAMEWORKPATH),
- 'OTHER_LDFLAGS': libs + ' ' + frameworks,
- 'OTHER_LIBTOOLFLAGS': bld.env['LINKFLAGS'],
+ 'OTHER_LDFLAGS': libs + ' ' + frameworks + ' ' + ' '.join(bld.env['LINKFLAGS']),
'OTHER_CPLUSPLUSFLAGS': Utils.to_list(self.env['CXXFLAGS']),
'OTHER_CFLAGS': Utils.to_list(self.env['CFLAGS']),
- 'INSTALL_PATH': []
+ 'INSTALL_PATH': [],
+ 'GCC_PREPROCESSOR_DEFINITIONS': self.env['DEFINES']
}
# Install path
@@ -591,7 +591,7 @@ def process_xcode(self):
# The keys represents different build configuration, e.g. Debug, Release and so on..
# Insert our generated build settings to all configuration names
- keys = set(settings.keys() + bld.env.PROJ_CONFIGURATION.keys())
+ keys = set(settings.keys()) | set(bld.env.PROJ_CONFIGURATION.keys())
for k in keys:
if k in settings:
settings[k].update(bldsettings)