summaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--.clang-format27
-rw-r--r--.clang-tidy55
-rw-r--r--.clant.json6
-rw-r--r--.gitattributes1
-rw-r--r--.gitignore4
-rw-r--r--.includes.imp17
-rw-r--r--.reuse/dep532
-rw-r--r--INSTALL59
-rw-r--r--INSTALL.md74
-rw-r--r--LICENSES/0BSD.txt12
-rw-r--r--LICENSES/CC-BY-SA-4.0.txt425
l---------LICENSES/GPL-3.0-or-later.txt1
-rw-r--r--NEWS134
-rw-r--r--README.md (renamed from README)3
-rw-r--r--doc/patchage.169
-rw-r--r--icons/meson.build40
-rw-r--r--meson.build288
-rw-r--r--meson/suppressions/meson.build72
-rw-r--r--meson_options.txt26
-rw-r--r--osx/Info.plist.in4
-rwxr-xr-xosx/bundleify.sh3
-rw-r--r--patchage.desktop.in13
-rw-r--r--po/LINGUAS4
-rw-r--r--po/POTFILES60
-rw-r--r--po/de.po153
-rw-r--r--po/fr.po154
-rw-r--r--po/ko.po155
-rw-r--r--po/meson.build27
-rw-r--r--po/patchage.pot154
-rw-r--r--src/Action.hpp90
-rw-r--r--src/ActionSink.hpp18
-rw-r--r--src/AlsaDriver.cpp974
-rw-r--r--src/AlsaDriver.hpp116
-rw-r--r--src/AlsaStubDriver.cpp18
-rw-r--r--src/AudioDriver.hpp40
-rw-r--r--src/Canvas.cpp342
-rw-r--r--src/Canvas.hpp81
-rw-r--r--src/CanvasModule.cpp155
-rw-r--r--src/CanvasModule.hpp74
-rw-r--r--src/CanvasPort.hpp108
-rw-r--r--src/ClientID.hpp123
-rw-r--r--src/ClientInfo.hpp18
-rw-r--r--src/ClientType.hpp42
-rw-r--r--src/Configuration.cpp607
-rw-r--r--src/Configuration.hpp222
-rw-r--r--src/Coord.hpp28
-rw-r--r--src/Driver.hpp77
-rw-r--r--src/Drivers.cpp56
-rw-r--r--src/Drivers.hpp52
-rw-r--r--src/Event.hpp71
-rw-r--r--src/ILog.hpp32
-rw-r--r--src/JackDbusDriver.cpp1661
-rw-r--r--src/JackDbusDriver.hpp161
-rw-r--r--src/JackDriver.cpp588
-rw-r--r--src/JackDriver.hpp109
-rw-r--r--src/JackLibDriver.cpp490
-rw-r--r--src/JackStubDriver.cpp18
-rw-r--r--src/Legend.cpp82
-rw-r--r--src/Legend.hpp79
-rw-r--r--src/Metadata.cpp72
-rw-r--r--src/Metadata.hpp42
-rw-r--r--src/Options.hpp16
-rw-r--r--src/Patchage.cpp1579
-rw-r--r--src/Patchage.hpp385
-rw-r--r--src/PatchageCanvas.cpp338
-rw-r--r--src/PatchageCanvas.hpp85
-rw-r--r--src/PatchageEvent.cpp110
-rw-r--r--src/PatchageEvent.hpp87
-rw-r--r--src/PatchageModule.cpp157
-rw-r--r--src/PatchageModule.hpp67
-rw-r--r--src/PatchagePort.hpp104
-rw-r--r--src/PortID.hpp260
-rw-r--r--src/PortInfo.hpp26
-rw-r--r--src/PortNames.hpp44
-rw-r--r--src/PortType.hpp50
-rw-r--r--src/Queue.hpp131
-rw-r--r--src/Reactor.cpp216
-rw-r--r--src/Reactor.hpp75
-rw-r--r--src/Setting.hpp88
-rw-r--r--src/SignalDirection.hpp44
-rw-r--r--src/TextViewLog.cpp81
-rw-r--r--src/TextViewLog.hpp54
-rw-r--r--src/UIFile.hpp103
-rw-r--r--src/Widget.hpp62
-rw-r--r--src/binary_location.h71
-rw-r--r--src/event_to_string.cpp110
-rw-r--r--src/event_to_string.hpp22
-rw-r--r--src/handle_event.cpp149
-rw-r--r--src/handle_event.hpp26
-rw-r--r--src/i18n.hpp12
-rw-r--r--src/jackey.h20
-rw-r--r--src/main.cpp204
-rw-r--r--src/make_alsa_driver.hpp20
-rw-r--r--src/make_jack_driver.hpp21
-rw-r--r--src/patchage.ui.in (renamed from src/patchage.ui)145
-rw-r--r--src/patchage_config.h112
-rw-r--r--src/warnings.hpp45
-rw-r--r--subprojects/fmt/include/fmt/core.h3277
-rw-r--r--subprojects/fmt/include/fmt/format-inl.h1733
-rw-r--r--subprojects/fmt/include/fmt/format.h4192
-rw-r--r--subprojects/fmt/include/fmt/ostream.h213
-rw-r--r--subprojects/fmt/meson.build19
-rwxr-xr-xwaf16
-rw-r--r--waflib/.gitignore2
-rw-r--r--waflib/Build.py1491
-rw-r--r--waflib/COPYING25
-rw-r--r--waflib/ConfigSet.py361
-rw-r--r--waflib/Configure.py638
-rw-r--r--waflib/Context.py737
-rw-r--r--waflib/Errors.py68
-rw-r--r--waflib/Logs.py379
-rw-r--r--waflib/Node.py970
-rw-r--r--waflib/Options.py342
-rw-r--r--waflib/README.md24
-rw-r--r--waflib/Runner.py611
-rw-r--r--waflib/Scripting.py614
-rw-r--r--waflib/Task.py1394
-rw-r--r--waflib/TaskGen.py917
-rw-r--r--waflib/Tools/__init__.py3
-rw-r--r--waflib/Tools/ar.py24
-rw-r--r--waflib/Tools/asm.py73
-rw-r--r--waflib/Tools/bison.py49
-rw-r--r--waflib/Tools/c.py39
-rw-r--r--waflib/Tools/c_aliases.py144
-rw-r--r--waflib/Tools/c_config.py1351
-rw-r--r--waflib/Tools/c_osx.py193
-rw-r--r--waflib/Tools/c_preproc.py1091
-rw-r--r--waflib/Tools/c_tests.py229
-rw-r--r--waflib/Tools/ccroot.py776
-rw-r--r--waflib/Tools/clang.py29
-rw-r--r--waflib/Tools/clangxx.py30
-rw-r--r--waflib/Tools/compiler_c.py110
-rw-r--r--waflib/Tools/compiler_cxx.py111
-rw-r--r--waflib/Tools/compiler_d.py85
-rw-r--r--waflib/Tools/compiler_fc.py73
-rw-r--r--waflib/Tools/cs.py211
-rw-r--r--waflib/Tools/cxx.py40
-rw-r--r--waflib/Tools/d.py97
-rw-r--r--waflib/Tools/d_config.py64
-rw-r--r--waflib/Tools/d_scan.py211
-rw-r--r--waflib/Tools/dbus.py70
-rw-r--r--waflib/Tools/dmd.py80
-rw-r--r--waflib/Tools/errcheck.py237
-rw-r--r--waflib/Tools/fc.py189
-rw-r--r--waflib/Tools/fc_config.py488
-rw-r--r--waflib/Tools/fc_scan.py114
-rw-r--r--waflib/Tools/flex.py62
-rw-r--r--waflib/Tools/g95.py66
-rw-r--r--waflib/Tools/gas.py18
-rw-r--r--waflib/Tools/gcc.py156
-rw-r--r--waflib/Tools/gdc.py55
-rw-r--r--waflib/Tools/gfortran.py93
-rw-r--r--waflib/Tools/glib2.py489
-rw-r--r--waflib/Tools/gnu_dirs.py131
-rw-r--r--waflib/Tools/gxx.py157
-rw-r--r--waflib/Tools/icc.py30
-rw-r--r--waflib/Tools/icpc.py30
-rw-r--r--waflib/Tools/ifort.py413
-rw-r--r--waflib/Tools/intltool.py231
-rw-r--r--waflib/Tools/irixcc.py66
-rw-r--r--waflib/Tools/javaw.py464
-rw-r--r--waflib/Tools/ldc2.py56
-rw-r--r--waflib/Tools/lua.py38
-rw-r--r--waflib/Tools/md5_tstamp.py39
-rw-r--r--waflib/Tools/msvc.py1020
-rw-r--r--waflib/Tools/nasm.py26
-rw-r--r--waflib/Tools/nobuild.py24
-rw-r--r--waflib/Tools/perl.py156
-rw-r--r--waflib/Tools/python.py627
-rw-r--r--waflib/Tools/qt5.py796
-rw-r--r--waflib/Tools/ruby.py186
-rw-r--r--waflib/Tools/suncc.py67
-rw-r--r--waflib/Tools/suncxx.py67
-rw-r--r--waflib/Tools/tex.py543
-rw-r--r--waflib/Tools/vala.py355
-rw-r--r--waflib/Tools/waf_unit_test.py296
-rw-r--r--waflib/Tools/winres.py78
-rw-r--r--waflib/Tools/xlc.py65
-rw-r--r--waflib/Tools/xlcxx.py65
-rw-r--r--waflib/Utils.py1021
-rw-r--r--waflib/__init__.py3
-rw-r--r--waflib/ansiterm.py342
-rw-r--r--waflib/extras/__init__.py3
-rw-r--r--waflib/extras/autowaf.py1244
-rw-r--r--waflib/extras/batched_cc.py173
-rw-r--r--waflib/extras/biber.py58
-rw-r--r--waflib/extras/bjam.py128
-rw-r--r--waflib/extras/blender.py108
-rw-r--r--waflib/extras/boo.py81
-rw-r--r--waflib/extras/boost.py525
-rw-r--r--waflib/extras/build_file_tracker.py28
-rw-r--r--waflib/extras/build_logs.py110
-rw-r--r--waflib/extras/buildcopy.py82
-rw-r--r--waflib/extras/c_bgxlc.py32
-rw-r--r--waflib/extras/c_dumbpreproc.py72
-rw-r--r--waflib/extras/c_emscripten.py87
-rw-r--r--waflib/extras/c_nec.py74
-rw-r--r--waflib/extras/cabal.py152
-rw-r--r--waflib/extras/cfg_altoptions.py110
-rw-r--r--waflib/extras/clang_compilation_database.py85
-rw-r--r--waflib/extras/codelite.py875
-rw-r--r--waflib/extras/color_gcc.py39
-rw-r--r--waflib/extras/color_rvct.py51
-rw-r--r--waflib/extras/compat15.py406
-rw-r--r--waflib/extras/cppcheck.py591
-rw-r--r--waflib/extras/cpplint.py209
-rw-r--r--waflib/extras/cross_gnu.py227
-rw-r--r--waflib/extras/cython.py146
-rw-r--r--waflib/extras/dcc.py72
-rw-r--r--waflib/extras/distnet.py430
-rw-r--r--waflib/extras/doxygen.py227
-rw-r--r--waflib/extras/dpapi.py87
-rw-r--r--waflib/extras/eclipse.py431
-rw-r--r--waflib/extras/erlang.py110
-rw-r--r--waflib/extras/fast_partial.py518
-rw-r--r--waflib/extras/fc_bgxlf.py32
-rw-r--r--waflib/extras/fc_cray.py51
-rw-r--r--waflib/extras/fc_nag.py61
-rw-r--r--waflib/extras/fc_nec.py60
-rw-r--r--waflib/extras/fc_open64.py58
-rw-r--r--waflib/extras/fc_pgfortran.py68
-rw-r--r--waflib/extras/fc_solstudio.py62
-rw-r--r--waflib/extras/fc_xlf.py63
-rw-r--r--waflib/extras/file_to_object.py137
-rw-r--r--waflib/extras/fluid.py30
-rw-r--r--waflib/extras/freeimage.py74
-rw-r--r--waflib/extras/fsb.py31
-rw-r--r--waflib/extras/fsc.py64
-rw-r--r--waflib/extras/gccdeps.py214
-rw-r--r--waflib/extras/gdbus.py87
-rw-r--r--waflib/extras/gob2.py17
-rw-r--r--waflib/extras/halide.py151
-rwxr-xr-xwaflib/extras/javatest.py118
-rw-r--r--waflib/extras/kde4.py93
-rw-r--r--waflib/extras/local_rpath.py19
-rw-r--r--waflib/extras/lv2.py47
-rw-r--r--waflib/extras/make.py142
-rw-r--r--waflib/extras/midl.py69
-rw-r--r--waflib/extras/msvcdeps.py256
-rw-r--r--waflib/extras/msvs.py1048
-rw-r--r--waflib/extras/netcache_client.py390
-rw-r--r--waflib/extras/objcopy.py50
-rw-r--r--waflib/extras/ocaml.py348
-rw-r--r--waflib/extras/package.py76
-rw-r--r--waflib/extras/parallel_debug.py459
-rw-r--r--waflib/extras/pch.py148
-rw-r--r--waflib/extras/pep8.py106
-rw-r--r--waflib/extras/pgicc.py75
-rw-r--r--waflib/extras/pgicxx.py20
-rw-r--r--waflib/extras/proc.py54
-rw-r--r--waflib/extras/protoc.py243
-rw-r--r--waflib/extras/pyqt5.py241
-rw-r--r--waflib/extras/pytest.py225
-rw-r--r--waflib/extras/qnxnto.py72
-rw-r--r--waflib/extras/qt4.py695
-rw-r--r--waflib/extras/relocation.py85
-rw-r--r--waflib/extras/remote.py327
-rw-r--r--waflib/extras/resx.py35
-rw-r--r--waflib/extras/review.py325
-rw-r--r--waflib/extras/rst.py260
-rw-r--r--waflib/extras/run_do_script.py139
-rw-r--r--waflib/extras/run_m_script.py88
-rw-r--r--waflib/extras/run_py_script.py104
-rw-r--r--waflib/extras/run_r_script.py86
-rw-r--r--waflib/extras/sas.py71
-rw-r--r--waflib/extras/satellite_assembly.py57
-rw-r--r--waflib/extras/scala.py128
-rw-r--r--waflib/extras/slow_qt4.py96
-rw-r--r--waflib/extras/softlink_libs.py76
-rw-r--r--waflib/extras/stale.py98
-rw-r--r--waflib/extras/stracedeps.py174
-rw-r--r--waflib/extras/swig.py237
-rw-r--r--waflib/extras/syms.py84
-rw-r--r--waflib/extras/ticgt.py300
-rw-r--r--waflib/extras/unity.py108
-rw-r--r--waflib/extras/use_config.py185
-rw-r--r--waflib/extras/valadoc.py140
-rw-r--r--waflib/extras/waf_xattr.py150
-rw-r--r--waflib/extras/why.py78
-rw-r--r--waflib/extras/win32_opts.py170
-rw-r--r--waflib/extras/wix.py87
-rw-r--r--waflib/extras/xcode6.py727
-rw-r--r--waflib/fixpy2.py64
-rwxr-xr-xwaflib/processor.py64
-rwxr-xr-xwaflib/waf16
-rw-r--r--wscript225
286 files changed, 17748 insertions, 47263 deletions
diff --git a/.clang-format b/.clang-format
new file mode 100644
index 0000000..40d32ed
--- /dev/null
+++ b/.clang-format
@@ -0,0 +1,27 @@
+# Copyright 2020-2021 David Robillard <d@drobilla.net>
+# SPDX-License-Identifier: 0BSD OR GPL-3.0-or-later
+
+---
+AlignConsecutiveAssignments: true
+AlignConsecutiveDeclarations: true
+AlignEscapedNewlinesLeft: true
+BasedOnStyle: Mozilla
+BraceWrapping:
+ AfterNamespace: false
+ AfterClass: true
+ AfterEnum: false
+ AfterExternBlock: false
+ AfterFunction: true
+ AfterStruct: false
+ SplitEmptyFunction: false
+ SplitEmptyRecord: false
+BreakBeforeBraces: Custom
+Cpp11BracedListStyle: true
+FixNamespaceComments: true
+IndentCaseLabels: false
+IndentPPDirectives: AfterHash
+KeepEmptyLinesAtTheStartOfBlocks: false
+SpacesInContainerLiterals: false
+StatementMacros:
+ - _Pragma
+...
diff --git a/.clang-tidy b/.clang-tidy
new file mode 100644
index 0000000..cec10cc
--- /dev/null
+++ b/.clang-tidy
@@ -0,0 +1,55 @@
+# Copyright 2020-2023 David Robillard <d@drobilla.net>
+# SPDX-License-Identifier: 0BSD OR GPL-3.0-or-later
+
+Checks: >
+ *,
+ -*-c-arrays,
+ -*-magic-numbers,
+ -*-named-parameter,
+ -*-narrowing-conversions,
+ -*-no-malloc,
+ -*-non-private-member-variables-in-classes,
+ -*-unused-macros,
+ -*-uppercase-literal-suffix,
+ -*-vararg,
+ -abseil-string-find-str-contains,
+ -altera-*,
+ -bugprone-assignment-in-if-condition,
+ -bugprone-easily-swappable-parameters,
+ -cert-dcl21-cpp,
+ -cert-dcl50-cpp,
+ -cert-msc30-c,
+ -cert-msc50-cpp,
+ -clang-analyzer-optin.cplusplus.VirtualCall,
+ -concurrency-mt-unsafe,
+ -cppcoreguidelines-avoid-const-or-ref-data-members,
+ -cppcoreguidelines-macro-usage,
+ -cppcoreguidelines-owning-memory,
+ -cppcoreguidelines-pro-bounds-array-to-pointer-decay,
+ -cppcoreguidelines-pro-bounds-constant-array-index,
+ -cppcoreguidelines-pro-bounds-pointer-arithmetic,
+ -cppcoreguidelines-pro-type-const-cast,
+ -cppcoreguidelines-pro-type-cstyle-cast,
+ -cppcoreguidelines-pro-type-reinterpret-cast,
+ -cppcoreguidelines-pro-type-union-access,
+ -fuchsia-default-arguments-calls,
+ -fuchsia-default-arguments-declarations,
+ -fuchsia-multiple-inheritance,
+ -fuchsia-overloaded-operator,
+ -google-readability-casting,
+ -google-readability-todo,
+ -google-runtime-references,
+ -hicpp-no-array-decay,
+ -hicpp-signed-bitwise,
+ -llvm-header-guard,
+ -llvmlibc-*,
+ -misc-no-recursion,
+ -modernize-use-nodiscard,
+ -modernize-use-trailing-return-type,
+ -readability-convert-member-functions-to-static,
+ -readability-function-cognitive-complexity,
+ -readability-identifier-length,
+ -readability-implicit-bool-conversion,
+FormatStyle: file
+WarningsAsErrors: '*'
+HeaderFilterRegex: '.*'
diff --git a/.clant.json b/.clant.json
new file mode 100644
index 0000000..23d71fd
--- /dev/null
+++ b/.clant.json
@@ -0,0 +1,6 @@
+{
+ "version": "1.0.0",
+ "mapping_files": [
+ ".includes.imp"
+ ]
+}
diff --git a/.gitattributes b/.gitattributes
deleted file mode 100644
index f063da3..0000000
--- a/.gitattributes
+++ /dev/null
@@ -1 +0,0 @@
-waf binary \ No newline at end of file
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..c41acb9
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,4 @@
+# Copyright 2020-2022 David Robillard <d@drobilla.net>
+# SPDX-License-Identifier: 0BSD OR GPL-3.0-or-later
+
+build/
diff --git a/.includes.imp b/.includes.imp
new file mode 100644
index 0000000..d79b174
--- /dev/null
+++ b/.includes.imp
@@ -0,0 +1,17 @@
+[
+ { "symbol": [ "GdkEvent", "private", "<gdk/gdk.h>", "public" ] },
+ { "symbol": [ "fmt::format", "private", "<fmt/core.h>", "public" ] },
+ { "symbol": [ "std::exception", "private", "<exception>", "public" ] },
+ { "symbol": [ "std::ifstream", "private", "<fstream>", "public" ] },
+ { "symbol": [ "std::ofstream", "private", "<fstream>", "public" ] },
+ { "symbol": [ "std::ostream", "private", "<iosfwd>", "public" ] },
+ { "symbol": [ "std::ostream", "private", "<ostream>", "public" ] },
+ { "symbol": [ "std::stringstream", "private", "<sstream>", "public" ] },
+
+ { "include": [ "<alsa/error.h>", "public", "<alsa/asoundlib.h>", "public" ] },
+ { "include": [ "<alsa/seq.h>", "public", "<alsa/asoundlib.h>", "public" ] },
+ { "include": [ "<alsa/seq_event.h>", "public", "<alsa/asoundlib.h>", "public" ] },
+ { "include": [ "<alsa/seqmid.h>", "public", "<alsa/asoundlib.h>", "public" ] },
+ { "include": [ "<bits/stdint-uintn.h>", "public", "<stdint.h>", "public" ] },
+ { "include": [ "<sigc++/type_traits.h>", "public", "<sigc++/adaptors/bind.h>", "public" ] }
+]
diff --git a/.reuse/dep5 b/.reuse/dep5
new file mode 100644
index 0000000..0685ea3
--- /dev/null
+++ b/.reuse/dep5
@@ -0,0 +1,32 @@
+Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
+Upstream-Name: patchage
+Upstream-Contact: David Robillard <d@drobilla.net>
+Source: https://gitlab.com/drobilla/patchage
+
+Files: .clant.json .includes.imp INSTALL.md NEWS
+Copyright: 2020-2021 David Robillard <d@drobilla.net>
+License: 0BSD OR GPL-3.0-or-later
+
+Files: AUTHORS README.md patchage.desktop.in
+Copyright: 2006-2014 David Robillard <d@drobilla.net>
+License: 0BSD OR GPL-3.0-or-later
+
+Files: src/patchage.gladep src/patchage.ui.in
+Copyright: 2006-2014 David Robillard <d@drobilla.net>
+License: GPL-3.0-or-later
+
+Files: osx/Info.plist.in osx/gtkrc osx/loaders.cache osx/pango.modules osx/pangorc
+Copyright: 2011-2017 David Robillard <d@drobilla.net>
+License: 0BSD OR GPL-3.0-or-later
+
+Files: osx/Patchage.icns
+Copyright: 2011 David Robillard <d@drobilla.net>
+License: CC-BY-SA-4.0 OR GPL-3.0-or-later
+
+Files: icons/*.png icons/*.svg
+Copyright: 2007-2011 David Robillard <d@drobilla.net>
+License: CC-BY-SA-4.0 OR GPL-3.0-or-later
+
+Files: po/patchage.pot po/*.po
+Copyright: 2022 David Robillard <d@drobilla.net>
+License: GPL-3.0-or-later
diff --git a/INSTALL b/INSTALL
deleted file mode 100644
index 623cddd..0000000
--- a/INSTALL
+++ /dev/null
@@ -1,59 +0,0 @@
-Installation Instructions
-=========================
-
-Basic Installation
-------------------
-
-Building this software requires only Python. To install with default options:
-
- ./waf configure
- ./waf
- ./waf install
-
-You may need to become root for the install stage, for example:
-
- sudo ./waf install
-
-Configuration Options
----------------------
-
-All supported options can be viewed using the command:
-
- ./waf --help
-
-Most options only need to be passed during the configure stage, for example:
-
- ./waf configure --prefix=/usr
- ./waf
- ./waf install
-
-Compiler Configuration
-----------------------
-
-Several standard environment variables can be used to control how compilers are
-invoked:
-
- * CC: Path to C compiler
- * CFLAGS: C compiler options
- * CXX: Path to C++ compiler
- * CXXFLAGS: C++ compiler options
- * CPPFLAGS: C preprocessor options
- * LINKFLAGS: Linker options
-
-Installation Directories
-------------------------
-
-The --prefix option (or the PREFIX environment variable) can be used to change
-the prefix which all files are installed under. There are also several options
-allowing for more fine-tuned control, see the --help output for details.
-
-Packaging
----------
-
-Everything can be installed to a specific root directory by passing a --destdir
-option to the install stage (or setting the DESTDIR environment variable),
-which adds a prefix to all install paths. For example:
-
- ./waf configure --prefix=/usr
- ./waf
- ./waf install --destdir=/tmp/package
diff --git a/INSTALL.md b/INSTALL.md
new file mode 100644
index 0000000..36bca7c
--- /dev/null
+++ b/INSTALL.md
@@ -0,0 +1,74 @@
+Installation Instructions
+=========================
+
+Prerequisites
+-------------
+
+To build from source, you will need:
+
+ * A relatively modern C compiler (GCC, Clang, and MSVC are known to work).
+
+ * [Meson](http://mesonbuild.com/), which depends on
+ [Python](http://python.org/).
+
+This is a brief overview of building this project with meson. See the meson
+documentation for more detailed information.
+
+Configuration
+-------------
+
+The build is configured with the `setup` command, which creates a new build
+directory with the given name:
+
+ meson setup build
+
+Some environment variables are read during `setup` and stored with the
+configuration:
+
+ * `CXX`: Path to C++ compiler.
+ * `CXXFLAGS`: C++ compiler options.
+ * `LDFLAGS`: Linker options.
+
+However, it is better to use meson options for configuration. All options can
+be inspected with the `configure` command from within the build directory:
+
+ cd build
+ meson configure
+
+Options can be set by passing C-style "define" options to `configure`:
+
+ meson configure -Dcpp_args="-march=native" -Dprefix="/opt/mypackage/"
+
+Note that some options, such as `strict` and `werror` are for
+developer/maintainer use only. Please don't file issues about anything that
+happens when they are enabled.
+
+Building
+--------
+
+From within a configured build directory, everything can be built with the
+`compile` command:
+
+ meson compile
+
+Similarly, tests can be run with the `test` command:
+
+ meson test
+
+Meson can also generate a project for several popular IDEs, see the `backend`
+option for details.
+
+Installation
+------------
+
+A compiled project can be installed with the `install` command:
+
+ meson install
+
+You may need to acquire root permissions to install to a system-wide prefix.
+For packaging, the installation may be staged to a directory using the
+`DESTDIR` environment variable or the `--destdir` option:
+
+ DESTDIR=/tmp/mypackage/ meson install
+
+ meson install --destdir=/tmp/mypackage/
diff --git a/LICENSES/0BSD.txt b/LICENSES/0BSD.txt
new file mode 100644
index 0000000..8a1f019
--- /dev/null
+++ b/LICENSES/0BSD.txt
@@ -0,0 +1,12 @@
+Copyright 2006-2022 David Robillard <d@drobilla.net>
+
+Permission to use, copy, modify, and/or distribute this software for any
+purpose with or without fee is hereby granted.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
+REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
+FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
+INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
+LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
+OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+PERFORMANCE OF THIS SOFTWARE.
diff --git a/LICENSES/CC-BY-SA-4.0.txt b/LICENSES/CC-BY-SA-4.0.txt
new file mode 100644
index 0000000..1912a89
--- /dev/null
+++ b/LICENSES/CC-BY-SA-4.0.txt
@@ -0,0 +1,425 @@
+Creative Commons Attribution-ShareAlike 4.0 International
+
+=======================================================================
+
+Creative Commons Corporation ("Creative Commons") is not a law firm and
+does not provide legal services or legal advice. Distribution of
+Creative Commons public licenses does not create a lawyer-client or
+other relationship. Creative Commons makes its licenses and related
+information available on an "as-is" basis. Creative Commons gives no
+warranties regarding its licenses, any material licensed under their
+terms and conditions, or any related information. Creative Commons
+disclaims all liability for damages resulting from their use to the
+fullest extent possible.
+
+Using Creative Commons Public Licenses
+
+Creative Commons public licenses provide a standard set of terms and
+conditions that creators and other rights holders may use to share
+original works of authorship and other material subject to copyright
+and certain other rights specified in the public license below. The
+following considerations are for informational purposes only, are not
+exhaustive, and do not form part of our licenses.
+
+ Considerations for licensors: Our public licenses are intended for
+ use by those authorized to give the public permission to use
+ material in ways otherwise restricted by copyright and certain
+ other rights. Our licenses are irrevocable. Licensors should
+ read and understand the terms and conditions of the license they
+ choose before applying it. Licensors should also secure all
+ rights necessary before applying our licenses so that the public
+ can reuse the material as expected. Licensors should clearly mark
+ any material not subject to the license. This includes other CC-
+ licensed material, or material used under an exception or
+ limitation to copyright. More considerations for licensors:
+ wiki.creativecommons.org/Considerations_for_licensors
+
+ Considerations for the public: By using one of our public
+ licenses, a licensor grants the public permission to use the
+ licensed material under specified terms and conditions. If the
+ licensor's permission is not necessary for any reason--for
+ example, because of any applicable exception or limitation to
+ copyright--then that use is not regulated by the license. Our
+ licenses grant only permissions under copyright and certain other
+ rights that a licensor has authority to grant. Use of the
+ licensed material may still be restricted for other reasons,
+ including because others have copyright or other rights in the
+ material. A licensor may make special requests, such as asking
+ that all changes be marked or described. Although not required by
+ our licenses, you are encouraged to respect those requests where
+ reasonable. More considerations for the public:
+ wiki.creativecommons.org/Considerations_for_licensees
+
+=======================================================================
+
+Creative Commons Attribution-ShareAlike 4.0 International Public
+License
+
+By exercising the Licensed Rights (defined below), You accept and agree
+to be bound by the terms and conditions of this Creative Commons
+Attribution-ShareAlike 4.0 International Public License ("Public
+License"). To the extent this Public License may be interpreted as a
+contract, You are granted the Licensed Rights in consideration of Your
+acceptance of these terms and conditions, and the Licensor grants You
+such rights in consideration of benefits the Licensor receives from
+making the Licensed Material available under these terms and
+conditions.
+
+
+Section 1 -- Definitions.
+
+ a. Adapted Material means material subject to Copyright and Similar
+ Rights that is derived from or based upon the Licensed Material
+ and in which the Licensed Material is translated, altered,
+ arranged, transformed, or otherwise modified in a manner requiring
+ permission under the Copyright and Similar Rights held by the
+ Licensor. For purposes of this Public License, where the Licensed
+ Material is a musical work, performance, or sound recording,
+ Adapted Material is always produced where the Licensed Material is
+ synched in timed relation with a moving image.
+
+ b. Adapter's License means the license You apply to Your Copyright
+ and Similar Rights in Your contributions to Adapted Material in
+ accordance with the terms and conditions of this Public License.
+
+ c. BY-SA Compatible License means a license listed at
+ creativecommons.org/compatiblelicenses, approved by Creative
+ Commons as essentially the equivalent of this Public License.
+
+ d. Copyright and Similar Rights means copyright and/or similar rights
+ closely related to copyright including, without limitation,
+ performance, broadcast, sound recording, and Sui Generis Database
+ Rights, without regard to how the rights are labeled or
+ categorized. For purposes of this Public License, the rights
+ specified in Section 2(b)(1)-(2) are not Copyright and Similar
+ Rights.
+
+ e. Effective Technological Measures means those measures that, in the
+ absence of proper authority, may not be circumvented under laws
+ fulfilling obligations under Article 11 of the WIPO Copyright
+ Treaty adopted on December 20, 1996, and/or similar international
+ agreements.
+
+ f. Exceptions and Limitations means fair use, fair dealing, and/or
+ any other exception or limitation to Copyright and Similar Rights
+ that applies to Your use of the Licensed Material.
+
+ g. License Elements means the license attributes listed in the name
+ of a Creative Commons Public License. The License Elements of
+ this Public License are Attribution and ShareAlike.
+
+ h. Licensed Material means the artistic or literary work, database,
+ or other material to which the Licensor applied this Public
+ License.
+
+ i. Licensed Rights means the rights granted to You subject to the
+ terms and conditions of this Public License, which are limited to
+ all Copyright and Similar Rights that apply to Your use of the
+ Licensed Material and that the Licensor has authority to license.
+
+ j. Licensor means the individual(s) or entity(ies) granting rights
+ under this Public License.
+
+ k. Share means to provide material to the public by any means or
+ process that requires permission under the Licensed Rights, such
+ as reproduction, public display, public performance, distribution,
+ dissemination, communication, or importation, and to make material
+ available to the public including in ways that members of the
+ public may access the material from a place and at a time
+ individually chosen by them.
+
+ l. Sui Generis Database Rights means rights other than copyright
+ resulting from Directive 96/9/EC of the European Parliament and of
+ the Council of 11 March 1996 on the legal protection of databases,
+ as amended and/or succeeded, as well as other essentially
+ equivalent rights anywhere in the world.
+
+ m. You means the individual or entity exercising the Licensed Rights
+ under this Public License. Your has a corresponding meaning.
+
+
+Section 2 -- Scope.
+
+ a. License grant.
+
+ 1. Subject to the terms and conditions of this Public License,
+ the Licensor hereby grants You a worldwide, royalty-free,
+ non-sublicensable, non-exclusive, irrevocable license to
+ exercise the Licensed Rights in the Licensed Material to:
+
+ a. reproduce and Share the Licensed Material, in whole or
+ in part; and
+
+ b. produce, reproduce, and Share Adapted Material.
+
+ 2. Exceptions and Limitations. For the avoidance of doubt,
+ where Exceptions and Limitations apply to Your use, this
+ Public License does not apply, and You do not need to comply
+ with its terms and conditions.
+
+ 3. Term. The term of this Public License is specified in
+ Section 6(a).
+
+ 4. Media and formats; technical modifications allowed. The
+ Licensor authorizes You to exercise the Licensed Rights in
+ all media and formats whether now known or hereafter created,
+ and to make technical modifications necessary to do so. The
+ Licensor waives and/or agrees not to assert any right or
+ authority to forbid You from making technical modifications
+ necessary to exercise the Licensed Rights, including
+ technical modifications necessary to circumvent Effective
+ Technological Measures. For purposes of this Public License,
+ simply making modifications authorized by this Section 2(a)
+ (4) never produces Adapted Material.
+
+ 5. Downstream recipients.
+
+ a. Offer from the Licensor -- Licensed Material. Every
+ recipient of the Licensed Material automatically
+ receives an offer from the Licensor to exercise the
+ Licensed Rights under the terms and conditions of this
+ Public License.
+
+ b. Additional offer from the Licensor -- Adapted Material.
+ Every recipient of Adapted Material from You
+ automatically receives an offer from the Licensor to
+ exercise the Licensed Rights in the Adapted Material
+ under the conditions of the Adapter's License You apply.
+
+ c. No downstream restrictions. You may not offer or impose
+ any additional or different terms or conditions on, or
+ apply any Effective Technological Measures to, the
+ Licensed Material if doing so restricts exercise of the
+ Licensed Rights by any recipient of the Licensed
+ Material.
+
+ 6. No endorsement. Nothing in this Public License constitutes
+ or may be construed as permission to assert or imply that You
+ are, or that Your use of the Licensed Material is, connected
+ with, or sponsored, endorsed, or granted official status by,
+ the Licensor or others designated to receive attribution as
+ provided in Section 3(a)(1)(A)(i).
+
+ b. Other rights.
+
+ 1. Moral rights, such as the right of integrity, are not
+ licensed under this Public License, nor are publicity,
+ privacy, and/or other similar personality rights; however, to
+ the extent possible, the Licensor waives and/or agrees not to
+ assert any such rights held by the Licensor to the limited
+ extent necessary to allow You to exercise the Licensed
+ Rights, but not otherwise.
+
+ 2. Patent and trademark rights are not licensed under this
+ Public License.
+
+ 3. To the extent possible, the Licensor waives any right to
+ collect royalties from You for the exercise of the Licensed
+ Rights, whether directly or through a collecting society
+ under any voluntary or waivable statutory or compulsory
+ licensing scheme. In all other cases the Licensor expressly
+ reserves any right to collect such royalties.
+
+
+Section 3 -- License Conditions.
+
+Your exercise of the Licensed Rights is expressly made subject to the
+following conditions.
+
+ a. Attribution.
+
+ 1. If You Share the Licensed Material (including in modified
+ form), You must:
+
+ a. retain the following if it is supplied by the Licensor
+ with the Licensed Material:
+
+ i. identification of the creator(s) of the Licensed
+ Material and any others designated to receive
+ attribution, in any reasonable manner requested by
+ the Licensor (including by pseudonym if
+ designated);
+
+ ii. a copyright notice;
+
+ iii. a notice that refers to this Public License;
+
+ iv. a notice that refers to the disclaimer of
+ warranties;
+
+ v. a URI or hyperlink to the Licensed Material to the
+ extent reasonably practicable;
+
+ b. indicate if You modified the Licensed Material and
+ retain an indication of any previous modifications; and
+
+ c. indicate the Licensed Material is licensed under this
+ Public License, and include the text of, or the URI or
+ hyperlink to, this Public License.
+
+ 2. You may satisfy the conditions in Section 3(a)(1) in any
+ reasonable manner based on the medium, means, and context in
+ which You Share the Licensed Material. For example, it may
+ be reasonable to satisfy the conditions by providing a URI or
+ hyperlink to a resource that includes the required
+ information.
+
+ 3. If requested by the Licensor, You must remove any of the
+ information required by Section 3(a)(1)(A) to the extent
+ reasonably practicable.
+
+ b. ShareAlike.
+
+ In addition to the conditions in Section 3(a), if You Share
+ Adapted Material You produce, the following conditions also apply.
+
+ 1. The Adapter's License You apply must be a Creative Commons
+ license with the same License Elements, this version or
+ later, or a BY-SA Compatible License.
+
+ 2. You must include the text of, or the URI or hyperlink to, the
+ Adapter's License You apply. You may satisfy this condition
+ in any reasonable manner based on the medium, means, and
+ context in which You Share Adapted Material.
+
+ 3. You may not offer or impose any additional or different terms
+ or conditions on, or apply any Effective Technological
+ Measures to, Adapted Material that restrict exercise of the
+ rights granted under the Adapter's License You apply.
+
+
+Section 4 -- Sui Generis Database Rights.
+
+Where the Licensed Rights include Sui Generis Database Rights that
+apply to Your use of the Licensed Material:
+
+ a. for the avoidance of doubt, Section 2(a)(1) grants You the right
+ to extract, reuse, reproduce, and Share all or a substantial
+ portion of the contents of the database;
+
+ b. if You include all or a substantial portion of the database
+ contents in a database in which You have Sui Generis Database
+ Rights, then the database in which You have Sui Generis Database
+ Rights (but not its individual contents) is Adapted Material,
+ including for purposes of Section 3(b); and
+
+ c. You must comply with the conditions in Section 3(a) if You Share
+ all or a substantial portion of the contents of the database.
+
+For the avoidance of doubt, this Section 4 supplements and does not
+replace Your obligations under this Public License where the Licensed
+Rights include other Copyright and Similar Rights.
+
+
+Section 5 -- Disclaimer of Warranties and Limitation of Liability.
+
+ a. UNLESS OTHERWISE SEPARATELY UNDERTAKEN BY THE LICENSOR, TO THE
+ EXTENT POSSIBLE, THE LICENSOR OFFERS THE LICENSED MATERIAL AS-IS
+ AND AS-AVAILABLE, AND MAKES NO REPRESENTATIONS OR WARRANTIES OF
+ ANY KIND CONCERNING THE LICENSED MATERIAL, WHETHER EXPRESS,
+ IMPLIED, STATUTORY, OR OTHER. THIS INCLUDES, WITHOUT LIMITATION,
+ WARRANTIES OF TITLE, MERCHANTABILITY, FITNESS FOR A PARTICULAR
+ PURPOSE, NON-INFRINGEMENT, ABSENCE OF LATENT OR OTHER DEFECTS,
+ ACCURACY, OR THE PRESENCE OR ABSENCE OF ERRORS, WHETHER OR NOT
+ KNOWN OR DISCOVERABLE. WHERE DISCLAIMERS OF WARRANTIES ARE NOT
+ ALLOWED IN FULL OR IN PART, THIS DISCLAIMER MAY NOT APPLY TO YOU.
+
+ b. TO THE EXTENT POSSIBLE, IN NO EVENT WILL THE LICENSOR BE LIABLE TO
+ YOU ON ANY LEGAL THEORY (INCLUDING, WITHOUT LIMITATION,
+ NEGLIGENCE) OR OTHERWISE FOR ANY DIRECT, SPECIAL, INDIRECT,
+ INCIDENTAL, CONSEQUENTIAL, PUNITIVE, EXEMPLARY, OR OTHER LOSSES,
+ COSTS, EXPENSES, OR DAMAGES ARISING OUT OF THIS PUBLIC LICENSE OR
+ USE OF THE LICENSED MATERIAL, EVEN IF THE LICENSOR HAS BEEN
+ ADVISED OF THE POSSIBILITY OF SUCH LOSSES, COSTS, EXPENSES, OR
+ DAMAGES. WHERE A LIMITATION OF LIABILITY IS NOT ALLOWED IN FULL
+ OR IN PART, THIS LIMITATION MAY NOT APPLY TO YOU.
+
+ c. The disclaimer of warranties and limitation of liability provided
+ above shall be interpreted in a manner that, to the extent
+ possible, most closely approximates an absolute disclaimer and
+ waiver of all liability.
+
+
+Section 6 -- Term and Termination.
+
+ a. This Public License applies for the term of the Copyright and
+ Similar Rights licensed here. However, if You fail to comply with
+ this Public License, then Your rights under this Public License
+ terminate automatically.
+
+ b. Where Your right to use the Licensed Material has terminated under
+ Section 6(a), it reinstates:
+
+ 1. automatically as of the date the violation is cured, provided
+ it is cured within 30 days of Your discovery of the
+ violation; or
+
+ 2. upon express reinstatement by the Licensor.
+
+ For the avoidance of doubt, this Section 6(b) does not affect any
+ right the Licensor may have to seek remedies for Your violations
+ of this Public License.
+
+ c. For the avoidance of doubt, the Licensor may also offer the
+ Licensed Material under separate terms or conditions or stop
+ distributing the Licensed Material at any time; however, doing so
+ will not terminate this Public License.
+
+ d. Sections 1, 5, 6, 7, and 8 survive termination of this Public
+ License.
+
+
+Section 7 -- Other Terms and Conditions.
+
+ a. The Licensor shall not be bound by any additional or different
+ terms or conditions communicated by You unless expressly agreed.
+
+ b. Any arrangements, understandings, or agreements regarding the
+ Licensed Material not stated herein are separate from and
+ independent of the terms and conditions of this Public License.
+
+
+Section 8 -- Interpretation.
+
+ a. For the avoidance of doubt, this Public License does not, and
+ shall not be interpreted to, reduce, limit, restrict, or impose
+ conditions on any use of the Licensed Material that could lawfully
+ be made without permission under this Public License.
+
+ b. To the extent possible, if any provision of this Public License is
+ deemed unenforceable, it shall be automatically reformed to the
+ minimum extent necessary to make it enforceable. If the provision
+ cannot be reformed, it shall be severed from this Public License
+ without affecting the enforceability of the remaining terms and
+ conditions.
+
+ c. No term or condition of this Public License will be waived and no
+ failure to comply consented to unless expressly agreed to by the
+ Licensor.
+
+ d. Nothing in this Public License constitutes or may be interpreted
+ as a limitation upon, or waiver of, any privileges and immunities
+ that apply to the Licensor or You, including from the legal
+ processes of any jurisdiction or authority.
+
+
+=======================================================================
+
+Creative Commons is not a party to its public licenses.
+Notwithstanding, Creative Commons may elect to apply one of its public
+licenses to material it publishes and in those instances will be
+considered the “Licensor.” The text of the Creative Commons public
+licenses is dedicated to the public domain under the CC0 Public Domain
+Dedication. Except for the limited purpose of indicating that material
+is shared under a Creative Commons public license or as otherwise
+permitted by the Creative Commons policies published at
+creativecommons.org/policies, Creative Commons does not authorize the
+use of the trademark "Creative Commons" or any other trademark or logo
+of Creative Commons without its prior written consent including,
+without limitation, in connection with any unauthorized modifications
+to any of its public licenses or any other arrangements,
+understandings, or agreements concerning use of licensed material. For
+the avoidance of doubt, this paragraph does not form part of the public
+licenses.
+
+Creative Commons may be contacted at creativecommons.org.
diff --git a/LICENSES/GPL-3.0-or-later.txt b/LICENSES/GPL-3.0-or-later.txt
new file mode 120000
index 0000000..012065c
--- /dev/null
+++ b/LICENSES/GPL-3.0-or-later.txt
@@ -0,0 +1 @@
+../COPYING \ No newline at end of file
diff --git a/NEWS b/NEWS
index 7d91a7a..02f681b 100644
--- a/NEWS
+++ b/NEWS
@@ -1,89 +1,139 @@
-patchage (1.0.1) unstable;
+patchage (1.0.11) stable; urgency=medium
+
+ * Convert man page to mdoc
+ * Explicitly disable sprung layout menu item by default
+
+ -- David Robillard <d@drobilla.net> Mon, 11 Dec 2023 17:26:40 +0000
+
+patchage (1.0.10) stable; urgency=medium
+
+ * Add German translation
+ * Add Korean translation from Junghee Lee
+ * Add i18n support
+ * Replace boost with standard C++17 facilities
+ * Upgrade to fmt 9.0.0
+
+ -- David Robillard <d@drobilla.net> Sat, 10 Sep 2022 00:59:46 +0000
+
+patchage (1.0.8) stable; urgency=medium
+
+ * Switch to C++17 and modernize code
+ * Switch to meson build system
+
+ -- David Robillard <d@drobilla.net> Sat, 13 Aug 2022 22:15:26 +0000
+
+patchage (1.0.6) stable; urgency=medium
+
+ * Fix ALSA sequencer port subscriptions
+ * Fix crash on client disconnection
+ * Fix initial display with no configuration file
+ * Fix various minor bugs
+ * Rework code architecture to be more decoupled and data-driven
+ * Save "human names" setting in configuration
+ * Show latency in toolbar with 2 decimal places
+ * Switch to C++14
+
+ -- David Robillard <d@drobilla.net> Fri, 27 May 2022 03:08:31 +0000
+
+patchage (1.0.4) stable; urgency=medium
+
+ * Add NSMicrophoneUsageDescription for MacOS 10.15 Catalina
+ * Add command line option to print version
+ * Fix making and breaking connections with Jack DBus
+ * Fix sample rate with Jack DBus
+ * Fix unstable module positions
+ * Improve man page
+ * Remove Jack session support
+ * Remove flaky DSP load meter
+
+ -- David Robillard <d@drobilla.net> Thu, 07 Jan 2021 21:59:02 +0000
+
+patchage (1.0.2) stable; urgency=medium
- * Support Jack CV and OSC via metadata
* Add support for exporting canvas as PDF
- * Save window size and position when closed via window manager
- * Order ports deterministically
* Bring back Jack buffer size selector
- * Style messages pane to match canvas
- * Don't install 512x512 icons
- * Restore messages pane visibility and height
* Configure based on compiler target OS for cross-compilation
+ * Don't install 512x512 icons
* Fix compilation with Jack DBus
- * Upgrade to waf 1.8.14
+ * Order ports deterministically
+ * Restore messages pane visibility and height
+ * Save window size and position when closed via window manager
+ * Style messages pane to match canvas
+ * Support Jack CV and OSC via metadata
+ * Upgrade to waf 2.0.19
- -- David Robillard <d@drobilla.net> Fri, 14 Oct 2016 19:06:17 -0400
+ -- David Robillard <d@drobilla.net> Thu, 09 Apr 2020 12:29:59 +0000
-patchage (1.0.0) stable;
+patchage (1.0.0) stable; urgency=medium
* Allow removing connections by selecting their handle and pressing delete
- * Remove Raul dependency
- * Switch from FlowCanvas to Ganv (much improved looks and performance)
- * Remove LASH support and simplify UI
* Fix font configuration on OSX
- * Use Mac style key bindings on OSX
* Integrate with Mac menu bar on OSX
- * Support for DOT export for rendering with GraphViz
- * Use XDG_CONFIG_HOME instead of ~/.patchagerc
* Make port colours configurable
+ * Remove LASH support and simplify UI
+ * Remove Raul dependency
+ * Support for DOT export for rendering with GraphViz
* Support port pretty names via new Jack metadata API
+ * Switch from FlowCanvas to Ganv (much improved looks and performance)
+ * Use Mac style key bindings on OSX
+ * Use XDG_CONFIG_HOME instead of ~/.patchagerc
- -- David Robillard <d@drobilla.net> Sun, 27 Apr 2014 23:46:10 -0400
+ -- David Robillard <d@drobilla.net> Mon, 28 Apr 2014 03:46:10 +0000
-patchage (0.5.0) stable;
+patchage (0.5.0) stable; urgency=medium
- * Auto-arrange interface modules sanely (align corresponding inputs/outputs)
* Add -J (--no-jack) command line option
+ * Add missing COPYING file to distribution
+ * Add more/larger icons
* Add proper --help output and man page
- * Improve performance (dramatically for large setups)
+ * Auto-arrange interface modules sanely (align corresponding inputs/outputs)
+ * Build system and code quality improvements
+ * Bump FlowCanvas dependency to 0.7.0
* Fancy console info/warning/error logging
- * Fix minor memory leaks and reduce memory consumption
+ * Fix Jack D-Bus support
* Fix handling of ALSA duplex ports
+ * Fix minor memory leaks and reduce memory consumption
* Hide "split" module menu item when it is useless
- * Fix Jack D-Bus support
+ * Improve performance (dramatically for large setups)
* Mac OS X .app bundle port
- * Bump FlowCanvas dependency to 0.7.0
- * Add more/larger icons
- * Add missing COPYING file to distribution
- * Build system and code quality improvements
- -- David Robillard <d@drobilla.net> Tue, 11 Jan 2011 17:42:07 -0500
+ -- David Robillard <d@drobilla.net> Tue, 11 Jan 2011 22:42:07 +0000
-patchage (0.4.5) stable;
+patchage (0.4.5) stable; urgency=medium
- * Install SVG icon
+ * Bump FlowCanvas dependency to 0.6.0
* Fix compilation without Jack
* Improve performance when dragging modules
- * Bump FlowCanvas dependency to 0.6.0
+ * Install SVG icon
* Upgrade to waf 1.5.18
- -- David Robillard <d@drobilla.net> Fri, 03 Sep 2010 20:24:36 -0400
+ -- David Robillard <d@drobilla.net> Sat, 04 Sep 2010 00:24:36 +0000
-patchage (0.4.4) stable;
+patchage (0.4.4) stable; urgency=medium
* Fix incorrect icon install paths
- -- David Robillard <d@drobilla.net> Wed, 09 Dec 2009 10:17:37 -0500
+ -- David Robillard <d@drobilla.net> Wed, 09 Dec 2009 15:17:37 +0000
-patchage (0.4.3) stable;
+patchage (0.4.3) stable; urgency=medium
- * Switch to waf build system
- * Fix compilation with GCC 4.4
* Better ALSA support
- * Massive performance improvements when ALSA is enabled
* Center on startup
+ * Fix compilation with GCC 4.4
+ * Massive performance improvements when ALSA is enabled
+ * Switch to waf build system
- -- David Robillard <d@drobilla.net> Tue, 08 Dec 2009 21:13:37 -0500
+ -- David Robillard <d@drobilla.net> Wed, 09 Dec 2009 02:13:37 +0000
-patchage (0.4.2) stable;
+patchage (0.4.2) stable; urgency=medium
* LASH support via D-BUS from ladi-patchage branch
* Remove old LASH support via liblash
- -- David Robillard <d@drobilla.net> Tue, 09 Sep 2008 15:41:04 -0400
+ -- David Robillard <d@drobilla.net> Tue, 09 Sep 2008 19:41:04 +0000
-patchage (0.4.1) stable;
+patchage (0.4.1) stable; urgency=medium
* Initial release
- -- David Robillard <d@drobilla.net> Sun, 06 Jul 2008 17:19:55 -0400
+ -- David Robillard <d@drobilla.net> Sun, 06 Jul 2008 21:19:55 +0000
diff --git a/README b/README.md
index 33af97f..de04877 100644
--- a/README
+++ b/README.md
@@ -2,6 +2,5 @@ Patchage
========
Patchage is a modular patch bay for Jack and ALSA based audio/MIDI systems.
-For more information, see <http://drobilla.net/software/patchage>.
-
+
-- David Robillard <d@drobilla.net>
diff --git a/doc/patchage.1 b/doc/patchage.1
index 07cdabe..3269561 100644
--- a/doc/patchage.1
+++ b/doc/patchage.1
@@ -1,27 +1,46 @@
-.TH PATCHAGE 1 "15 Dec 2010"
-
-.SH NAME
-.B patchage \- Graphically connect JACK and ALSA Audio/MIDI ports
-
-.SH SYNOPSIS
-.B Patchage
-provides a graphical interface to connect Jack/Alsa Audio/MIDI inputs
-and outputs. Each application is represented on a canvas as a "module"
-with inputs on the left and outputs on the right. Modules can be arranged
-manually or automatically to have a clear display of the current setup.
-
-.SH OPTIONS
-.TP
-\fB\-h\fR, \fB\-\-help\fR
-Print the command line options.
-
-.TP
-\fB\-A\fR, \fB\-\-no\-alsa\fR
+.\" # Copyright 2010-2022 David Robillard <d@drobilla.net>
+.\" # SPDX-License-Identifier: CC-BY-SA-4.0 or GPL-3.0-or-later
+.Dd Nov 30, 2022
+.Dt PATCHAGE 1
+.Os Patchage 1.0.11
+.Sh NAME
+.Nm patchage
+.Nd graphically connect JACK/ALSA audio/MIDI applications
+.Sh SYNOPSIS
+.Nm patchage
+.Op Fl AJVh
+.Op Fl Fl help
+.Op Fl Fl no-alsa
+.Op Fl Fl no-jack
+.Op Fl Fl version
+.Sh DESCRIPTION
+.Nm
+provides a graphical interface to connect JACK and ALSA inputs and outputs.
+Each application is represented on a canvas as a
+.Dq module ,
+with inputs on the left and outputs on the right.
+Modules can be arranged manually or automatically to have a clear display of the current setup.
+.Pp
+The options are as follows:
+.Pp
+.Bl -tag -compact -width 3n
+.It Fl A , Fl Fl no-alsa
Do not automatically attach to ALSA.
-
-.TP
-\fB\-J\fR, \fB\-\-no\-jack\fR
+.Pp
+.It Fl J , Fl Fl no-jack
Do not automatically attach to JACK.
-
-.SH AUTHOR
-Patchage was written by David Robillard <d@drobilla.net>
+.Pp
+.It Fl V , Fl Fl version
+Display version information and exit.
+.Pp
+.It Fl h , Fl Fl help
+Print the command line options.
+.El
+.Sh EXIT STATUS
+.Nm
+exits with a status of 0, or non-zero if an error occurred.
+.Sh AUTHORS
+.Nm
+was written by
+.An David Robillard
+.Mt d@drobilla.net .
diff --git a/icons/meson.build b/icons/meson.build
new file mode 100644
index 0000000..3de7d1c
--- /dev/null
+++ b/icons/meson.build
@@ -0,0 +1,40 @@
+# Copyright 2022 David Robillard <d@drobilla.net>
+# SPDX-License-Identifier: 0BSD OR GPL-3.0-or-later
+
+svg_icon_sizes = [
+ '16x16',
+ '22x22',
+ '32x32',
+ '48x48',
+]
+
+png_icon_sizes = [
+ '16x16',
+ '22x22',
+ '24x24',
+ '32x32',
+ '48x48',
+ '128x128',
+ '256x256',
+]
+
+icons_dir = get_option('prefix') / get_option('datadir') / 'icons' / 'hicolor'
+
+install_data(
+ 'scalable/patchage.svg',
+ install_dir: icons_dir / 'scalable' / 'apps',
+)
+
+foreach size : svg_icon_sizes
+ install_data(
+ files(size / 'patchage.svg'),
+ install_dir: icons_dir / size / 'apps',
+ )
+endforeach
+
+foreach size : png_icon_sizes
+ install_data(
+ files(size / 'patchage.png'),
+ install_dir: icons_dir / size / 'apps',
+ )
+endforeach
diff --git a/meson.build b/meson.build
new file mode 100644
index 0000000..32c79d4
--- /dev/null
+++ b/meson.build
@@ -0,0 +1,288 @@
+# Copyright 2020-2023 David Robillard <d@drobilla.net>
+# SPDX-License-Identifier: 0BSD OR GPL-3.0-or-later
+
+project(
+ 'patchage',
+ ['c', 'cpp'],
+ default_options: [
+ 'b_ndebug=if-release',
+ 'buildtype=release',
+ 'cpp_std=c++17',
+ ],
+ license: 'GPLv3+',
+ meson_version: '>= 0.56.0',
+ version: '1.0.11',
+)
+
+patchage_src_root = meson.current_source_dir()
+major_version = meson.project_version().split('.')[0]
+version_suffix = '@0@-@1@'.format(meson.project_name(), major_version)
+
+#######################
+# Compilers and Flags #
+#######################
+
+# Required tools
+cpp = meson.get_compiler('cpp')
+
+# Set global warning suppressions
+subdir('meson/suppressions')
+add_project_arguments(cpp_suppressions, language: ['cpp'])
+add_project_arguments(['-DFMT_HEADER_ONLY'], language: ['cpp'])
+
+##########################
+# Platform Configuration #
+##########################
+
+patchage_datadir = get_option('prefix') / get_option('datadir') / 'patchage'
+patchage_localedir = get_option('prefix') / get_option('localedir')
+
+platform_defines = [
+ '-DPATCHAGE_VERSION="@0@"'.format(meson.project_version()),
+ '-DPATCHAGE_DATA_DIR="@0@"'.format(patchage_datadir),
+ '-DPATCHAGE_LOCALE_DIR="@0@"'.format(patchage_localedir),
+]
+
+if host_machine.system() in ['gnu', 'linux']
+ platform_defines += ['-D_GNU_SOURCE']
+endif
+
+if get_option('checks').disabled()
+ # Generic build without platform-specific features
+ platform_defines += ['-DPATCHAGE_NO_DEFAULT_CONFIG']
+elif get_option('checks').enabled()
+ # Only use the features detected by the build system
+ platform_defines += ['-DPATCHAGE_NO_DEFAULT_CONFIG']
+
+ dladdr_code = '''#include <dlfcn.h>
+int main(void) { Dl_info info; return dladdr(&info, &info); }'''
+
+ gettext_code = '''#include <libintl.h>
+int main(void) { return !!gettext("hello"); }'''
+
+ jack_metadata_code = '''#include <jack/metadata.h>
+int main(void) { return !!&jack_set_property; }'''
+
+ if cpp.compiles(dladdr_code, args: platform_defines, name: 'dladdr')
+ platform_defines += [
+ '-DHAVE_DLADDR=1',
+ '-DPATCHAGE_BUNDLED=1',
+ ]
+ else
+ platform_defines += ['-DHAVE_DLADDR=0']
+ endif
+
+ platform_defines += '-DHAVE_GETTEXT=@0@'.format(
+ cpp.compiles(gettext_code, args: platform_defines, name: 'gettext').to_int(),
+ )
+
+ platform_defines += '-DHAVE_JACK_METADATA=@0@'.format(
+ cpp.compiles(
+ jack_metadata_code,
+ args: platform_defines,
+ name: 'jack_metadata',
+ ).to_int(),
+ )
+
+endif
+
+################
+# Dependencies #
+################
+
+m_dep = cpp.find_library('m', required: false)
+dl_dep = cpp.find_library('dl', required: false)
+thread_dep = dependency('threads', include_type: 'system')
+
+fmt_dep = dependency(
+ 'fmt',
+ fallback: ['fmt', 'fmt_dep'],
+ include_type: 'system',
+ version: ['>= 9.0.0', '<= 10.0.0'],
+)
+
+gthread_dep = dependency(
+ 'gthread-2.0',
+ include_type: 'system',
+ version: '>= 2.14.0',
+)
+
+glibmm_dep = dependency(
+ 'glibmm-2.4',
+ include_type: 'system',
+ version: '>= 2.14.0',
+)
+
+gtkmm_dep = dependency(
+ 'gtkmm-2.4',
+ include_type: 'system',
+ version: '>= 2.12.0',
+)
+
+ganv_dep = dependency(
+ 'ganv-1',
+ include_type: 'system',
+ version: '>= 1.8.2',
+)
+
+dependencies = [
+ dl_dep,
+ fmt_dep,
+ ganv_dep,
+ glibmm_dep,
+ gthread_dep,
+ gtkmm_dep,
+ m_dep,
+ thread_dep,
+]
+
+#######################
+# Driver Dependencies #
+#######################
+
+# Optional ALSA sequencer support
+alsa_dep = dependency(
+ 'alsa',
+ include_type: 'system',
+ required: get_option('alsa'),
+)
+
+# Optional JACK support
+jack_dep = dependency(
+ 'jack',
+ include_type: 'system',
+ required: get_option('jack'),
+ version: '>= 0.120.0',
+)
+
+# Optional JACK D-Bus support
+
+dbus_dep = dependency(
+ 'dbus-1',
+ include_type: 'system',
+ required: get_option('jack_dbus'),
+)
+
+dbus_glib_dep = dependency(
+ 'dbus-glib-1',
+ include_type: 'system',
+ required: get_option('jack_dbus'),
+)
+
+if jack_dep.found() and dbus_dep.found() and dbus_glib_dep.found()
+ message('Both libjack and D-Bus available, defaulting to libjack')
+endif
+
+#######################
+# Translations (i18n) #
+#######################
+
+subdir('po')
+
+###########
+# Program #
+###########
+
+sources = files(
+ 'src/Canvas.cpp',
+ 'src/CanvasModule.cpp',
+ 'src/Configuration.cpp',
+ 'src/Drivers.cpp',
+ 'src/Legend.cpp',
+ 'src/Metadata.cpp',
+ 'src/Patchage.cpp',
+ 'src/Reactor.cpp',
+ 'src/TextViewLog.cpp',
+ 'src/event_to_string.cpp',
+ 'src/handle_event.cpp',
+ 'src/main.cpp',
+)
+
+if alsa_dep.found()
+ sources += files('src/AlsaDriver.cpp')
+ dependencies += [alsa_dep]
+else
+ sources += files('src/AlsaStubDriver.cpp')
+endif
+
+if jack_dep.found()
+ sources += files('src/JackLibDriver.cpp')
+ dependencies += [jack_dep]
+elif dbus_dep.found() and dbus_glib_dep.found()
+ sources += files('src/JackDbusDriver.cpp')
+ dependencies += [dbus_dep, dbus_glib_dep]
+else
+ sources += files('src/JackStubDriver.cpp')
+endif
+
+executable(
+ 'patchage',
+ sources,
+ cpp_args: cpp_suppressions + platform_defines,
+ dependencies: dependencies,
+ install: true,
+)
+
+########
+# Data #
+########
+
+subdir('icons')
+
+config = configuration_data()
+config.set('PATCHAGE_VERSION', meson.project_version())
+config.set('BINDIR', get_option('prefix') / get_option('bindir'))
+
+configure_file(
+ configuration: config,
+ input: files('src/patchage.ui.in'),
+ install: true,
+ install_dir: get_option('datadir') / 'patchage',
+ output: 'patchage.ui',
+)
+
+configure_file(
+ configuration: config,
+ input: files('patchage.desktop.in'),
+ install: true,
+ install_dir: get_option('datadir') / 'applications',
+ output: 'patchage.desktop',
+)
+
+install_man(files('doc/patchage.1'))
+
+#########
+# Tests #
+#########
+
+if get_option('lint')
+ if not meson.is_subproject()
+ # Check release metadata
+ autoship = find_program('autoship', required: false)
+ if autoship.found()
+ test(
+ 'autoship',
+ autoship,
+ args: ['test', patchage_src_root],
+ suite: 'data',
+ )
+ endif
+ endif
+
+ # Check licensing metadata
+ reuse = find_program('reuse', required: false)
+ if reuse.found()
+ test(
+ 'REUSE',
+ reuse,
+ args: ['--root', patchage_src_root, 'lint'],
+ suite: 'data',
+ )
+ endif
+endif
+
+if not meson.is_subproject()
+ summary('Install prefix', get_option('prefix'))
+ summary('Executables', get_option('prefix') / get_option('bindir'))
+ summary('Man pages', get_option('prefix') / get_option('mandir'))
+endif
diff --git a/meson/suppressions/meson.build b/meson/suppressions/meson.build
new file mode 100644
index 0000000..eea8bb0
--- /dev/null
+++ b/meson/suppressions/meson.build
@@ -0,0 +1,72 @@
+# Copyright 2020-2023 David Robillard <d@drobilla.net>
+# SPDX-License-Identifier: 0BSD OR GPL-3.0-or-later
+
+# Project-specific warning suppressions.
+#
+# This should be used in conjunction with the generic "warnings" sibling that
+# enables all reasonable warnings for the compiler. It lives here just to keep
+# the top-level meson.build more readable.
+
+#######
+# C++ #
+#######
+
+if is_variable('cpp')
+ cpp_suppressions = []
+
+ if get_option('warning_level') == 'everything'
+ if cpp.get_id() == 'clang'
+ cpp_suppressions += [
+ '-Wno-alloca',
+ '-Wno-c++20-compat',
+ '-Wno-c++98-compat',
+ '-Wno-c++98-compat-pedantic',
+ '-Wno-cast-function-type-strict',
+ '-Wno-cast-qual',
+ '-Wno-double-promotion',
+ '-Wno-float-conversion',
+ '-Wno-float-equal',
+ '-Wno-implicit-float-conversion',
+ '-Wno-padded',
+ '-Wno-pedantic',
+ '-Wno-shorten-64-to-32',
+ '-Wno-sign-conversion',
+ '-Wno-unsafe-buffer-usage',
+ '-Wno-weak-vtables',
+ ]
+
+ if host_machine.system() == 'darwin'
+ cpp_suppressions += [
+ '-Wno-documentation', # JACK
+ '-Wno-documentation-deprecated-sync', # JACK
+ '-Wno-documentation-unknown-command', # JACK
+ '-Wno-reserved-id-macro', # JACK
+ ]
+ endif
+
+ elif cpp.get_id() == 'gcc'
+ cpp_suppressions += [
+ '-Wno-abi-tag',
+ '-Wno-alloca',
+ '-Wno-conditionally-supported',
+ '-Wno-conversion',
+ '-Wno-effc++',
+ '-Wno-float-equal',
+ '-Wno-inline',
+ '-Wno-null-dereference',
+ '-Wno-padded',
+ '-Wno-pedantic',
+ '-Wno-stack-protector',
+ '-Wno-strict-overflow',
+ '-Wno-suggest-attribute=const',
+ '-Wno-suggest-attribute=noreturn',
+ '-Wno-suggest-attribute=pure',
+ '-Wno-suggest-final-methods',
+ '-Wno-suggest-final-types',
+ '-Wno-switch-default',
+ ]
+ endif
+ endif
+
+ cpp_suppressions = cpp.get_supported_arguments(cpp_suppressions)
+endif
diff --git a/meson_options.txt b/meson_options.txt
new file mode 100644
index 0000000..edc88cd
--- /dev/null
+++ b/meson_options.txt
@@ -0,0 +1,26 @@
+# Copyright 2022-2023 David Robillard <d@drobilla.net>
+# SPDX-License-Identifier: 0BSD OR GPL-3.0-or-later
+
+option('alsa', type: 'feature', value: 'auto', yield: true,
+ description: 'Build ALSA sequencer support')
+
+option('checks', type: 'feature', value: 'enabled', yield: true,
+ description: 'Check for platform-specific features')
+
+option('jack', type: 'feature', value: 'auto', yield: true,
+ description: 'Build JACK audio and MIDI support')
+
+option('jack_dbus', type: 'feature', value: 'auto', yield: true,
+ description: 'Use JACK via D-Bus')
+
+option('lint', type: 'boolean', value: false, yield: true,
+ description: 'Run code quality checks')
+
+option('strict', type: 'boolean', value: false, yield: true,
+ description: 'Enable ultra-strict warnings')
+
+option('tests', type: 'feature', value: 'auto', yield: true,
+ description: 'Build tests')
+
+option('title', type: 'string', value: 'Patchage',
+ description: 'Project title')
diff --git a/osx/Info.plist.in b/osx/Info.plist.in
index 0d8714b..1457536 100644
--- a/osx/Info.plist.in
+++ b/osx/Info.plist.in
@@ -9,7 +9,7 @@
<key>CFBundleExecutable</key>
<string>patchage</string>
<key>CFBundleGetInfoString</key>
- <string>@PATCHAGE_VERSION@, Copyright © 2014 David Robillard</string>
+ <string>@PATCHAGE_VERSION@, Copyright © 2020 David Robillard</string>
<key>CFBundleIconFile</key>
<string>Patchage</string>
<key>CFBundleIconFile</key>
@@ -41,5 +41,7 @@
<key>PANGO_RC_FILE</key>
<string>Resources/pangorc</string>
</dict>
+ <key>NSMicrophoneUsageDescription</key>
+ <string>Patchage needs access to the microphone in order to work with the JACK server.</string>
</dict>
</plist>
diff --git a/osx/bundleify.sh b/osx/bundleify.sh
index 55ff57d..559795b 100755
--- a/osx/bundleify.sh
+++ b/osx/bundleify.sh
@@ -1,5 +1,8 @@
#!/bin/bash
+# Copyright 2011-2017 David Robillard <d@drobilla.net>
+# SPDX-License-Identifier: 0BSD OR GPL-3.0-or-later
+
if [ "$#" != 3 ]; then
echo "USAGE: $0 LIB_PREFIX BUNDLE EXE";
exit 1;
diff --git a/patchage.desktop.in b/patchage.desktop.in
index b2f2809..efceb0d 100644
--- a/patchage.desktop.in
+++ b/patchage.desktop.in
@@ -1,9 +1,12 @@
[Desktop Entry]
-Name=@APP_HUMAN_NAME@
-Comment=Connect audio and MIDI applications together and manage audio sessions
-Comment[fr]=Connecter des applications audio et MIDI entre elles, et gérer les sessions audio
-Exec=@BINDIR@/@APP_INSTALL_NAME@
+Name=Patchage
+Comment=Connect audio and MIDI applications together
+Comment[de]=Audio und MIDI Anwendungen verdrahten
+Comment[fr]=Connecter des applications audio et MIDI entre elles
+Comment[ko]=오디오 및 MIDI 응용프로그램을 함께 연결합니다
+Exec=@BINDIR@/patchage
Terminal=false
-Icon=@APP_INSTALL_NAME@
+Icon=patchage
Type=Application
Categories=AudioVideo;Audio;
+Keywords=music;midi;alsa;jack;
diff --git a/po/LINGUAS b/po/LINGUAS
new file mode 100644
index 0000000..d7e570d
--- /dev/null
+++ b/po/LINGUAS
@@ -0,0 +1,4 @@
+# Copyright 2022 David Robillard <d@drobilla.net>
+# SPDX-License-Identifier: 0BSD OR GPL-3.0-or-later
+
+de fr ko
diff --git a/po/POTFILES b/po/POTFILES
new file mode 100644
index 0000000..a5b038c
--- /dev/null
+++ b/po/POTFILES
@@ -0,0 +1,60 @@
+# Copyright 2022 David Robillard <d@drobilla.net>
+# SPDX-License-Identifier: 0BSD OR GPL-3.0-or-later
+
+src/Action.hpp
+src/ActionSink.hpp
+src/AlsaDriver.cpp
+src/AlsaStubDriver.cpp
+src/AudioDriver.hpp
+src/Canvas.cpp
+src/Canvas.hpp
+src/CanvasModule.cpp
+src/CanvasModule.hpp
+src/CanvasPort.hpp
+src/ClientID.hpp
+src/ClientInfo.hpp
+src/ClientType.hpp
+src/Configuration.cpp
+src/Configuration.hpp
+src/Coord.hpp
+src/Driver.hpp
+src/Drivers.cpp
+src/Drivers.hpp
+src/Event.hpp
+src/ILog.hpp
+src/JackDbusDriver.cpp
+src/JackLibDriver.cpp
+src/JackStubDriver.cpp
+src/Legend.cpp
+src/Legend.hpp
+src/Metadata.cpp
+src/Metadata.hpp
+src/Options.hpp
+src/Patchage.cpp
+src/Patchage.hpp
+src/PortID.hpp
+src/PortInfo.hpp
+src/PortNames.hpp
+src/PortType.hpp
+src/Reactor.cpp
+src/Reactor.hpp
+src/Setting.hpp
+src/SignalDirection.hpp
+src/TextViewLog.cpp
+src/TextViewLog.hpp
+src/UIFile.hpp
+src/Widget.hpp
+src/binary_location.h
+src/event_to_string.cpp
+src/event_to_string.hpp
+src/handle_event.cpp
+src/handle_event.hpp
+src/jackey.h
+src/main.cpp
+src/make_alsa_driver.hpp
+src/make_jack_driver.hpp
+# src/patchage.gladep
+# src/patchage.svg
+src/patchage.ui.in
+src/patchage_config.h
+src/warnings.hpp
diff --git a/po/de.po b/po/de.po
new file mode 100644
index 0000000..275319d
--- /dev/null
+++ b/po/de.po
@@ -0,0 +1,153 @@
+# Copyright (C) 2022 David Robillard <d@drobilla.net>
+# This file is distributed under the same license as the patchage package.
+# David Robillard <d@drobilla.net>, 2022.
+#
+msgid ""
+msgstr ""
+"Project-Id-Version: patchage 1.0.10\n"
+"Report-Msgid-Bugs-To: https://gitlab.com/drobilla/patchage/issues/new\n"
+"POT-Creation-Date: 2022-08-23 00:52-0400\n"
+"PO-Revision-Date: 2022-08-22 23:24-0000\n"
+"Last-Translator: David Robillard <d@drobilla.net>\n"
+"Language-Team: \n"
+"Language: de\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=UTF-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+
+#: src/CanvasModule.cpp:93
+msgid "_Split"
+msgstr "_Aufteilen"
+
+#: src/CanvasModule.cpp:97
+msgid "_Join"
+msgstr "_Vereinen"
+
+#: src/CanvasModule.cpp:101
+msgid "_Disconnect"
+msgstr "_Trennen"
+
+#: src/CanvasPort.hpp:86
+msgid "Disconnect"
+msgstr "Trennen"
+
+#: src/Legend.cpp:27
+msgid "Audio"
+msgstr "Audio"
+
+#: src/Patchage.cpp:453
+msgid "frames at {} kHz ({:0.2f} ms)"
+msgstr "Frames bei {} kHz ({:0.2f} ms)"
+
+#: src/Patchage.cpp:475 src/Patchage.cpp:510 src/patchage.ui.in:391
+msgid "Dropouts: {}"
+msgstr "Aussetzer: {}"
+
+#: src/Patchage.cpp:838
+msgid "Export Image"
+msgstr "Bild exportieren"
+
+#: src/Patchage.cpp:860
+msgid "Draw _Background"
+msgstr "_Hintergrund zeichnen"
+
+#: src/Patchage.cpp:871
+msgid "File exists! Overwrite {}?"
+msgstr "Datei {} existiert! Überschreiben?"
+
+#: src/patchage.ui.in:21
+msgid "_File"
+msgstr "_Datei"
+
+#: src/patchage.ui.in:30
+msgid "_Export Image…"
+msgstr "Bild _exportieren…"
+
+#: src/patchage.ui.in:60
+msgid "_System"
+msgstr "_System"
+
+#: src/patchage.ui.in:67
+msgid "Connect to _JACK"
+msgstr "Verbinde zu _JACK"
+
+#: src/patchage.ui.in:78
+msgid "Disconnect from JACK"
+msgstr "Trenne von JACK"
+
+#: src/patchage.ui.in:95
+msgid "Connect to _ALSA"
+msgstr "Verbinde zu _ALSA"
+
+#: src/patchage.ui.in:106
+msgid "Disconnect from ALSA"
+msgstr "Trenne von ALSA"
+
+#: src/patchage.ui.in:123
+msgid "_View"
+msgstr "_Ansicht"
+
+#: src/patchage.ui.in:132
+msgid "_Messages"
+msgstr "_Nachrichten"
+
+#: src/patchage.ui.in:141
+msgid "Tool_bar"
+msgstr "Werkzeug_leiste"
+
+#: src/patchage.ui.in:157
+msgid "_Human Names"
+msgstr "_Freundliche Namen"
+
+#: src/patchage.ui.in:167
+msgid "_Sort Ports by Name"
+msgstr "Ports nach Namen _sortieren"
+
+#: src/patchage.ui.in:230
+msgid "_Increase Font Size"
+msgstr "Vergrößern"
+
+#: src/patchage.ui.in:239
+msgid "_Decrease Font Size"
+msgstr "Verkleinern"
+
+#: src/patchage.ui.in:248
+msgid "_Normal Font Size"
+msgstr "Normale Schriftgröße"
+
+#: src/patchage.ui.in:272
+msgid "_Arrange"
+msgstr "An_ordnen"
+
+#: src/patchage.ui.in:285
+msgid "Sprung Layou_t"
+msgstr "Elastisches Layout"
+
+#: src/patchage.ui.in:298
+msgid "_Help"
+msgstr "_Hilfe"
+
+#: src/patchage.ui.in:341
+msgid "JACK buffer size and sample rate."
+msgstr "JACK Buffergröße und Samplerate"
+
+#: src/patchage.ui.in:353
+msgid "JACK buffer length in frames."
+msgstr "JACK Buffergröße in Frames"
+
+#: src/patchage.ui.in:365
+msgid "frames at ? kHz (? ms)"
+msgstr "Frames bei ? kHz (? ms)"
+
+#: src/patchage.ui.in:405
+msgid "Clear dropout indicator."
+msgstr "Aussetzerindikator zurücksetzen."
+
+#: src/patchage.ui.in:505
+msgid "A modular patchbay for JACK and ALSA applications."
+msgstr "Eine modulare Patchbay für JACK- und ALSA-Anwendungen."
+
+#. TRANSLATORS: Replace this string with your names, one name per line.
+#: src/patchage.ui.in:1183
+msgid "translator-credits"
+msgstr "David Robillard"
diff --git a/po/fr.po b/po/fr.po
new file mode 100644
index 0000000..fa33621
--- /dev/null
+++ b/po/fr.po
@@ -0,0 +1,154 @@
+# Copyright (C) 2022 Olivier Humbert <trebmuh@tuxfamily.org>
+# This file is distributed under the same license as the patchage package.
+# Olivier Humbert <trebmuh@tuxfamily.org>, 2022.
+#
+msgid ""
+msgstr ""
+"Project-Id-Version: patchage 1.0.10\n"
+"Report-Msgid-Bugs-To: https://gitlab.com/drobilla/patchage/issues/new\n"
+"POT-Creation-Date: 2022-08-23 00:52-0400\n"
+"PO-Revision-Date: 2022-08-24 19:35+0900\n"
+"Last-Translator: Olivier Humbert (trebmuh/olinuxx) <trebmuh@tuxfamily.org>\n"
+"Language-Team: French\n"
+"Language: fr\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=UTF-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+"Plural-Forms: nplurals=2; plural=(n > 1);\n"
+
+#: src/CanvasModule.cpp:93
+msgid "_Split"
+msgstr "_Séparer"
+
+#: src/CanvasModule.cpp:97
+msgid "_Join"
+msgstr "_Joindre"
+
+#: src/CanvasModule.cpp:101
+msgid "_Disconnect"
+msgstr "_Déconnecter"
+
+#: src/CanvasPort.hpp:86
+msgid "Disconnect"
+msgstr "Déconnecter"
+
+#: src/Legend.cpp:27
+msgid "Audio"
+msgstr "Audio"
+
+#: src/Patchage.cpp:453
+msgid "frames at {} kHz ({:0.2f} ms)"
+msgstr "trames à {} kHz ({:0.2f} ms)"
+
+#: src/Patchage.cpp:475 src/Patchage.cpp:510 src/patchage.ui.in:391
+msgid "Dropouts: {}"
+msgstr "Décrochage : {}"
+
+#: src/Patchage.cpp:838
+msgid "Export Image"
+msgstr "Exporter une image"
+
+#: src/Patchage.cpp:860
+msgid "Draw _Background"
+msgstr "_Dessiner l'arrière-plan"
+
+#: src/Patchage.cpp:871
+msgid "File exists! Overwrite {}?"
+msgstr "Le fichier existe ! Écraser {}?"
+
+#: src/patchage.ui.in:21
+msgid "_File"
+msgstr "_Fichier"
+
+#: src/patchage.ui.in:30
+msgid "_Export Image…"
+msgstr "_Exporter une image…"
+
+#: src/patchage.ui.in:60
+msgid "_System"
+msgstr "_Système"
+
+#: src/patchage.ui.in:67
+msgid "Connect to _JACK"
+msgstr "Connecter à _JACK"
+
+#: src/patchage.ui.in:78
+msgid "Disconnect from JACK"
+msgstr "Déconnecter de JACK"
+
+#: src/patchage.ui.in:95
+msgid "Connect to _ALSA"
+msgstr "Connecter à _ALSA"
+
+#: src/patchage.ui.in:106
+msgid "Disconnect from ALSA"
+msgstr "Déconnecter d'ALSA"
+
+#: src/patchage.ui.in:123
+msgid "_View"
+msgstr "_Affichage"
+
+#: src/patchage.ui.in:132
+msgid "_Messages"
+msgstr "_Messages"
+
+#: src/patchage.ui.in:141
+msgid "Tool_bar"
+msgstr "_Bar d'outils"
+
+#: src/patchage.ui.in:157
+msgid "_Human Names"
+msgstr "Noms _humains"
+
+#: src/patchage.ui.in:167
+msgid "_Sort Ports by Name"
+msgstr "Trier les port_s par nom"
+
+#: src/patchage.ui.in:230
+msgid "_Increase Font Size"
+msgstr "Augmenter la ta_ille de la police"
+
+#: src/patchage.ui.in:239
+msgid "_Decrease Font Size"
+msgstr "_Diminuer la taille de la police"
+
+#: src/patchage.ui.in:248
+msgid "_Normal Font Size"
+msgstr "Taille de police _normale"
+
+#: src/patchage.ui.in:272
+msgid "_Arrange"
+msgstr "_Arranger"
+
+#: src/patchage.ui.in:285
+msgid "Sprung Layou_t"
+msgstr "Rafraîchir la disposi_tion"
+
+#: src/patchage.ui.in:298
+msgid "_Help"
+msgstr "Aid_e"
+
+#: src/patchage.ui.in:341
+msgid "JACK buffer size and sample rate."
+msgstr "Taille du tampon et du taux d'échantillonnage de JACK."
+
+#: src/patchage.ui.in:353
+msgid "JACK buffer length in frames."
+msgstr "Taille du tampon JACK en trames."
+
+#: src/patchage.ui.in:365
+msgid "frames at ? kHz (? ms)"
+msgstr "trames à ? kHz (? ms)"
+
+#: src/patchage.ui.in:405
+msgid "Clear dropout indicator."
+msgstr "Nettoyer l'indicateur de désynchronisation."
+
+#: src/patchage.ui.in:505
+msgid "A modular patchbay for JACK and ALSA applications."
+msgstr "Une baie de brassage modulaire pour les applications JACK et ALSA."
+
+#. TRANSLATORS: Replace this string with your names, one name per line.
+#: src/patchage.ui.in:1183
+msgid "translator-credits"
+msgstr "Olivier Humbert (trebmuh/olinuxx) <trebmuh@tuxfamily.org>"
diff --git a/po/ko.po b/po/ko.po
new file mode 100644
index 0000000..1d0492a
--- /dev/null
+++ b/po/ko.po
@@ -0,0 +1,155 @@
+# Copyright (C) 2022 Junghee Lee <daemul72@gmail.com>
+# This file is distributed under the same license as the patchage package.
+# Junghee Lee <daemul72@gmail.com>, 2022.
+#
+msgid ""
+msgstr ""
+"Project-Id-Version: patchage 1.0.10\n"
+"Report-Msgid-Bugs-To: https://gitlab.com/drobilla/patchage/issues/new\n"
+"POT-Creation-Date: 2022-08-23 00:52-0400\n"
+"PO-Revision-Date: 2022-08-24 03:11+0900\n"
+"Last-Translator: Junghee Lee <daemul72@gmail.com>\n"
+"Language-Team: \n"
+"Language: ko\n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=UTF-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+"Plural-Forms: nplurals=1; plural=0;\n"
+"X-Generator: Poedit 3.1\n"
+
+#: src/CanvasModule.cpp:93
+msgid "_Split"
+msgstr "분리하기(_S)"
+
+#: src/CanvasModule.cpp:97
+msgid "_Join"
+msgstr "결합하기(_J)"
+
+#: src/CanvasModule.cpp:101
+msgid "_Disconnect"
+msgstr "연결끊기(_D)"
+
+#: src/CanvasPort.hpp:86
+msgid "Disconnect"
+msgstr "연결끊기"
+
+#: src/Legend.cpp:27
+msgid "Audio"
+msgstr "오디오"
+
+#: src/Patchage.cpp:453
+msgid "frames at {} kHz ({:0.2f} ms)"
+msgstr "{} kHz의 프레임 ({:0.2f} ms)"
+
+#: src/Patchage.cpp:475 src/Patchage.cpp:510 src/patchage.ui.in:391
+msgid "Dropouts: {}"
+msgstr "드롭아웃: {}"
+
+#: src/Patchage.cpp:838
+msgid "Export Image"
+msgstr "이미지 내보내기"
+
+#: src/Patchage.cpp:860
+msgid "Draw _Background"
+msgstr "배경 그리기(_B)"
+
+#: src/Patchage.cpp:871
+msgid "File exists! Overwrite {}?"
+msgstr "파일이 존재합니다! {}을(를) 덮어쓰시겠습니까?"
+
+#: src/patchage.ui.in:21
+msgid "_File"
+msgstr "파일(_F)"
+
+#: src/patchage.ui.in:30
+msgid "_Export Image…"
+msgstr "이미지 내보내기(_E)…"
+
+#: src/patchage.ui.in:60
+msgid "_System"
+msgstr "시스템(_S)"
+
+#: src/patchage.ui.in:67
+msgid "Connect to _JACK"
+msgstr "JACK에 연결하기(_J)"
+
+#: src/patchage.ui.in:78
+msgid "Disconnect from JACK"
+msgstr "JACK에서 연결끊기"
+
+#: src/patchage.ui.in:95
+msgid "Connect to _ALSA"
+msgstr "ALSA에 연결하기(_A)"
+
+#: src/patchage.ui.in:106
+msgid "Disconnect from ALSA"
+msgstr "ALSA에서 연결끊기"
+
+#: src/patchage.ui.in:123
+msgid "_View"
+msgstr "보기(_V)"
+
+#: src/patchage.ui.in:132
+msgid "_Messages"
+msgstr "메시지(_M)"
+
+#: src/patchage.ui.in:141
+msgid "Tool_bar"
+msgstr "도구모음(_B)"
+
+#: src/patchage.ui.in:157
+msgid "_Human Names"
+msgstr "사람 이름"
+
+#: src/patchage.ui.in:167
+msgid "_Sort Ports by Name"
+msgstr "이름순으로 포트 정렬하기(_S)"
+
+#: src/patchage.ui.in:230
+msgid "_Increase Font Size"
+msgstr "글꼴 크기 늘리기(_I)"
+
+#: src/patchage.ui.in:239
+msgid "_Decrease Font Size"
+msgstr "글꼴 크기 줄이기(_D)"
+
+#: src/patchage.ui.in:248
+msgid "_Normal Font Size"
+msgstr "보통 글꼴 크기(_N)"
+
+#: src/patchage.ui.in:272
+msgid "_Arrange"
+msgstr "배열하기(_A)"
+
+#: src/patchage.ui.in:285
+msgid "Sprung Layou_t"
+msgstr "스프링 레이아웃(_T)"
+
+#: src/patchage.ui.in:298
+msgid "_Help"
+msgstr "도움말(_H)"
+
+#: src/patchage.ui.in:341
+msgid "JACK buffer size and sample rate."
+msgstr "JACK 버퍼 크기 및 샘플 속도입니다."
+
+#: src/patchage.ui.in:353
+msgid "JACK buffer length in frames."
+msgstr "프레임 단위의 JACK 버퍼 길이입니다."
+
+#: src/patchage.ui.in:365
+msgid "frames at ? kHz (? ms)"
+msgstr "? kHz의 프레임 (? ms)"
+
+#: src/patchage.ui.in:405
+msgid "Clear dropout indicator."
+msgstr "드롭아웃 표시기를 지웁니다."
+
+#: src/patchage.ui.in:505
+msgid "A modular patchbay for JACK and ALSA applications."
+msgstr "JACK 및 ALSA 응용프로그램을 위한 모듈식 패치베이입니다."
+
+#. TRANSLATORS: Replace this string with your names, one name per line.
+#: src/patchage.ui.in:1183
+msgid "translator-credits"
+msgstr "Junghee Lee <daemul72@gmail.com>"
diff --git a/po/meson.build b/po/meson.build
new file mode 100644
index 0000000..26c1bf4
--- /dev/null
+++ b/po/meson.build
@@ -0,0 +1,27 @@
+# Copyright 2020-2022 David Robillard <d@drobilla.net>
+# SPDX-License-Identifier: 0BSD OR GPL-3.0-or-later
+
+i18n = import('i18n')
+
+add_project_arguments(
+ ['-DGETTEXT_PACKAGE="@0@"'.format(meson.project_name())],
+ language: 'cpp',
+)
+
+i18n.gettext(
+ meson.project_name(),
+ args: [
+ '--add-comments',
+ '--check=bullet-unicode',
+ '--check=ellipsis-unicode',
+ '--check=quote-unicode',
+ '--check=space-ellipsis',
+ '--copyright-holder=FULL NAME <EMAIL@ADDRESS>',
+ '--from-code=UTF-8',
+ '--msgid-bugs-address=https://gitlab.com/drobilla/patchage/issues/new',
+ '--package-version=@0@'.format(meson.project_version()),
+ '--sentence-end=double-space',
+ '--sort-by-file',
+ '--width=80',
+ ],
+)
diff --git a/po/patchage.pot b/po/patchage.pot
new file mode 100644
index 0000000..3b465cc
--- /dev/null
+++ b/po/patchage.pot
@@ -0,0 +1,154 @@
+# SOME DESCRIPTIVE TITLE.
+# Copyright (C) YEAR FULL NAME <EMAIL@ADDRESS>
+# This file is distributed under the same license as the patchage package.
+# FIRST AUTHOR <EMAIL@ADDRESS>, YEAR.
+#
+msgid ""
+msgstr ""
+"Project-Id-Version: patchage 1.0.10\n"
+"Report-Msgid-Bugs-To: https://gitlab.com/drobilla/patchage/issues/new\n"
+"POT-Creation-Date: 2022-08-23 00:52-0400\n"
+"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
+"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
+"Language-Team: LANGUAGE <LL@li.org>\n"
+"Language: \n"
+"MIME-Version: 1.0\n"
+"Content-Type: text/plain; charset=UTF-8\n"
+"Content-Transfer-Encoding: 8bit\n"
+
+#: src/CanvasModule.cpp:93
+msgid "_Split"
+msgstr ""
+
+#: src/CanvasModule.cpp:97
+msgid "_Join"
+msgstr ""
+
+#: src/CanvasModule.cpp:101
+msgid "_Disconnect"
+msgstr ""
+
+#: src/CanvasPort.hpp:86
+msgid "Disconnect"
+msgstr ""
+
+#: src/Legend.cpp:27
+msgid "Audio"
+msgstr ""
+
+#: src/Patchage.cpp:453
+msgid "frames at {} kHz ({:0.2f} ms)"
+msgstr ""
+
+#: src/Patchage.cpp:475 src/Patchage.cpp:510 src/patchage.ui.in:391
+msgid "Dropouts: {}"
+msgstr ""
+
+#: src/Patchage.cpp:838
+msgid "Export Image"
+msgstr ""
+
+#: src/Patchage.cpp:860
+msgid "Draw _Background"
+msgstr ""
+
+#: src/Patchage.cpp:871
+msgid "File exists! Overwrite {}?"
+msgstr ""
+
+#: src/patchage.ui.in:21
+msgid "_File"
+msgstr ""
+
+#: src/patchage.ui.in:30
+msgid "_Export Image…"
+msgstr ""
+
+#: src/patchage.ui.in:60
+msgid "_System"
+msgstr ""
+
+#: src/patchage.ui.in:67
+msgid "Connect to _JACK"
+msgstr ""
+
+#: src/patchage.ui.in:78
+msgid "Disconnect from JACK"
+msgstr ""
+
+#: src/patchage.ui.in:95
+msgid "Connect to _ALSA"
+msgstr ""
+
+#: src/patchage.ui.in:106
+msgid "Disconnect from ALSA"
+msgstr ""
+
+#: src/patchage.ui.in:123
+msgid "_View"
+msgstr ""
+
+#: src/patchage.ui.in:132
+msgid "_Messages"
+msgstr ""
+
+#: src/patchage.ui.in:141
+msgid "Tool_bar"
+msgstr ""
+
+#: src/patchage.ui.in:157
+msgid "_Human Names"
+msgstr ""
+
+#: src/patchage.ui.in:167
+msgid "_Sort Ports by Name"
+msgstr ""
+
+#: src/patchage.ui.in:230
+msgid "_Increase Font Size"
+msgstr ""
+
+#: src/patchage.ui.in:239
+msgid "_Decrease Font Size"
+msgstr ""
+
+#: src/patchage.ui.in:248
+msgid "_Normal Font Size"
+msgstr ""
+
+#: src/patchage.ui.in:272
+msgid "_Arrange"
+msgstr ""
+
+#: src/patchage.ui.in:285
+msgid "Sprung Layou_t"
+msgstr ""
+
+#: src/patchage.ui.in:298
+msgid "_Help"
+msgstr ""
+
+#: src/patchage.ui.in:341
+msgid "JACK buffer size and sample rate."
+msgstr ""
+
+#: src/patchage.ui.in:353
+msgid "JACK buffer length in frames."
+msgstr ""
+
+#: src/patchage.ui.in:365
+msgid "frames at ? kHz (? ms)"
+msgstr ""
+
+#: src/patchage.ui.in:405
+msgid "Clear dropout indicator."
+msgstr ""
+
+#: src/patchage.ui.in:505
+msgid "A modular patchbay for JACK and ALSA applications."
+msgstr ""
+
+#. TRANSLATORS: Replace this string with your names, one name per line.
+#: src/patchage.ui.in:1183
+msgid "translator-credits"
+msgstr ""
diff --git a/src/Action.hpp b/src/Action.hpp
new file mode 100644
index 0000000..c791445
--- /dev/null
+++ b/src/Action.hpp
@@ -0,0 +1,90 @@
+// Copyright 2007-2021 David Robillard <d@drobilla.net>
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+#ifndef PATCHAGE_ACTION_HPP
+#define PATCHAGE_ACTION_HPP
+
+#include "ClientID.hpp"
+#include "PortID.hpp"
+#include "Setting.hpp"
+#include "SignalDirection.hpp"
+
+#include <variant>
+
+namespace patchage {
+namespace action {
+
+struct ChangeSetting {
+ Setting setting;
+};
+
+struct ConnectPorts {
+ PortID tail;
+ PortID head;
+};
+
+struct DecreaseFontSize {};
+
+struct DisconnectClient {
+ ClientID client;
+ SignalDirection direction;
+};
+
+struct DisconnectPort {
+ PortID port;
+};
+
+struct DisconnectPorts {
+ PortID tail;
+ PortID head;
+};
+
+struct IncreaseFontSize {};
+
+struct MoveModule {
+ ClientID client;
+ SignalDirection direction;
+ double x;
+ double y;
+};
+
+struct Refresh {};
+
+struct ResetFontSize {};
+
+struct SplitModule {
+ ClientID client;
+};
+
+struct UnsplitModule {
+ ClientID client;
+};
+
+struct ZoomFull {};
+struct ZoomIn {};
+struct ZoomNormal {};
+struct ZoomOut {};
+
+} // namespace action
+
+/// A high-level action from the user
+using Action = std::variant<action::ChangeSetting,
+ action::ConnectPorts,
+ action::DecreaseFontSize,
+ action::DisconnectClient,
+ action::DisconnectPort,
+ action::DisconnectPorts,
+ action::IncreaseFontSize,
+ action::MoveModule,
+ action::Refresh,
+ action::ResetFontSize,
+ action::SplitModule,
+ action::UnsplitModule,
+ action::ZoomFull,
+ action::ZoomIn,
+ action::ZoomNormal,
+ action::ZoomOut>;
+
+} // namespace patchage
+
+#endif // PATCHAGE_ACTION_HPP
diff --git a/src/ActionSink.hpp b/src/ActionSink.hpp
new file mode 100644
index 0000000..7e023f8
--- /dev/null
+++ b/src/ActionSink.hpp
@@ -0,0 +1,18 @@
+// Copyright 2007-2020 David Robillard <d@drobilla.net>
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+#ifndef PATCHAGE_ACTION_SINK_HPP
+#define PATCHAGE_ACTION_SINK_HPP
+
+#include "Action.hpp"
+
+#include <functional>
+
+namespace patchage {
+
+/// Sink function for user actions
+using ActionSink = std::function<void(Action)>;
+
+} // namespace patchage
+
+#endif // PATCHAGE_ACTION_SINK_HPP
diff --git a/src/AlsaDriver.cpp b/src/AlsaDriver.cpp
index 1ebd12d..2771abd 100644
--- a/src/AlsaDriver.cpp
+++ b/src/AlsaDriver.cpp
@@ -1,585 +1,531 @@
-/* This file is part of Patchage.
- * Copyright 2007-2014 David Robillard <http://drobilla.net>
- *
- * Patchage is free software: you can redistribute it and/or modify it under
- * the terms of the GNU General Public License as published by the Free
- * Software Foundation, either version 3 of the License, or (at your option)
- * any later version.
- *
- * Patchage is distributed in the hope that it will be useful, but WITHOUT ANY
- * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
- * FOR A PARTICULAR PURPOSE. See the GNU General Public License for details.
- *
- * You should have received a copy of the GNU General Public License
- * along with Patchage. If not, see <http://www.gnu.org/licenses/>.
- */
+// Copyright 2007-2020 David Robillard <d@drobilla.net>
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+#include "make_alsa_driver.hpp"
+
+#include "ClientID.hpp"
+#include "ClientInfo.hpp"
+#include "ClientType.hpp"
+#include "Driver.hpp"
+#include "Event.hpp"
+#include "ILog.hpp"
+#include "PortID.hpp"
+#include "PortInfo.hpp"
+#include "PortType.hpp"
+#include "SignalDirection.hpp"
+#include "warnings.hpp"
+
+PATCHAGE_DISABLE_FMT_WARNINGS
+#include <fmt/core.h>
+PATCHAGE_RESTORE_WARNINGS
+
+#include <alsa/asoundlib.h>
+#include <pthread.h>
#include <cassert>
+#include <cstdint>
+#include <functional>
+#include <limits>
+#include <memory>
+#include <optional>
#include <set>
-#include <string>
#include <utility>
-#include <boost/format.hpp>
+namespace patchage {
+namespace {
-#include "AlsaDriver.hpp"
-#include "Patchage.hpp"
-#include "PatchageCanvas.hpp"
-#include "PatchageModule.hpp"
-#include "PatchagePort.hpp"
+/// Driver for ALSA Sequencer ports
+class AlsaDriver : public Driver
+{
+public:
+ explicit AlsaDriver(ILog& log, EventSink emit_event);
-using std::endl;
-using std::string;
-using boost::format;
+ AlsaDriver(const AlsaDriver&) = delete;
+ AlsaDriver& operator=(const AlsaDriver&) = delete;
-AlsaDriver::AlsaDriver(Patchage* app)
- : _app(app)
- , _seq(NULL)
-{
-}
+ AlsaDriver(AlsaDriver&&) = delete;
+ AlsaDriver& operator=(AlsaDriver&&) = delete;
-AlsaDriver::~AlsaDriver()
-{
- detach();
-}
+ ~AlsaDriver() override;
-/** Attach to ALSA. */
-void
-AlsaDriver::attach(bool /*launch_daemon*/)
-{
- int ret = snd_seq_open(&_seq, "default", SND_SEQ_OPEN_DUPLEX, 0);
- if (ret) {
- _app->error_msg("Alsa: Unable to attach.");
- _seq = NULL;
- } else {
- _app->info_msg("Alsa: Attached.");
+ void attach(bool launch_daemon) override;
+ void detach() override;
- snd_seq_set_client_name(_seq, "Patchage");
+ bool is_attached() const override { return (_seq != nullptr); }
- pthread_attr_t attr;
- pthread_attr_init(&attr);
- pthread_attr_setstacksize(&attr, 50000);
+ void refresh(const EventSink& sink) override;
- ret = pthread_create(&_refresh_thread, &attr, &AlsaDriver::refresh_main, this);
- if (ret)
- _app->error_msg("Alsa: Failed to start refresh thread.");
+ bool connect(const PortID& tail_id, const PortID& head_id) override;
- signal_attached.emit();
- }
-}
+ bool disconnect(const PortID& tail_id, const PortID& head_id) override;
-void
-AlsaDriver::detach()
-{
- if (_seq) {
- pthread_cancel(_refresh_thread);
- pthread_join(_refresh_thread, NULL);
- snd_seq_close(_seq);
- _seq = NULL;
- signal_detached.emit();
- _app->info_msg("Alsa: Detached.");
- }
-}
+private:
+ bool create_refresh_port();
+ static void* refresh_main(void* me);
+ void _refresh_main();
+
+ ILog& _log;
+ snd_seq_t* _seq{nullptr};
+ pthread_t _refresh_thread{};
+
+ struct SeqAddrComparator {
+ bool operator()(const snd_seq_addr_t& a, const snd_seq_addr_t& b) const
+ {
+ return ((a.client < b.client) ||
+ ((a.client == b.client) && a.port < b.port));
+ }
+ };
+
+ using Ignored = std::set<snd_seq_addr_t, SeqAddrComparator>;
+
+ Ignored _ignored;
+
+ bool ignore(const snd_seq_addr_t& addr, bool add = true);
+};
-static bool
-is_alsa_port(const PatchagePort* port)
+PortID
+addr_to_id(const snd_seq_addr_t& addr, const bool is_input)
{
- return port->type() == ALSA_MIDI;
+ return PortID::alsa(addr.client, addr.port, is_input);
}
-/** Destroy all JACK (canvas) ports.
- */
-void
-AlsaDriver::destroy_all()
+SignalDirection
+port_direction(const snd_seq_port_info_t* const pinfo)
{
- _app->canvas()->remove_ports(is_alsa_port);
- _modules.clear();
- _port_addrs.clear();
+ const int caps = snd_seq_port_info_get_capability(pinfo);
+
+ if ((caps & SND_SEQ_PORT_CAP_READ) && (caps & SND_SEQ_PORT_CAP_WRITE)) {
+ return SignalDirection::duplex;
+ }
+
+ if (caps & SND_SEQ_PORT_CAP_READ) {
+ return SignalDirection::output;
+ }
+
+ if (caps & SND_SEQ_PORT_CAP_WRITE) {
+ return SignalDirection::input;
+ }
+
+ return SignalDirection::duplex;
}
-/** Refresh all Alsa Midi ports and connections.
- */
-void
-AlsaDriver::refresh()
+ClientInfo
+client_info(snd_seq_client_info_t* const cinfo)
{
- if (!is_attached())
- return;
-
- assert(_seq);
-
- _modules.clear();
- _ignored.clear();
- _port_addrs.clear();
-
- snd_seq_client_info_t* cinfo;
- snd_seq_client_info_alloca(&cinfo);
- snd_seq_client_info_set_client(cinfo, -1);
-
- snd_seq_port_info_t* pinfo;
- snd_seq_port_info_alloca(&pinfo);
-
- PatchageModule* parent = NULL;
- PatchagePort* port = NULL;
-
- // Create port views
- while (snd_seq_query_next_client(_seq, cinfo) >= 0) {
- snd_seq_port_info_set_client(pinfo, snd_seq_client_info_get_client(cinfo));
- snd_seq_port_info_set_port(pinfo, -1);
- while (snd_seq_query_next_port(_seq, pinfo) >= 0) {
- const snd_seq_addr_t& addr = *snd_seq_port_info_get_addr(pinfo);
- if (ignore(addr)) {
- continue;
- }
-
- create_port_view_internal(_app, addr, parent, port);
- }
- }
-
- // Create connections
- snd_seq_client_info_set_client(cinfo, -1);
- while (snd_seq_query_next_client(_seq, cinfo) >= 0) {
- snd_seq_port_info_set_client(pinfo, snd_seq_client_info_get_client(cinfo));
- snd_seq_port_info_set_port(pinfo, -1);
- while (snd_seq_query_next_port(_seq, pinfo) >= 0) {
- const snd_seq_addr_t* addr = snd_seq_port_info_get_addr(pinfo);
- if (ignore(*addr)) {
- continue;
- }
-
- PatchagePort* port = _app->canvas()->find_port(PortID(*addr, false));
- if (!port) {
- continue;
- }
-
- snd_seq_query_subscribe_t* subsinfo;
- snd_seq_query_subscribe_alloca(&subsinfo);
- snd_seq_query_subscribe_set_root(subsinfo, addr);
- snd_seq_query_subscribe_set_index(subsinfo, 0);
- while (!snd_seq_query_port_subscribers(_seq, subsinfo)) {
- const snd_seq_addr_t* addr2 = snd_seq_query_subscribe_get_addr(subsinfo);
- if (addr2) {
- const PortID id2(*addr2, true);
- PatchagePort* port2 = _app->canvas()->find_port(id2);
- if (port2 && !_app->canvas()->get_edge(port, port2)) {
- _app->canvas()->make_connection(port, port2);
- }
- }
-
- snd_seq_query_subscribe_set_index(
- subsinfo, snd_seq_query_subscribe_get_index(subsinfo) + 1);
- }
- }
- }
+ return {snd_seq_client_info_get_name(cinfo)};
}
-PatchagePort*
-AlsaDriver::create_port_view(Patchage* patchage,
- const PortID& id)
+PortInfo
+port_info(const snd_seq_port_info_t* const pinfo)
{
- PatchageModule* parent = NULL;
- PatchagePort* port = NULL;
- create_port_view_internal(patchage, id.id.alsa_addr, parent, port);
- return port;
+ const int type = snd_seq_port_info_get_type(pinfo);
+
+ return {snd_seq_port_info_get_name(pinfo),
+ PortType::alsa_midi,
+ port_direction(pinfo),
+ snd_seq_port_info_get_port(pinfo),
+ (type & SND_SEQ_PORT_TYPE_APPLICATION) == 0};
}
-PatchageModule*
-AlsaDriver::find_module(uint8_t client_id, ModuleType type)
+AlsaDriver::AlsaDriver(ILog& log, EventSink emit_event)
+ : Driver{std::move(emit_event)}
+ , _log(log)
+{}
+
+AlsaDriver::~AlsaDriver()
{
- const Modules::const_iterator i = _modules.find(client_id);
- if (i == _modules.end())
- return NULL;
-
- PatchageModule* io_module = NULL;
- for (Modules::const_iterator j = i;
- j != _modules.end() && j->first == client_id;
- ++j) {
- if (j->second->type() == type) {
- return j->second;
- } else if (j->second->type() == InputOutput) {
- io_module = j->second;
- }
- }
-
- // Return InputOutput module for Input or Output, or NULL if not found
- return io_module;
+ detach();
}
-PatchageModule*
-AlsaDriver::find_or_create_module(
- Patchage* patchage,
- uint8_t client_id,
- const std::string& client_name,
- ModuleType type)
+void
+AlsaDriver::attach(bool /*launch_daemon*/)
{
- PatchageModule* m = find_module(client_id, type);
- if (!m) {
- m = new PatchageModule(patchage, client_name, type);
- m->load_location();
- _app->canvas()->add_module(client_name, m);
- _modules.insert(std::make_pair(client_id, m));
- }
- return m;
+ int ret = snd_seq_open(&_seq, "default", SND_SEQ_OPEN_DUPLEX, 0);
+ if (ret) {
+ _log.error("[ALSA] Unable to attach");
+ _seq = nullptr;
+ } else {
+ _emit_event(event::DriverAttached{ClientType::alsa});
+
+ snd_seq_set_client_name(_seq, "Patchage");
+
+ pthread_attr_t attr;
+ pthread_attr_init(&attr);
+ pthread_attr_setstacksize(&attr, 50000);
+
+ ret =
+ pthread_create(&_refresh_thread, &attr, &AlsaDriver::refresh_main, this);
+ if (ret) {
+ _log.error("[ALSA] Failed to start refresh thread");
+ }
+ }
}
void
-AlsaDriver::create_port_view_internal(
- Patchage* patchage,
- snd_seq_addr_t addr,
- PatchageModule*& m,
- PatchagePort*& port)
+AlsaDriver::detach()
{
- if (ignore(addr))
- return;
-
- snd_seq_client_info_t* cinfo;
- snd_seq_client_info_alloca(&cinfo);
- snd_seq_client_info_set_client(cinfo, addr.client);
- snd_seq_get_any_client_info(_seq, addr.client, cinfo);
-
- snd_seq_port_info_t* pinfo;
- snd_seq_port_info_alloca(&pinfo);
- snd_seq_port_info_set_client(pinfo, addr.client);
- snd_seq_port_info_set_port(pinfo, addr.port);
- snd_seq_get_any_port_info(_seq, addr.client, addr.port, pinfo);
-
- const string client_name = snd_seq_client_info_get_name(cinfo);
- const string port_name = snd_seq_port_info_get_name(pinfo);
- bool is_input = false;
- bool is_duplex = false;
- bool is_application = true;
-
- int caps = snd_seq_port_info_get_capability(pinfo);
- int type = snd_seq_port_info_get_type(pinfo);
-
- // Figure out direction
- if ((caps & SND_SEQ_PORT_CAP_READ) && (caps & SND_SEQ_PORT_CAP_WRITE))
- is_duplex = true;
- else if (caps & SND_SEQ_PORT_CAP_READ)
- is_input = false;
- else if (caps & SND_SEQ_PORT_CAP_WRITE)
- is_input = true;
-
- is_application = (type & SND_SEQ_PORT_TYPE_APPLICATION);
-
- // Because there would be name conflicts, we must force a split if (stupid)
- // alsa duplex ports are present on the client
- bool split = false;
- if (is_duplex) {
- split = true;
- if (!_app->conf()->get_module_split(client_name, !is_application)) {
- _app->conf()->set_module_split(client_name, true);
- }
- } else {
- split = _app->conf()->get_module_split(client_name, !is_application);
- }
-
- /*cout << "ALSA PORT: " << client_name << " : " << port_name
- << " is_application = " << is_application
- << " is_duplex = " << is_duplex
- << " split = " << split << endl;*/
-
- if (!split) {
- m = find_or_create_module(_app, addr.client, client_name, InputOutput);
- if (!m->get_port(port_name)) {
- port = create_port(*m, port_name, is_input, addr);
- port->show();
- }
-
- } else { // split
- ModuleType type = ((is_input) ? Input : Output);
- m = find_or_create_module(_app, addr.client, client_name, type);
- if (!m->get_port(port_name)) {
- port = create_port(*m, port_name, is_input, addr);
- port->show();
- }
-
- if (is_duplex) {
- type = ((!is_input) ? Input : Output);
- m = find_or_create_module(_app, addr.client, client_name, type);
- if (!m->get_port(port_name)) {
- port = create_port(*m, port_name, !is_input, addr);
- port->show();
- }
- }
- }
+ if (_seq) {
+ pthread_cancel(_refresh_thread);
+ pthread_join(_refresh_thread, nullptr);
+ snd_seq_close(_seq);
+ _seq = nullptr;
+ _emit_event(event::DriverDetached{ClientType::alsa});
+ }
}
-PatchagePort*
-AlsaDriver::create_port(PatchageModule& parent,
- const string& name, bool is_input, snd_seq_addr_t addr)
+void
+AlsaDriver::refresh(const EventSink& sink)
{
- PatchagePort* ret = new PatchagePort(
- parent, ALSA_MIDI, name, "", is_input,
- _app->conf()->get_port_color(ALSA_MIDI),
- _app->show_human_names());
-
- dynamic_cast<PatchageCanvas*>(parent.canvas())->index_port(
- PortID(addr, is_input), ret);
-
- _app->canvas()->index_port(PortID(addr, is_input), ret);
- _port_addrs.insert(std::make_pair(ret, PortID(addr, is_input)));
- return ret;
+ if (!is_attached() || !_seq) {
+ return;
+ }
+
+ _ignored.clear();
+
+ snd_seq_client_info_t* cinfo = nullptr;
+ snd_seq_client_info_alloca(&cinfo);
+ snd_seq_client_info_set_client(cinfo, -1);
+
+ snd_seq_port_info_t* pinfo = nullptr;
+ snd_seq_port_info_alloca(&pinfo);
+
+ // Emit all clients
+ snd_seq_client_info_set_client(cinfo, -1);
+ while (snd_seq_query_next_client(_seq, cinfo) >= 0) {
+ const auto client_id = snd_seq_client_info_get_client(cinfo);
+
+ assert(client_id < std::numeric_limits<uint8_t>::max());
+ sink({event::ClientCreated{ClientID::alsa(static_cast<uint8_t>(client_id)),
+ client_info(cinfo)}});
+ }
+
+ // Emit all ports
+ snd_seq_client_info_set_client(cinfo, -1);
+ while (snd_seq_query_next_client(_seq, cinfo) >= 0) {
+ const auto client_id = snd_seq_client_info_get_client(cinfo);
+
+ snd_seq_port_info_set_client(pinfo, client_id);
+ snd_seq_port_info_set_port(pinfo, -1);
+ while (snd_seq_query_next_port(_seq, pinfo) >= 0) {
+ const auto addr = *snd_seq_port_info_get_addr(pinfo);
+ if (!ignore(addr)) {
+ const auto caps = snd_seq_port_info_get_capability(pinfo);
+ auto info = port_info(pinfo);
+
+ if (caps & SND_SEQ_PORT_CAP_READ) {
+ info.direction = SignalDirection::output;
+ sink({event::PortCreated{addr_to_id(addr, false), info}});
+ }
+
+ if (caps & SND_SEQ_PORT_CAP_WRITE) {
+ info.direction = SignalDirection::input;
+ sink({event::PortCreated{addr_to_id(addr, true), info}});
+ }
+ }
+ }
+ }
+
+ // Emit all connections
+ snd_seq_client_info_set_client(cinfo, -1);
+ while (snd_seq_query_next_client(_seq, cinfo) >= 0) {
+ const auto client_id = snd_seq_client_info_get_client(cinfo);
+
+ snd_seq_port_info_set_client(pinfo, client_id);
+ snd_seq_port_info_set_port(pinfo, -1);
+ while (snd_seq_query_next_port(_seq, pinfo) >= 0) {
+ const auto port_addr = *snd_seq_port_info_get_addr(pinfo);
+ const auto caps = snd_seq_port_info_get_capability(pinfo);
+
+ if (!ignore(port_addr) && (caps & SND_SEQ_PORT_CAP_READ)) {
+ const auto tail_id = addr_to_id(port_addr, false);
+
+ snd_seq_query_subscribe_t* sinfo = nullptr;
+ snd_seq_query_subscribe_alloca(&sinfo);
+ snd_seq_query_subscribe_set_type(sinfo, SND_SEQ_QUERY_SUBS_READ);
+ snd_seq_query_subscribe_set_root(sinfo, &port_addr);
+ snd_seq_query_subscribe_set_index(sinfo, 0);
+ while (!snd_seq_query_port_subscribers(_seq, sinfo)) {
+ const auto head_addr = *snd_seq_query_subscribe_get_addr(sinfo);
+ const auto head_id = addr_to_id(head_addr, true);
+
+ sink({event::PortsConnected{tail_id, head_id}});
+
+ snd_seq_query_subscribe_set_index(
+ sinfo, snd_seq_query_subscribe_get_index(sinfo) + 1);
+ }
+ }
+ }
+ }
}
bool
AlsaDriver::ignore(const snd_seq_addr_t& addr, bool add)
{
- if (_ignored.find(addr) != _ignored.end())
- return true;
-
- if (!add)
- return false;
-
- snd_seq_client_info_t* cinfo;
- snd_seq_client_info_alloca(&cinfo);
- snd_seq_client_info_set_client(cinfo, addr.client);
- snd_seq_get_any_client_info(_seq, addr.client, cinfo);
-
- snd_seq_port_info_t* pinfo;
- snd_seq_port_info_alloca(&pinfo);
- snd_seq_port_info_set_client(pinfo, addr.client);
- snd_seq_port_info_set_port(pinfo, addr.port);
- snd_seq_get_any_port_info(_seq, addr.client, addr.port, pinfo);
-
- const int type = snd_seq_port_info_get_type(pinfo);
- const int caps = snd_seq_port_info_get_capability(pinfo);
-
- if (caps & SND_SEQ_PORT_CAP_NO_EXPORT) {
- _ignored.insert(addr);
- return true;
- } else if ( !( (caps & SND_SEQ_PORT_CAP_READ)
- || (caps & SND_SEQ_PORT_CAP_WRITE)
- || (caps & SND_SEQ_PORT_CAP_DUPLEX))) {
- _ignored.insert(addr);
- return true;
- } else if ((snd_seq_client_info_get_type(cinfo) != SND_SEQ_USER_CLIENT)
- && ((type == SND_SEQ_PORT_SYSTEM_TIMER
- || type == SND_SEQ_PORT_SYSTEM_ANNOUNCE))) {
- _ignored.insert(addr);
- return true;
- }
-
- return false;
+ if (_ignored.find(addr) != _ignored.end()) {
+ return true;
+ }
+
+ if (!add) {
+ return false;
+ }
+
+ snd_seq_client_info_t* cinfo = nullptr;
+ snd_seq_client_info_alloca(&cinfo);
+ snd_seq_client_info_set_client(cinfo, addr.client);
+ snd_seq_get_any_client_info(_seq, addr.client, cinfo);
+
+ snd_seq_port_info_t* pinfo = nullptr;
+ snd_seq_port_info_alloca(&pinfo);
+ snd_seq_port_info_set_client(pinfo, addr.client);
+ snd_seq_port_info_set_port(pinfo, addr.port);
+ snd_seq_get_any_port_info(_seq, addr.client, addr.port, pinfo);
+
+ const int type = snd_seq_port_info_get_type(pinfo);
+ const int caps = snd_seq_port_info_get_capability(pinfo);
+
+ if (caps & SND_SEQ_PORT_CAP_NO_EXPORT) {
+ _ignored.insert(addr);
+ return true;
+ }
+
+ if (!((caps & SND_SEQ_PORT_CAP_READ) || (caps & SND_SEQ_PORT_CAP_WRITE) ||
+ (caps & SND_SEQ_PORT_CAP_DUPLEX))) {
+ _ignored.insert(addr);
+ return true;
+ }
+
+ if ((snd_seq_client_info_get_type(cinfo) != SND_SEQ_USER_CLIENT) &&
+ ((type == SND_SEQ_PORT_SYSTEM_TIMER ||
+ type == SND_SEQ_PORT_SYSTEM_ANNOUNCE))) {
+ _ignored.insert(addr);
+ return true;
+ }
+
+ return false;
}
-/** Connects two Alsa Midi ports.
- *
- * \return Whether connection succeeded.
- */
bool
-AlsaDriver::connect(PatchagePort* src_port,
- PatchagePort* dst_port)
+AlsaDriver::connect(const PortID& tail_id, const PortID& head_id)
{
- PortAddrs::const_iterator s = _port_addrs.find(src_port);
- PortAddrs::const_iterator d = _port_addrs.find(dst_port);
-
- if (s == _port_addrs.end() || d == _port_addrs.end()) {
- _app->error_msg("Alsa: Attempt to connect port with no address.");
- return false;
- }
-
- const PortID src = s->second;
- const PortID dst = d->second;
-
- if (src.id.alsa_addr.client == dst.id.alsa_addr.client
- && src.id.alsa_addr.port == dst.id.alsa_addr.port) {
- _app->warning_msg("Alsa: Refusing to connect port to itself.");
- return false;
- }
-
- bool result = true;
-
- snd_seq_port_subscribe_t* subs;
- snd_seq_port_subscribe_malloc(&subs);
- snd_seq_port_subscribe_set_sender(subs, &src.id.alsa_addr);
- snd_seq_port_subscribe_set_dest(subs, &dst.id.alsa_addr);
- snd_seq_port_subscribe_set_exclusive(subs, 0);
- snd_seq_port_subscribe_set_time_update(subs, 0);
- snd_seq_port_subscribe_set_time_real(subs, 0);
-
- // Already connected (shouldn't happen)
- if (!snd_seq_get_port_subscription(_seq, subs)) {
- _app->error_msg("Alsa: Attempt to double subscribe ports.");
- result = false;
- }
-
- int ret = snd_seq_subscribe_port(_seq, subs);
- if (ret < 0) {
- _app->error_msg((format("Alsa: Subscription failed (%1%).")
- % snd_strerror(ret)).str());
- result = false;
- }
-
- if (result)
- _app->info_msg(string("Alsa: Connected ")
- + src_port->full_name() + " => " + dst_port->full_name());
- else
- _app->error_msg(string("Alsa: Unable to connect ")
- + src_port->full_name() + " => " + dst_port->full_name());
-
- return (!result);
+ if (tail_id.type() != PortID::Type::alsa ||
+ head_id.type() != PortID::Type::alsa) {
+ _log.error("[ALSA] Attempt to connect non-ALSA ports");
+ return false;
+ }
+
+ const snd_seq_addr_t tail_addr = {tail_id.alsa_client(), tail_id.alsa_port()};
+ const snd_seq_addr_t head_addr = {head_id.alsa_client(), head_id.alsa_port()};
+
+ if (tail_addr.client == head_addr.client &&
+ tail_addr.port == head_addr.port) {
+ _log.warning("[ALSA] Refusing to connect port to itself");
+ return false;
+ }
+
+ bool result = true;
+
+ snd_seq_port_subscribe_t* subs = nullptr;
+ snd_seq_port_subscribe_malloc(&subs);
+ snd_seq_port_subscribe_set_sender(subs, &tail_addr);
+ snd_seq_port_subscribe_set_dest(subs, &head_addr);
+ snd_seq_port_subscribe_set_exclusive(subs, 0);
+ snd_seq_port_subscribe_set_time_update(subs, 0);
+ snd_seq_port_subscribe_set_time_real(subs, 0);
+
+ // Already connected (shouldn't happen)
+ if (!snd_seq_get_port_subscription(_seq, subs)) {
+ _log.error("[ALSA] Attempt to double subscribe ports");
+ result = false;
+ }
+
+ const int ret = snd_seq_subscribe_port(_seq, subs);
+ if (ret < 0) {
+ _log.error(
+ fmt::format("[ALSA] Subscription failed ({})", snd_strerror(ret)));
+ result = false;
+ }
+
+ if (!result) {
+ _log.error(
+ fmt::format("[ALSA] Failed to connect {} => {}", tail_id, head_id));
+ }
+
+ return (!result);
}
-/** Disconnects two Alsa Midi ports.
- *
- * \return Whether disconnection succeeded.
- */
bool
-AlsaDriver::disconnect(PatchagePort* src_port,
- PatchagePort* dst_port)
+AlsaDriver::disconnect(const PortID& tail_id, const PortID& head_id)
{
- PortAddrs::const_iterator s = _port_addrs.find(src_port);
- PortAddrs::const_iterator d = _port_addrs.find(dst_port);
-
- if (s == _port_addrs.end() || d == _port_addrs.end()) {
- _app->error_msg("Alsa: Attempt to connect port with no address");
- return false;
- }
-
- const PortID src = s->second;
- const PortID dst = d->second;
-
- snd_seq_port_subscribe_t* subs;
- snd_seq_port_subscribe_malloc(&subs);
- snd_seq_port_subscribe_set_sender(subs, &src.id.alsa_addr);
- snd_seq_port_subscribe_set_dest(subs, &dst.id.alsa_addr);
- snd_seq_port_subscribe_set_exclusive(subs, 0);
- snd_seq_port_subscribe_set_time_update(subs, 0);
- snd_seq_port_subscribe_set_time_real(subs, 0);
-
- // Not connected (shouldn't happen)
- if (snd_seq_get_port_subscription(_seq, subs) != 0) {
- _app->error_msg("Alsa: Attempt to unsubscribe ports that are not subscribed.");
- return false;
- }
-
- int ret = snd_seq_unsubscribe_port(_seq, subs);
- if (ret < 0) {
- _app->error_msg(string("Alsa: Unable to disconnect ")
- + src_port->full_name() + " => " + dst_port->full_name()
- + "(" + snd_strerror(ret) + ")");
- return false;
- }
-
- _app->info_msg(string("Alsa: Disconnected ")
- + src_port->full_name() + " => " + dst_port->full_name());
-
- return true;
+ if (tail_id.type() != PortID::Type::alsa ||
+ head_id.type() != PortID::Type::alsa) {
+ _log.error("[ALSA] Attempt to disconnect non-ALSA ports");
+ return false;
+ }
+
+ const snd_seq_addr_t tail_addr = {tail_id.alsa_client(), tail_id.alsa_port()};
+ const snd_seq_addr_t head_addr = {head_id.alsa_client(), head_id.alsa_port()};
+
+ snd_seq_port_subscribe_t* subs = nullptr;
+ snd_seq_port_subscribe_malloc(&subs);
+ snd_seq_port_subscribe_set_sender(subs, &tail_addr);
+ snd_seq_port_subscribe_set_dest(subs, &head_addr);
+ snd_seq_port_subscribe_set_exclusive(subs, 0);
+ snd_seq_port_subscribe_set_time_update(subs, 0);
+ snd_seq_port_subscribe_set_time_real(subs, 0);
+
+ // Not connected (shouldn't happen)
+ if (snd_seq_get_port_subscription(_seq, subs) != 0) {
+ _log.error("[ALSA] Attempt to unsubscribe ports that are not subscribed");
+ return false;
+ }
+
+ const int ret = snd_seq_unsubscribe_port(_seq, subs);
+ if (ret < 0) {
+ _log.error(fmt::format("[ALSA] Failed to disconnect {} => {} ({})",
+ tail_id,
+ head_id,
+ snd_strerror(ret)));
+ return false;
+ }
+
+ return true;
}
bool
AlsaDriver::create_refresh_port()
{
- snd_seq_port_info_t* port_info;
- snd_seq_port_info_alloca(&port_info);
- snd_seq_port_info_set_name(port_info, "System Announcement Reciever");
- snd_seq_port_info_set_type(port_info, SND_SEQ_PORT_TYPE_APPLICATION);
- snd_seq_port_info_set_capability(port_info,
- SND_SEQ_PORT_CAP_WRITE|SND_SEQ_PORT_CAP_SUBS_WRITE|SND_SEQ_PORT_CAP_NO_EXPORT);
-
- int ret = snd_seq_create_port(_seq, port_info);
- if (ret) {
- _app->error_msg((format("Alsa: Error creating port (%1%): ")
- % snd_strerror(ret)).str());
- return false;
- }
-
- // Subscribe the port to the system announcer
- ret = snd_seq_connect_from(_seq,
- snd_seq_port_info_get_port(port_info),
- SND_SEQ_CLIENT_SYSTEM,
- SND_SEQ_PORT_SYSTEM_ANNOUNCE);
- if (ret) {
- _app->error_msg((format("Alsa: Failed to connect to system announce port (%1%)")
- % snd_strerror(ret)).str());
- return false;
- }
-
- return true;
+ snd_seq_port_info_t* port_info = nullptr;
+ snd_seq_port_info_alloca(&port_info);
+ snd_seq_port_info_set_name(port_info, "System Announcement Receiver");
+ snd_seq_port_info_set_type(port_info, SND_SEQ_PORT_TYPE_APPLICATION);
+ snd_seq_port_info_set_capability(port_info,
+ SND_SEQ_PORT_CAP_WRITE |
+ SND_SEQ_PORT_CAP_SUBS_WRITE |
+ SND_SEQ_PORT_CAP_NO_EXPORT);
+
+ int ret = snd_seq_create_port(_seq, port_info);
+ if (ret) {
+ _log.error(
+ fmt::format("[ALSA] Error creating port ({})", snd_strerror(ret)));
+ return false;
+ }
+
+ // Subscribe the port to the system announcer
+ ret = snd_seq_connect_from(_seq,
+ snd_seq_port_info_get_port(port_info),
+ SND_SEQ_CLIENT_SYSTEM,
+ SND_SEQ_PORT_SYSTEM_ANNOUNCE);
+ if (ret) {
+ _log.error(
+ fmt::format("[ALSA] Failed to connect to system announce port ({})",
+ snd_strerror(ret)));
+ return false;
+ }
+
+ return true;
}
void*
AlsaDriver::refresh_main(void* me)
{
- AlsaDriver* ad = (AlsaDriver*)me;
- ad->_refresh_main();
- return NULL;
+ auto* ad = static_cast<AlsaDriver*>(me);
+ ad->_refresh_main();
+ return nullptr;
}
void
AlsaDriver::_refresh_main()
{
- if (!create_refresh_port()) {
- _app->error_msg("Alsa: Could not create listen port, auto-refresh disabled.");
- return;
- }
-
- int caps = 0;
-
- snd_seq_client_info_t* cinfo;
- snd_seq_client_info_alloca(&cinfo);
-
- snd_seq_port_info_t* pinfo;
- snd_seq_port_info_alloca(&pinfo);
-
- snd_seq_event_t* ev;
- while (snd_seq_event_input(_seq, &ev) > 0) {
- assert(ev);
-
- Glib::Mutex::Lock lock(_events_mutex);
-
- switch (ev->type) {
- case SND_SEQ_EVENT_PORT_SUBSCRIBED:
- if (!ignore(ev->data.connect.sender) && !ignore(ev->data.connect.dest))
- _events.push(PatchageEvent(PatchageEvent::CONNECTION,
- ev->data.connect.sender, ev->data.connect.dest));
- break;
- case SND_SEQ_EVENT_PORT_UNSUBSCRIBED:
- if (!ignore(ev->data.connect.sender) && !ignore(ev->data.connect.dest))
- _events.push(PatchageEvent(PatchageEvent::DISCONNECTION,
- ev->data.connect.sender, ev->data.connect.dest));
- break;
- case SND_SEQ_EVENT_PORT_START:
- snd_seq_get_any_client_info(_seq, ev->data.addr.client, cinfo);
- snd_seq_get_any_port_info(_seq, ev->data.addr.client, ev->data.addr.port, pinfo);
- caps = snd_seq_port_info_get_capability(pinfo);
-
- if (!ignore(ev->data.addr))
- _events.push(PatchageEvent(PatchageEvent::PORT_CREATION,
- PortID(ev->data.addr, (caps & SND_SEQ_PORT_CAP_READ))));
- break;
- case SND_SEQ_EVENT_PORT_EXIT:
- if (!ignore(ev->data.addr, false)) {
- // Note: getting caps at this point does not work
- // Delete both inputs and outputs (in case this is a duplex port)
- _events.push(PatchageEvent(PatchageEvent::PORT_DESTRUCTION,
- PortID(ev->data.addr, true)));
- _events.push(PatchageEvent(PatchageEvent::PORT_DESTRUCTION,
- PortID(ev->data.addr, false)));
- _port_addrs.erase(_app->canvas()->find_port(
- PortID(ev->data.addr, false)));
- _port_addrs.erase(_app->canvas()->find_port(
- PortID(ev->data.addr, true)));
- }
- break;
- case SND_SEQ_EVENT_CLIENT_CHANGE:
- case SND_SEQ_EVENT_CLIENT_EXIT:
- case SND_SEQ_EVENT_CLIENT_START:
- case SND_SEQ_EVENT_PORT_CHANGE:
- case SND_SEQ_EVENT_RESET:
- default:
- //_events.push(PatchageEvent(PatchageEvent::REFRESH));
- break;
- }
- }
+ if (!create_refresh_port()) {
+ _log.error("[ALSA] Could not create listen port, auto-refresh disabled");
+ return;
+ }
+
+ int caps = 0;
+
+ snd_seq_client_info_t* cinfo = nullptr;
+ snd_seq_client_info_alloca(&cinfo);
+
+ snd_seq_port_info_t* pinfo = nullptr;
+ snd_seq_port_info_alloca(&pinfo);
+
+ snd_seq_event_t* ev = nullptr;
+ while (snd_seq_event_input(_seq, &ev) > 0) {
+ assert(ev);
+
+ switch (ev->type) {
+ case SND_SEQ_EVENT_CLIENT_START:
+ snd_seq_get_any_client_info(_seq, ev->data.addr.client, cinfo);
+ _emit_event(event::ClientCreated{
+ ClientID::alsa(ev->data.addr.client),
+ client_info(cinfo),
+ });
+ break;
+
+ case SND_SEQ_EVENT_CLIENT_EXIT:
+ _emit_event(event::ClientDestroyed{
+ ClientID::alsa(ev->data.addr.client),
+ });
+ break;
+
+ case SND_SEQ_EVENT_CLIENT_CHANGE:
+ break;
+
+ case SND_SEQ_EVENT_PORT_START:
+ snd_seq_get_any_client_info(_seq, ev->data.addr.client, cinfo);
+ snd_seq_get_any_port_info(
+ _seq, ev->data.addr.client, ev->data.addr.port, pinfo);
+ caps = snd_seq_port_info_get_capability(pinfo);
+
+ if (!ignore(ev->data.addr)) {
+ _emit_event(event::PortCreated{
+ addr_to_id(ev->data.addr, (caps & SND_SEQ_PORT_CAP_WRITE)),
+ port_info(pinfo),
+ });
+ }
+ break;
+
+ case SND_SEQ_EVENT_PORT_EXIT:
+ if (!ignore(ev->data.addr, false)) {
+ // Note: getting caps at this point does not work
+ // Delete both inputs and outputs (to handle duplex ports)
+ _emit_event(event::PortDestroyed{addr_to_id(ev->data.addr, true)});
+ _emit_event(event::PortDestroyed{addr_to_id(ev->data.addr, false)});
+ }
+ break;
+
+ case SND_SEQ_EVENT_PORT_CHANGE:
+ break;
+
+ case SND_SEQ_EVENT_PORT_SUBSCRIBED:
+ if (!ignore(ev->data.connect.sender) && !ignore(ev->data.connect.dest)) {
+ _emit_event(
+ event::PortsConnected{addr_to_id(ev->data.connect.sender, false),
+ addr_to_id(ev->data.connect.dest, true)});
+ }
+ break;
+
+ case SND_SEQ_EVENT_PORT_UNSUBSCRIBED:
+ if (!ignore(ev->data.connect.sender) && !ignore(ev->data.connect.dest)) {
+ _emit_event(
+ event::PortsDisconnected{addr_to_id(ev->data.connect.sender, false),
+ addr_to_id(ev->data.connect.dest, true)});
+ }
+ break;
+
+ case SND_SEQ_EVENT_RESET:
+ default:
+ break;
+ }
+ }
}
-void
-AlsaDriver::process_events(Patchage* app)
+} // namespace
+
+std::unique_ptr<Driver>
+make_alsa_driver(ILog& log, Driver::EventSink emit_event)
{
- Glib::Mutex::Lock lock(_events_mutex);
- while (!_events.empty()) {
- PatchageEvent& ev = _events.front();
- ev.execute(app);
- _events.pop();
- }
+ return std::unique_ptr<Driver>{new AlsaDriver{log, std::move(emit_event)}};
}
+
+} // namespace patchage
diff --git a/src/AlsaDriver.hpp b/src/AlsaDriver.hpp
deleted file mode 100644
index 8bf837a..0000000
--- a/src/AlsaDriver.hpp
+++ /dev/null
@@ -1,116 +0,0 @@
-/* This file is part of Patchage.
- * Copyright 2007-2014 David Robillard <http://drobilla.net>
- *
- * Patchage is free software: you can redistribute it and/or modify it under
- * the terms of the GNU General Public License as published by the Free
- * Software Foundation, either version 3 of the License, or (at your option)
- * any later version.
- *
- * Patchage is distributed in the hope that it will be useful, but WITHOUT ANY
- * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
- * FOR A PARTICULAR PURPOSE. See the GNU General Public License for details.
- *
- * You should have received a copy of the GNU General Public License
- * along with Patchage. If not, see <http://www.gnu.org/licenses/>.
- */
-
-#ifndef PATCHAGE_ALSADRIVER_HPP
-#define PATCHAGE_ALSADRIVER_HPP
-
-#include <queue>
-#include <set>
-#include <string>
-#include <map>
-
-#include <alsa/asoundlib.h>
-#include <pthread.h>
-
-#include "Driver.hpp"
-#include "PatchageModule.hpp"
-
-class Patchage;
-class PatchagePort;
-
-/** Handles all externally driven functionality, registering ports etc.
- */
-class AlsaDriver : public Driver
-{
-public:
- explicit AlsaDriver(Patchage* app);
- ~AlsaDriver();
-
- void attach(bool launch_daemon = false);
- void detach();
-
- bool is_attached() const { return (_seq != NULL); }
-
- void refresh();
- void destroy_all();
-
- PatchagePort* create_port_view(
- Patchage* patchage,
- const PortID& id);
-
- bool connect(PatchagePort* src_port,
- PatchagePort* dst_port);
-
- bool disconnect(PatchagePort* src_port,
- PatchagePort* dst_port);
-
- void print_addr(snd_seq_addr_t addr);
-
- void process_events(Patchage* app);
-
-private:
- bool create_refresh_port();
- static void* refresh_main(void* me);
- void _refresh_main();
-
- PatchageModule* find_module(uint8_t client_id, ModuleType type);
-
- PatchageModule*
- find_or_create_module(
- Patchage* patchage,
- uint8_t client_id,
- const std::string& client_name,
- ModuleType type);
-
- void
- create_port_view_internal(
- Patchage* patchage,
- snd_seq_addr_t addr,
- PatchageModule*& parent,
- PatchagePort*& port);
-
- PatchagePort* create_port(
- PatchageModule& parent,
- const std::string& name,
- bool is_input,
- snd_seq_addr_t addr);
-
- Patchage* _app;
- snd_seq_t* _seq;
- pthread_t _refresh_thread;
-
- Glib::Mutex _events_mutex;
- std::queue<PatchageEvent> _events;
-
- struct SeqAddrComparator {
- bool operator() (const snd_seq_addr_t& a, const snd_seq_addr_t& b) const {
- return ((a.client < b.client) || ((a.client == b.client) && a.port < b.port));
- }
- };
-
- typedef std::set<snd_seq_addr_t, SeqAddrComparator> Ignored;
- Ignored _ignored;
-
- typedef std::multimap<uint8_t, PatchageModule*> Modules;
- Modules _modules;
-
- typedef std::map<PatchagePort*, PortID> PortAddrs;
- PortAddrs _port_addrs;
-
- bool ignore(const snd_seq_addr_t& addr, bool add=true);
-};
-
-#endif // PATCHAGE_ALSADRIVER_HPP
diff --git a/src/AlsaStubDriver.cpp b/src/AlsaStubDriver.cpp
new file mode 100644
index 0000000..5dc8f0b
--- /dev/null
+++ b/src/AlsaStubDriver.cpp
@@ -0,0 +1,18 @@
+// Copyright 2007-2022 David Robillard <d@drobilla.net>
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+#include "AudioDriver.hpp"
+#include "Driver.hpp"
+#include "make_alsa_driver.hpp"
+
+#include <memory>
+
+namespace patchage {
+
+std::unique_ptr<Driver>
+make_alsa_driver(ILog&, Driver::EventSink)
+{
+ return nullptr;
+}
+
+} // namespace patchage
diff --git a/src/AudioDriver.hpp b/src/AudioDriver.hpp
new file mode 100644
index 0000000..ac3d5ec
--- /dev/null
+++ b/src/AudioDriver.hpp
@@ -0,0 +1,40 @@
+// Copyright 2007-2020 David Robillard <d@drobilla.net>
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+#ifndef PATCHAGE_AUDIODRIVER_HPP
+#define PATCHAGE_AUDIODRIVER_HPP
+
+#include "Driver.hpp"
+
+#include <cstdint>
+#include <utility>
+
+namespace patchage {
+
+/// Base class for drivers that work with an audio system
+class AudioDriver : public Driver
+{
+public:
+ explicit AudioDriver(EventSink emit_event)
+ : Driver{std::move(emit_event)}
+ {}
+
+ /// Return the number of xruns (dropouts) since the last reset
+ virtual uint32_t xruns() = 0;
+
+ /// Reset the xrun count
+ virtual void reset_xruns() = 0;
+
+ /// Return the current buffer size in frames
+ virtual uint32_t buffer_size() = 0;
+
+ /// Try to set the current buffer size in frames, return true on success
+ virtual bool set_buffer_size(uint32_t frames) = 0;
+
+ /// Return the current sample rate in Hz
+ virtual uint32_t sample_rate() = 0;
+};
+
+} // namespace patchage
+
+#endif // PATCHAGE_AUDIODRIVER_HPP
diff --git a/src/Canvas.cpp b/src/Canvas.cpp
new file mode 100644
index 0000000..3624933
--- /dev/null
+++ b/src/Canvas.cpp
@@ -0,0 +1,342 @@
+// Copyright 2007-2022 David Robillard <d@drobilla.net>
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+#include "Canvas.hpp"
+
+#include "Action.hpp"
+#include "ActionSink.hpp"
+#include "CanvasModule.hpp"
+#include "CanvasPort.hpp"
+#include "ClientID.hpp"
+#include "ClientInfo.hpp"
+#include "ClientType.hpp"
+#include "Configuration.hpp"
+#include "Coord.hpp"
+#include "ILog.hpp"
+#include "Metadata.hpp"
+#include "PortID.hpp"
+#include "PortInfo.hpp"
+#include "PortNames.hpp"
+#include "Setting.hpp"
+#include "SignalDirection.hpp"
+#include "warnings.hpp"
+
+PATCHAGE_DISABLE_GANV_WARNINGS
+#include "ganv/Canvas.hpp"
+#include "ganv/Edge.hpp"
+#include "ganv/Module.hpp"
+#include "ganv/Node.hpp"
+#include "ganv/Port.hpp"
+#include "ganv/module.h"
+#include "ganv/types.h"
+PATCHAGE_RESTORE_WARNINGS
+
+PATCHAGE_DISABLE_FMT_WARNINGS
+#include <fmt/core.h>
+PATCHAGE_RESTORE_WARNINGS
+
+#include <gdk/gdkkeysyms.h>
+#include <sigc++/functors/mem_fun.h>
+#include <sigc++/signal.h>
+
+#include <cassert>
+#include <cstdlib>
+#include <functional>
+#include <optional>
+#include <set>
+#include <string>
+#include <utility>
+
+namespace patchage {
+namespace {
+
+struct RemovePortsData {
+ using Predicate = bool (*)(const CanvasPort*);
+
+ explicit RemovePortsData(Predicate p)
+ : pred(p)
+ {}
+
+ Predicate pred;
+ std::set<ClientID> empty_clients;
+};
+
+void
+delete_port_if_matches(GanvPort* port, void* cdata)
+{
+ auto* data = static_cast<RemovePortsData*>(cdata);
+ auto* pport = dynamic_cast<CanvasPort*>(Glib::wrap(port));
+ if (pport && data->pred(pport)) {
+ delete pport;
+ }
+}
+
+void
+remove_ports_matching(GanvNode* node, void* cdata)
+{
+ if (!GANV_IS_MODULE(node)) {
+ return;
+ }
+
+ Ganv::Module* cmodule = Glib::wrap(GANV_MODULE(node));
+ auto* pmodule = dynamic_cast<CanvasModule*>(cmodule);
+ if (!pmodule) {
+ return;
+ }
+
+ auto* data = static_cast<RemovePortsData*>(cdata);
+
+ pmodule->for_each_port(delete_port_if_matches, data);
+
+ if (pmodule->num_ports() == 0) {
+ data->empty_clients.insert(pmodule->id());
+ }
+}
+
+} // namespace
+
+Canvas::Canvas(ILog& log, ActionSink& action_sink, int width, int height)
+ : Ganv::Canvas(width, height)
+ , _log(log)
+ , _action_sink(action_sink)
+{
+ signal_event.connect(sigc::mem_fun(this, &Canvas::on_event));
+ signal_connect.connect(sigc::mem_fun(this, &Canvas::on_connect));
+ signal_disconnect.connect(sigc::mem_fun(this, &Canvas::on_disconnect));
+}
+
+CanvasPort*
+Canvas::create_port(Configuration& conf,
+ const Metadata& metadata,
+ const PortID& id,
+ const PortInfo& info)
+{
+ const auto client_id = id.client();
+
+ const auto port_name =
+ ((id.type() == PortID::Type::alsa) ? info.label : PortNames(id).port());
+
+ // Figure out the client name, for ALSA we need the metadata cache
+ std::string client_name;
+ if (id.type() == PortID::Type::alsa) {
+ const auto client_info = metadata.client(client_id);
+ if (!client_info) {
+ _log.error(fmt::format(
+ u8"(Unable to add port “{}”, client “{}” is unknown)", id, client_id));
+
+ return nullptr;
+ }
+
+ client_name = client_info->label;
+ } else {
+ client_name = PortNames(id).client();
+ }
+
+ // Determine the module type to place the port on in case of splitting
+ SignalDirection module_type = SignalDirection::duplex;
+ if (conf.get_module_split(client_name, info.is_terminal)) {
+ module_type = info.direction;
+ }
+
+ // Find or create parent module
+ CanvasModule* parent = find_module(client_id, module_type);
+ if (!parent) {
+ // Determine initial position
+ Coord loc;
+ if (!conf.get_module_location(client_name, module_type, loc)) {
+ // No position saved, come up with a pseudo-random one
+ loc.x = 20 + rand() % 640;
+ loc.y = 20 + rand() % 480;
+
+ conf.set_module_location(client_name, module_type, loc);
+ }
+
+ parent = new CanvasModule(
+ *this, _action_sink, client_name, module_type, client_id, loc.x, loc.y);
+
+ add_module(client_id, parent);
+ }
+
+ if (parent->get_port(id)) {
+ // TODO: Update existing port?
+ _log.error(fmt::format(
+ u8"(Module “{}” already has port “{}”)", client_name, port_name));
+ return nullptr;
+ }
+
+ auto* const port = new CanvasPort(*parent,
+ info.type,
+ id,
+ port_name,
+ info.label,
+ info.direction == SignalDirection::input,
+ conf.get_port_color(info.type),
+ conf.get<setting::HumanNames>(),
+ info.order);
+
+ _port_index.insert(std::make_pair(id, port));
+
+ return port;
+}
+
+CanvasModule*
+Canvas::find_module(const ClientID& id, const SignalDirection type)
+{
+ auto i = _module_index.find(id);
+
+ CanvasModule* io_module = nullptr;
+ for (; i != _module_index.end() && i->first == id; ++i) {
+ if (i->second->type() == type) {
+ return i->second;
+ }
+
+ if (i->second->type() == SignalDirection::duplex) {
+ io_module = i->second;
+ }
+ }
+
+ // Return duplex module for input or output (or nullptr if not found)
+ return io_module;
+}
+
+void
+Canvas::remove_module(const ClientID& id)
+{
+ auto i = _module_index.find(id);
+ while (i != _module_index.end() && i->first == id) {
+ delete i->second;
+ i = _module_index.erase(i);
+ }
+}
+
+CanvasPort*
+Canvas::find_port(const PortID& id)
+{
+ auto i = _port_index.find(id);
+ if (i != _port_index.end()) {
+ assert(i->second->get_module());
+ return i->second;
+ }
+
+ return nullptr;
+}
+
+void
+Canvas::remove_port(const PortID& id)
+{
+ CanvasPort* const port = find_port(id);
+ _port_index.erase(id);
+ delete port;
+}
+
+void
+Canvas::remove_ports(bool (*pred)(const CanvasPort*))
+{
+ RemovePortsData data(pred);
+
+ for_each_node(remove_ports_matching, &data);
+
+ for (auto i = _port_index.begin(); i != _port_index.end();) {
+ auto next = i;
+ ++next;
+ if (pred(i->second)) {
+ _port_index.erase(i);
+ }
+ i = next;
+ }
+
+ for (const ClientID& id : data.empty_clients) {
+ remove_module(id);
+ }
+}
+
+void
+Canvas::on_connect(Ganv::Node* port1, Ganv::Node* port2)
+{
+ auto* const p1 = dynamic_cast<CanvasPort*>(port1);
+ auto* const p2 = dynamic_cast<CanvasPort*>(port2);
+
+ if (p1 && p2) {
+ if (p1->is_output() && p2->is_input()) {
+ _action_sink(action::ConnectPorts{p1->id(), p2->id()});
+ } else if (p2->is_output() && p1->is_input()) {
+ _action_sink(action::ConnectPorts{p2->id(), p1->id()});
+ }
+ }
+}
+
+void
+Canvas::on_disconnect(Ganv::Node* port1, Ganv::Node* port2)
+{
+ auto* const p1 = dynamic_cast<CanvasPort*>(port1);
+ auto* const p2 = dynamic_cast<CanvasPort*>(port2);
+
+ if (p1 && p2) {
+ if (p1->is_output() && p2->is_input()) {
+ _action_sink(action::DisconnectPorts{p1->id(), p2->id()});
+ } else if (p2->is_output() && p1->is_input()) {
+ _action_sink(action::DisconnectPorts{p2->id(), p1->id()});
+ }
+ }
+}
+
+void
+Canvas::add_module(const ClientID& id, CanvasModule* module)
+{
+ _module_index.emplace(id, module);
+
+ // Join partners, if applicable
+ CanvasModule* in_module = nullptr;
+ CanvasModule* out_module = nullptr;
+ if (module->type() == SignalDirection::input) {
+ in_module = module;
+ out_module = find_module(id, SignalDirection::output);
+ } else if (module->type() == SignalDirection::output) {
+ in_module = find_module(id, SignalDirection::input);
+ out_module = module;
+ }
+
+ if (in_module && out_module) {
+ out_module->set_partner(in_module);
+ }
+}
+
+void
+disconnect_edge(GanvEdge* edge, void* data)
+{
+ auto* canvas = static_cast<Canvas*>(data);
+ Ganv::Edge* edgemm = Glib::wrap(edge);
+
+ if (canvas && edgemm) {
+ canvas->on_disconnect(edgemm->get_tail(), edgemm->get_head());
+ }
+}
+
+bool
+Canvas::on_event(GdkEvent* ev)
+{
+ if (ev->type == GDK_KEY_PRESS && ev->key.keyval == GDK_KEY_Delete) {
+ for_each_selected_edge(disconnect_edge, this);
+ clear_selection();
+ return true;
+ }
+
+ return false;
+}
+
+bool
+Canvas::make_connection(Ganv::Node* tail, Ganv::Node* head)
+{
+ new Ganv::Edge(*this, tail, head);
+ return true;
+}
+
+void
+Canvas::clear()
+{
+ _port_index.clear();
+ _module_index.clear();
+ Ganv::Canvas::clear();
+}
+
+} // namespace patchage
diff --git a/src/Canvas.hpp b/src/Canvas.hpp
new file mode 100644
index 0000000..81e4d61
--- /dev/null
+++ b/src/Canvas.hpp
@@ -0,0 +1,81 @@
+// Copyright 2007-2021 David Robillard <d@drobilla.net>
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+#ifndef PATCHAGE_CANVAS_HPP
+#define PATCHAGE_CANVAS_HPP
+
+#include "ActionSink.hpp"
+#include "ClientID.hpp"
+#include "PortID.hpp"
+#include "warnings.hpp"
+
+PATCHAGE_DISABLE_GANV_WARNINGS
+#include "ganv/Canvas.hpp"
+#include "ganv/types.h"
+PATCHAGE_RESTORE_WARNINGS
+
+#include <gdk/gdk.h>
+
+#include <map>
+
+namespace Ganv {
+class Node;
+} // namespace Ganv
+
+namespace patchage {
+
+enum class SignalDirection;
+
+struct PortInfo;
+
+class CanvasModule;
+class CanvasPort;
+class ILog;
+class Metadata;
+class Configuration;
+
+class Canvas : public Ganv::Canvas
+{
+public:
+ Canvas(ILog& log, ActionSink& action_sink, int width, int height);
+
+ CanvasPort* create_port(Configuration& conf,
+ const Metadata& metadata,
+ const PortID& id,
+ const PortInfo& info);
+
+ CanvasModule* find_module(const ClientID& id, SignalDirection type);
+ CanvasPort* find_port(const PortID& id);
+
+ void remove_module(const ClientID& id);
+
+ void remove_ports(bool (*pred)(const CanvasPort*));
+
+ void add_module(const ClientID& id, CanvasModule* module);
+
+ bool make_connection(Ganv::Node* tail, Ganv::Node* head);
+
+ void remove_port(const PortID& id);
+
+ void clear() override;
+
+private:
+ using PortIndex = std::map<const PortID, CanvasPort*>;
+ using ModuleIndex = std::multimap<const ClientID, CanvasModule*>;
+
+ friend void disconnect_edge(GanvEdge*, void*);
+
+ bool on_event(GdkEvent* ev);
+
+ void on_connect(Ganv::Node* port1, Ganv::Node* port2);
+ void on_disconnect(Ganv::Node* port1, Ganv::Node* port2);
+
+ ILog& _log;
+ ActionSink& _action_sink;
+ PortIndex _port_index;
+ ModuleIndex _module_index;
+};
+
+} // namespace patchage
+
+#endif // PATCHAGE_CANVAS_HPP
diff --git a/src/CanvasModule.cpp b/src/CanvasModule.cpp
new file mode 100644
index 0000000..4015819
--- /dev/null
+++ b/src/CanvasModule.cpp
@@ -0,0 +1,155 @@
+// Copyright 2010-2021 David Robillard <d@drobilla.net>
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+#include "CanvasModule.hpp"
+
+#include "Action.hpp"
+#include "ActionSink.hpp"
+#include "Canvas.hpp"
+#include "CanvasPort.hpp"
+#include "ClientID.hpp"
+#include "PortID.hpp"
+#include "SignalDirection.hpp"
+#include "i18n.hpp"
+#include "warnings.hpp"
+
+PATCHAGE_DISABLE_GANV_WARNINGS
+#include "ganv/Module.hpp"
+#include "ganv/Port.hpp"
+PATCHAGE_RESTORE_WARNINGS
+
+#include <gtkmm/menu.h>
+#include <gtkmm/menu_elems.h>
+#include <gtkmm/menuitem.h>
+#include <gtkmm/menushell.h>
+#include <sigc++/functors/mem_fun.h>
+#include <sigc++/signal.h>
+
+#include <cassert>
+#include <functional>
+#include <memory>
+#include <utility>
+
+namespace patchage {
+
+CanvasModule::CanvasModule(Canvas& canvas,
+ ActionSink& action_sink,
+ const std::string& name,
+ SignalDirection type,
+ ClientID id,
+ double x,
+ double y)
+ : Module(canvas, name, x, y)
+ , _action_sink(action_sink)
+ , _name(name)
+ , _type(type)
+ , _id(std::move(id))
+{
+ signal_event().connect(sigc::mem_fun(this, &CanvasModule::on_event));
+ signal_moved().connect(sigc::mem_fun(this, &CanvasModule::on_moved));
+}
+
+void
+CanvasModule::update_menu()
+{
+ if (!_menu) {
+ return;
+ }
+
+ if (_type == SignalDirection::duplex) {
+ bool has_in = false;
+ bool has_out = false;
+ for (const auto* p : *this) {
+ if (p) {
+ if (p->is_input()) {
+ has_in = true;
+ } else {
+ has_out = true;
+ }
+
+ if (has_in && has_out) {
+ break;
+ }
+ }
+ }
+
+ if (has_in && has_out) {
+ _menu->items()[0].show(); // Show "Split" menu item
+ } else {
+ _menu->items()[0].hide(); // Hide "Split" menu item
+ }
+ }
+}
+
+bool
+CanvasModule::show_menu(GdkEventButton* ev)
+{
+ _menu = std::make_unique<Gtk::Menu>();
+
+ Gtk::Menu::MenuList& items = _menu->items();
+
+ if (_type == SignalDirection::duplex) {
+ items.push_back(Gtk::Menu_Helpers::MenuElem(
+ T("_Split"), sigc::mem_fun(this, &CanvasModule::on_split)));
+ update_menu();
+ } else {
+ items.push_back(Gtk::Menu_Helpers::MenuElem(
+ T("_Join"), sigc::mem_fun(this, &CanvasModule::on_join)));
+ }
+
+ items.push_back(Gtk::Menu_Helpers::MenuElem(
+ T("_Disconnect"), sigc::mem_fun(this, &CanvasModule::on_disconnect)));
+
+ _menu->popup(ev->button, ev->time);
+ return true;
+}
+
+bool
+CanvasModule::on_event(GdkEvent* ev)
+{
+ if (ev->type == GDK_BUTTON_PRESS && ev->button.button == 3) {
+ return show_menu(&ev->button);
+ }
+ return false;
+}
+
+void
+CanvasModule::on_moved(double x, double y)
+{
+ _action_sink(action::MoveModule{_id, _type, x, y});
+}
+
+void
+CanvasModule::on_split()
+{
+ assert(_type == SignalDirection::duplex);
+ _action_sink(action::SplitModule{_id});
+}
+
+void
+CanvasModule::on_join()
+{
+ assert(_type != SignalDirection::duplex);
+ _action_sink(action::UnsplitModule{_id});
+}
+
+void
+CanvasModule::on_disconnect()
+{
+ _action_sink(action::DisconnectClient{_id, _type});
+}
+
+CanvasPort*
+CanvasModule::get_port(const PortID& id)
+{
+ for (Ganv::Port* p : *this) {
+ auto* pport = dynamic_cast<CanvasPort*>(p);
+ if (pport && pport->id() == id) {
+ return pport;
+ }
+ }
+
+ return nullptr;
+}
+
+} // namespace patchage
diff --git a/src/CanvasModule.hpp b/src/CanvasModule.hpp
new file mode 100644
index 0000000..e15a6b8
--- /dev/null
+++ b/src/CanvasModule.hpp
@@ -0,0 +1,74 @@
+// Copyright 2007-2021 David Robillard <d@drobilla.net>
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+#ifndef PATCHAGE_CANVASMODULE_HPP
+#define PATCHAGE_CANVASMODULE_HPP
+
+#include "ActionSink.hpp"
+#include "ClientID.hpp"
+#include "warnings.hpp"
+
+PATCHAGE_DISABLE_GANV_WARNINGS
+#include "ganv/Module.hpp"
+PATCHAGE_RESTORE_WARNINGS
+
+#include <gdk/gdk.h>
+#include <gtkmm/menu.h>
+
+#include <memory>
+#include <string>
+
+namespace patchage {
+
+enum class SignalDirection;
+
+struct PortID;
+
+class Canvas;
+class CanvasPort;
+
+class CanvasModule : public Ganv::Module
+{
+public:
+ CanvasModule(Canvas& canvas,
+ ActionSink& action_sink,
+ const std::string& name,
+ SignalDirection type,
+ ClientID id,
+ double x,
+ double y);
+
+ CanvasModule(const CanvasModule&) = delete;
+ CanvasModule& operator=(const CanvasModule&) = delete;
+
+ CanvasModule(CanvasModule&&) = delete;
+ CanvasModule& operator=(CanvasModule&&) = delete;
+
+ ~CanvasModule() override = default;
+
+ bool show_menu(GdkEventButton* ev);
+ void update_menu();
+
+ CanvasPort* get_port(const PortID& id);
+
+ SignalDirection type() const { return _type; }
+ ClientID id() const { return _id; }
+ const std::string& name() const { return _name; }
+
+protected:
+ bool on_event(GdkEvent* ev) override;
+ void on_moved(double x, double y);
+ void on_split();
+ void on_join();
+ void on_disconnect();
+
+ ActionSink& _action_sink;
+ std::unique_ptr<Gtk::Menu> _menu;
+ std::string _name;
+ SignalDirection _type;
+ ClientID _id;
+};
+
+} // namespace patchage
+
+#endif // PATCHAGE_CANVASMODULE_HPP
diff --git a/src/CanvasPort.hpp b/src/CanvasPort.hpp
new file mode 100644
index 0000000..0fc2f04
--- /dev/null
+++ b/src/CanvasPort.hpp
@@ -0,0 +1,108 @@
+// Copyright 2007-2020 David Robillard <d@drobilla.net>
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+#ifndef PATCHAGE_CANVASPORT_HPP
+#define PATCHAGE_CANVASPORT_HPP
+
+#include "PortID.hpp"
+#include "PortType.hpp"
+#include "i18n.hpp"
+#include "warnings.hpp"
+
+PATCHAGE_DISABLE_GANV_WARNINGS
+#include "ganv/Port.hpp"
+PATCHAGE_RESTORE_WARNINGS
+
+#include <gdk/gdk.h>
+#include <gtkmm/menu.h>
+#include <gtkmm/menu_elems.h>
+#include <gtkmm/menushell.h>
+#include <gtkmm/object.h>
+#include <sigc++/functors/mem_fun.h>
+#include <sigc++/signal.h>
+
+#include <cstdint>
+#include <optional>
+#include <string>
+#include <utility>
+
+namespace Ganv {
+class Module;
+} // namespace Ganv
+
+namespace patchage {
+
+/// A port on a CanvasModule
+class CanvasPort : public Ganv::Port
+{
+public:
+ CanvasPort(Ganv::Module& module,
+ PortType type,
+ PortID id,
+ const std::string& name,
+ const std::string& human_name,
+ bool is_input,
+ uint32_t color,
+ bool show_human_name,
+ std::optional<int> order = std::optional<int>())
+ : Port(module,
+ (show_human_name && !human_name.empty()) ? human_name : name,
+ is_input,
+ color)
+ , _type(type)
+ , _id(std::move(id))
+ , _name(name)
+ , _human_name(human_name)
+ , _order(order)
+ {
+ signal_event().connect(sigc::mem_fun(this, &CanvasPort::on_event));
+ }
+
+ CanvasPort(const CanvasPort&) = delete;
+ CanvasPort& operator=(const CanvasPort&) = delete;
+
+ CanvasPort(CanvasPort&&) = delete;
+ CanvasPort& operator=(CanvasPort&&) = delete;
+
+ ~CanvasPort() override = default;
+
+ void show_human_name(bool human)
+ {
+ if (human && !_human_name.empty()) {
+ set_label(_human_name.c_str());
+ } else {
+ set_label(_name.c_str());
+ }
+ }
+
+ bool on_event(GdkEvent* ev) override
+ {
+ if (ev->type != GDK_BUTTON_PRESS || ev->button.button != 3) {
+ return false;
+ }
+
+ Gtk::Menu* menu = Gtk::manage(new Gtk::Menu());
+ menu->items().push_back(Gtk::Menu_Helpers::MenuElem(
+ T("Disconnect"), sigc::mem_fun(this, &Port::disconnect)));
+
+ menu->popup(ev->button.button, ev->button.time);
+ return true;
+ }
+
+ PortType type() const { return _type; }
+ PortID id() const { return _id; }
+ const std::string& name() const { return _name; }
+ const std::string& human_name() const { return _human_name; }
+ const std::optional<int>& order() const { return _order; }
+
+private:
+ PortType _type;
+ PortID _id;
+ std::string _name;
+ std::string _human_name;
+ std::optional<int> _order;
+};
+
+} // namespace patchage
+
+#endif // PATCHAGE_CANVASPORT_HPP
diff --git a/src/ClientID.hpp b/src/ClientID.hpp
new file mode 100644
index 0000000..d73e45e
--- /dev/null
+++ b/src/ClientID.hpp
@@ -0,0 +1,123 @@
+// Copyright 2020 David Robillard <d@drobilla.net>
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+#ifndef PATCHAGE_CLIENTID_HPP
+#define PATCHAGE_CLIENTID_HPP
+
+#include "ClientType.hpp"
+#include "warnings.hpp"
+
+PATCHAGE_DISABLE_FMT_WARNINGS
+#include <fmt/core.h>
+#include <fmt/ostream.h>
+PATCHAGE_RESTORE_WARNINGS
+
+#include <cassert>
+#include <cstdint>
+#include <ostream>
+#include <string>
+#include <utility>
+
+namespace patchage {
+
+/// An ID for some client (program) that has ports
+struct ClientID {
+ using Type = ClientType;
+
+ ClientID(const ClientID& copy) = default;
+ ClientID& operator=(const ClientID& copy) = default;
+
+ ClientID(ClientID&& id) = default;
+ ClientID& operator=(ClientID&& id) = default;
+
+ ~ClientID() = default;
+
+ /// Return an ID for a JACK client by name
+ static ClientID jack(std::string name)
+ {
+ return ClientID{Type::jack, std::move(name)};
+ }
+
+ /// Return an ID for an ALSA Sequencer client by ID
+ static ClientID alsa(const uint8_t id) { return ClientID{Type::alsa, id}; }
+
+ Type type() const { return _type; }
+ const std::string& jack_name() const { return _jack_name; }
+ uint8_t alsa_id() const { return _alsa_id; }
+
+private:
+ ClientID(const Type type, std::string jack_name)
+ : _type{type}
+ , _jack_name{std::move(jack_name)}
+ {
+ assert(_type == Type::jack);
+ }
+
+ ClientID(const Type type, const uint8_t alsa_id)
+ : _type{type}
+ , _alsa_id{alsa_id}
+ {
+ assert(_type == Type::alsa);
+ }
+
+ Type _type; ///< Determines which field is active
+ std::string _jack_name{}; ///< Client name for Type::jack
+ uint8_t _alsa_id{}; ///< Client ID for Type::alsa
+};
+
+inline std::ostream&
+operator<<(std::ostream& os, const ClientID& id)
+{
+ switch (id.type()) {
+ case ClientID::Type::jack:
+ return os << "jack:" << id.jack_name();
+ case ClientID::Type::alsa:
+ return os << "alsa:" << int(id.alsa_id());
+ }
+
+ assert(false);
+ return os;
+}
+
+inline bool
+operator==(const ClientID& lhs, const ClientID& rhs)
+{
+ if (lhs.type() != rhs.type()) {
+ return false;
+ }
+
+ switch (lhs.type()) {
+ case ClientID::Type::jack:
+ return lhs.jack_name() == rhs.jack_name();
+ case ClientID::Type::alsa:
+ return lhs.alsa_id() == rhs.alsa_id();
+ }
+
+ assert(false);
+ return false;
+}
+
+inline bool
+operator<(const ClientID& lhs, const ClientID& rhs)
+{
+ if (lhs.type() != rhs.type()) {
+ return lhs.type() < rhs.type();
+ }
+
+ switch (lhs.type()) {
+ case ClientID::Type::jack:
+ return lhs.jack_name() < rhs.jack_name();
+ case ClientID::Type::alsa:
+ return lhs.alsa_id() < rhs.alsa_id();
+ }
+
+ assert(false);
+ return false;
+}
+
+} // namespace patchage
+
+template<>
+struct fmt::formatter<patchage::ClientID> : fmt::ostream_formatter {};
+
+#endif // PATCHAGE_CLIENTID_HPP
diff --git a/src/ClientInfo.hpp b/src/ClientInfo.hpp
new file mode 100644
index 0000000..2dd755e
--- /dev/null
+++ b/src/ClientInfo.hpp
@@ -0,0 +1,18 @@
+// Copyright 2007-2020 David Robillard <d@drobilla.net>
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+#ifndef PATCHAGE_CLIENTINFO_HPP
+#define PATCHAGE_CLIENTINFO_HPP
+
+#include <string>
+
+namespace patchage {
+
+/// Extra information about a client (program) not expressed in its ID
+struct ClientInfo {
+ std::string label; ///< Human-friendly label
+};
+
+} // namespace patchage
+
+#endif // PATCHAGE_CLIENTINFO_HPP
diff --git a/src/ClientType.hpp b/src/ClientType.hpp
new file mode 100644
index 0000000..7a3f87a
--- /dev/null
+++ b/src/ClientType.hpp
@@ -0,0 +1,42 @@
+// Copyright 2020 David Robillard <d@drobilla.net>
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+#ifndef PATCHAGE_CLIENTTYPE_HPP
+#define PATCHAGE_CLIENTTYPE_HPP
+
+#include "warnings.hpp"
+
+PATCHAGE_DISABLE_FMT_WARNINGS
+#include <fmt/core.h>
+#include <fmt/ostream.h>
+PATCHAGE_RESTORE_WARNINGS
+
+#include <ostream>
+
+namespace patchage {
+
+/// A type of client (program) with supported ports
+enum class ClientType {
+ jack,
+ alsa,
+};
+
+inline std::ostream&
+operator<<(std::ostream& os, const ClientType type)
+{
+ switch (type) {
+ case ClientType::jack:
+ return os << "JACK";
+ case ClientType::alsa:
+ return os << "ALSA";
+ }
+
+ return os;
+}
+
+} // namespace patchage
+
+template<>
+struct fmt::formatter<patchage::ClientType> : fmt::ostream_formatter {};
+
+#endif // PATCHAGE_CLIENTTYPE_HPP
diff --git a/src/Configuration.cpp b/src/Configuration.cpp
index d9537c0..9bb2ac6 100644
--- a/src/Configuration.cpp
+++ b/src/Configuration.cpp
@@ -1,333 +1,374 @@
-/* This file is part of Patchage.
- * Copyright 2007-2014 David Robillard <http://drobilla.net>
- *
- * Patchage is free software: you can redistribute it and/or modify it under
- * the terms of the GNU General Public License as published by the Free
- * Software Foundation, either version 3 of the License, or (at your option)
- * any later version.
- *
- * Patchage is distributed in the hope that it will be useful, but WITHOUT ANY
- * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
- * FOR A PARTICULAR PURPOSE. See the GNU General Public License for details.
- *
- * You should have received a copy of the GNU General Public License
- * along with Patchage. If not, see <http://www.gnu.org/licenses/>.
- */
+// Copyright 2007-2020 David Robillard <d@drobilla.net>
+// SPDX-License-Identifier: GPL-3.0-or-later
-#include <ctype.h>
-#include <stdlib.h>
+#include "Configuration.hpp"
+#include "Coord.hpp"
+#include "PortType.hpp"
+#include "Setting.hpp"
+#include "SignalDirection.hpp"
+#include "patchage_config.h"
+
+#include <cctype>
+#include <cstdlib>
#include <fstream>
-#include <ios>
#include <iostream>
#include <limits>
-#include <stdexcept>
+#include <utility>
#include <vector>
-#include "Configuration.hpp"
-#include "Patchage.hpp"
-
-static const char* port_type_names[N_PORT_TYPES] = {
- "JACK_AUDIO",
- "JACK_MIDI",
- "ALSA_MIDI",
- "JACK_OSC",
- "JACK_CV"
-};
-
-Configuration::Configuration()
- : _window_location(0, 0)
- , _window_size(640, 480)
- , _zoom(1.0)
- , _font_size(12.0)
- , _messages_height(0)
- , _show_toolbar(true)
- , _show_messages(false)
- , _sort_ports(true)
+// IWYU pragma: no_include <algorithm>
+
+namespace patchage {
+namespace {
+
+/// Return a vector of filenames in descending order by preference
+std::vector<std::string>
+get_filenames()
+{
+ std::vector<std::string> filenames;
+ const std::string prefix;
+
+ const char* xdg_config_home = getenv("XDG_CONFIG_HOME");
+ const char* home = getenv("HOME");
+
+ // XDG spec
+ if (xdg_config_home) {
+ filenames.push_back(std::string(xdg_config_home) + "/patchagerc");
+ } else if (home) {
+ filenames.push_back(std::string(home) + "/.config/patchagerc");
+ }
+
+ // Old location
+ if (home) {
+ filenames.push_back(std::string(home) + "/.patchagerc");
+ }
+
+ // Current directory (bundle or last-ditch effort)
+ filenames.emplace_back("patchagerc");
+
+ return filenames;
+}
+
+} // namespace
+
+static const char* const port_type_names[Configuration::n_port_types] =
+ {"JACK_AUDIO", "JACK_MIDI", "ALSA_MIDI", "JACK_OSC", "JACK_CV"};
+
+Configuration::Configuration(std::function<void(const Setting&)> on_change)
+ : _on_change(std::move(on_change))
{
-#ifdef PATCHAGE_USE_LIGHT_THEME
- _port_colors[JACK_AUDIO] = _default_port_colors[JACK_AUDIO] = 0xA4BC8CFF;
- _port_colors[JACK_MIDI] = _default_port_colors[JACK_MIDI] = 0xC89595FF;
- _port_colors[ALSA_MIDI] = _default_port_colors[ALSA_MIDI] = 0x8F7198FF;
- _port_colors[JACK_OSC] = _default_port_colors[JACK_OSC] = 0x7E8EAAFF;
- _port_colors[JACK_CV] = _default_port_colors[JACK_CV] = 0x83AFABFF;
+ std::get<setting::FontSize>(_settings).value = 12.0f;
+ std::get<setting::WindowLocation>(_settings).value = Coord{0.0, 0.0};
+ std::get<setting::WindowSize>(_settings).value = Coord{960.0, 540.0};
+ std::get<setting::Zoom>(_settings).value = 1.0f;
+
+#if PATCHAGE_USE_LIGHT_THEME
+ _port_colors[static_cast<unsigned>(PortType::jack_audio)] =
+ _default_port_colors[static_cast<unsigned>(PortType::jack_audio)] =
+ 0xA4BC8CFF;
+
+ _port_colors[static_cast<unsigned>(PortType::jack_midi)] =
+ _default_port_colors[static_cast<unsigned>(PortType::jack_midi)] =
+ 0xC89595FF;
+
+ _port_colors[static_cast<unsigned>(PortType::alsa_midi)] =
+ _default_port_colors[static_cast<unsigned>(PortType::alsa_midi)] =
+ 0x8F7198FF;
+
+ _port_colors[static_cast<unsigned>(PortType::jack_osc)] =
+ _default_port_colors[static_cast<unsigned>(PortType::jack_osc)] =
+ 0x7E8EAAFF;
+
+ _port_colors[static_cast<unsigned>(PortType::jack_cv)] =
+ _default_port_colors[static_cast<unsigned>(PortType::jack_cv)] = 0x83AFABFF;
#else
- _port_colors[JACK_AUDIO] = _default_port_colors[JACK_AUDIO] = 0x3E5E00FF;
- _port_colors[JACK_MIDI] = _default_port_colors[JACK_MIDI] = 0x650300FF;
- _port_colors[ALSA_MIDI] = _default_port_colors[ALSA_MIDI] = 0x2D0043FF;
- _port_colors[JACK_OSC] = _default_port_colors[JACK_OSC] = 0x4100FEFF;
- _port_colors[JACK_CV] = _default_port_colors[JACK_CV] = 0x005E4EFF;
+ _port_colors[static_cast<unsigned>(PortType::jack_audio)] =
+ _default_port_colors[static_cast<unsigned>(PortType::jack_audio)] =
+ 0x3E5E00FF;
+
+ _port_colors[static_cast<unsigned>(PortType::jack_midi)] =
+ _default_port_colors[static_cast<unsigned>(PortType::jack_midi)] =
+ 0x650300FF;
+
+ _port_colors[static_cast<unsigned>(PortType::alsa_midi)] =
+ _default_port_colors[static_cast<unsigned>(PortType::alsa_midi)] =
+ 0x2D0043FF;
+
+ _port_colors[static_cast<unsigned>(PortType::jack_osc)] =
+ _default_port_colors[static_cast<unsigned>(PortType::jack_osc)] =
+ 0x4100FEFF;
+
+ _port_colors[static_cast<unsigned>(PortType::jack_cv)] =
+ _default_port_colors[static_cast<unsigned>(PortType::jack_cv)] = 0x005E4EFF;
#endif
}
bool
-Configuration::get_module_location(const std::string& name, ModuleType type, Coord& loc)
+Configuration::get_module_location(const std::string& name,
+ SignalDirection type,
+ Coord& loc) const
{
- std::map<std::string, ModuleSettings>::const_iterator i = _module_settings.find(name);
- if (i == _module_settings.end()) {
- return false;
- }
-
- const ModuleSettings& settings = (*i).second;
- if (type == Input && settings.input_location) {
- loc = *settings.input_location;
- } else if (type == Output && settings.output_location) {
- loc = *settings.output_location;
- } else if (type == InputOutput && settings.inout_location) {
- loc = *settings.inout_location;
- } else {
- return false;
- }
-
- return true;
+ auto i = _module_settings.find(name);
+ if (i == _module_settings.end()) {
+ return false;
+ }
+
+ const ModuleSettings& settings = (*i).second;
+ if (type == SignalDirection::input && settings.input_location) {
+ loc = *settings.input_location;
+ } else if (type == SignalDirection::output && settings.output_location) {
+ loc = *settings.output_location;
+ } else if (type == SignalDirection::duplex && settings.inout_location) {
+ loc = *settings.inout_location;
+ } else {
+ return false;
+ }
+
+ return true;
}
void
-Configuration::set_module_location(const std::string& name, ModuleType type, Coord loc)
+Configuration::set_module_location(const std::string& name,
+ SignalDirection type,
+ Coord loc)
{
- std::map<std::string, ModuleSettings>::iterator i = _module_settings.find(name);
- if (i == _module_settings.end()) {
- i = _module_settings.insert(
- std::make_pair(name, ModuleSettings(type != InputOutput))).first;
- }
-
- ModuleSettings& settings = (*i).second;
- switch (type) {
- case Input:
- settings.input_location = loc;
- break;
- case Output:
- settings.output_location = loc;
- break;
- case InputOutput:
- settings.inout_location = loc;
- break;
- default:
- break; // shouldn't reach here
- }
+ if (name.empty()) {
+ return;
+ }
+
+ auto i = _module_settings.find(name);
+ if (i == _module_settings.end()) {
+ i = _module_settings
+ .insert(std::make_pair(
+ name, ModuleSettings(type != SignalDirection::duplex)))
+ .first;
+ }
+
+ ModuleSettings& settings = (*i).second;
+ switch (type) {
+ case SignalDirection::input:
+ settings.input_location = loc;
+ break;
+ case SignalDirection::output:
+ settings.output_location = loc;
+ break;
+ case SignalDirection::duplex:
+ settings.inout_location = loc;
+ break;
+ }
}
/** Returns whether or not this module should be split.
*
- * If nothing is known about the given module, `default_val` is returned (this is
- * to allow driver's to request terminal ports get split by default).
+ * If nothing is known about the given module, `default_val` is returned (this
+ * is to allow driver's to request terminal ports get split by default).
*/
bool
Configuration::get_module_split(const std::string& name, bool default_val) const
{
- std::map<std::string, ModuleSettings>::const_iterator i = _module_settings.find(name);
- if (i == _module_settings.end()) {
- return default_val;
- }
+ auto i = _module_settings.find(name);
+ if (i == _module_settings.end()) {
+ return default_val;
+ }
- return (*i).second.split;
+ return (*i).second.split;
}
void
Configuration::set_module_split(const std::string& name, bool split)
{
- _module_settings[name].split = split;
-}
-
-/** Return a vector of filenames in descending order by preference. */
-static std::vector<std::string>
-get_filenames()
-{
- std::vector<std::string> filenames;
- std::string prefix;
-
- const char* xdg_config_home = getenv("XDG_CONFIG_HOME");
- const char* home = getenv("HOME");
-
- // XDG spec
- if (xdg_config_home) {
- filenames.push_back(std::string(xdg_config_home) + "/patchagerc");
- } else if (home) {
- filenames.push_back(std::string(home) + "/.config/patchagerc");
- }
-
- // Old location
- if (home) {
- filenames.push_back(std::string(home) + "/.patchagerc");
- }
-
- // Current directory (bundle or last-ditch effort)
- filenames.push_back("patchagerc");
-
- return filenames;
+ if (!name.empty()) {
+ _module_settings[name].split = split;
+ }
}
void
Configuration::load()
{
- // Try to find a readable configuration file
- const std::vector<std::string> filenames = get_filenames();
- std::ifstream file;
- for (size_t i = 0; i < filenames.size(); ++i) {
- file.open(filenames[i].c_str(), std::ios::in);
- if (file.good()) {
- std::cout << "Loading configuration from " << filenames[i] << std::endl;
- break;
- }
- }
-
- if (!file.good()) {
- std::cout << "No configuration file present" << std::endl;
- return;
- }
-
- _module_settings.clear();
- while (file.good()) {
- std::string key;
- if (file.peek() == '\"') {
- /* Old versions omitted the module_position key and listed
- positions starting with module name in quotes. */
- key = "module_position";
- } else {
- file >> key;
- }
-
- if (key == "window_location") {
- file >> _window_location.x >> _window_location.y;
- } else if (key == "window_size") {
- file >> _window_size.x >> _window_size.y;
- } else if (key == "zoom_level") {
- file >> _zoom;
- } else if (key == "font_size") {
- file >> _font_size;
- } else if (key == "show_toolbar") {
- file >> _show_toolbar;
- } else if (key == "sprung_layout") {
- file >> _sprung_layout;
- } else if (key == "show_messages") {
- file >> _show_messages;
- } else if (key == "sort_ports") {
- file >> _sort_ports;
- } else if (key == "messages_height") {
- file >> _messages_height;
- } else if (key == "port_color") {
- std::string type_name;
- uint32_t rgba;
- file >> type_name;
- file.ignore(1, '#');
- file >> std::hex >> std::uppercase;
- file >> rgba;
- file >> std::dec >> std::nouppercase;
-
- bool found = false;
- for (int i = 0; i < N_PORT_TYPES; ++i) {
- if (type_name == port_type_names[i]) {
- _port_colors[i] = rgba;
- found = true;
- break;
- }
- }
- if (!found) {
- std::cerr << "error: color for unknown port type `"
- << type_name << "'" << std::endl;
- }
- } else if (key == "module_position" || key[0] == '\"') {
- Coord loc;
- std::string name;
- file.ignore(1, '\"');
- std::getline(file, name, '\"');
-
- ModuleType type;
- std::string type_str;
- file >> type_str;
- if (type_str == "input") {
- type = Input;
- } else if (type_str == "output") {
- type = Output;
- } else if (type_str == "inputoutput") {
- type = InputOutput;
- } else {
- std::cerr << "error: bad position type `" << type_str
- << "' for module `" << name << "'" << std::endl;
- file.ignore(std::numeric_limits<std::streamsize>::max(), '\n');
- continue;
- }
-
- file >> loc.x;
- file >> loc.y;
-
- set_module_location(name, type, loc);
- } else {
- std::cerr << "warning: unknown configuration key `" << key << "'"
- << std::endl;
- file.ignore(std::numeric_limits<std::streamsize>::max(), '\n');
- }
-
- // Skip trailing whitespace, including newline
- while (file.good() && isspace(file.peek())) {
- file.ignore(1);
- }
- }
-
- file.close();
+ // Try to find a readable configuration file
+ const std::vector<std::string> filenames = get_filenames();
+ std::ifstream file;
+ for (const auto& filename : filenames) {
+ file.open(filename.c_str(), std::ios::in);
+ if (file.good()) {
+ std::cout << "Loading configuration from " << filename << std::endl;
+ break;
+ }
+ }
+
+ if (!file.good()) {
+ std::cout << "No configuration file present" << std::endl;
+ return;
+ }
+
+ _module_settings.clear();
+ while (file.good()) {
+ std::string key;
+ if (file.peek() == '\"') {
+ /* Old versions omitted the module_position key and listed
+ positions starting with module name in quotes. */
+ key = "module_position";
+ } else {
+ file >> key;
+ }
+
+ if (key == "window_location") {
+ auto& setting = std::get<setting::WindowLocation>(_settings);
+ file >> setting.value.x >> setting.value.y;
+ } else if (key == "window_size") {
+ auto& setting = std::get<setting::WindowSize>(_settings);
+ file >> setting.value.x >> setting.value.y;
+ } else if (key == "zoom_level") {
+ file >> std::get<setting::Zoom>(_settings).value;
+ } else if (key == "font_size") {
+ file >> std::get<setting::FontSize>(_settings).value;
+ } else if (key == "show_toolbar") {
+ file >> std::get<setting::ToolbarVisible>(_settings).value;
+ } else if (key == "sprung_layout") {
+ file >> std::get<setting::SprungLayout>(_settings).value;
+ } else if (key == "show_messages") {
+ file >> std::get<setting::MessagesVisible>(_settings).value;
+ } else if (key == "sort_ports") {
+ file >> std::get<setting::SortedPorts>(_settings).value;
+ } else if (key == "messages_height") {
+ file >> std::get<setting::MessagesHeight>(_settings).value;
+ } else if (key == "human_names") {
+ file >> std::get<setting::HumanNames>(_settings).value;
+ } else if (key == "port_color") {
+ std::string type_name;
+ uint32_t rgba = 0u;
+ file >> type_name;
+ file.ignore(1, '#');
+ file >> std::hex >> std::uppercase;
+ file >> rgba;
+ file >> std::dec >> std::nouppercase;
+
+ bool found = false;
+ for (unsigned i = 0U; i < n_port_types; ++i) {
+ if (type_name == port_type_names[i]) {
+ _port_colors[i] = rgba;
+ found = true;
+ break;
+ }
+ }
+ if (!found) {
+ std::cerr << "error: color for unknown port type `" << type_name << "'"
+ << std::endl;
+ }
+ } else if (key == "module_position") {
+ Coord loc;
+ std::string name;
+ file.ignore(std::numeric_limits<std::streamsize>::max(), '\"');
+ std::getline(file, name, '\"');
+
+ SignalDirection type = SignalDirection::input;
+ std::string type_str;
+ file >> type_str;
+ if (type_str == "input") {
+ type = SignalDirection::input;
+ } else if (type_str == "output") {
+ type = SignalDirection::output;
+ } else if (type_str == "inputoutput") {
+ type = SignalDirection::duplex;
+ } else {
+ std::cerr << "error: bad position type `" << type_str
+ << "' for module `" << name << "'" << std::endl;
+ file.ignore(std::numeric_limits<std::streamsize>::max(), '\n');
+ continue;
+ }
+
+ file >> loc.x;
+ file >> loc.y;
+
+ set_module_location(name, type, loc);
+ } else {
+ std::cerr << "warning: unknown configuration key `" << key << "'"
+ << std::endl;
+ file.ignore(std::numeric_limits<std::streamsize>::max(), '\n');
+ }
+
+ // Skip trailing whitespace, including newline
+ while (file.good() && isspace(file.peek())) {
+ file.ignore(1);
+ }
+ }
+
+ file.close();
}
-static inline void
+inline void
write_module_position(std::ofstream& os,
const std::string& name,
const char* type,
const Coord& loc)
{
- os << "module_position \"" << name << "\""
- << " " << type << " " << loc.x << " " << loc.y << std::endl;
+ os << "module_position \"" << name << "\""
+ << " " << type << " " << loc.x << " " << loc.y << std::endl;
}
void
Configuration::save()
{
- // Try to find a writable configuration file
- const std::vector<std::string> filenames = get_filenames();
- std::ofstream file;
- for (size_t i = 0; i < filenames.size(); ++i) {
- file.open(filenames[i].c_str(), std::ios::out);
- if (file.good()) {
- std::cout << "Writing configuration to " << filenames[i] << std::endl;
- break;
- }
- }
-
- if (!file.good()) {
- std::cout << "Unable to open configuration file to write" << std::endl;
- return;
- }
-
- file << "window_location " << _window_location.x << " " << _window_location.y << std::endl;
- file << "window_size " << _window_size.x << " " << _window_size.y << std::endl;
- file << "zoom_level " << _zoom << std::endl;
- file << "font_size " << _font_size << std::endl;
- file << "show_toolbar " << _show_toolbar << std::endl;
- file << "sprung_layout " << _sprung_layout << std::endl;
- file << "show_messages " << _show_messages << std::endl;
- file << "sort_ports " << _sort_ports << std::endl;
- file << "messages_height " << _messages_height << std::endl;
-
- file << std::hex << std::uppercase;
- for (int i = 0; i < N_PORT_TYPES; ++i) {
- if (_port_colors[i] != _default_port_colors[i]) {
- file << "port_color " << port_type_names[i] << " " << _port_colors[i] << std::endl;
- }
- }
- file << std::dec << std::nouppercase;
-
- for (std::map<std::string, ModuleSettings>::iterator i = _module_settings.begin();
- i != _module_settings.end(); ++i) {
- const ModuleSettings& settings = (*i).second;
- const std::string& name = (*i).first;
-
- if (settings.split) {
- if (settings.input_location && settings.output_location) {
- write_module_position(file, name, "input", *settings.input_location);
- write_module_position(file, name, "output", *settings.output_location);
- }
- } else if (settings.inout_location) {
- write_module_position(file, name, "inputoutput", *settings.inout_location);
- }
- }
-
- file.close();
+ // Try to find a writable configuration file
+ const std::vector<std::string> filenames = get_filenames();
+ std::ofstream file;
+ for (const std::string& filename : filenames) {
+ file.open(filename.c_str(), std::ios::out);
+ if (file.good()) {
+ std::cout << "Writing configuration to " << filename << std::endl;
+ break;
+ }
+ }
+
+ if (!file.good()) {
+ std::cout << "Unable to open configuration file to write" << std::endl;
+ return;
+ }
+
+ file << "window_location " << get<setting::WindowLocation>().x << " "
+ << get<setting::WindowLocation>().y << std::endl;
+
+ file << "window_size " << get<setting::WindowSize>().x << " "
+ << get<setting::WindowSize>().y << std::endl;
+
+ file << "zoom_level " << get<setting::Zoom>() << std::endl;
+ file << "font_size " << get<setting::FontSize>() << std::endl;
+ file << "show_toolbar " << get<setting::ToolbarVisible>() << std::endl;
+ file << "sprung_layout " << get<setting::SprungLayout>() << std::endl;
+ file << "show_messages " << get<setting::MessagesVisible>() << std::endl;
+ file << "sort_ports " << get<setting::SortedPorts>() << std::endl;
+ file << "messages_height " << get<setting::MessagesHeight>() << std::endl;
+ file << "human_names " << get<setting::HumanNames>() << std::endl;
+
+ file << std::hex << std::uppercase;
+ for (unsigned i = 0U; i < n_port_types; ++i) {
+ if (_port_colors[i] != _default_port_colors[i]) {
+ file << "port_color " << port_type_names[i] << " " << _port_colors[i]
+ << std::endl;
+ }
+ }
+ file << std::dec << std::nouppercase;
+
+ for (const auto& s : _module_settings) {
+ const std::string& name = s.first;
+ const ModuleSettings& settings = s.second;
+
+ if (settings.split) {
+ if (settings.input_location) {
+ write_module_position(file, name, "input", *settings.input_location);
+ }
+
+ if (settings.output_location) {
+ write_module_position(file, name, "output", *settings.output_location);
+ }
+ } else if (settings.inout_location) {
+ write_module_position(
+ file, name, "inputoutput", *settings.inout_location);
+ }
+ }
+
+ file.close();
}
+
+} // namespace patchage
diff --git a/src/Configuration.hpp b/src/Configuration.hpp
index 127a4a8..a92d4af 100644
--- a/src/Configuration.hpp
+++ b/src/Configuration.hpp
@@ -1,109 +1,147 @@
-/* This file is part of Patchage.
- * Copyright 2007-2014 David Robillard <http://drobilla.net>
- *
- * Patchage is free software: you can redistribute it and/or modify it under
- * the terms of the GNU General Public License as published by the Free
- * Software Foundation, either version 3 of the License, or (at your option)
- * any later version.
- *
- * Patchage is distributed in the hope that it will be useful, but WITHOUT ANY
- * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
- * FOR A PARTICULAR PURPOSE. See the GNU General Public License for details.
- *
- * You should have received a copy of the GNU General Public License
- * along with Patchage. If not, see <http://www.gnu.org/licenses/>.
- */
+// Copyright 2007-2020 David Robillard <d@drobilla.net>
+// SPDX-License-Identifier: GPL-3.0-or-later
#ifndef PATCHAGE_CONFIGURATION_HPP
#define PATCHAGE_CONFIGURATION_HPP
-#include <stdint.h>
+#include "Coord.hpp"
+#include "Setting.hpp"
-#include <string>
-#include <list>
+#include <cstdint>
+#include <functional>
#include <map>
+#include <optional>
+#include <string>
+#include <tuple>
-#include <boost/optional.hpp>
-
-enum ModuleType { Input, Output, InputOutput };
-
-enum PortType { JACK_AUDIO, JACK_MIDI, ALSA_MIDI, JACK_OSC, JACK_CV };
+namespace patchage {
-#define N_PORT_TYPES 5
-
-struct Coord {
- Coord(double x_=0, double y_=0) : x(x_), y(y_) {}
- double x;
- double y;
-};
+enum class SignalDirection;
+enum class PortType;
class Configuration
{
public:
- Configuration();
-
- void load();
- void save();
-
- bool get_module_location(const std::string& name, ModuleType type, Coord& loc);
- void set_module_location(const std::string& name, ModuleType type, Coord loc);
-
- void set_module_split(const std::string& name, bool split);
- bool get_module_split(const std::string& name, bool default_val) const;
-
- float get_zoom() const { return _zoom; }
- void set_zoom(float zoom) { _zoom = zoom; }
- float get_font_size() const { return _font_size; }
- void set_font_size(float font_size) { _font_size = font_size; }
-
- float get_show_toolbar() const { return _show_toolbar; }
- void set_show_toolbar(float show_toolbar) { _show_toolbar = show_toolbar; }
-
- float get_sprung_layout() const { return _sprung_layout; }
- void set_sprung_layout(float sprung_layout) { _sprung_layout = sprung_layout; }
-
- bool get_show_messages() const { return _show_messages; }
- void set_show_messages(bool show_messages) { _show_messages = show_messages; }
-
- bool get_sort_ports() const { return _sort_ports; }
- void set_sort_ports(bool sort_ports) { _sort_ports = sort_ports; }
-
- int get_messages_height() const { return _messages_height; }
- void set_messages_height(int height) { _messages_height = height; }
-
- uint32_t get_port_color(PortType type) const { return _port_colors[type]; }
- void set_port_color(PortType type, uint32_t rgba) {
- _port_colors[type] = rgba;
- }
-
- Coord get_window_location() { return _window_location; }
- void set_window_location(Coord loc) { _window_location = loc; }
- Coord get_window_size() { return _window_size; }
- void set_window_size(Coord size) { _window_size = size; }
+ static constexpr unsigned n_port_types = 5U;
+
+ explicit Configuration(std::function<void(const Setting&)> on_change);
+
+ void load();
+ void save();
+
+ bool get_module_location(const std::string& name,
+ SignalDirection type,
+ Coord& loc) const;
+
+ void set_module_location(const std::string& name,
+ SignalDirection type,
+ Coord loc);
+
+ void set_module_split(const std::string& name, bool split);
+ bool get_module_split(const std::string& name, bool default_val) const;
+
+ uint32_t get_port_color(PortType type) const
+ {
+ return _port_colors[static_cast<unsigned>(type)];
+ }
+
+ void set_port_color(PortType type, uint32_t rgba)
+ {
+ _port_colors[static_cast<unsigned>(type)] = rgba;
+ _on_change(setting::PortColor{type, rgba});
+ }
+
+ // Set a global configuration setting
+ template<class S>
+ void set(decltype(S::value) value)
+ {
+ S& setting = std::get<S>(_settings);
+
+ if (setting.value != value) {
+ setting.value = std::move(value);
+ _on_change(setting);
+ }
+ }
+
+ // Set a global configuration setting
+ template<class S>
+ void set_setting(S new_setting)
+ {
+ set<S>(new_setting.value);
+ }
+
+ // Set a global port color setting
+ void set_setting(setting::PortColor new_setting)
+ {
+ auto& color = _port_colors[static_cast<unsigned>(new_setting.type)];
+
+ if (color != new_setting.color) {
+ set_port_color(new_setting.type, new_setting.color);
+ }
+ }
+
+ // Get a global configuration setting
+ template<class S>
+ decltype(S::value) get() const
+ {
+ return std::get<S>(_settings).value;
+ }
+
+ /// Call `visitor` once with each configuration setting
+ template<class Visitor>
+ void each(Visitor visitor)
+ {
+ visitor(std::get<setting::FontSize>(_settings));
+ visitor(std::get<setting::HumanNames>(_settings));
+ visitor(std::get<setting::MessagesHeight>(_settings));
+ visitor(std::get<setting::MessagesVisible>(_settings));
+ visitor(std::get<setting::SortedPorts>(_settings));
+ visitor(std::get<setting::SprungLayout>(_settings));
+ visitor(std::get<setting::ToolbarVisible>(_settings));
+ visitor(std::get<setting::WindowLocation>(_settings));
+ visitor(std::get<setting::WindowSize>(_settings));
+ visitor(std::get<setting::Zoom>(_settings));
+
+ for (auto i = 0u; i < n_port_types; ++i) {
+ visitor(setting::PortColor{static_cast<PortType>(i), _port_colors[i]});
+ }
+ }
private:
- struct ModuleSettings {
- ModuleSettings(bool s=false) : split(s) {}
- boost::optional<Coord> input_location;
- boost::optional<Coord> output_location;
- boost::optional<Coord> inout_location;
- bool split;
- };
-
- std::map<std::string, ModuleSettings> _module_settings;
-
- uint32_t _default_port_colors[N_PORT_TYPES];
- uint32_t _port_colors[N_PORT_TYPES];
-
- Coord _window_location;
- Coord _window_size;
- float _zoom;
- float _font_size;
- int _messages_height;
- bool _show_toolbar;
- bool _sprung_layout;
- bool _show_messages;
- bool _sort_ports;
+ struct ModuleSettings {
+ explicit ModuleSettings(bool s = false)
+ : split(s)
+ {}
+
+ std::optional<Coord> input_location;
+ std::optional<Coord> output_location;
+ std::optional<Coord> inout_location;
+ bool split;
+ };
+
+ std::map<std::string, ModuleSettings> _module_settings;
+
+ uint32_t _default_port_colors[n_port_types] = {};
+ uint32_t _port_colors[n_port_types] = {};
+
+ using Settings = std::tuple<setting::AlsaAttached,
+ setting::FontSize,
+ setting::HumanNames,
+ setting::JackAttached,
+ setting::MessagesHeight,
+ setting::MessagesVisible,
+ setting::SortedPorts,
+ setting::SprungLayout,
+ setting::ToolbarVisible,
+ setting::WindowLocation,
+ setting::WindowSize,
+ setting::Zoom>;
+
+ Settings _settings;
+
+ std::function<void(const Setting&)> _on_change;
};
+} // namespace patchage
+
#endif // PATCHAGE_CONFIGURATION_HPP
diff --git a/src/Coord.hpp b/src/Coord.hpp
new file mode 100644
index 0000000..b24d7dc
--- /dev/null
+++ b/src/Coord.hpp
@@ -0,0 +1,28 @@
+// Copyright 2007-2021 David Robillard <d@drobilla.net>
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+#ifndef PATCHAGE_COORD_HPP
+#define PATCHAGE_COORD_HPP
+
+namespace patchage {
+
+struct Coord {
+ double x{0.0};
+ double y{0.0};
+};
+
+inline bool
+operator==(const Coord& lhs, const Coord& rhs)
+{
+ return lhs.x == rhs.x && lhs.y == rhs.y;
+}
+
+inline bool
+operator!=(const Coord& lhs, const Coord& rhs)
+{
+ return !(lhs == rhs);
+}
+
+} // namespace patchage
+
+#endif // PATCHAGE_COORD_HPP
diff --git a/src/Driver.hpp b/src/Driver.hpp
index 3837382..4cb890b 100644
--- a/src/Driver.hpp
+++ b/src/Driver.hpp
@@ -1,55 +1,58 @@
-/* This file is part of Patchage.
- * Copyright 2007-2014 David Robillard <http://drobilla.net>
- *
- * Patchage is free software: you can redistribute it and/or modify it under
- * the terms of the GNU General Public License as published by the Free
- * Software Foundation, either version 3 of the License, or (at your option)
- * any later version.
- *
- * Patchage is distributed in the hope that it will be useful, but WITHOUT ANY
- * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
- * FOR A PARTICULAR PURPOSE. See the GNU General Public License for details.
- *
- * You should have received a copy of the GNU General Public License
- * along with Patchage. If not, see <http://www.gnu.org/licenses/>.
- */
+// Copyright 2007-2020 David Robillard <d@drobilla.net>
+// SPDX-License-Identifier: GPL-3.0-or-later
#ifndef PATCHAGE_DRIVER_HPP
#define PATCHAGE_DRIVER_HPP
-#include <boost/shared_ptr.hpp>
-#include <sigc++/sigc++.h>
+#include "Event.hpp"
-#include "PatchageEvent.hpp"
+#include <functional>
+#include <utility>
-class PatchagePort;
-class PatchageCanvas;
+namespace patchage {
-/** Trival driver base class */
-class Driver {
+struct PortID;
+
+/// Base class for drivers that handle system clients and ports
+class Driver
+{
public:
- virtual ~Driver() {}
+ using EventSink = std::function<void(const Event&)>;
+
+ explicit Driver(EventSink emit_event)
+ : _emit_event{std::move(emit_event)}
+ {}
+
+ Driver(const Driver&) = delete;
+ Driver& operator=(const Driver&) = delete;
- virtual void process_events(Patchage* app) = 0;
+ Driver(Driver&&) = delete;
+ Driver& operator=(Driver&&) = delete;
- virtual void attach(bool launch_daemon) = 0;
- virtual void detach() = 0;
- virtual bool is_attached() const = 0;
+ virtual ~Driver() = default;
- virtual void refresh() = 0;
- virtual void destroy_all() {}
+ /// Connect to the underlying system API
+ virtual void attach(bool launch_daemon) = 0;
- virtual PatchagePort* create_port_view(Patchage* patchage,
- const PortID& id) = 0;
+ /// Disconnect from the underlying system API
+ virtual void detach() = 0;
- virtual bool connect(PatchagePort* src_port,
- PatchagePort* dst_port) = 0;
+ /// Return true iff the driver is active and connected to the system
+ virtual bool is_attached() const = 0;
- virtual bool disconnect(PatchagePort* src_port,
- PatchagePort* dst_port) = 0;
+ /// Send events to `sink` that describe the complete current system state
+ virtual void refresh(const EventSink& sink) = 0;
- sigc::signal<void> signal_attached;
- sigc::signal<void> signal_detached;
+ /// Make a connection between ports
+ virtual bool connect(const PortID& tail_id, const PortID& head_id) = 0;
+
+ /// Remove a connection between ports
+ virtual bool disconnect(const PortID& tail_id, const PortID& head_id) = 0;
+
+protected:
+ EventSink _emit_event; ///< Sink for emitting "live" events
};
+} // namespace patchage
+
#endif // PATCHAGE_DRIVER_HPP
diff --git a/src/Drivers.cpp b/src/Drivers.cpp
new file mode 100644
index 0000000..6de2459
--- /dev/null
+++ b/src/Drivers.cpp
@@ -0,0 +1,56 @@
+// Copyright 2007-2021 David Robillard <d@drobilla.net>
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+#include "Drivers.hpp"
+
+#include "AudioDriver.hpp"
+#include "ClientType.hpp"
+#include "Driver.hpp"
+#include "Event.hpp"
+#include "make_alsa_driver.hpp"
+#include "make_jack_driver.hpp"
+
+#include <functional>
+#include <utility>
+
+namespace patchage {
+
+Drivers::Drivers(ILog& log, Driver::EventSink emit_event)
+ : _log{log}
+ , _emit_event{std::move(emit_event)}
+ , _alsa_driver{make_alsa_driver(
+ log,
+ [this](const Event& event) { _emit_event(event); })}
+ , _jack_driver{make_jack_driver(_log, [this](const Event& event) {
+ _emit_event(event);
+ })}
+{}
+
+void
+Drivers::refresh()
+{
+ _emit_event(event::Cleared{});
+
+ if (_alsa_driver) {
+ _alsa_driver->refresh(_emit_event);
+ }
+
+ if (_jack_driver) {
+ _jack_driver->refresh(_emit_event);
+ }
+}
+
+Driver*
+Drivers::driver(const ClientType type)
+{
+ switch (type) {
+ case ClientType::jack:
+ return _jack_driver.get();
+ case ClientType::alsa:
+ return _alsa_driver.get();
+ }
+
+ return nullptr;
+}
+
+} // namespace patchage
diff --git a/src/Drivers.hpp b/src/Drivers.hpp
new file mode 100644
index 0000000..6bb9eb4
--- /dev/null
+++ b/src/Drivers.hpp
@@ -0,0 +1,52 @@
+// Copyright 2007-2021 David Robillard <d@drobilla.net>
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+#ifndef PATCHAGE_DRIVERS_HPP
+#define PATCHAGE_DRIVERS_HPP
+
+#include "AudioDriver.hpp"
+#include "Driver.hpp"
+
+#include <memory>
+
+namespace patchage {
+
+class ILog;
+enum class ClientType;
+
+/// Manager for all drivers
+class Drivers
+{
+public:
+ Drivers(ILog& log, Driver::EventSink emit_event);
+
+ Drivers(const Drivers&) = delete;
+ Drivers& operator=(const Drivers&) = delete;
+
+ Drivers(Drivers&&) = delete;
+ Drivers& operator=(Drivers&&) = delete;
+
+ ~Drivers() = default;
+
+ /// Refresh all drivers and emit results to the event sink
+ void refresh();
+
+ /// Return a pointer to the driver for the given client type (or null)
+ Driver* driver(ClientType type);
+
+ /// Return a pointer to the ALSA driver (or null)
+ const std::unique_ptr<Driver>& alsa() { return _alsa_driver; }
+
+ /// Return a pointer to the JACK driver (or null)
+ const std::unique_ptr<AudioDriver>& jack() { return _jack_driver; }
+
+protected:
+ ILog& _log;
+ Driver::EventSink _emit_event;
+ std::unique_ptr<Driver> _alsa_driver;
+ std::unique_ptr<AudioDriver> _jack_driver;
+};
+
+} // namespace patchage
+
+#endif // PATCHAGE_DRIVER_HPP
diff --git a/src/Event.hpp b/src/Event.hpp
new file mode 100644
index 0000000..36166e7
--- /dev/null
+++ b/src/Event.hpp
@@ -0,0 +1,71 @@
+// Copyright 2007-2021 David Robillard <d@drobilla.net>
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+#ifndef PATCHAGE_EVENT_HPP
+#define PATCHAGE_EVENT_HPP
+
+#include "ClientID.hpp"
+#include "ClientInfo.hpp"
+#include "ClientType.hpp"
+#include "PortID.hpp"
+#include "PortInfo.hpp"
+
+#include <variant>
+
+namespace patchage {
+namespace event {
+
+struct Cleared {};
+
+struct ClientCreated {
+ ClientID id;
+ ClientInfo info;
+};
+
+struct ClientDestroyed {
+ ClientID id;
+};
+
+struct DriverAttached {
+ ClientType type;
+};
+
+struct DriverDetached {
+ ClientType type;
+};
+
+struct PortCreated {
+ PortID id;
+ PortInfo info;
+};
+
+struct PortDestroyed {
+ PortID id;
+};
+
+struct PortsConnected {
+ PortID tail;
+ PortID head;
+};
+
+struct PortsDisconnected {
+ PortID tail;
+ PortID head;
+};
+
+} // namespace event
+
+/// An event from drivers that represents a change to the system
+using Event = std::variant<event::Cleared,
+ event::ClientCreated,
+ event::ClientDestroyed,
+ event::DriverAttached,
+ event::DriverDetached,
+ event::PortCreated,
+ event::PortDestroyed,
+ event::PortsConnected,
+ event::PortsDisconnected>;
+
+} // namespace patchage
+
+#endif // PATCHAGE_EVENT_HPP
diff --git a/src/ILog.hpp b/src/ILog.hpp
new file mode 100644
index 0000000..5e00602
--- /dev/null
+++ b/src/ILog.hpp
@@ -0,0 +1,32 @@
+// Copyright 2007-2020 David Robillard <d@drobilla.net>
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+#ifndef PATCHAGE_ILOG_HPP
+#define PATCHAGE_ILOG_HPP
+
+#include <string>
+
+namespace patchage {
+
+/// Interface for writing log messages
+class ILog
+{
+public:
+ ILog() = default;
+
+ ILog(const ILog&) = default;
+ ILog& operator=(const ILog&) = default;
+
+ ILog(ILog&&) = default;
+ ILog& operator=(ILog&&) = default;
+
+ virtual ~ILog() = default;
+
+ virtual void info(const std::string& msg) = 0;
+ virtual void warning(const std::string& msg) = 0;
+ virtual void error(const std::string& msg) = 0;
+};
+
+} // namespace patchage
+
+#endif // PATCHAGE_ILOG_HPP
diff --git a/src/JackDbusDriver.cpp b/src/JackDbusDriver.cpp
index 7953051..81aca11 100644
--- a/src/JackDbusDriver.cpp
+++ b/src/JackDbusDriver.cpp
@@ -1,1048 +1,957 @@
-/* This file is part of Patchage.
- * Copyright 2008 Nedko Arnaudov <nedko@arnaudov.name>
- *
- * Patchage is free software: you can redistribute it and/or modify it under
- * the terms of the GNU General Public License as published by the Free
- * Software Foundation, either version 3 of the License, or (at your option)
- * any later version.
- *
- * Patchage is distributed in the hope that it will be useful, but WITHOUT ANY
- * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
- * FOR A PARTICULAR PURPOSE. See the GNU General Public License for details.
- *
- * You should have received a copy of the GNU General Public License
- * along with Patchage. If not, see <http://www.gnu.org/licenses/>.
- */
+// Copyright 2008-2020 David Robillard <d@drobilla.net>
+// Copyright 2008 Nedko Arnaudov <nedko@arnaudov.name>
+// SPDX-License-Identifier: GPL-3.0-or-later
-#include <cassert>
-#include <cstring>
-#include <string>
-#include <set>
-
-#include "patchage_config.h"
+#include "AudioDriver.hpp"
+#include "ClientType.hpp"
+#include "Driver.hpp"
+#include "Event.hpp"
+#include "ILog.hpp"
+#include "PortNames.hpp"
+#include "PortType.hpp"
+#include "SignalDirection.hpp"
+#include "make_jack_driver.hpp"
+#include "warnings.hpp"
+
+PATCHAGE_DISABLE_FMT_WARNINGS
+#include <fmt/core.h>
+PATCHAGE_RESTORE_WARNINGS
-#include <glib.h>
-#include <dbus/dbus.h>
-#include <dbus/dbus-glib.h>
#include <dbus/dbus-glib-lowlevel.h>
+#include <dbus/dbus-glib.h>
+#include <dbus/dbus.h>
+#include <glib.h>
-#include <boost/format.hpp>
+#include <cassert>
+#include <cstdint>
+#include <cstring>
+#include <set>
+#include <string>
-#include "Driver.hpp"
-#include "JackDbusDriver.hpp"
-#include "Patchage.hpp"
-#include "PatchageCanvas.hpp"
-#include "PatchageEvent.hpp"
-#include "PatchageModule.hpp"
-
-#define JACKDBUS_SERVICE "org.jackaudio.service"
-#define JACKDBUS_OBJECT "/org/jackaudio/Controller"
-#define JACKDBUS_IFACE_CONTROL "org.jackaudio.JackControl"
+#define JACKDBUS_SERVICE "org.jackaudio.service"
+#define JACKDBUS_OBJECT "/org/jackaudio/Controller"
+#define JACKDBUS_IFACE_CONTROL "org.jackaudio.JackControl"
#define JACKDBUS_IFACE_PATCHBAY "org.jackaudio.JackPatchbay"
#define JACKDBUS_CALL_DEFAULT_TIMEOUT 1000 // in milliseconds
-#define JACKDBUS_PORT_FLAG_INPUT 0x00000001
-#define JACKDBUS_PORT_FLAG_OUTPUT 0x00000002
-#define JACKDBUS_PORT_FLAG_PHYSICAL 0x00000004
-#define JACKDBUS_PORT_FLAG_CAN_MONITOR 0x00000008
-#define JACKDBUS_PORT_FLAG_TERMINAL 0x00000010
+#define JACKDBUS_PORT_FLAG_INPUT 0x00000001
+#define JACKDBUS_PORT_FLAG_TERMINAL 0x00000010
#define JACKDBUS_PORT_TYPE_AUDIO 0
-#define JACKDBUS_PORT_TYPE_MIDI 1
+#define JACKDBUS_PORT_TYPE_MIDI 1
-//#define USE_FULL_REFRESH
+namespace patchage {
+namespace {
-JackDriver::JackDriver(Patchage* app)
- : _app(app)
- , _dbus_connection(0)
- , _max_dsp_load(0)
- , _server_responding(false)
- , _server_started(false)
- , _graph_version(0)
+/// Driver for JACK audio and midi ports that uses D-Bus
+class JackDriver : public AudioDriver
{
- dbus_error_init(&_dbus_error);
-}
+public:
+ explicit JackDriver(ILog& log, EventSink emit_event);
-JackDriver::~JackDriver()
-{
- if (_dbus_connection) {
- dbus_connection_flush(_dbus_connection);
- }
+ JackDriver(const JackDriver&) = delete;
+ JackDriver& operator=(const JackDriver&) = delete;
- if (dbus_error_is_set(&_dbus_error)) {
- dbus_error_free(&_dbus_error);
- }
-}
+ JackDriver(JackDriver&&) = delete;
+ JackDriver& operator=(JackDriver&&) = delete;
+
+ ~JackDriver() override;
+
+ // Driver interface
+ void attach(bool launch_daemon) override;
+ void detach() override;
+ bool is_attached() const override;
+ void refresh(const EventSink& sink) override;
+ bool connect(const PortID& tail_id, const PortID& head_id) override;
+ bool disconnect(const PortID& tail_id, const PortID& head_id) override;
+
+ // AudioDriver interface
+ uint32_t xruns() override;
+ void reset_xruns() override;
+ uint32_t buffer_size() override;
+ bool set_buffer_size(uint32_t frames) override;
+ uint32_t sample_rate() override;
+
+private:
+ PortType patchage_port_type(dbus_uint32_t dbus_port_type) const;
+
+ PortInfo port_info(const std::string& port_name,
+ dbus_uint32_t port_type,
+ dbus_uint32_t port_flags) const;
+
+ void error_msg(const std::string& msg) const;
+ void info_msg(const std::string& msg) const;
+
+ bool call(bool response_expected,
+ const char* iface,
+ const char* method,
+ DBusMessage** reply_ptr_ptr,
+ int in_type,
+ ...);
+
+ void update_attached();
+
+ bool is_started();
-static bool
-is_jack_port(const PatchagePort* port)
+ void start_server();
+
+ void stop_server();
+
+ static DBusHandlerResult dbus_message_hook(DBusConnection* connection,
+ DBusMessage* message,
+ void* jack_driver);
+
+ void on_jack_appeared();
+
+ void on_jack_disappeared();
+
+ ILog& _log;
+ DBusError _dbus_error;
+ DBusConnection* _dbus_connection;
+
+ mutable bool _server_responding;
+ bool _server_started;
+
+ dbus_uint64_t _graph_version;
+};
+
+JackDriver::JackDriver(ILog& log, EventSink emit_event)
+ : AudioDriver{std::move(emit_event)}
+ , _log(log)
+ , _dbus_error()
+ , _dbus_connection(nullptr)
+ , _server_responding(false)
+ , _server_started(false)
+ , _graph_version(0)
{
- return port->type() == JACK_AUDIO || port->type() == JACK_MIDI;
+ dbus_error_init(&_dbus_error);
}
-/** Destroy all JACK (canvas) ports.
- */
-void
-JackDriver::destroy_all()
+JackDriver::~JackDriver()
{
- _app->canvas()->remove_ports(is_jack_port);
+ if (_dbus_connection) {
+ dbus_connection_flush(_dbus_connection);
+ }
+
+ if (dbus_error_is_set(&_dbus_error)) {
+ dbus_error_free(&_dbus_error);
+ }
}
void
JackDriver::update_attached()
{
- bool was_attached = _server_started;
- _server_started = is_started();
-
- if (!_server_responding) {
- if (was_attached) {
- signal_detached.emit();
- }
- return;
- }
-
- if (_server_started && !was_attached) {
- signal_attached.emit();
- return;
- }
-
- if (!_server_started && was_attached) {
- signal_detached.emit();
- return;
- }
+ bool was_attached = _server_started;
+ _server_started = is_started();
+
+ if (!_server_responding) {
+ if (was_attached) {
+ _emit_event(event::DriverDetached{ClientType::jack});
+ }
+ return;
+ }
+
+ if (_server_started && !was_attached) {
+ _emit_event(event::DriverAttached{ClientType::jack});
+ return;
+ }
+
+ if (!_server_started && was_attached) {
+ _emit_event(event::DriverDetached{ClientType::jack});
+ return;
+ }
}
void
JackDriver::on_jack_appeared()
{
- info_msg("JACK appeared.");
- update_attached();
+ info_msg("Server appeared");
+ update_attached();
}
void
JackDriver::on_jack_disappeared()
{
- info_msg("JACK disappeared.");
+ info_msg("Server disappeared");
- // we are not calling update_attached() here, because it will activate jackdbus
+ // we are not calling update_attached() here, because it will activate
+ // jackdbus
- _server_responding = false;
+ _server_responding = false;
- if (_server_started) {
- signal_detached.emit();
- }
+ if (_server_started) {
+ _emit_event(event::DriverDetached{ClientType::jack});
+ }
- _server_started = false;
+ _server_started = false;
}
-/** Handle signals we have subscribed for in attach(). */
DBusHandlerResult
-JackDriver::dbus_message_hook(DBusConnection* connection,
- DBusMessage* message,
- void* jack_driver)
-{
- const char* client2_name;
- const char* client_name;
- const char* new_owner;
- const char* object_name;
- const char* old_owner;
- const char* port2_name;
- const char* port_name;
- dbus_uint32_t port_flags;
- dbus_uint32_t port_type;
- dbus_uint64_t client2_id;
- dbus_uint64_t client_id;
- dbus_uint64_t connection_id;
- dbus_uint64_t new_graph_version;
- dbus_uint64_t port2_id;
- dbus_uint64_t port_id;
-
- assert(jack_driver);
- JackDriver* me = reinterpret_cast<JackDriver*>(jack_driver);
- assert(me->_dbus_connection);
-
- if (dbus_message_is_signal(message, DBUS_INTERFACE_DBUS, "NameOwnerChanged")) {
- if (!dbus_message_get_args( message, &me->_dbus_error,
- DBUS_TYPE_STRING, &object_name,
- DBUS_TYPE_STRING, &old_owner,
- DBUS_TYPE_STRING, &new_owner,
- DBUS_TYPE_INVALID)) {
- me->error_msg(str(boost::format("dbus_message_get_args() failed to extract "
- "NameOwnerChanged signal arguments (%s)") % me->_dbus_error.message));
- dbus_error_free(&me->_dbus_error);
- return DBUS_HANDLER_RESULT_HANDLED;
- }
-
- if (old_owner[0] == '\0') {
- me->on_jack_appeared();
- } else if (new_owner[0] == '\0') {
- me->on_jack_disappeared();
- }
- }
-
-#if defined(USE_FULL_REFRESH)
- if (dbus_message_is_signal(message, JACKDBUS_IFACE_PATCHBAY, "GraphChanged")) {
- if (!dbus_message_get_args(message, &me->_dbus_error,
- DBUS_TYPE_UINT64, &new_graph_version,
- DBUS_TYPE_INVALID)) {
- me->error_msg(str(boost::format("dbus_message_get_args() failed to extract "
- "GraphChanged signal arguments (%s)") % me->_dbus_error.message));
- dbus_error_free(&me->_dbus_error);
- return DBUS_HANDLER_RESULT_HANDLED;
- }
-
- if (!me->_server_started) {
- me->_server_started = true;
- me->signal_attached.emit();
- }
-
- if (new_graph_version > me->_graph_version) {
- me->refresh_internal(false);
- }
+JackDriver::dbus_message_hook(DBusConnection* /*connection*/,
+ DBusMessage* message,
+ void* jack_driver)
+{
+ const char* client2_name = nullptr;
+ const char* client_name = nullptr;
+ const char* new_owner = nullptr;
+ const char* object_name = nullptr;
+ const char* old_owner = nullptr;
+ const char* port2_name = nullptr;
+ const char* port_name = nullptr;
+ dbus_uint32_t port_flags = 0u;
+ dbus_uint32_t port_type = 0u;
+ dbus_uint64_t client2_id = 0u;
+ dbus_uint64_t client_id = 0u;
+ dbus_uint64_t connection_id = 0u;
+ dbus_uint64_t new_graph_version = 0u;
+ dbus_uint64_t port2_id = 0u;
+ dbus_uint64_t port_id = 0u;
+
+ assert(jack_driver);
+ auto* me = static_cast<JackDriver*>(jack_driver);
+ assert(me->_dbus_connection);
+
+ if (dbus_message_is_signal(
+ message, DBUS_INTERFACE_DBUS, "NameOwnerChanged")) {
+ if (!dbus_message_get_args(message,
+ &me->_dbus_error,
+ DBUS_TYPE_STRING,
+ &object_name,
+ DBUS_TYPE_STRING,
+ &old_owner,
+ DBUS_TYPE_STRING,
+ &new_owner,
+ DBUS_TYPE_INVALID)) {
+ me->error_msg(fmt::format("dbus_message_get_args() failed to extract "
+ "NameOwnerChanged signal arguments ({})",
+ me->_dbus_error.message));
+
+ dbus_error_free(&me->_dbus_error);
+ return DBUS_HANDLER_RESULT_HANDLED;
+ }
+
+ if (old_owner[0] == '\0') {
+ me->on_jack_appeared();
+ } else if (new_owner[0] == '\0') {
+ me->on_jack_disappeared();
+ }
+ }
+
+ if (dbus_message_is_signal(
+ message, JACKDBUS_IFACE_PATCHBAY, "PortAppeared")) {
+ if (!dbus_message_get_args(message,
+ &me->_dbus_error,
+ DBUS_TYPE_UINT64,
+ &new_graph_version,
+ DBUS_TYPE_UINT64,
+ &client_id,
+ DBUS_TYPE_STRING,
+ &client_name,
+ DBUS_TYPE_UINT64,
+ &port_id,
+ DBUS_TYPE_STRING,
+ &port_name,
+ DBUS_TYPE_UINT32,
+ &port_flags,
+ DBUS_TYPE_UINT32,
+ &port_type,
+ DBUS_TYPE_INVALID)) {
+ me->error_msg(fmt::format("dbus_message_get_args() failed to extract "
+ "PortAppeared signal arguments ({})",
+ me->_dbus_error.message));
+ dbus_error_free(&me->_dbus_error);
+ return DBUS_HANDLER_RESULT_HANDLED;
+ }
+
+ if (!me->_server_started) {
+ me->_server_started = true;
+ me->_emit_event(event::DriverAttached{ClientType::jack});
+ }
+
+ me->_emit_event(
+ event::PortCreated{PortID::jack(client_name, port_name),
+ me->port_info(port_name, port_type, port_flags)});
return DBUS_HANDLER_RESULT_HANDLED;
- }
-#else
-// if (dbus_message_is_signal(message, JACKDBUS_IFACE_PATCHBAY, "ClientAppeared")) {
-// me->info_msg("ClientAppeared");
-// return DBUS_HANDLER_RESULT_HANDLED;
-// }
-
-// if (dbus_message_is_signal(message, JACKDBUS_IFACE_PATCHBAY, "ClientDisappeared")) {
-// me->info_msg("ClientDisappeared");
-// return DBUS_HANDLER_RESULT_HANDLED;
-// }
-
- if (dbus_message_is_signal(message, JACKDBUS_IFACE_PATCHBAY, "PortAppeared")) {
- if (!dbus_message_get_args( message, &me->_dbus_error,
- DBUS_TYPE_UINT64, &new_graph_version,
- DBUS_TYPE_UINT64, &client_id,
- DBUS_TYPE_STRING, &client_name,
- DBUS_TYPE_UINT64, &port_id,
- DBUS_TYPE_STRING, &port_name,
- DBUS_TYPE_UINT32, &port_flags,
- DBUS_TYPE_UINT32, &port_type,
- DBUS_TYPE_INVALID)) {
- me->error_msg(str(boost::format("dbus_message_get_args() failed to extract "
- "PortAppeared signal arguments (%s)") % me->_dbus_error.message));
- dbus_error_free(&me->_dbus_error);
- return DBUS_HANDLER_RESULT_HANDLED;
- }
-
- //me->info_msg(str(boost::format("PortAppeared, %s(%llu):%s(%llu), %lu, %lu") % client_name % client_id % port_name % port_id % port_flags % port_type));
-
- if (!me->_server_started) {
- me->_server_started = true;
- me->signal_attached.emit();
- }
-
- me->add_port(client_id, client_name, port_id, port_name, port_flags, port_type);
-
- return DBUS_HANDLER_RESULT_HANDLED;
- }
-
- if (dbus_message_is_signal(message, JACKDBUS_IFACE_PATCHBAY, "PortDisappeared")) {
- if (!dbus_message_get_args( message, &me->_dbus_error,
- DBUS_TYPE_UINT64, &new_graph_version,
- DBUS_TYPE_UINT64, &client_id,
- DBUS_TYPE_STRING, &client_name,
- DBUS_TYPE_UINT64, &port_id,
- DBUS_TYPE_STRING, &port_name,
- DBUS_TYPE_INVALID)) {
- me->error_msg(str(boost::format("dbus_message_get_args() failed to extract "
- "PortDisappeared signal arguments (%s)") % me->_dbus_error.message));
- dbus_error_free(&me->_dbus_error);
- return DBUS_HANDLER_RESULT_HANDLED;
- }
-
- //me->info_msg(str(boost::format("PortDisappeared, %s(%llu):%s(%llu)") % client_name % client_id % port_name % port_id));
-
- if (!me->_server_started) {
- me->_server_started = true;
- me->signal_attached.emit();
- }
-
- me->remove_port(client_id, client_name, port_id, port_name);
-
- return DBUS_HANDLER_RESULT_HANDLED;
- }
-
- if (dbus_message_is_signal(message, JACKDBUS_IFACE_PATCHBAY, "PortsConnected")) {
- if (!dbus_message_get_args(message, &me->_dbus_error,
- DBUS_TYPE_UINT64, &new_graph_version,
- DBUS_TYPE_UINT64, &client_id,
- DBUS_TYPE_STRING, &client_name,
- DBUS_TYPE_UINT64, &port_id,
- DBUS_TYPE_STRING, &port_name,
- DBUS_TYPE_UINT64, &client2_id,
- DBUS_TYPE_STRING, &client2_name,
- DBUS_TYPE_UINT64, &port2_id,
- DBUS_TYPE_STRING, &port2_name,
- DBUS_TYPE_UINT64, &connection_id,
- DBUS_TYPE_INVALID)) {
- me->error_msg(str(boost::format("dbus_message_get_args() failed to extract "
- "PortsConnected signal arguments (%s)") % me->_dbus_error.message));
- dbus_error_free(&me->_dbus_error);
- return DBUS_HANDLER_RESULT_HANDLED;
- }
-
- if (!me->_server_started) {
- me->_server_started = true;
- me->signal_attached.emit();
- }
-
- me->connect_ports(
- connection_id,
- client_id, client_name,
- port_id, port_name,
- client2_id, client2_name,
- port2_id, port2_name);
-
- return DBUS_HANDLER_RESULT_HANDLED;
- }
-
- if (dbus_message_is_signal(message, JACKDBUS_IFACE_PATCHBAY, "PortsDisconnected")) {
- if (!dbus_message_get_args(message, &me->_dbus_error,
- DBUS_TYPE_UINT64, &new_graph_version,
- DBUS_TYPE_UINT64, &client_id,
- DBUS_TYPE_STRING, &client_name,
- DBUS_TYPE_UINT64, &port_id,
- DBUS_TYPE_STRING, &port_name,
- DBUS_TYPE_UINT64, &client2_id,
- DBUS_TYPE_STRING, &client2_name,
- DBUS_TYPE_UINT64, &port2_id,
- DBUS_TYPE_STRING, &port2_name,
- DBUS_TYPE_UINT64, &connection_id,
- DBUS_TYPE_INVALID)) {
- me->error_msg(str(boost::format("dbus_message_get_args() failed to extract "
- "PortsConnected signal arguments (%s)") % me->_dbus_error.message));
- dbus_error_free(&me->_dbus_error);
- return DBUS_HANDLER_RESULT_HANDLED;
- }
-
- if (!me->_server_started) {
- me->_server_started = true;
- me->signal_attached.emit();
- }
-
- me->disconnect_ports(
- connection_id,
- client_id, client_name,
- port_id, port_name,
- client2_id, client2_name,
- port2_id, port2_name);
-
- return DBUS_HANDLER_RESULT_HANDLED;
- }
-#endif
-
- return DBUS_HANDLER_RESULT_NOT_YET_HANDLED;
+ }
+
+ if (dbus_message_is_signal(
+ message, JACKDBUS_IFACE_PATCHBAY, "PortDisappeared")) {
+ if (!dbus_message_get_args(message,
+ &me->_dbus_error,
+ DBUS_TYPE_UINT64,
+ &new_graph_version,
+ DBUS_TYPE_UINT64,
+ &client_id,
+ DBUS_TYPE_STRING,
+ &client_name,
+ DBUS_TYPE_UINT64,
+ &port_id,
+ DBUS_TYPE_STRING,
+ &port_name,
+ DBUS_TYPE_INVALID)) {
+ me->error_msg(fmt::format("dbus_message_get_args() failed to extract "
+ "PortDisappeared signal arguments ({})",
+ me->_dbus_error.message));
+ dbus_error_free(&me->_dbus_error);
+ return DBUS_HANDLER_RESULT_HANDLED;
+ }
+
+ if (!me->_server_started) {
+ me->_server_started = true;
+ me->_emit_event(event::DriverAttached{ClientType::jack});
+ }
+
+ me->_emit_event(event::PortDestroyed{PortID::jack(client_name, port_name)});
+
+ return DBUS_HANDLER_RESULT_HANDLED;
+ }
+
+ if (dbus_message_is_signal(
+ message, JACKDBUS_IFACE_PATCHBAY, "PortsConnected")) {
+ if (!dbus_message_get_args(message,
+ &me->_dbus_error,
+ DBUS_TYPE_UINT64,
+ &new_graph_version,
+ DBUS_TYPE_UINT64,
+ &client_id,
+ DBUS_TYPE_STRING,
+ &client_name,
+ DBUS_TYPE_UINT64,
+ &port_id,
+ DBUS_TYPE_STRING,
+ &port_name,
+ DBUS_TYPE_UINT64,
+ &client2_id,
+ DBUS_TYPE_STRING,
+ &client2_name,
+ DBUS_TYPE_UINT64,
+ &port2_id,
+ DBUS_TYPE_STRING,
+ &port2_name,
+ DBUS_TYPE_UINT64,
+ &connection_id,
+ DBUS_TYPE_INVALID)) {
+ me->error_msg(fmt::format("dbus_message_get_args() failed to extract "
+ "PortsConnected signal arguments ({})",
+ me->_dbus_error.message));
+ dbus_error_free(&me->_dbus_error);
+ return DBUS_HANDLER_RESULT_HANDLED;
+ }
+
+ if (!me->_server_started) {
+ me->_server_started = true;
+ me->_emit_event(event::DriverAttached{ClientType::jack});
+ }
+
+ me->_emit_event(
+ event::PortsConnected{PortID::jack(client_name, port_name),
+ PortID::jack(client2_name, port2_name)});
+
+ return DBUS_HANDLER_RESULT_HANDLED;
+ }
+
+ if (dbus_message_is_signal(
+ message, JACKDBUS_IFACE_PATCHBAY, "PortsDisconnected")) {
+ if (!dbus_message_get_args(message,
+ &me->_dbus_error,
+ DBUS_TYPE_UINT64,
+ &new_graph_version,
+ DBUS_TYPE_UINT64,
+ &client_id,
+ DBUS_TYPE_STRING,
+ &client_name,
+ DBUS_TYPE_UINT64,
+ &port_id,
+ DBUS_TYPE_STRING,
+ &port_name,
+ DBUS_TYPE_UINT64,
+ &client2_id,
+ DBUS_TYPE_STRING,
+ &client2_name,
+ DBUS_TYPE_UINT64,
+ &port2_id,
+ DBUS_TYPE_STRING,
+ &port2_name,
+ DBUS_TYPE_UINT64,
+ &connection_id,
+ DBUS_TYPE_INVALID)) {
+ me->error_msg(fmt::format("dbus_message_get_args() failed to extract "
+ "PortsDisconnected signal arguments ({})",
+ me->_dbus_error.message));
+ dbus_error_free(&me->_dbus_error);
+ return DBUS_HANDLER_RESULT_HANDLED;
+ }
+
+ if (!me->_server_started) {
+ me->_server_started = true;
+ me->_emit_event(event::DriverAttached{ClientType::jack});
+ }
+
+ me->_emit_event(
+ event::PortsDisconnected{PortID::jack(client_name, port_name),
+ PortID::jack(client2_name, port2_name)});
+
+ return DBUS_HANDLER_RESULT_HANDLED;
+ }
+
+ return DBUS_HANDLER_RESULT_NOT_YET_HANDLED;
}
bool
-JackDriver::call(
- bool response_expected,
- const char* iface,
- const char* method,
- DBusMessage** reply_ptr_ptr,
- int in_type, ...)
+JackDriver::call(bool response_expected,
+ const char* iface,
+ const char* method,
+ DBusMessage** reply_ptr_ptr,
+ int in_type,
+ ...)
{
- DBusMessage* request_ptr;
- DBusMessage* reply_ptr;
- va_list ap;
-
- request_ptr = dbus_message_new_method_call(
- JACKDBUS_SERVICE,
- JACKDBUS_OBJECT,
- iface,
- method);
- if (!request_ptr) {
- throw std::runtime_error("dbus_message_new_method_call() returned 0");
- }
-
- va_start(ap, in_type);
-
- dbus_message_append_args_valist(request_ptr, in_type, ap);
-
- va_end(ap);
-
- // send message and get a handle for a reply
- reply_ptr = dbus_connection_send_with_reply_and_block(_dbus_connection, request_ptr,
- JACKDBUS_CALL_DEFAULT_TIMEOUT, &_dbus_error);
-
- dbus_message_unref(request_ptr);
-
- if (!reply_ptr) {
- if (response_expected) {
- error_msg(str(boost::format("no reply from server when calling method '%s'"
- ", error is '%s'") % method % _dbus_error.message));
- }
- _server_responding = false;
- dbus_error_free(&_dbus_error);
- } else {
- _server_responding = true;
- *reply_ptr_ptr = reply_ptr;
- }
-
- return reply_ptr;
+ DBusMessage* request_ptr = nullptr;
+ DBusMessage* reply_ptr = nullptr;
+ va_list ap;
+
+ request_ptr = dbus_message_new_method_call(
+ JACKDBUS_SERVICE, JACKDBUS_OBJECT, iface, method);
+ if (!request_ptr) {
+ throw std::runtime_error("dbus_message_new_method_call() returned 0");
+ }
+
+ va_start(ap, in_type);
+
+ dbus_message_append_args_valist(request_ptr, in_type, ap);
+
+ va_end(ap);
+
+ // send message and get a handle for a reply
+ reply_ptr = dbus_connection_send_with_reply_and_block(
+ _dbus_connection, request_ptr, JACKDBUS_CALL_DEFAULT_TIMEOUT, &_dbus_error);
+
+ dbus_message_unref(request_ptr);
+
+ if (!reply_ptr) {
+ if (response_expected) {
+ error_msg(fmt::format("No reply from server when calling method {} ({})",
+ method,
+ _dbus_error.message));
+ }
+ _server_responding = false;
+ dbus_error_free(&_dbus_error);
+ } else {
+ _server_responding = true;
+ *reply_ptr_ptr = reply_ptr;
+ }
+
+ return reply_ptr;
}
bool
JackDriver::is_started()
{
- DBusMessage* reply_ptr;
- dbus_bool_t started;
+ DBusMessage* reply_ptr = nullptr;
+ dbus_bool_t started = false;
- if (!call(false, JACKDBUS_IFACE_CONTROL, "IsStarted", &reply_ptr, DBUS_TYPE_INVALID)) {
- return false;
- }
+ if (!call(false,
+ JACKDBUS_IFACE_CONTROL,
+ "IsStarted",
+ &reply_ptr,
+ DBUS_TYPE_INVALID)) {
+ return false;
+ }
- if (!dbus_message_get_args(reply_ptr, &_dbus_error,
- DBUS_TYPE_BOOLEAN, &started,
- DBUS_TYPE_INVALID)) {
- dbus_message_unref(reply_ptr);
- dbus_error_free(&_dbus_error);
- error_msg("decoding reply of IsStarted failed.");
- return false;
- }
+ if (!dbus_message_get_args(reply_ptr,
+ &_dbus_error,
+ DBUS_TYPE_BOOLEAN,
+ &started,
+ DBUS_TYPE_INVALID)) {
+ dbus_message_unref(reply_ptr);
+ dbus_error_free(&_dbus_error);
+ error_msg("Decoding reply of IsStarted failed");
+ return false;
+ }
- dbus_message_unref(reply_ptr);
+ dbus_message_unref(reply_ptr);
- return started;
+ return started;
}
void
JackDriver::start_server()
{
- DBusMessage* reply_ptr;
+ DBusMessage* reply_ptr = nullptr;
- if (!call(false, JACKDBUS_IFACE_CONTROL, "StartServer", &reply_ptr, DBUS_TYPE_INVALID)) {
- return;
- }
+ if (!call(false,
+ JACKDBUS_IFACE_CONTROL,
+ "StartServer",
+ &reply_ptr,
+ DBUS_TYPE_INVALID)) {
+ return;
+ }
- dbus_message_unref(reply_ptr);
+ dbus_message_unref(reply_ptr);
- update_attached();
+ update_attached();
}
void
JackDriver::stop_server()
{
- DBusMessage* reply_ptr;
+ DBusMessage* reply_ptr = nullptr;
- if (!call(false, JACKDBUS_IFACE_CONTROL, "StopServer", &reply_ptr, DBUS_TYPE_INVALID)) {
- return;
- }
+ if (!call(false,
+ JACKDBUS_IFACE_CONTROL,
+ "StopServer",
+ &reply_ptr,
+ DBUS_TYPE_INVALID)) {
+ error_msg("Error stopping JACK server");
+ }
- dbus_message_unref(reply_ptr);
-
- if (!_server_started) {
- _server_started = false;
- signal_detached.emit();
- }
+ dbus_message_unref(reply_ptr);
+ _emit_event(event::DriverDetached{ClientType::jack});
}
void
JackDriver::attach(bool launch_daemon)
{
- // Connect to the bus
- _dbus_connection = dbus_bus_get(DBUS_BUS_SESSION, &_dbus_error);
- if (dbus_error_is_set(&_dbus_error)) {
- error_msg("dbus_bus_get() failed");
- error_msg(_dbus_error.message);
- dbus_error_free(&_dbus_error);
- return;
- }
-
- dbus_connection_setup_with_g_main(_dbus_connection, NULL);
-
- dbus_bus_add_match(_dbus_connection, "type='signal',interface='" DBUS_INTERFACE_DBUS "',member=NameOwnerChanged,arg0='org.jackaudio.service'", NULL);
-#if defined(USE_FULL_REFRESH)
- dbus_bus_add_match(_dbus_connection, "type='signal',interface='" JACKDBUS_IFACE_PATCHBAY "',member=GraphChanged", NULL);
-#else
- // dbus_bus_add_match(_dbus_connection, "type='signal',interface='" JACKDBUS_IFACE_PATCHBAY "',member=ClientAppeared", NULL);
- // dbus_bus_add_match(_dbus_connection, "type='signal',interface='" JACKDBUS_IFACE_PATCHBAY "',member=ClientDisappeared", NULL);
- dbus_bus_add_match(_dbus_connection, "type='signal',interface='" JACKDBUS_IFACE_PATCHBAY "',member=PortAppeared", NULL);
- dbus_bus_add_match(_dbus_connection, "type='signal',interface='" JACKDBUS_IFACE_PATCHBAY "',member=PortDisappeared", NULL);
- dbus_bus_add_match(_dbus_connection, "type='signal',interface='" JACKDBUS_IFACE_PATCHBAY "',member=PortsConnected", NULL);
- dbus_bus_add_match(_dbus_connection, "type='signal',interface='" JACKDBUS_IFACE_PATCHBAY "',member=PortsDisconnected", NULL);
-#endif
- dbus_connection_add_filter(_dbus_connection, dbus_message_hook, this, NULL);
-
- update_attached();
-
- if (!_server_responding) {
- return;
- }
-
- if (launch_daemon) {
- start_server();
- }
+ // Connect to the bus
+ _dbus_connection = dbus_bus_get(DBUS_BUS_SESSION, &_dbus_error);
+ if (dbus_error_is_set(&_dbus_error)) {
+ error_msg(fmt::format("dbus_bus_get() failed ({})", _dbus_error.message));
+ dbus_error_free(&_dbus_error);
+ return;
+ }
+
+ dbus_connection_setup_with_g_main(_dbus_connection, nullptr);
+
+ dbus_bus_add_match(_dbus_connection,
+ "type='signal',interface='" DBUS_INTERFACE_DBUS
+ "',member=NameOwnerChanged,arg0='org.jackaudio.service'",
+ nullptr);
+ dbus_bus_add_match(_dbus_connection,
+ "type='signal',interface='" JACKDBUS_IFACE_PATCHBAY
+ "',member=PortAppeared",
+ nullptr);
+ dbus_bus_add_match(_dbus_connection,
+ "type='signal',interface='" JACKDBUS_IFACE_PATCHBAY
+ "',member=PortDisappeared",
+ nullptr);
+ dbus_bus_add_match(_dbus_connection,
+ "type='signal',interface='" JACKDBUS_IFACE_PATCHBAY
+ "',member=PortsConnected",
+ nullptr);
+ dbus_bus_add_match(_dbus_connection,
+ "type='signal',interface='" JACKDBUS_IFACE_PATCHBAY
+ "',member=PortsDisconnected",
+ nullptr);
+
+ dbus_connection_add_filter(
+ _dbus_connection, dbus_message_hook, this, nullptr);
+
+ update_attached();
+
+ if (!_server_responding) {
+ return;
+ }
+
+ if (launch_daemon) {
+ start_server();
+ }
+
+ _log.info("[JACK] Attached to bus");
}
void
JackDriver::detach()
{
- stop_server();
+ stop_server();
}
bool
JackDriver::is_attached() const
{
- return _dbus_connection && _server_responding;
+ return _dbus_connection && _server_responding;
}
void
-JackDriver::add_port(PatchageModule* module,
- PortType type,
- const std::string& name,
- bool is_input)
-{
- if (module->get_port(name)) {
- return;
- }
-
- new PatchagePort(
- *module,
- type,
- name,
- "", // TODO: pretty name
- is_input,
- _app->conf()->get_port_color(type),
- _app->show_human_names());
-}
+JackDriver::refresh(const EventSink& sink)
+{
+ DBusMessage* reply_ptr = nullptr;
+ DBusMessageIter iter = {};
+ dbus_uint64_t version = 0u;
+ const char* reply_signature = nullptr;
+ DBusMessageIter clients_array_iter = {};
+ DBusMessageIter client_struct_iter = {};
+ DBusMessageIter ports_array_iter = {};
+ DBusMessageIter port_struct_iter = {};
+ DBusMessageIter connections_array_iter = {};
+ DBusMessageIter connection_struct_iter = {};
+ dbus_uint64_t client_id = 0u;
+ const char* client_name = nullptr;
+ dbus_uint64_t port_id = 0u;
+ const char* port_name = nullptr;
+ dbus_uint32_t port_flags = 0u;
+ dbus_uint32_t port_type = 0u;
+ dbus_uint64_t client2_id = 0u;
+ const char* client2_name = nullptr;
+ dbus_uint64_t port2_id = 0u;
+ const char* port2_name = nullptr;
+ dbus_uint64_t connection_id = 0u;
+
+ if (!call(true,
+ JACKDBUS_IFACE_PATCHBAY,
+ "GetGraph",
+ &reply_ptr,
+ DBUS_TYPE_UINT64,
+ &version,
+ DBUS_TYPE_INVALID)) {
+ error_msg("GetGraph() failed");
+ return;
+ }
-void
-JackDriver::add_port(dbus_uint64_t client_id,
- const char* client_name,
- dbus_uint64_t port_id,
- const char* port_name,
- dbus_uint32_t port_flags,
- dbus_uint32_t port_type)
-{
- PortType local_port_type;
-
- switch (port_type) {
- case JACKDBUS_PORT_TYPE_AUDIO:
- local_port_type = JACK_AUDIO;
- break;
- case JACKDBUS_PORT_TYPE_MIDI:
- local_port_type = JACK_MIDI;
- break;
- default:
- error_msg("Unknown JACK D-Bus port type");
- return;
- }
-
- ModuleType type = InputOutput;
- if (_app->conf()->get_module_split(client_name, port_flags & JACKDBUS_PORT_FLAG_TERMINAL)) {
- if (port_flags & JACKDBUS_PORT_FLAG_INPUT) {
- type = Input;
- } else {
- type = Output;
- }
- }
-
- PatchageModule* module = find_or_create_module(type, client_name);
-
- add_port(module, local_port_type, port_name, port_flags & JACKDBUS_PORT_FLAG_INPUT);
-}
+ reply_signature = dbus_message_get_signature(reply_ptr);
-void
-JackDriver::remove_port(dbus_uint64_t client_id,
- const char* client_name,
- dbus_uint64_t port_id,
- const char* port_name)
-{
- PatchagePort* port = _app->canvas()->find_port_by_name(client_name, port_name);
- if (!port) {
- error_msg("Unable to remove unknown port");
- return;
- }
+ if (strcmp(reply_signature, "ta(tsa(tsuu))a(tstststst)") != 0) {
+ error_msg(std::string{"GetGraph() reply signature mismatch. "} +
+ reply_signature);
+ dbus_message_unref(reply_ptr);
+ return;
+ }
- PatchageModule* module = dynamic_cast<PatchageModule*>(port->get_module());
+ dbus_message_iter_init(reply_ptr, &iter);
- delete port;
+ dbus_message_iter_get_basic(&iter, &version);
+ dbus_message_iter_next(&iter);
- // No empty modules (for now)
- if (module->num_ports() == 0) {
- delete module;
- }
+ _graph_version = version;
- if (_app->canvas()->empty()) {
- if (_server_started) {
- signal_detached.emit();
- }
+ // Emit all clients and ports
+ for (dbus_message_iter_recurse(&iter, &clients_array_iter);
+ dbus_message_iter_get_arg_type(&clients_array_iter) != DBUS_TYPE_INVALID;
+ dbus_message_iter_next(&clients_array_iter)) {
+ dbus_message_iter_recurse(&clients_array_iter, &client_struct_iter);
- _server_started = false;
- }
-}
+ dbus_message_iter_get_basic(&client_struct_iter, &client_id);
+ dbus_message_iter_next(&client_struct_iter);
-PatchageModule*
-JackDriver::find_or_create_module(
- ModuleType type,
- const std::string& name)
-{
- PatchageModule* module = _app->canvas()->find_module(name, type);
+ dbus_message_iter_get_basic(&client_struct_iter, &client_name);
+ dbus_message_iter_next(&client_struct_iter);
- if (!module) {
- module = new PatchageModule(_app, name, type);
- module->load_location();
- _app->canvas()->add_module(name, module);
- }
+ // TODO: Pretty name?
+ sink({event::ClientCreated{ClientID::jack(client_name), {client_name}}});
- return module;
-}
+ for (dbus_message_iter_recurse(&client_struct_iter, &ports_array_iter);
+ dbus_message_iter_get_arg_type(&ports_array_iter) != DBUS_TYPE_INVALID;
+ dbus_message_iter_next(&ports_array_iter)) {
+ dbus_message_iter_recurse(&ports_array_iter, &port_struct_iter);
-void
-JackDriver::connect_ports(dbus_uint64_t connection_id,
- dbus_uint64_t client1_id,
- const char* client1_name,
- dbus_uint64_t port1_id,
- const char* port1_name,
- dbus_uint64_t client2_id,
- const char* client2_name,
- dbus_uint64_t port2_id,
- const char* port2_name)
-{
- PatchagePort* port1 = _app->canvas()->find_port_by_name(client1_name, port1_name);
- if (!port1) {
- error_msg((std::string)"Unable to connect unknown port '" + port1_name + "' of client '" + client1_name + "'");
- return;
- }
-
- PatchagePort* port2 = _app->canvas()->find_port_by_name(client2_name, port2_name);
- if (!port2) {
- error_msg((std::string)"Unable to connect unknown port '" + port2_name + "' of client '" + client2_name + "'");
- return;
- }
-
- _app->canvas()->connect(port1, port2);
-}
+ dbus_message_iter_get_basic(&port_struct_iter, &port_id);
+ dbus_message_iter_next(&port_struct_iter);
-void
-JackDriver::disconnect_ports(dbus_uint64_t connection_id,
- dbus_uint64_t client1_id,
- const char* client1_name,
- dbus_uint64_t port1_id,
- const char* port1_name,
- dbus_uint64_t client2_id,
- const char* client2_name,
- dbus_uint64_t port2_id,
- const char* port2_name)
-{
- PatchagePort* port1 = _app->canvas()->find_port_by_name(client1_name, port1_name);
- if (!port1) {
- error_msg((std::string)"Unable to disconnect unknown port '" + port1_name + "' of client '" + client1_name + "'");
- return;
- }
-
- PatchagePort* port2 = _app->canvas()->find_port_by_name(client2_name, port2_name);
- if (!port2) {
- error_msg((std::string)"Unable to disconnect unknown port '" + port2_name + "' of client '" + client2_name + "'");
- return;
- }
-
- _app->canvas()->disconnect(port1, port2);
-}
+ dbus_message_iter_get_basic(&port_struct_iter, &port_name);
+ dbus_message_iter_next(&port_struct_iter);
-void
-JackDriver::refresh_internal(bool force)
-{
- DBusMessage* reply_ptr;
- DBusMessageIter iter;
- dbus_uint64_t version;
- const char* reply_signature;
- DBusMessageIter clients_array_iter;
- DBusMessageIter client_struct_iter;
- DBusMessageIter ports_array_iter;
- DBusMessageIter port_struct_iter;
- DBusMessageIter connections_array_iter;
- DBusMessageIter connection_struct_iter;
- dbus_uint64_t client_id;
- const char* client_name;
- dbus_uint64_t port_id;
- const char* port_name;
- dbus_uint32_t port_flags;
- dbus_uint32_t port_type;
- dbus_uint64_t client2_id;
- const char* client2_name;
- dbus_uint64_t port2_id;
- const char* port2_name;
- dbus_uint64_t connection_id;
+ dbus_message_iter_get_basic(&port_struct_iter, &port_flags);
+ dbus_message_iter_next(&port_struct_iter);
- if (force) {
- version = 0; // workaround module split/join stupidity
- } else {
- version = _graph_version;
- }
+ dbus_message_iter_get_basic(&port_struct_iter, &port_type);
+ dbus_message_iter_next(&port_struct_iter);
- if (!call(true, JACKDBUS_IFACE_PATCHBAY, "GetGraph", &reply_ptr, DBUS_TYPE_UINT64, &version, DBUS_TYPE_INVALID)) {
- error_msg("GetGraph() failed.");
- return;
- }
+ sink({event::PortCreated{PortID::jack(client_name, port_name),
+ port_info(port_name, port_type, port_flags)}});
+ }
- reply_signature = dbus_message_get_signature(reply_ptr);
+ dbus_message_iter_next(&client_struct_iter);
+ }
- if (strcmp(reply_signature, "ta(tsa(tsuu))a(tstststst)") != 0) {
- error_msg((std::string)"GetGraph() reply signature mismatch. " + reply_signature);
- goto unref;
- }
-
- dbus_message_iter_init(reply_ptr, &iter);
-
- //info_msg((string)"version " + (char)dbus_message_iter_get_arg_type(&iter));
- dbus_message_iter_get_basic(&iter, &version);
- dbus_message_iter_next(&iter);
-
- if (!force && version <= _graph_version) {
- goto unref;
- }
-
- destroy_all();
-
- //info_msg(str(boost::format("got new graph version %llu") % version));
- _graph_version = version;
-
- //info_msg((string)"clients " + (char)dbus_message_iter_get_arg_type(&iter));
-
- for (dbus_message_iter_recurse(&iter, &clients_array_iter);
- dbus_message_iter_get_arg_type(&clients_array_iter) != DBUS_TYPE_INVALID;
- dbus_message_iter_next(&clients_array_iter)) {
- //info_msg((string)"a client " + (char)dbus_message_iter_get_arg_type(&clients_array_iter));
- dbus_message_iter_recurse(&clients_array_iter, &client_struct_iter);
-
- dbus_message_iter_get_basic(&client_struct_iter, &client_id);
- dbus_message_iter_next(&client_struct_iter);
-
- dbus_message_iter_get_basic(&client_struct_iter, &client_name);
- dbus_message_iter_next(&client_struct_iter);
-
- //info_msg((string)"client '" + client_name + "'");
-
- for (dbus_message_iter_recurse(&client_struct_iter, &ports_array_iter);
- dbus_message_iter_get_arg_type(&ports_array_iter) != DBUS_TYPE_INVALID;
- dbus_message_iter_next(&ports_array_iter)) {
- //info_msg((string)"a port " + (char)dbus_message_iter_get_arg_type(&ports_array_iter));
- dbus_message_iter_recurse(&ports_array_iter, &port_struct_iter);
-
- dbus_message_iter_get_basic(&port_struct_iter, &port_id);
- dbus_message_iter_next(&port_struct_iter);
-
- dbus_message_iter_get_basic(&port_struct_iter, &port_name);
- dbus_message_iter_next(&port_struct_iter);
-
- dbus_message_iter_get_basic(&port_struct_iter, &port_flags);
- dbus_message_iter_next(&port_struct_iter);
-
- dbus_message_iter_get_basic(&port_struct_iter, &port_type);
- dbus_message_iter_next(&port_struct_iter);
-
- //info_msg((string)"port: " + port_name);
-
- add_port(client_id, client_name, port_id, port_name, port_flags, port_type);
- }
-
- dbus_message_iter_next(&client_struct_iter);
- }
-
- dbus_message_iter_next(&iter);
-
- for (dbus_message_iter_recurse(&iter, &connections_array_iter);
- dbus_message_iter_get_arg_type(&connections_array_iter) != DBUS_TYPE_INVALID;
- dbus_message_iter_next(&connections_array_iter)) {
- //info_msg((string)"a connection " + (char)dbus_message_iter_get_arg_type(&connections_array_iter));
- dbus_message_iter_recurse(&connections_array_iter, &connection_struct_iter);
-
- dbus_message_iter_get_basic(&connection_struct_iter, &client_id);
- dbus_message_iter_next(&connection_struct_iter);
+ dbus_message_iter_next(&iter);
- dbus_message_iter_get_basic(&connection_struct_iter, &client_name);
- dbus_message_iter_next(&connection_struct_iter);
+ // Emit all connections
+ for (dbus_message_iter_recurse(&iter, &connections_array_iter);
+ dbus_message_iter_get_arg_type(&connections_array_iter) !=
+ DBUS_TYPE_INVALID;
+ dbus_message_iter_next(&connections_array_iter)) {
+ dbus_message_iter_recurse(&connections_array_iter, &connection_struct_iter);
- dbus_message_iter_get_basic(&connection_struct_iter, &port_id);
- dbus_message_iter_next(&connection_struct_iter);
+ dbus_message_iter_get_basic(&connection_struct_iter, &client_id);
+ dbus_message_iter_next(&connection_struct_iter);
- dbus_message_iter_get_basic(&connection_struct_iter, &port_name);
- dbus_message_iter_next(&connection_struct_iter);
+ dbus_message_iter_get_basic(&connection_struct_iter, &client_name);
+ dbus_message_iter_next(&connection_struct_iter);
- dbus_message_iter_get_basic(&connection_struct_iter, &client2_id);
- dbus_message_iter_next(&connection_struct_iter);
+ dbus_message_iter_get_basic(&connection_struct_iter, &port_id);
+ dbus_message_iter_next(&connection_struct_iter);
- dbus_message_iter_get_basic(&connection_struct_iter, &client2_name);
- dbus_message_iter_next(&connection_struct_iter);
+ dbus_message_iter_get_basic(&connection_struct_iter, &port_name);
+ dbus_message_iter_next(&connection_struct_iter);
- dbus_message_iter_get_basic(&connection_struct_iter, &port2_id);
- dbus_message_iter_next(&connection_struct_iter);
+ dbus_message_iter_get_basic(&connection_struct_iter, &client2_id);
+ dbus_message_iter_next(&connection_struct_iter);
- dbus_message_iter_get_basic(&connection_struct_iter, &port2_name);
- dbus_message_iter_next(&connection_struct_iter);
+ dbus_message_iter_get_basic(&connection_struct_iter, &client2_name);
+ dbus_message_iter_next(&connection_struct_iter);
- dbus_message_iter_get_basic(&connection_struct_iter, &connection_id);
- dbus_message_iter_next(&connection_struct_iter);
+ dbus_message_iter_get_basic(&connection_struct_iter, &port2_id);
+ dbus_message_iter_next(&connection_struct_iter);
- //info_msg(str(boost::format("connection(%llu) %s(%llu):%s(%llu) <-> %s(%llu):%s(%llu)") %
- // connection_id %
- // client_name %
- // client_id %
- // port_name %
- // port_id %
- // client2_name %
- // client2_id %
- // port2_name %
- // port2_id));
+ dbus_message_iter_get_basic(&connection_struct_iter, &port2_name);
+ dbus_message_iter_next(&connection_struct_iter);
- connect_ports(
- connection_id,
- client_id, client_name,
- port_id, port_name,
- client2_id, client2_name,
- port2_id, port2_name);
- }
+ dbus_message_iter_get_basic(&connection_struct_iter, &connection_id);
+ dbus_message_iter_next(&connection_struct_iter);
-unref:
- dbus_message_unref(reply_ptr);
+ sink({event::PortsConnected{PortID::jack(client_name, port_name),
+ PortID::jack(client2_name, port2_name)}});
+ }
}
-void
-JackDriver::refresh()
-{
- refresh_internal(true);
+bool
+JackDriver::connect(const PortID& tail_id, const PortID& head_id)
+{
+ const auto tail_names = PortNames(tail_id);
+ const auto head_names = PortNames(head_id);
+ const char* const tail_client_name = tail_names.client().c_str();
+ const char* const tail_port_name = tail_names.port().c_str();
+ const char* const head_client_name = head_names.client().c_str();
+ const char* const head_port_name = head_names.port().c_str();
+
+ DBusMessage* reply_ptr = nullptr;
+
+ if (!call(true,
+ JACKDBUS_IFACE_PATCHBAY,
+ "ConnectPortsByName",
+ &reply_ptr,
+ DBUS_TYPE_STRING,
+ &tail_client_name,
+ DBUS_TYPE_STRING,
+ &tail_port_name,
+ DBUS_TYPE_STRING,
+ &head_client_name,
+ DBUS_TYPE_STRING,
+ &head_port_name,
+ DBUS_TYPE_INVALID)) {
+ error_msg("ConnectPortsByName() failed");
+ return false;
+ }
+
+ return true;
}
bool
-JackDriver::connect(PatchagePort* src,
- PatchagePort* dst)
-{
- const char* client1_name = src->get_module()->get_label();
- const char* port1_name = src->get_label();
- const char* client2_name = dst->get_module()->get_label();
- const char* port2_name = dst->get_label();
-
- DBusMessage* reply_ptr;
- if (!call(true, JACKDBUS_IFACE_PATCHBAY, "ConnectPortsByName", &reply_ptr,
- DBUS_TYPE_STRING, &client1_name,
- DBUS_TYPE_STRING, &port1_name,
- DBUS_TYPE_STRING, &client2_name,
- DBUS_TYPE_STRING, &port2_name,
- DBUS_TYPE_INVALID)) {
- error_msg("ConnectPortsByName() failed.");
- return false;
- }
-
- return true;
+JackDriver::disconnect(const PortID& tail_id, const PortID& head_id)
+{
+ const auto tail_names = PortNames(tail_id);
+ const auto head_names = PortNames(head_id);
+ const char* const tail_client_name = tail_names.client().c_str();
+ const char* const tail_port_name = tail_names.port().c_str();
+ const char* const head_client_name = head_names.client().c_str();
+ const char* const head_port_name = head_names.port().c_str();
+
+ DBusMessage* reply_ptr = nullptr;
+
+ if (!call(true,
+ JACKDBUS_IFACE_PATCHBAY,
+ "DisconnectPortsByName",
+ &reply_ptr,
+ DBUS_TYPE_STRING,
+ &tail_client_name,
+ DBUS_TYPE_STRING,
+ &tail_port_name,
+ DBUS_TYPE_STRING,
+ &head_client_name,
+ DBUS_TYPE_STRING,
+ &head_port_name,
+ DBUS_TYPE_INVALID)) {
+ error_msg("DisconnectPortsByName() failed");
+ return false;
+ }
+
+ return true;
+}
+
+uint32_t
+JackDriver::xruns()
+{
+ DBusMessage* reply_ptr = nullptr;
+ dbus_uint32_t xruns = 0u;
+
+ if (_server_responding && !_server_started) {
+ return 0;
+ }
+
+ if (!call(true,
+ JACKDBUS_IFACE_CONTROL,
+ "GetXruns",
+ &reply_ptr,
+ DBUS_TYPE_INVALID)) {
+ return 0;
+ }
+
+ if (!dbus_message_get_args(
+ reply_ptr, &_dbus_error, DBUS_TYPE_UINT32, &xruns, DBUS_TYPE_INVALID)) {
+ dbus_message_unref(reply_ptr);
+ dbus_error_free(&_dbus_error);
+ error_msg("Decoding reply of GetXruns failed");
+ return 0;
+ }
+
+ dbus_message_unref(reply_ptr);
+
+ return xruns;
}
-bool
-JackDriver::disconnect(PatchagePort* src,
- PatchagePort* dst)
+void
+JackDriver::reset_xruns()
{
- const char* client1_name = src->get_module()->get_label();
- const char* port1_name = src->get_label();
- const char* client2_name = dst->get_module()->get_label();
- const char* port2_name = dst->get_label();
-
- DBusMessage* reply_ptr;
- if (!call(true, JACKDBUS_IFACE_PATCHBAY, "DisconnectPortsByName", &reply_ptr,
- DBUS_TYPE_STRING, &client1_name,
- DBUS_TYPE_STRING, &port1_name,
- DBUS_TYPE_STRING, &client2_name,
- DBUS_TYPE_STRING, &port2_name,
- DBUS_TYPE_INVALID)) {
- error_msg("DisconnectPortsByName() failed.");
- return false;
- }
-
- return true;
+ DBusMessage* reply_ptr = nullptr;
+
+ if (!call(true,
+ JACKDBUS_IFACE_CONTROL,
+ "ResetXruns",
+ &reply_ptr,
+ DBUS_TYPE_INVALID)) {
+ return;
+ }
+
+ dbus_message_unref(reply_ptr);
}
-jack_nframes_t
+uint32_t
JackDriver::buffer_size()
{
- DBusMessage* reply_ptr;
- dbus_uint32_t buffer_size;
-
- if (_server_responding && !_server_started) {
- goto fail;
- }
+ DBusMessage* reply_ptr = nullptr;
+ dbus_uint32_t buffer_size = 0u;
- if (!call(true, JACKDBUS_IFACE_CONTROL, "GetBufferSize", &reply_ptr, DBUS_TYPE_INVALID)) {
- goto fail;
- }
+ if (_server_responding && !_server_started) {
+ return 4096;
+ }
- if (!dbus_message_get_args(reply_ptr, &_dbus_error, DBUS_TYPE_UINT32, &buffer_size, DBUS_TYPE_INVALID)) {
- dbus_message_unref(reply_ptr);
- dbus_error_free(&_dbus_error);
- error_msg("decoding reply of GetBufferSize failed.");
- goto fail;
- }
+ if (!call(true,
+ JACKDBUS_IFACE_CONTROL,
+ "GetBufferSize",
+ &reply_ptr,
+ DBUS_TYPE_INVALID)) {
+ return 4096;
+ }
- dbus_message_unref(reply_ptr);
+ if (!dbus_message_get_args(reply_ptr,
+ &_dbus_error,
+ DBUS_TYPE_UINT32,
+ &buffer_size,
+ DBUS_TYPE_INVALID)) {
+ dbus_message_unref(reply_ptr);
+ dbus_error_free(&_dbus_error);
+ error_msg("Decoding reply of GetBufferSize failed");
+ return 4096;
+ }
- return buffer_size;
+ dbus_message_unref(reply_ptr);
-fail:
- return 4096; // something fake, patchage needs it to match combobox value
+ return buffer_size;
}
bool
-JackDriver::set_buffer_size(jack_nframes_t size)
+JackDriver::set_buffer_size(const uint32_t frames)
{
- DBusMessage* reply_ptr;
- dbus_uint32_t buffer_size;
-
- buffer_size = size;
+ DBusMessage* reply_ptr = nullptr;
+ dbus_uint32_t buffer_size = frames;
- if (!call(true, JACKDBUS_IFACE_CONTROL, "SetBufferSize", &reply_ptr, DBUS_TYPE_UINT32, &buffer_size, DBUS_TYPE_INVALID)) {
- return false;
- }
+ if (!call(true,
+ JACKDBUS_IFACE_CONTROL,
+ "SetBufferSize",
+ &reply_ptr,
+ DBUS_TYPE_UINT32,
+ &buffer_size,
+ DBUS_TYPE_INVALID)) {
+ return false;
+ }
- dbus_message_unref(reply_ptr);
+ dbus_message_unref(reply_ptr);
- return true;
+ return true;
}
-float
+uint32_t
JackDriver::sample_rate()
{
- DBusMessage* reply_ptr;
- double sample_rate;
+ DBusMessage* reply_ptr = nullptr;
+ dbus_uint32_t sample_rate = 0u;
- if (!call(true, JACKDBUS_IFACE_CONTROL, "GetSampleRate", &reply_ptr, DBUS_TYPE_INVALID)) {
- return false;
- }
+ if (!call(true,
+ JACKDBUS_IFACE_CONTROL,
+ "GetSampleRate",
+ &reply_ptr,
+ DBUS_TYPE_INVALID)) {
+ return false;
+ }
- if (!dbus_message_get_args(reply_ptr, &_dbus_error, DBUS_TYPE_DOUBLE, &sample_rate, DBUS_TYPE_INVALID)) {
- dbus_message_unref(reply_ptr);
- dbus_error_free(&_dbus_error);
- error_msg("decoding reply of GetSampleRate failed.");
- return false;
- }
+ if (!dbus_message_get_args(reply_ptr,
+ &_dbus_error,
+ DBUS_TYPE_UINT32,
+ &sample_rate,
+ DBUS_TYPE_INVALID)) {
+ dbus_message_unref(reply_ptr);
+ dbus_error_free(&_dbus_error);
+ error_msg("Decoding reply of GetSampleRate failed");
+ return false;
+ }
- dbus_message_unref(reply_ptr);
+ dbus_message_unref(reply_ptr);
- return sample_rate;
+ return sample_rate;
}
-bool
-JackDriver::is_realtime() const
-{
- DBusMessage* reply_ptr;
- dbus_bool_t realtime;
-
- JackDriver* me = const_cast<JackDriver*>(this);
- if (!me->call(true, JACKDBUS_IFACE_CONTROL, "IsRealtime",
- &reply_ptr, DBUS_TYPE_INVALID)) {
- return false;
- }
-
- if (!dbus_message_get_args(reply_ptr, &me->_dbus_error, DBUS_TYPE_BOOLEAN,
- &realtime, DBUS_TYPE_INVALID)) {
- dbus_message_unref(reply_ptr);
- dbus_error_free(&me->_dbus_error);
- error_msg("decoding reply of IsRealtime failed.");
- return false;
- }
-
- dbus_message_unref(reply_ptr);
-
- return realtime;
-}
-
-size_t
-JackDriver::get_xruns()
+PortType
+JackDriver::patchage_port_type(const dbus_uint32_t dbus_port_type) const
{
- DBusMessage* reply_ptr;
- dbus_uint32_t xruns;
-
- if (_server_responding && !_server_started) {
- return 0;
- }
-
- if (!call(true, JACKDBUS_IFACE_CONTROL, "GetXruns", &reply_ptr, DBUS_TYPE_INVALID)) {
- return 0;
- }
-
- if (!dbus_message_get_args(reply_ptr, &_dbus_error, DBUS_TYPE_UINT32, &xruns, DBUS_TYPE_INVALID)) {
- dbus_message_unref(reply_ptr);
- dbus_error_free(&_dbus_error);
- error_msg("decoding reply of GetXruns failed.");
- return 0;
- }
-
- dbus_message_unref(reply_ptr);
+ switch (dbus_port_type) {
+ case JACKDBUS_PORT_TYPE_AUDIO:
+ return PortType::jack_audio;
+ case JACKDBUS_PORT_TYPE_MIDI:
+ return PortType::jack_midi;
+ default:
+ break;
+ }
- return xruns;
+ error_msg(fmt::format("Unknown JACK D-Bus port type {}", dbus_port_type));
+ return PortType::jack_audio;
}
-void
-JackDriver::reset_xruns()
+PortInfo
+JackDriver::port_info(const std::string& port_name,
+ const dbus_uint32_t port_type,
+ const dbus_uint32_t port_flags) const
{
- DBusMessage* reply_ptr;
+ const SignalDirection direction =
+ ((port_flags & JACKDBUS_PORT_FLAG_INPUT) ? SignalDirection::input
+ : SignalDirection::output);
- if (!call(true, JACKDBUS_IFACE_CONTROL, "ResetXruns", &reply_ptr, DBUS_TYPE_INVALID)) {
- return;
- }
-
- dbus_message_unref(reply_ptr);
+ // TODO: Metadata?
+ return {port_name,
+ patchage_port_type(port_type),
+ direction,
+ {},
+ bool(port_flags & JACKDBUS_PORT_FLAG_TERMINAL)};
}
-float
-JackDriver::get_max_dsp_load()
+void
+JackDriver::error_msg(const std::string& msg) const
{
- DBusMessage* reply_ptr;
- double load;
-
- if (_server_responding && !_server_started) {
- return 0.0;
- }
-
- if (!call(true, JACKDBUS_IFACE_CONTROL, "GetLoad", &reply_ptr, DBUS_TYPE_INVALID)) {
- return 0.0;
- }
-
- if (!dbus_message_get_args(reply_ptr, &_dbus_error, DBUS_TYPE_DOUBLE, &load, DBUS_TYPE_INVALID)) {
- dbus_message_unref(reply_ptr);
- dbus_error_free(&_dbus_error);
- error_msg("decoding reply of GetLoad failed.");
- return 0.0;
- }
-
- dbus_message_unref(reply_ptr);
-
- load /= 100.0; // convert from percent to [0..1]
-
- if (load > _max_dsp_load) {
- _max_dsp_load = load;
- }
-
- return _max_dsp_load;
+ _log.error(std::string{"[JACK] "} + msg);
}
-
void
-JackDriver::reset_max_dsp_load()
+JackDriver::info_msg(const std::string& msg) const
{
- _max_dsp_load = 0.0;
+ _log.info(std::string{"[JACK] "} + msg);
}
-PatchagePort*
-JackDriver::create_port_view(Patchage* patchage,
- const PortID& id)
-{
- assert(false); // we dont use events at all
- return NULL;
-}
+} // namespace
-void
-JackDriver::error_msg(const std::string& msg) const
+std::unique_ptr<AudioDriver>
+make_jack_driver(ILog& log, Driver::EventSink emit_event)
{
- _app->error_msg((std::string)"Jack: " + msg);
+ return std::unique_ptr<AudioDriver>{
+ new JackDriver{log, std::move(emit_event)}};
}
-void
-JackDriver::info_msg(const std::string& msg) const
-{
- _app->info_msg((std::string)"Jack: " + msg);
-}
+} // namespace patchage
diff --git a/src/JackDbusDriver.hpp b/src/JackDbusDriver.hpp
deleted file mode 100644
index 69cc0a5..0000000
--- a/src/JackDbusDriver.hpp
+++ /dev/null
@@ -1,161 +0,0 @@
-/* This file is part of Patchage.
- * Copyright 2008 Nedko Arnaudov <nedko@arnaudov.name>
- *
- * Patchage is free software: you can redistribute it and/or modify it under
- * the terms of the GNU General Public License as published by the Free
- * Software Foundation, either version 3 of the License, or (at your option)
- * any later version.
- *
- * Patchage is distributed in the hope that it will be useful, but WITHOUT ANY
- * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
- * FOR A PARTICULAR PURPOSE. See the GNU General Public License for details.
- *
- * You should have received a copy of the GNU General Public License
- * along with Patchage. If not, see <http://www.gnu.org/licenses/>.
- */
-
-#ifndef PATCHAGE_JACKDBUSDRIVER_HPP
-#define PATCHAGE_JACKDBUSDRIVER_HPP
-
-#include <string>
-#include <boost/shared_ptr.hpp>
-#include <jack/jack.h>
-#include <jack/statistics.h>
-#include <glibmm/thread.h>
-#include <dbus/dbus.h>
-#include "Driver.hpp"
-#include "Patchage.hpp"
-#include "PatchageModule.hpp"
-
-class PatchageEvent;
-class PatchageCanvas;
-class PatchagePort;
-
-class JackDriver : public Driver
-{
-public:
- explicit JackDriver(Patchage* app);
- ~JackDriver();
-
- void attach(bool launch_daemon);
- void detach();
-
- bool is_attached() const;
- bool is_realtime() const;
-
- void refresh();
- void destroy_all();
-
- bool connect(
- PatchagePort* src,
- PatchagePort* dst);
-
- bool disconnect(
- PatchagePort* src,
- PatchagePort* dst);
-
- size_t get_xruns();
- void reset_xruns();
- float get_max_dsp_load();
- void reset_max_dsp_load();
-
- float sample_rate();
- jack_nframes_t buffer_size();
- bool set_buffer_size(jack_nframes_t size);
-
- void process_events(Patchage* app) {}
-
- PatchagePort* create_port_view(
- Patchage* patchage,
- const PortID& ref);
-
-private:
- void error_msg(const std::string& msg) const;
- void info_msg(const std::string& msg) const;
-
- PatchageModule* find_or_create_module(
- ModuleType type,
- const std::string& name);
-
- void add_port(
- PatchageModule* module,
- PortType type,
- const std::string& name,
- bool is_input);
-
- void add_port(
- dbus_uint64_t client_id,
- const char* client_name,
- dbus_uint64_t port_id,
- const char* port_name,
- dbus_uint32_t port_flags,
- dbus_uint32_t port_type);
-
- void remove_port(
- dbus_uint64_t client_id,
- const char* client_name,
- dbus_uint64_t port_id,
- const char* port_name);
-
- void connect_ports(
- dbus_uint64_t connection_id,
- dbus_uint64_t client1_id,
- const char* client1_name,
- dbus_uint64_t port1_id,
- const char* port1_name,
- dbus_uint64_t client2_id,
- const char* client2_name,
- dbus_uint64_t port2_id,
- const char* port2_name);
-
- void disconnect_ports(
- dbus_uint64_t connection_id,
- dbus_uint64_t client1_id,
- const char* client1_name,
- dbus_uint64_t port1_id,
- const char* port1_name,
- dbus_uint64_t client2_id,
- const char* client2_name,
- dbus_uint64_t port2_id,
- const char* port2_name);
-
- bool call(
- bool response_expected,
- const char* iface,
- const char* method,
- DBusMessage** reply_ptr_ptr,
- int in_type,
- ...);
-
- void update_attached();
-
- bool is_started();
-
- void start_server();
-
- void stop_server();
-
- void refresh_internal(bool force);
-
- static DBusHandlerResult dbus_message_hook(
- DBusConnection *connection,
- DBusMessage *message,
- void *me);
-
- void on_jack_appeared();
-
- void on_jack_disappeared();
-
-private:
- Patchage* _app;
- DBusError _dbus_error;
- DBusConnection* _dbus_connection;
- float _max_dsp_load;
-
- bool _server_responding;
- bool _server_started;
-
- dbus_uint64_t _graph_version;
-};
-
-#endif // PATCHAGE_JACKDBUSDRIVER_HPP
diff --git a/src/JackDriver.cpp b/src/JackDriver.cpp
deleted file mode 100644
index 5daedae..0000000
--- a/src/JackDriver.cpp
+++ /dev/null
@@ -1,588 +0,0 @@
-/* This file is part of Patchage.
- * Copyright 2007-2014 David Robillard <http://drobilla.net>
- *
- * Patchage is free software: you can redistribute it and/or modify it under
- * the terms of the GNU General Public License as published by the Free
- * Software Foundation, either version 3 of the License, or (at your option)
- * any later version.
- *
- * Patchage is distributed in the hope that it will be useful, but WITHOUT ANY
- * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
- * FOR A PARTICULAR PURPOSE. See the GNU General Public License for details.
- *
- * You should have received a copy of the GNU General Public License
- * along with Patchage. If not, see <http://www.gnu.org/licenses/>.
- */
-
-#include <cassert>
-#include <cstring>
-#include <set>
-#include <string>
-
-#include <boost/format.hpp>
-
-#include <jack/jack.h>
-#include <jack/statistics.h>
-
-#include "JackDriver.hpp"
-#include "Patchage.hpp"
-#include "PatchageCanvas.hpp"
-#include "PatchageEvent.hpp"
-#include "PatchageModule.hpp"
-#include "Queue.hpp"
-#include "patchage_config.h"
-#ifdef HAVE_JACK_METADATA
-#include <jack/metadata.h>
-#include "jackey.h"
-#endif
-
-using std::endl;
-using std::string;
-using boost::format;
-
-JackDriver::JackDriver(Patchage* app)
- : _app(app)
- , _client(NULL)
- , _events(128)
- , _xruns(0)
- , _xrun_delay(0)
- , _is_activated(false)
-{
- _last_pos.frame = 0;
- _last_pos.valid = (jack_position_bits_t)0;
-}
-
-JackDriver::~JackDriver()
-{
- detach();
-}
-
-/** Connect to Jack.
- */
-void
-JackDriver::attach(bool launch_daemon)
-{
- // Already connected
- if (_client)
- return;
-
- jack_options_t options = (!launch_daemon) ? JackNoStartServer : JackNullOption;
- _client = jack_client_open("Patchage", options, NULL);
- if (_client == NULL) {
- _app->error_msg("Jack: Unable to create client.");
- _is_activated = false;
- } else {
- jack_client_t* const client = _client;
-
- jack_on_shutdown(client, jack_shutdown_cb, this);
- jack_set_client_registration_callback(client, jack_client_registration_cb, this);
- jack_set_port_registration_callback(client, jack_port_registration_cb, this);
- jack_set_port_connect_callback(client, jack_port_connect_cb, this);
- jack_set_xrun_callback(client, jack_xrun_cb, this);
-
- _buffer_size = jack_get_buffer_size(client);
-
- if (!jack_activate(client)) {
- _is_activated = true;
- signal_attached.emit();
- std::stringstream ss;
- _app->info_msg("Jack: Attached.");
- } else {
- _app->error_msg("Jack: Client activation failed.");
- _is_activated = false;
- }
- }
-}
-
-void
-JackDriver::detach()
-{
- Glib::Mutex::Lock lock(_shutdown_mutex);
- if (_client) {
- jack_deactivate(_client);
- jack_client_close(_client);
- _client = NULL;
- }
- _is_activated = false;
- signal_detached.emit();
- _app->info_msg("Jack: Detached.");
-}
-
-static bool
-is_jack_port(const PatchagePort* port)
-{
- return (port->type() == JACK_AUDIO ||
- port->type() == JACK_MIDI ||
- port->type() == JACK_OSC ||
- port->type() == JACK_CV);
-}
-
-/** Destroy all JACK (canvas) ports.
- */
-void
-JackDriver::destroy_all()
-{
- if (_app->canvas()) {
- _app->canvas()->remove_ports(is_jack_port);
- }
-}
-
-PatchagePort*
-JackDriver::create_port_view(Patchage* patchage,
- const PortID& id)
-{
- assert(id.type == PortID::JACK_ID);
-
- jack_port_t* jack_port = jack_port_by_id(_client, id.id.jack_id);
- if (!jack_port) {
- _app->error_msg((format("Jack: Failed to find port with ID `%1%'.")
- % id).str());;
- return NULL;
- }
-
- const int jack_flags = jack_port_flags(jack_port);
-
- string module_name, port_name;
- port_names(id, module_name, port_name);
-
- ModuleType type = InputOutput;
- if (_app->conf()->get_module_split(
- module_name, (jack_flags & JackPortIsTerminal))) {
- if (jack_flags & JackPortIsInput) {
- type = Input;
- } else {
- type = Output;
- }
- }
-
- PatchageModule* parent = _app->canvas()->find_module(module_name, type);
- if (!parent) {
- parent = new PatchageModule(patchage, module_name, type);
- parent->load_location();
- patchage->canvas()->add_module(module_name, parent);
- }
-
- if (parent->get_port(port_name)) {
- _app->error_msg((format("Jack: Module `%1%' already has port `%2%'.")
- % module_name % port_name).str());
- return NULL;
- }
-
- PatchagePort* port = create_port(*parent, jack_port, id);
- port->show();
- if (port->is_input()) {
- parent->set_is_source(false);
- }
-
- return port;
-}
-
-#ifdef HAVE_JACK_METADATA
-static std::string
-get_property(jack_uuid_t subject, const char* key)
-{
- std::string result;
-
- char* value = NULL;
- char* datatype = NULL;
- if (!jack_get_property(subject, key, &value, &datatype)) {
- result = value;
- }
- jack_free(datatype);
- jack_free(value);
-
- return result;
-}
-#endif
-
-PatchagePort*
-JackDriver::create_port(PatchageModule& parent, jack_port_t* port, PortID id)
-{
- if (!port) {
- return NULL;
- }
-
- std::string label;
- boost::optional<int> order;
-
-#ifdef HAVE_JACK_METADATA
- const jack_uuid_t uuid = jack_port_uuid(port);
- if (_app->conf()->get_sort_ports()) {
- const std::string order_str = get_property(uuid, JACKEY_ORDER);
- label = get_property(uuid, JACK_METADATA_PRETTY_NAME);
- if (!order_str.empty()) {
- order = atoi(order_str.c_str());
- }
- }
-#endif
-
- const char* const type_str = jack_port_type(port);
- PortType port_type;
- if (!strcmp(type_str, JACK_DEFAULT_AUDIO_TYPE)) {
- port_type = JACK_AUDIO;
-#ifdef HAVE_JACK_METADATA
- if (get_property(uuid, JACKEY_SIGNAL_TYPE) == "CV") {
- port_type = JACK_CV;
- }
-#endif
- } else if (!strcmp(type_str, JACK_DEFAULT_MIDI_TYPE)) {
- port_type = JACK_MIDI;
-#ifdef HAVE_JACK_METADATA
- if (get_property(uuid, JACKEY_EVENT_TYPES) == "OSC") {
- port_type = JACK_OSC;
- }
-#endif
- } else {
- _app->warning_msg((format("Jack: Port `%1%' has unknown type `%2%'.")
- % jack_port_name(port) % type_str).str());
- return NULL;
- }
-
- PatchagePort* ret(
- new PatchagePort(parent, port_type, jack_port_short_name(port),
- label,
- (jack_port_flags(port) & JackPortIsInput),
- _app->conf()->get_port_color(port_type),
- _app->show_human_names(),
- order));
-
- if (id.type != PortID::NULL_PORT_ID) {
- dynamic_cast<PatchageCanvas*>(parent.canvas())->index_port(id, ret);
- }
-
- return ret;
-}
-
-void
-JackDriver::shutdown()
-{
- signal_detached.emit();
-}
-
-/** Refresh all Jack audio ports/connections.
- * To be called from GTK thread only.
- */
-void
-JackDriver::refresh()
-{
- const char** ports;
- jack_port_t* port;
-
- // Jack can take _client away from us at any time throughout here :/
- // Shortest locks possible is the best solution I can figure out
-
- Glib::Mutex::Lock lock(_shutdown_mutex);
-
- if (_client == NULL) {
- shutdown();
- return;
- }
-
- ports = jack_get_ports(_client, NULL, NULL, 0); // get all existing ports
-
- if (!ports) {
- return;
- }
-
- string client1_name;
- string port1_name;
- string client2_name;
- string port2_name;
- size_t colon;
-
- // Add all ports
- for (int i = 0; ports[i]; ++i) {
- port = jack_port_by_name(_client, ports[i]);
-
- client1_name = ports[i];
- client1_name = client1_name.substr(0, client1_name.find(":"));
-
- ModuleType type = InputOutput;
- if (_app->conf()->get_module_split(
- client1_name,
- (jack_port_flags(port) & JackPortIsTerminal))) {
- if (jack_port_flags(port) & JackPortIsInput) {
- type = Input;
- } else {
- type = Output;
- }
- }
-
- PatchageModule* m = _app->canvas()->find_module(client1_name, type);
-
- if (!m) {
- m = new PatchageModule(_app, client1_name, type);
- m->load_location();
- _app->canvas()->add_module(client1_name, m);
- }
-
- if (!m->get_port(jack_port_short_name(port)))
- create_port(*m, port, PortID());
- }
-
- // Add all connections
- for (int i = 0; ports[i]; ++i) {
- port = jack_port_by_name(_client, ports[i]);
- const char** connected_ports = jack_port_get_all_connections(_client, port);
-
- client1_name = ports[i];
- colon = client1_name.find(':');
- port1_name = client1_name.substr(colon + 1);
- client1_name = client1_name.substr(0, colon);
-
- const ModuleType port1_type = (jack_port_flags(port) & JackPortIsInput)
- ? Input : Output;
-
- PatchageModule* client1_module
- = _app->canvas()->find_module(client1_name, port1_type);
-
- if (connected_ports) {
- for (int j = 0; connected_ports[j]; ++j) {
-
- client2_name = connected_ports[j];
- colon = client2_name.find(':');
- port2_name = client2_name.substr(colon+1);
- client2_name = client2_name.substr(0, colon);
-
- const ModuleType port2_type = (port1_type == Input) ? Output : Input;
-
- PatchageModule* client2_module
- = _app->canvas()->find_module(client2_name, port2_type);
-
- Ganv::Port* port1 = client1_module->get_port(port1_name);
- Ganv::Port* port2 = client2_module->get_port(port2_name);
-
- if (!port1 || !port2)
- continue;
-
- Ganv::Port* src = NULL;
- Ganv::Port* dst = NULL;
-
- if (port1->is_output() && port2->is_input()) {
- src = port1;
- dst = port2;
- } else {
- src = port2;
- dst = port1;
- }
-
- if (src && dst && !_app->canvas()->get_edge(src, dst))
- _app->canvas()->make_connection(src, dst);
- }
-
- jack_free(connected_ports);
- }
- }
-
- jack_free(ports);
-}
-
-bool
-JackDriver::port_names(const PortID& id,
- string& module_name,
- string& port_name)
-{
- jack_port_t* jack_port = NULL;
-
- if (id.type == PortID::JACK_ID)
- jack_port = jack_port_by_id(_client, id.id.jack_id);
-
- if (!jack_port) {
- module_name.clear();
- port_name.clear();
- return false;
- }
-
- const string full_name = jack_port_name(jack_port);
-
- module_name = full_name.substr(0, full_name.find(":"));
- port_name = full_name.substr(full_name.find(":")+1);
-
- return true;
-}
-
-/** Connects two Jack audio ports.
- * To be called from GTK thread only.
- * \return Whether connection succeeded.
- */
-bool
-JackDriver::connect(PatchagePort* src_port,
- PatchagePort* dst_port)
-{
- if (_client == NULL)
- return false;
-
- int result = jack_connect(_client, src_port->full_name().c_str(), dst_port->full_name().c_str());
-
- if (result == 0)
- _app->info_msg(string("Jack: Connected ")
- + src_port->full_name() + " => " + dst_port->full_name());
- else
- _app->error_msg(string("Jack: Unable to connect ")
- + src_port->full_name() + " => " + dst_port->full_name());
-
- return (!result);
-}
-
-/** Disconnects two Jack audio ports.
- * To be called from GTK thread only.
- * \return Whether disconnection succeeded.
- */
-bool
-JackDriver::disconnect(PatchagePort* const src_port,
- PatchagePort* const dst_port)
-{
- if (_client == NULL)
- return false;
-
- int result = jack_disconnect(_client, src_port->full_name().c_str(), dst_port->full_name().c_str());
-
- if (result == 0)
- _app->info_msg(string("Jack: Disconnected ")
- + src_port->full_name() + " => " + dst_port->full_name());
- else
- _app->error_msg(string("Jack: Unable to disconnect ")
- + src_port->full_name() + " => " + dst_port->full_name());
-
- return (!result);
-}
-
-void
-JackDriver::jack_client_registration_cb(const char* name, int registered, void* jack_driver)
-{
- JackDriver* me = reinterpret_cast<JackDriver*>(jack_driver);
- assert(me->_client);
-
- if (registered) {
- me->_events.push(PatchageEvent(PatchageEvent::CLIENT_CREATION, name));
- } else {
- me->_events.push(PatchageEvent(PatchageEvent::CLIENT_DESTRUCTION, name));
- }
-}
-
-void
-JackDriver::jack_port_registration_cb(jack_port_id_t port_id, int registered, void* jack_driver)
-{
- JackDriver* me = reinterpret_cast<JackDriver*>(jack_driver);
- assert(me->_client);
-
- if (registered) {
- me->_events.push(PatchageEvent(PatchageEvent::PORT_CREATION, port_id));
- } else {
- me->_events.push(PatchageEvent(PatchageEvent::PORT_DESTRUCTION, port_id));
- }
-}
-
-void
-JackDriver::jack_port_connect_cb(jack_port_id_t src, jack_port_id_t dst, int connect, void* jack_driver)
-{
- JackDriver* me = reinterpret_cast<JackDriver*>(jack_driver);
- assert(me->_client);
-
- if (connect) {
- me->_events.push(PatchageEvent(PatchageEvent::CONNECTION, src, dst));
- } else {
- me->_events.push(PatchageEvent(PatchageEvent::DISCONNECTION, src, dst));
- }
-}
-
-int
-JackDriver::jack_xrun_cb(void* jack_driver)
-{
- JackDriver* me = reinterpret_cast<JackDriver*>(jack_driver);
- assert(me->_client);
-
- ++me->_xruns;
- me->_xrun_delay = jack_get_xrun_delayed_usecs(me->_client);
-
- jack_reset_max_delayed_usecs(me->_client);
-
- return 0;
-}
-
-void
-JackDriver::jack_shutdown_cb(void* jack_driver)
-{
- assert(jack_driver);
- JackDriver* me = reinterpret_cast<JackDriver*>(jack_driver);
- me->_app->info_msg("Jack: Shutdown.");
- Glib::Mutex::Lock lock(me->_shutdown_mutex);
- me->_client = NULL;
- me->_is_activated = false;
- me->signal_detached.emit();
-}
-
-jack_nframes_t
-JackDriver::buffer_size()
-{
- if (_is_activated)
- return _buffer_size;
- else
- return jack_get_buffer_size(_client);
-}
-
-void
-JackDriver::reset_xruns()
-{
- _xruns = 0;
- _xrun_delay = 0;
-}
-
-float
-JackDriver::get_max_dsp_load()
-{
- float max_load = 0.0f;
- if (_client) {
- const float max_delay = jack_get_max_delayed_usecs(_client);
- const float rate = sample_rate();
- const float size = buffer_size();
- const float period = size / rate * 1000000; // usec
-
- if (max_delay > period) {
- max_load = 1.0;
- jack_reset_max_delayed_usecs(_client);
- } else {
- max_load = max_delay / period;
- }
- }
- return max_load;
-}
-
-void
-JackDriver::reset_max_dsp_load()
-{
- if (_client) {
- jack_reset_max_delayed_usecs(_client);
- }
-}
-
-bool
-JackDriver::set_buffer_size(jack_nframes_t size)
-{
- if (buffer_size() == size) {
- return true;
- }
-
- if (!_client) {
- _buffer_size = size;
- return true;
- }
-
- if (jack_set_buffer_size(_client, size)) {
- _app->error_msg("[JACK] Unable to set buffer size");
- return false;
- } else {
- _buffer_size = size;
- return true;
- }
-}
-
-void
-JackDriver::process_events(Patchage* app)
-{
- while (!_events.empty()) {
- PatchageEvent& ev = _events.front();
- ev.execute(app);
- _events.pop();
- }
-}
diff --git a/src/JackDriver.hpp b/src/JackDriver.hpp
deleted file mode 100644
index 875bd61..0000000
--- a/src/JackDriver.hpp
+++ /dev/null
@@ -1,109 +0,0 @@
-/* This file is part of Patchage.
- * Copyright 2007-2014 David Robillard <http://drobilla.net>
- *
- * Patchage is free software: you can redistribute it and/or modify it under
- * the terms of the GNU General Public License as published by the Free
- * Software Foundation, either version 3 of the License, or (at your option)
- * any later version.
- *
- * Patchage is distributed in the hope that it will be useful, but WITHOUT ANY
- * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
- * FOR A PARTICULAR PURPOSE. See the GNU General Public License for details.
- *
- * You should have received a copy of the GNU General Public License
- * along with Patchage. If not, see <http://www.gnu.org/licenses/>.
- */
-
-#ifndef PATCHAGE_JACKDRIVER_HPP
-#define PATCHAGE_JACKDRIVER_HPP
-
-#include <string>
-
-#include <jack/jack.h>
-
-#include <glibmm/thread.h>
-
-#include "Driver.hpp"
-#include "Queue.hpp"
-
-class Patchage;
-class PatchageEvent;
-class PatchageCanvas;
-class PatchagePort;
-class PatchageModule;
-
-/** Handles all externally driven functionality, registering ports etc.
- *
- * Jack callbacks and connect methods and things like that live here.
- * Right now just for jack ports, but that will change...
- */
-class JackDriver : public Driver
-{
-public:
- explicit JackDriver(Patchage* app);
- ~JackDriver();
-
- void attach(bool launch_daemon);
- void detach();
-
- bool is_attached() const { return (_client != NULL); }
- bool is_realtime() const { return _client && jack_is_realtime(_client); }
-
- void refresh();
- void destroy_all();
-
- bool port_names(const PortID& id,
- std::string& module_name,
- std::string& port_name);
-
- PatchagePort* create_port_view(Patchage* patchage,
- const PortID& id);
-
- bool connect(PatchagePort* src,
- PatchagePort* dst);
-
- bool disconnect(PatchagePort* src,
- PatchagePort* dst);
-
- uint32_t get_xruns() { return _xruns; }
- void reset_xruns();
- float get_max_dsp_load();
- void reset_max_dsp_load();
-
- jack_client_t* client() { return _client; }
-
- jack_nframes_t sample_rate() { return jack_get_sample_rate(_client); }
- jack_nframes_t buffer_size();
- bool set_buffer_size(jack_nframes_t size);
-
- void process_events(Patchage* app);
-
-private:
- PatchagePort* create_port(
- PatchageModule& parent,
- jack_port_t* port,
- PortID id);
-
- void shutdown();
-
- static void jack_client_registration_cb(const char* name, int registered, void* me);
- static void jack_port_registration_cb(jack_port_id_t port_id, int registered, void* me);
- static void jack_port_connect_cb(jack_port_id_t src, jack_port_id_t dst, int connect, void* me);
- static int jack_xrun_cb(void* me);
- static void jack_shutdown_cb(void* me);
-
- Patchage* _app;
- jack_client_t* _client;
-
- Queue<PatchageEvent> _events;
-
- Glib::Mutex _shutdown_mutex;
-
- jack_position_t _last_pos;
- jack_nframes_t _buffer_size;
- uint32_t _xruns;
- float _xrun_delay;
- bool _is_activated :1;
-};
-
-#endif // PATCHAGE_JACKDRIVER_HPP
diff --git a/src/JackLibDriver.cpp b/src/JackLibDriver.cpp
new file mode 100644
index 0000000..5133bc6
--- /dev/null
+++ b/src/JackLibDriver.cpp
@@ -0,0 +1,490 @@
+// Copyright 2007-2020 David Robillard <d@drobilla.net>
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+#include "AudioDriver.hpp"
+#include "ClientID.hpp"
+#include "ClientInfo.hpp"
+#include "ClientType.hpp"
+#include "Driver.hpp"
+#include "Event.hpp"
+#include "ILog.hpp"
+#include "PortID.hpp"
+#include "PortInfo.hpp"
+#include "PortNames.hpp"
+#include "PortType.hpp"
+#include "SignalDirection.hpp"
+#include "jackey.h"
+#include "make_jack_driver.hpp"
+#include "patchage_config.h"
+#include "warnings.hpp"
+
+#if USE_JACK_METADATA
+# include <jack/metadata.h>
+#endif
+
+PATCHAGE_DISABLE_FMT_WARNINGS
+#include <fmt/core.h>
+PATCHAGE_RESTORE_WARNINGS
+
+#include <jack/jack.h>
+#include <jack/types.h>
+
+#include <cstdint>
+#include <cstring>
+#include <functional>
+#include <memory>
+#include <mutex>
+#include <optional>
+#include <set>
+#include <string>
+#include <unordered_set>
+#include <utility>
+
+namespace patchage {
+namespace {
+
+/// Driver for JACK audio and midi ports that uses libjack
+class JackLibDriver : public AudioDriver
+{
+public:
+ explicit JackLibDriver(ILog& log, EventSink emit_event);
+
+ JackLibDriver(const JackLibDriver&) = delete;
+ JackLibDriver& operator=(const JackLibDriver&) = delete;
+
+ JackLibDriver(JackLibDriver&&) = delete;
+ JackLibDriver& operator=(JackLibDriver&&) = delete;
+
+ ~JackLibDriver() override;
+
+ // Driver interface
+ void attach(bool launch_daemon) override;
+ void detach() override;
+ bool is_attached() const override;
+ void refresh(const EventSink& sink) override;
+ bool connect(const PortID& tail_id, const PortID& head_id) override;
+ bool disconnect(const PortID& tail_id, const PortID& head_id) override;
+
+ // AudioDriver interface
+ uint32_t xruns() override;
+ void reset_xruns() override;
+ uint32_t buffer_size() override;
+ bool set_buffer_size(uint32_t frames) override;
+ uint32_t sample_rate() override;
+
+private:
+ ClientInfo get_client_info(const char* name);
+ PortInfo get_port_info(const jack_port_t* port);
+
+ static void on_client(const char* name, int registered, void* driver);
+
+ static void on_port(jack_port_id_t port_id, int registered, void* driver);
+
+ static void on_connection(jack_port_id_t src,
+ jack_port_id_t dst,
+ int connect,
+ void* driver);
+
+ static int on_xrun(void* driver);
+
+ static void on_shutdown(void* driver);
+
+ ILog& _log;
+ std::mutex _shutdown_mutex;
+
+ jack_client_t* _client = nullptr;
+ jack_nframes_t _buffer_size = 0u;
+ uint32_t _xruns = 0u;
+ bool _is_activated = false;
+};
+
+JackLibDriver::JackLibDriver(ILog& log, EventSink emit_event)
+ : AudioDriver{std::move(emit_event)}
+ , _log{log}
+{}
+
+JackLibDriver::~JackLibDriver()
+{
+ detach();
+}
+
+void
+JackLibDriver::attach(const bool launch_daemon)
+{
+ if (_client) {
+ return; // Already connected
+ }
+
+ const jack_options_t options =
+ (!launch_daemon) ? JackNoStartServer : JackNullOption;
+
+ if (!(_client = jack_client_open("Patchage", options, nullptr))) {
+ _log.error("[JACK] Unable to create client");
+ _is_activated = false;
+ return;
+ }
+
+ jack_on_shutdown(_client, on_shutdown, this);
+ jack_set_client_registration_callback(_client, on_client, this);
+ jack_set_port_registration_callback(_client, on_port, this);
+ jack_set_port_connect_callback(_client, on_connection, this);
+ jack_set_xrun_callback(_client, on_xrun, this);
+
+ if (jack_activate(_client)) {
+ _log.error("[JACK] Client activation failed");
+ _is_activated = false;
+ _buffer_size = 0;
+ return;
+ }
+
+ _is_activated = true;
+ _buffer_size = jack_get_buffer_size(_client);
+
+ _emit_event(event::DriverAttached{ClientType::jack});
+}
+
+void
+JackLibDriver::detach()
+{
+ const std::lock_guard<std::mutex> lock{_shutdown_mutex};
+
+ if (_client) {
+ jack_deactivate(_client);
+ jack_client_close(_client);
+ _client = nullptr;
+ }
+
+ _is_activated = false;
+ _emit_event(event::DriverDetached{ClientType::jack});
+}
+
+bool
+JackLibDriver::is_attached() const
+{
+ return _client != nullptr;
+}
+
+std::string
+get_property(const jack_uuid_t subject, const char* const key)
+{
+ std::string result;
+
+#if USE_JACK_METADATA
+ char* value = nullptr;
+ char* datatype = nullptr;
+ if (!jack_get_property(subject, key, &value, &datatype)) {
+ result = value;
+ }
+ jack_free(datatype);
+ jack_free(value);
+#else
+ (void)subject;
+ (void)key;
+#endif
+
+ return result;
+}
+
+ClientInfo
+JackLibDriver::get_client_info(const char* const name)
+{
+ return {name}; // TODO: Pretty name?
+}
+
+PortInfo
+JackLibDriver::get_port_info(const jack_port_t* const port)
+{
+ const auto uuid = jack_port_uuid(port);
+ const auto flags = jack_port_flags(port);
+ const std::string name = jack_port_name(port);
+ auto label = PortNames{name}.port();
+
+ // Get pretty name to use as a label, if present
+#if USE_JACK_METADATA
+ const auto pretty_name = get_property(uuid, JACK_METADATA_PRETTY_NAME);
+ if (!pretty_name.empty()) {
+ label = pretty_name;
+ }
+#endif
+
+ // Determine detailed type, using metadata for fancy types if possible
+ const char* const type_str = jack_port_type(port);
+ PortType type = PortType::jack_audio;
+ if (!strcmp(type_str, JACK_DEFAULT_AUDIO_TYPE)) {
+ if (get_property(uuid, JACKEY_SIGNAL_TYPE) == "CV") {
+ type = PortType::jack_cv;
+ }
+ } else if (!strcmp(type_str, JACK_DEFAULT_MIDI_TYPE)) {
+ type = PortType::jack_midi;
+ if (get_property(uuid, JACKEY_EVENT_TYPES) == "OSC") {
+ type = PortType::jack_osc;
+ }
+ } else {
+ _log.warning(
+ fmt::format(R"([JACK] Port "{}" has unknown type "{}")", name, type_str));
+ }
+
+ // Get direction from port flags
+ const SignalDirection direction =
+ ((flags & JackPortIsInput) ? SignalDirection::input
+ : SignalDirection::output);
+
+ // Get port order from metadata if possible
+ std::optional<int> order;
+ const std::string order_str = get_property(uuid, JACKEY_ORDER);
+ if (!order_str.empty()) {
+ order = std::stoi(order_str);
+ }
+
+ return {label, type, direction, order, bool(flags & JackPortIsTerminal)};
+}
+
+void
+JackLibDriver::refresh(const EventSink& sink)
+{
+ const std::lock_guard<std::mutex> lock{_shutdown_mutex};
+
+ if (!_client) {
+ return;
+ }
+
+ // Get all existing ports
+ const char** const ports = jack_get_ports(_client, nullptr, nullptr, 0);
+ if (!ports) {
+ return;
+ }
+
+ // Get all client names (to only send a creation event once for each)
+ std::unordered_set<std::string> client_names;
+ for (auto i = 0u; ports[i]; ++i) {
+ client_names.insert(PortID::jack(ports[i]).client().jack_name());
+ }
+
+ // Emit all clients
+ for (const auto& client_name : client_names) {
+ sink({event::ClientCreated{ClientID::jack(client_name),
+ get_client_info(client_name.c_str())}});
+ }
+
+ // Emit all ports
+ for (auto i = 0u; ports[i]; ++i) {
+ const jack_port_t* const port = jack_port_by_name(_client, ports[i]);
+
+ sink({event::PortCreated{PortID::jack(ports[i]), get_port_info(port)}});
+ }
+
+ // Get all connections (again to only create them once)
+ std::set<std::pair<std::string, std::string>> connections;
+ for (auto i = 0u; ports[i]; ++i) {
+ const jack_port_t* const port = jack_port_by_name(_client, ports[i]);
+ const char** const peers = jack_port_get_all_connections(_client, port);
+
+ if (peers) {
+ if (jack_port_flags(port) & JackPortIsInput) {
+ for (auto j = 0u; peers[j]; ++j) {
+ connections.emplace(peers[j], ports[i]);
+ }
+ } else {
+ for (auto j = 0u; peers[j]; ++j) {
+ connections.emplace(ports[i], peers[j]);
+ }
+ }
+
+ jack_free(peers);
+ }
+ }
+
+ // Emit all connections
+ for (const auto& connection : connections) {
+ sink({event::PortsConnected{PortID::jack(connection.first),
+ PortID::jack(connection.second)}});
+ }
+
+ jack_free(ports);
+}
+
+bool
+JackLibDriver::connect(const PortID& tail_id, const PortID& head_id)
+{
+ if (!_client) {
+ return false;
+ }
+
+ const auto& tail_name = tail_id.jack_name();
+ const auto& head_name = head_id.jack_name();
+
+ const int result =
+ jack_connect(_client, tail_name.c_str(), head_name.c_str());
+
+ if (result) {
+ _log.error(
+ fmt::format("[JACK] Failed to connect {} => {}", tail_name, head_name));
+ return false;
+ }
+
+ return true;
+}
+
+bool
+JackLibDriver::disconnect(const PortID& tail_id, const PortID& head_id)
+{
+ if (!_client) {
+ return false;
+ }
+
+ const auto& tail_name = tail_id.jack_name();
+ const auto& head_name = head_id.jack_name();
+
+ const int result =
+ jack_disconnect(_client, tail_name.c_str(), head_name.c_str());
+
+ if (result) {
+ _log.error(fmt::format(
+ "[JACK] Failed to disconnect {} => {}", tail_name, head_name));
+ return false;
+ }
+
+ return true;
+}
+
+uint32_t
+JackLibDriver::xruns()
+{
+ return _xruns;
+}
+
+void
+JackLibDriver::reset_xruns()
+{
+ _xruns = 0;
+}
+
+uint32_t
+JackLibDriver::buffer_size()
+{
+ return _is_activated ? _buffer_size : jack_get_buffer_size(_client);
+}
+
+bool
+JackLibDriver::set_buffer_size(const uint32_t frames)
+{
+ if (!_client) {
+ _buffer_size = frames;
+ return true;
+ }
+
+ if (buffer_size() == frames) {
+ return true;
+ }
+
+ if (jack_set_buffer_size(_client, frames)) {
+ _log.error("[JACK] Unable to set buffer size");
+ return false;
+ }
+
+ _buffer_size = frames;
+ return true;
+}
+
+uint32_t
+JackLibDriver::sample_rate()
+{
+ return jack_get_sample_rate(_client);
+}
+
+void
+JackLibDriver::on_client(const char* const name,
+ const int registered,
+ void* const driver)
+{
+ auto* const me = static_cast<JackLibDriver*>(driver);
+
+ if (registered) {
+ me->_emit_event(event::ClientCreated{ClientID::jack(name), {name}});
+ } else {
+ me->_emit_event(event::ClientDestroyed{ClientID::jack(name)});
+ }
+}
+
+void
+JackLibDriver::on_port(const jack_port_id_t port_id,
+ const int registered,
+ void* const driver)
+{
+ auto* const me = static_cast<JackLibDriver*>(driver);
+
+ jack_port_t* const port = jack_port_by_id(me->_client, port_id);
+ const char* const name = jack_port_name(port);
+ const auto id = PortID::jack(name);
+
+ if (registered) {
+ me->_emit_event(event::PortCreated{id, me->get_port_info(port)});
+ } else {
+ me->_emit_event(event::PortDestroyed{id});
+ }
+}
+
+void
+JackLibDriver::on_connection(const jack_port_id_t src,
+ const jack_port_id_t dst,
+ const int connect,
+ void* const driver)
+{
+ auto* const me = static_cast<JackLibDriver*>(driver);
+
+ jack_port_t* const src_port = jack_port_by_id(me->_client, src);
+ jack_port_t* const dst_port = jack_port_by_id(me->_client, dst);
+ const char* const src_name = jack_port_name(src_port);
+ const char* const dst_name = jack_port_name(dst_port);
+
+ if (connect) {
+ me->_emit_event(
+ event::PortsConnected{PortID::jack(src_name), PortID::jack(dst_name)});
+ } else {
+ me->_emit_event(
+ event::PortsDisconnected{PortID::jack(src_name), PortID::jack(dst_name)});
+ }
+}
+
+int
+JackLibDriver::on_xrun(void* const driver)
+{
+ auto* const me = static_cast<JackLibDriver*>(driver);
+
+ ++me->_xruns;
+
+ return 0;
+}
+
+void
+JackLibDriver::on_shutdown(void* const driver)
+{
+ auto* const me = static_cast<JackLibDriver*>(driver);
+
+ /* Note that the JACK documentation lies about this situation. It says the
+ client must not call jack_client_close() here... except that is exactly
+ what libjack does if a shutdown callback isn't registered. Despite
+ that, doing so here hangs forever. Handling it "properly" like a signal
+ handler and calling jack_client_close() in another thread also hangs.
+
+ So, since JACK is a hot mess and it's impossible to gracefully handle
+ this situation, just leak the client. */
+
+ const std::lock_guard<std::mutex> lock{me->_shutdown_mutex};
+
+ me->_client = nullptr;
+ me->_is_activated = false;
+
+ me->_emit_event(event::DriverDetached{ClientType::jack});
+}
+
+} // namespace
+
+std::unique_ptr<AudioDriver>
+make_jack_driver(ILog& log, Driver::EventSink emit_event)
+{
+ return std::unique_ptr<AudioDriver>{
+ new JackLibDriver{log, std::move(emit_event)}};
+}
+
+} // namespace patchage
diff --git a/src/JackStubDriver.cpp b/src/JackStubDriver.cpp
new file mode 100644
index 0000000..a062df1
--- /dev/null
+++ b/src/JackStubDriver.cpp
@@ -0,0 +1,18 @@
+// Copyright 2020-2022 David Robillard <d@drobilla.net>
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+#include "AudioDriver.hpp"
+#include "Driver.hpp"
+#include "make_jack_driver.hpp"
+
+#include <memory>
+
+namespace patchage {
+
+std::unique_ptr<AudioDriver>
+make_jack_driver(ILog&, Driver::EventSink)
+{
+ return nullptr;
+}
+
+} // namespace patchage
diff --git a/src/Legend.cpp b/src/Legend.cpp
new file mode 100644
index 0000000..1ef833e
--- /dev/null
+++ b/src/Legend.cpp
@@ -0,0 +1,82 @@
+// Copyright 2014-2020 David Robillard <d@drobilla.net>
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+#include "Legend.hpp"
+
+#include "Configuration.hpp"
+#include "PortType.hpp"
+#include "i18n.hpp"
+#include "patchage_config.h"
+
+#include <gdkmm/color.h>
+#include <glibmm/signalproxy.h>
+#include <gtkmm/box.h>
+#include <gtkmm/colorbutton.h>
+#include <gtkmm/label.h>
+#include <gtkmm/object.h>
+#include <sigc++/adaptors/bind.h>
+#include <sigc++/functors/mem_fun.h>
+
+#include <string>
+
+namespace patchage {
+
+Legend::Legend(const Configuration& configuration)
+{
+ add_button(PortType::jack_audio,
+ T("Audio"),
+ configuration.get_port_color(PortType::jack_audio));
+
+#if USE_JACK_METADATA
+ add_button(
+ PortType::jack_cv, "CV", configuration.get_port_color(PortType::jack_cv));
+ add_button(PortType::jack_osc,
+ "OSC",
+ configuration.get_port_color(PortType::jack_osc));
+#endif
+
+ add_button(PortType::jack_midi,
+ "MIDI",
+ configuration.get_port_color(PortType::jack_midi));
+
+ add_button(PortType::alsa_midi,
+ "ALSA MIDI",
+ configuration.get_port_color(PortType::alsa_midi));
+
+ show_all_children();
+}
+
+void
+Legend::add_button(const PortType id, const std::string& label, uint32_t rgba)
+{
+ Gdk::Color col;
+ col.set_rgb(((rgba >> 24) & 0xFF) * 0x100,
+ ((rgba >> 16) & 0xFF) * 0x100,
+ ((rgba >> 8) & 0xFF) * 0x100);
+
+ auto* box = new Gtk::HBox();
+ auto* but = new Gtk::ColorButton(col);
+ but->set_use_alpha(false);
+ but->signal_color_set().connect(
+ sigc::bind(sigc::mem_fun(this, &Legend::on_color_set), id, label, but));
+
+ box->pack_end(*Gtk::manage(but));
+ box->pack_end(*Gtk::manage(new Gtk::Label(label)), false, false, 2);
+
+ this->pack_start(*Gtk::manage(box), false, false, 6);
+}
+
+void
+Legend::on_color_set(const PortType id,
+ const std::string& label,
+ const Gtk::ColorButton* but)
+{
+ const Gdk::Color col = but->get_color();
+ const uint32_t rgba =
+ (((col.get_red() / 0x100) << 24) | ((col.get_green() / 0x100) << 16) |
+ ((col.get_blue() / 0x100) << 8) | 0xFF);
+
+ signal_color_changed.emit(id, label, rgba);
+}
+
+} // namespace patchage
diff --git a/src/Legend.hpp b/src/Legend.hpp
index b95d30c..c73a74e 100644
--- a/src/Legend.hpp
+++ b/src/Legend.hpp
@@ -1,71 +1,40 @@
-/* This file is part of Patchage.
- * Copyright 2014 David Robillard <http://drobilla.net>
- *
- * Patchage is free software: you can redistribute it and/or modify it under
- * the terms of the GNU General Public License as published by the Free
- * Software Foundation, either version 3 of the License, or (at your option)
- * any later version.
- *
- * Patchage is distributed in the hope that it will be useful, but WITHOUT ANY
- * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
- * FOR A PARTICULAR PURPOSE. See the GNU General Public License for details.
- *
- * You should have received a copy of the GNU General Public License
- * along with Patchage. If not, see <http://www.gnu.org/licenses/>.
- */
+// Copyright 2014-2020 David Robillard <d@drobilla.net>
+// SPDX-License-Identifier: GPL-3.0-or-later
#ifndef PATCHAGE_LEGEND_HPP
#define PATCHAGE_LEGEND_HPP
-#include <gtkmm/colorbutton.h>
#include <gtkmm/box.h>
+#include <sigc++/signal.h>
-#include "Configuration.hpp"
+#include <cstdint>
+#include <string>
-class Legend : public Gtk::HBox {
-public:
- Legend(const Configuration& configuration) {
- add_button(JACK_AUDIO, "Audio", configuration.get_port_color(JACK_AUDIO));
-#ifdef HAVE_JACK_METADATA
- add_button(JACK_CV, "CV", configuration.get_port_color(JACK_CV));
- add_button(JACK_OSC, "OSC", configuration.get_port_color(JACK_OSC));
-#endif
- add_button(JACK_MIDI, "MIDI", configuration.get_port_color(JACK_MIDI));
- add_button(ALSA_MIDI, "ALSA MIDI", configuration.get_port_color(ALSA_MIDI));
- show_all_children();
- }
+namespace Gtk {
+class ColorButton;
+} // namespace Gtk
+
+namespace patchage {
- void add_button(int id, const std::string& label, uint32_t rgba) {
- Gdk::Color col;
- col.set_rgb(((rgba >> 24) & 0xFF) * 0x100,
- ((rgba>> 16) & 0xFF) * 0x100,
- ((rgba >> 8) & 0xFF) * 0x100);
- Gtk::HBox* box = new Gtk::HBox();
- Gtk::ColorButton* but = new Gtk::ColorButton(col);
- but->set_use_alpha(false);
- but->signal_color_set().connect(
- sigc::bind(sigc::mem_fun(this, &Legend::on_color_set),
- id, label, but));
+enum class PortType;
- box->pack_end(*Gtk::manage(but));
- box->pack_end(*Gtk::manage(new Gtk::Label(label)), false, false, 2);
+class Configuration;
- this->pack_start(*Gtk::manage(box), false, false, 6);
- }
+class Legend : public Gtk::HBox
+{
+public:
+ explicit Legend(const Configuration& configuration);
- void on_color_set(const int id,
- const std::string& label,
- const Gtk::ColorButton* but) {
- const Gdk::Color col = but->get_color();
- const uint32_t rgba = (((col.get_red() / 0x100) << 24) |
- ((col.get_green() / 0x100) << 16) |
- ((col.get_blue() / 0x100) << 8) |
- 0xFF);
+ sigc::signal<void, PortType, std::string, uint32_t> signal_color_changed;
- signal_color_changed.emit(id, label, rgba);
- }
+private:
+ void add_button(PortType id, const std::string& label, uint32_t rgba);
- sigc::signal<void, int, std::string, uint32_t> signal_color_changed;
+ void on_color_set(PortType id,
+ const std::string& label,
+ const Gtk::ColorButton* but);
};
+} // namespace patchage
+
#endif // PATCHAGE_LEGEND_HPP
diff --git a/src/Metadata.cpp b/src/Metadata.cpp
new file mode 100644
index 0000000..929b090
--- /dev/null
+++ b/src/Metadata.cpp
@@ -0,0 +1,72 @@
+// Copyright 2014-2020 David Robillard <d@drobilla.net>
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+#include "Metadata.hpp"
+
+#include "ClientID.hpp"
+#include "ClientInfo.hpp"
+#include "PortID.hpp"
+#include "PortInfo.hpp"
+
+#include <optional>
+#include <utility>
+
+namespace patchage {
+
+std::optional<ClientInfo>
+Metadata::client(const ClientID& id) const
+{
+ const auto i = _client_data.find(id);
+ if (i == _client_data.end()) {
+ return {};
+ }
+
+ return i->second;
+}
+
+std::optional<PortInfo>
+Metadata::port(const PortID& id) const
+{
+ const auto i = _port_data.find(id);
+ if (i == _port_data.end()) {
+ return {};
+ }
+
+ return i->second;
+}
+
+void
+Metadata::set_client(const ClientID& id, const ClientInfo& info)
+{
+ const auto i = _client_data.find(id);
+ if (i == _client_data.end()) {
+ _client_data.emplace(id, info);
+ } else {
+ i->second = info;
+ }
+}
+
+void
+Metadata::set_port(const PortID& id, const PortInfo& info)
+{
+ const auto i = _port_data.find(id);
+ if (i == _port_data.end()) {
+ _port_data.emplace(id, info);
+ } else {
+ i->second = info;
+ }
+}
+
+void
+Metadata::erase_client(const ClientID& id)
+{
+ _client_data.erase(id);
+}
+
+void
+Metadata::erase_port(const PortID& id)
+{
+ _port_data.erase(id);
+}
+
+} // namespace patchage
diff --git a/src/Metadata.hpp b/src/Metadata.hpp
new file mode 100644
index 0000000..520b0ce
--- /dev/null
+++ b/src/Metadata.hpp
@@ -0,0 +1,42 @@
+// Copyright 2007-2020 David Robillard <d@drobilla.net>
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+#ifndef PATCHAGE_METADATA_HPP
+#define PATCHAGE_METADATA_HPP
+
+#include "ClientID.hpp"
+#include "ClientInfo.hpp"
+#include "PortID.hpp"
+#include "PortInfo.hpp"
+
+#include <map>
+#include <optional>
+
+namespace patchage {
+
+/// Cache of metadata about clients and ports beyond their IDs
+class Metadata
+{
+public:
+ Metadata() = default;
+
+ std::optional<ClientInfo> client(const ClientID& id) const;
+ std::optional<PortInfo> port(const PortID& id) const;
+
+ void set_client(const ClientID& id, const ClientInfo& info);
+ void set_port(const PortID& id, const PortInfo& info);
+
+ void erase_client(const ClientID& id);
+ void erase_port(const PortID& id);
+
+private:
+ using ClientData = std::map<ClientID, ClientInfo>;
+ using PortData = std::map<PortID, PortInfo>;
+
+ ClientData _client_data;
+ PortData _port_data;
+};
+
+} // namespace patchage
+
+#endif // PATCHAGE_METADATA_HPP
diff --git a/src/Options.hpp b/src/Options.hpp
new file mode 100644
index 0000000..76846aa
--- /dev/null
+++ b/src/Options.hpp
@@ -0,0 +1,16 @@
+// Copyright 2007-2020 David Robillard <d@drobilla.net>
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+#ifndef PATCHAGE_OPTIONS_HPP
+#define PATCHAGE_OPTIONS_HPP
+
+namespace patchage {
+
+struct Options {
+ bool alsa_driver_autoattach = true;
+ bool jack_driver_autoattach = true;
+};
+
+} // namespace patchage
+
+#endif // PATCHAGE_OPTIONS_HPP
diff --git a/src/Patchage.cpp b/src/Patchage.cpp
index eae2ef9..785cd2d 100644
--- a/src/Patchage.cpp
+++ b/src/Patchage.cpp
@@ -1,1078 +1,915 @@
-/* This file is part of Patchage.
- * Copyright 2007-2014 David Robillard <http://drobilla.net>
- *
- * Patchage is free software: you can redistribute it and/or modify it under
- * the terms of the GNU General Public License as published by the Free
- * Software Foundation, either version 3 of the License, or (at your option)
- * any later version.
- *
- * Patchage is distributed in the hope that it will be useful, but WITHOUT ANY
- * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
- * FOR A PARTICULAR PURPOSE. See the GNU General Public License for details.
- *
- * You should have received a copy of the GNU General Public License
- * along with Patchage. If not, see <http://www.gnu.org/licenses/>.
- */
-
-#include <stdlib.h>
-#include <pthread.h>
+// Copyright 2007-2021 David Robillard <d@drobilla.net>
+// SPDX-License-Identifier: GPL-3.0-or-later
-#include <cmath>
-#include <fstream>
+#include "Patchage.hpp"
-#include <glib.h>
-#include <glib/gstdio.h>
-#include <gtk/gtkwindow.h>
+#include "Action.hpp"
+#include "AudioDriver.hpp"
+#include "Canvas.hpp"
+#include "CanvasModule.hpp"
+#include "CanvasPort.hpp"
+#include "Configuration.hpp"
+#include "Coord.hpp"
+#include "Driver.hpp"
+#include "Drivers.hpp"
+#include "Event.hpp"
+#include "Legend.hpp"
+#include "Options.hpp"
+#include "PortType.hpp"
+#include "Reactor.hpp"
+#include "Setting.hpp"
+#include "TextViewLog.hpp"
+#include "UIFile.hpp"
+#include "Widget.hpp"
+#include "event_to_string.hpp"
+#include "handle_event.hpp"
+#include "i18n.hpp"
+#include "patchage_config.h" // IWYU pragma: keep
+#include "warnings.hpp"
+
+PATCHAGE_DISABLE_GANV_WARNINGS
+#include "ganv/Edge.hpp"
+#include "ganv/Module.hpp"
+#include "ganv/Node.hpp"
+#include "ganv/Port.hpp"
+#include "ganv/module.h"
+#include "ganv/types.h"
+PATCHAGE_RESTORE_WARNINGS
-#include <boost/format.hpp>
+PATCHAGE_DISABLE_FMT_WARNINGS
+#include <fmt/core.h>
+PATCHAGE_RESTORE_WARNINGS
-#include <gtkmm/button.h>
+#include <glib-object.h>
+#include <glib.h>
+#include <glibmm/fileutils.h>
+#include <glibmm/main.h>
+#include <glibmm/miscutils.h>
+#include <glibmm/propertyproxy.h>
+#include <glibmm/signalproxy.h>
+#include <glibmm/ustring.h>
+#include <gobject/gclosure.h>
+#include <gtk/gtk.h>
+#include <gtkmm/aboutdialog.h>
+#include <gtkmm/adjustment.h>
+#include <gtkmm/alignment.h>
+#include <gtkmm/box.h>
+#include <gtkmm/builder.h>
+#include <gtkmm/checkbutton.h>
+#include <gtkmm/checkmenuitem.h>
+#include <gtkmm/combobox.h>
+#include <gtkmm/dialog.h>
+#include <gtkmm/enums.h>
+#include <gtkmm/filechooser.h>
#include <gtkmm/filechooserdialog.h>
+#include <gtkmm/filefilter.h>
+#include <gtkmm/imagemenuitem.h>
+#include <gtkmm/label.h>
+#include <gtkmm/layout.h>
#include <gtkmm/liststore.h>
+#include <gtkmm/menubar.h>
#include <gtkmm/menuitem.h>
#include <gtkmm/messagedialog.h>
+#include <gtkmm/object.h>
+#include <gtkmm/paned.h>
+#include <gtkmm/scrolledwindow.h>
#include <gtkmm/stock.h>
+#include <gtkmm/textbuffer.h>
+#include <gtkmm/texttag.h>
+#include <gtkmm/textview.h>
+#include <gtkmm/toolbar.h>
+#include <gtkmm/toolbutton.h>
+#include <gtkmm/treeiter.h>
#include <gtkmm/treemodel.h>
+#include <gtkmm/window.h>
+#include <sigc++/adaptors/bind.h>
+#include <sigc++/functors/mem_fun.h>
+#include <sigc++/functors/ptr_fun.h>
+#include <sigc++/signal.h>
-#include "ganv/Module.hpp"
-#include "ganv/Edge.hpp"
-
-#include "Configuration.hpp"
-#include "Legend.hpp"
-#include "Patchage.hpp"
-#include "PatchageCanvas.hpp"
-#include "PatchageEvent.hpp"
-#include "UIFile.hpp"
-#include "patchage_config.h"
-
-#if defined(HAVE_JACK_DBUS)
- #include "JackDbusDriver.hpp"
-#elif defined(PATCHAGE_LIBJACK)
- #include "JackDriver.hpp"
- #include <jack/statistics.h>
-#endif
+#include <algorithm>
+#include <cmath>
+#include <cstdint>
+#include <cstdlib>
+#include <functional>
+#include <map>
+#include <optional>
+#include <utility>
+#include <variant>
-#ifdef PATCHAGE_JACK_SESSION
- #include <jack/session.h>
-#endif
+#ifdef PATCHAGE_GTK_OSX
-#ifdef HAVE_ALSA
- #include "AlsaDriver.hpp"
-#endif
+# include <gtkmm/main.h>
+# include <gtkosxapplication.h>
-#ifdef PATCHAGE_GTK_OSX
- #include <gtkosxapplication.h>
+namespace {
-static gboolean
-can_activate_cb(GtkWidget* widget, guint signal_id, gpointer data)
+gboolean
+can_activate_cb(GtkWidget* widget, guint, gpointer)
{
return gtk_widget_is_sensitive(widget);
}
-static void
-terminate_cb(GtkosxApplication* app, gpointer data)
+void
+terminate_cb(GtkosxApplication*, gpointer data)
{
- Patchage* patchage = (Patchage*)data;
- patchage->save();
- Gtk::Main::quit();
+ auto* patchage = static_cast<patchage::Patchage*>(data);
+ patchage->save();
+ Gtk::Main::quit();
}
+} // namespace
+
#endif
-static bool
-configure_cb(GtkWindow* parentWindow, GdkEvent* event, gpointer data)
+namespace patchage {
+
+namespace {
+
+bool
+configure_cb(GtkWindow*, GdkEvent*, gpointer data)
{
- ((Patchage*)data)->store_window_location();
- return FALSE;
+ static_cast<Patchage*>(data)->store_window_location();
+ return FALSE;
}
-static int
-port_order(const GanvPort* a, const GanvPort* b, void* data)
+int
+port_order(const GanvPort* a, const GanvPort* b, void*)
{
- const PatchagePort* pa = dynamic_cast<const PatchagePort*>(Glib::wrap(a));
- const PatchagePort* pb = dynamic_cast<const PatchagePort*>(Glib::wrap(b));
- if (pa && pb) {
- if (pa->order() && pb->order()) {
- return *pa->order() - *pb->order();
- } else if (pa->order()) {
- return -1;
- } else if (pb->order()) {
- return 1;
- }
- return pa->name().compare(pb->name());
- }
- return 0;
-}
+ const auto* pa = dynamic_cast<const CanvasPort*>(Glib::wrap(a));
+ const auto* pb = dynamic_cast<const CanvasPort*>(Glib::wrap(b));
+ if (pa && pb) {
+ const auto oa = pa->order();
+ const auto ob = pb->order();
+ if (oa && ob) {
+ return *oa - *ob;
+ }
-struct ProjectList_column_record : public Gtk::TreeModel::ColumnRecord {
- Gtk::TreeModelColumn<Glib::ustring> label;
-};
+ if (pa->order()) {
+ return -1;
+ }
-using std::cout;
-using std::endl;
-using std::string;
+ if (pb->order()) {
+ return 1;
+ }
-#define INIT_WIDGET(x) x(_xml, ((const char*)#x) + 1)
+ return pa->name().compare(pb->name());
+ }
+ return 0;
+}
-Patchage::Patchage(int argc, char** argv)
- : _xml(UIFile::open("patchage"))
-#ifdef HAVE_ALSA
- , _alsa_driver(NULL)
-#endif
- , _jack_driver(NULL)
- , _conf(NULL)
- , INIT_WIDGET(_about_win)
- , INIT_WIDGET(_main_scrolledwin)
- , INIT_WIDGET(_main_win)
- , INIT_WIDGET(_main_vbox)
- , INIT_WIDGET(_menubar)
- , INIT_WIDGET(_menu_alsa_connect)
- , INIT_WIDGET(_menu_alsa_disconnect)
- , INIT_WIDGET(_menu_file_quit)
- , INIT_WIDGET(_menu_export_image)
- , INIT_WIDGET(_menu_help_about)
- , INIT_WIDGET(_menu_jack_connect)
- , INIT_WIDGET(_menu_jack_disconnect)
- , INIT_WIDGET(_menu_open_session)
- , INIT_WIDGET(_menu_save_session)
- , INIT_WIDGET(_menu_save_close_session)
- , INIT_WIDGET(_menu_view_arrange)
- , INIT_WIDGET(_menu_view_sprung_layout)
- , INIT_WIDGET(_menu_view_messages)
- , INIT_WIDGET(_menu_view_toolbar)
- , INIT_WIDGET(_menu_view_refresh)
- , INIT_WIDGET(_menu_view_human_names)
- , INIT_WIDGET(_menu_view_sort_ports)
- , INIT_WIDGET(_menu_zoom_in)
- , INIT_WIDGET(_menu_zoom_out)
- , INIT_WIDGET(_menu_zoom_normal)
- , INIT_WIDGET(_menu_zoom_full)
- , INIT_WIDGET(_menu_increase_font_size)
- , INIT_WIDGET(_menu_decrease_font_size)
- , INIT_WIDGET(_menu_normal_font_size)
- , INIT_WIDGET(_toolbar)
- , INIT_WIDGET(_clear_load_but)
- , INIT_WIDGET(_xrun_progress)
- , INIT_WIDGET(_buf_size_combo)
- , INIT_WIDGET(_latency_label)
- , INIT_WIDGET(_legend_alignment)
- , INIT_WIDGET(_main_paned)
- , INIT_WIDGET(_log_scrolledwindow)
- , INIT_WIDGET(_status_text)
- , _legend(NULL)
- , _pane_initialized(false)
- , _attach(true)
- , _driver_detached(false)
- , _refresh(false)
- , _enable_refresh(true)
- , _jack_driver_autoattach(true)
-#ifdef HAVE_ALSA
- , _alsa_driver_autoattach(true)
-#endif
+template<class S>
+void
+on_setting_toggled(Reactor* const reactor, const Gtk::CheckMenuItem* const item)
{
- _conf = new Configuration();
- _canvas = boost::shared_ptr<PatchageCanvas>(new PatchageCanvas(this, 1600*2, 1200*2));
-
- while (argc > 0) {
- if (!strcmp(*argv, "-h") || !strcmp(*argv, "--help")) {
- cout << "Usage: patchage [OPTION]..." << endl;
- cout << "Visually connect JACK and ALSA Audio/MIDI ports." << endl << endl;
- cout << "Options:" << endl;
- cout << "\t-h --help Show this help" << endl;
- cout << "\t-A --no-alsa Do not automatically attach to ALSA" << endl;
- cout << "\t-J --no-jack Do not automatically attack to JACK" << endl;
- exit(0);
-#ifdef HAVE_ALSA
- } else if (!strcmp(*argv, "-A") || !strcmp(*argv, "--no-alsa")) {
- _alsa_driver_autoattach = false;
-#endif
-#if defined(PATCHAGE_LIBJACK) || defined(HAVE_JACK_DBUS)
- } else if (!strcmp(*argv, "-J") || !strcmp(*argv, "--no-jack")) {
- _jack_driver_autoattach = false;
-#endif
- }
-
- argv++;
- argc--;
- }
-
- Glib::set_application_name("Patchage");
- _about_win->property_program_name() = "Patchage";
- _about_win->property_logo_icon_name() = "patchage";
- gtk_window_set_default_icon_name("patchage");
-
- // Create list model for buffer size selector
- Glib::RefPtr<Gtk::ListStore> buf_size_store = Gtk::ListStore::create(_buf_size_columns);
- for (size_t i = 32; i <= 4096; i *= 2) {
- Gtk::TreeModel::Row row = *(buf_size_store->append());
- row[_buf_size_columns.label] = std::to_string(i);
- }
-
- _buf_size_combo->set_model(buf_size_store);
- _buf_size_combo->pack_start(_buf_size_columns.label);
-
- _main_scrolledwin->add(_canvas->widget());
-
- _main_scrolledwin->property_hadjustment().get_value()->set_step_increment(10);
- _main_scrolledwin->property_vadjustment().get_value()->set_step_increment(10);
-
- _main_scrolledwin->signal_scroll_event().connect(
- sigc::mem_fun(this, &Patchage::on_scroll));
- _clear_load_but->signal_clicked().connect(
- sigc::mem_fun(this, &Patchage::clear_load));
- _buf_size_combo->signal_changed().connect(
- sigc::mem_fun(this, &Patchage::buffer_size_changed));
- _status_text->signal_size_allocate().connect(
- sigc::mem_fun(this, &Patchage::on_messages_resized));
-
-#ifdef PATCHAGE_JACK_SESSION
- _menu_open_session->signal_activate().connect(
- sigc::mem_fun(this, &Patchage::show_open_session_dialog));
- _menu_save_session->signal_activate().connect(
- sigc::mem_fun(this, &Patchage::show_save_session_dialog));
- _menu_save_close_session->signal_activate().connect(
- sigc::mem_fun(this, &Patchage::show_save_close_session_dialog));
-#else
- _menu_open_session->hide();
- _menu_save_session->hide();
- _menu_save_close_session->hide();
-#endif
-
-#ifdef HAVE_ALSA
- _menu_alsa_connect->signal_activate().connect(
- sigc::mem_fun(this, &Patchage::menu_alsa_connect));
- _menu_alsa_disconnect->signal_activate().connect(
- sigc::mem_fun(this, &Patchage::menu_alsa_disconnect));
-#else
- _menu_alsa_connect->set_sensitive(false);
- _menu_alsa_disconnect->set_sensitive(false);
-#endif
-
- _menu_file_quit->signal_activate().connect(
- sigc::mem_fun(this, &Patchage::on_quit));
- _menu_export_image->signal_activate().connect(
- sigc::mem_fun(this, &Patchage::on_export_image));
- _menu_view_refresh->signal_activate().connect(
- sigc::mem_fun(this, &Patchage::refresh));
- _menu_view_human_names->signal_activate().connect(
- sigc::mem_fun(this, &Patchage::on_view_human_names));
- _menu_view_sort_ports->signal_activate().connect(
- sigc::mem_fun(this, &Patchage::on_view_sort_ports));
- _menu_view_arrange->signal_activate().connect(
- sigc::mem_fun(this, &Patchage::on_arrange));
- _menu_view_sprung_layout->signal_activate().connect(
- sigc::mem_fun(this, &Patchage::on_sprung_layout_toggled));
- _menu_view_messages->signal_activate().connect(
- sigc::mem_fun(this, &Patchage::on_view_messages));
- _menu_view_toolbar->signal_activate().connect(
- sigc::mem_fun(this, &Patchage::on_view_toolbar));
- _menu_help_about->signal_activate().connect(
- sigc::mem_fun(this, &Patchage::on_help_about));
- _menu_zoom_in->signal_activate().connect(
- sigc::mem_fun(this, &Patchage::on_zoom_in));
- _menu_zoom_out->signal_activate().connect(
- sigc::mem_fun(this, &Patchage::on_zoom_out));
- _menu_zoom_normal->signal_activate().connect(
- sigc::mem_fun(this, &Patchage::on_zoom_normal));
- _menu_zoom_full->signal_activate().connect(
- sigc::mem_fun(this, &Patchage::on_zoom_full));
- _menu_increase_font_size->signal_activate().connect(
- sigc::mem_fun(this, &Patchage::on_increase_font_size));
- _menu_decrease_font_size->signal_activate().connect(
- sigc::mem_fun(this, &Patchage::on_decrease_font_size));
- _menu_normal_font_size->signal_activate().connect(
- sigc::mem_fun(this, &Patchage::on_normal_font_size));
-
- if (_canvas->supports_sprung_layout()) {
- _menu_view_sprung_layout->set_active(true);
- } else {
- _menu_view_sprung_layout->set_active(false);
- _menu_view_sprung_layout->set_sensitive(false);
- }
-
- for (int s = Gtk::STATE_NORMAL; s <= Gtk::STATE_INSENSITIVE; ++s) {
- _status_text->modify_base((Gtk::StateType)s, Gdk::Color("#000000"));
- _status_text->modify_text((Gtk::StateType)s, Gdk::Color("#FFFFFF"));
- }
-
- _error_tag = Gtk::TextTag::create();
- _error_tag->property_foreground() = "#CC0000";
- _status_text->get_buffer()->get_tag_table()->add(_error_tag);
-
- _warning_tag = Gtk::TextTag::create();
- _warning_tag->property_foreground() = "#C4A000";
- _status_text->get_buffer()->get_tag_table()->add(_warning_tag);
-
- _canvas->widget().show();
- _main_win->present();
-
- _conf->set_font_size(_canvas->get_default_font_size());
- _conf->load();
- _canvas->set_zoom(_conf->get_zoom());
- _canvas->set_font_size(_conf->get_font_size());
- if (_conf->get_sort_ports()) {
- _canvas->set_port_order(port_order, NULL);
- }
-
- _main_win->resize(
- static_cast<int>(_conf->get_window_size().x),
- static_cast<int>(_conf->get_window_size().y));
-
- _main_win->move(
- static_cast<int>(_conf->get_window_location().x),
- static_cast<int>(_conf->get_window_location().y));
-
- _legend = new Legend(*_conf);
- _legend->signal_color_changed.connect(
- sigc::mem_fun(this, &Patchage::on_legend_color_change));
- _legend_alignment->add(*Gtk::manage(_legend));
- _legend->show_all();
-
- _about_win->set_transient_for(*_main_win);
-#ifdef __APPLE__
- try {
- _about_win->set_logo(
- Gdk::Pixbuf::create_from_file(
- bundle_location() + "/Resources/Patchage.icns"));
- } catch (const Glib::Exception& e) {
- error_msg((boost::format("failed to set logo (%s)") % e.what()).str());
- }
-#endif
+ (*reactor)(action::ChangeSetting{{S{item->get_active()}}});
+}
-#if defined(PATCHAGE_LIBJACK) || defined(HAVE_JACK_DBUS)
- _jack_driver = new JackDriver(this);
- _jack_driver->signal_detached.connect(sigc::mem_fun(this, &Patchage::driver_detached));
+void
+update_labels(GanvNode* node, void* data)
+{
+ const bool human_names = *static_cast<const bool*>(data);
+ if (GANV_IS_MODULE(node)) {
+ Ganv::Module* gmod = Glib::wrap(GANV_MODULE(node));
+ auto* pmod = dynamic_cast<CanvasModule*>(gmod);
+ if (pmod) {
+ for (Ganv::Port* gport : *gmod) {
+ auto* pport = dynamic_cast<CanvasPort*>(gport);
+ if (pport) {
+ pport->show_human_name(human_names);
+ }
+ }
+ }
+ }
+}
+
+inline guint
+highlight_color(guint c, guint delta)
+{
+ const guint max_char = 255;
+ const guint r = MIN((c >> 24) + delta, max_char);
+ const guint g = MIN(((c >> 16) & 0xFF) + delta, max_char);
+ const guint b = MIN(((c >> 8) & 0xFF) + delta, max_char);
+ const guint a = c & 0xFF;
- _menu_jack_connect->signal_activate().connect(sigc::bind(
- sigc::mem_fun(_jack_driver, &JackDriver::attach), true));
- _menu_jack_disconnect->signal_activate().connect(
- sigc::mem_fun(_jack_driver, &JackDriver::detach));
-#endif
+ return ((r << 24u) | (g << 16u) | (b << 8u) | a);
+}
-#ifdef HAVE_ALSA
- _alsa_driver = new AlsaDriver(this);
-#endif
+void
+update_port_colors(GanvNode* node, void* data)
+{
+ auto* patchage = static_cast<Patchage*>(data);
+ if (!GANV_IS_MODULE(node)) {
+ return;
+ }
- connect_widgets();
- update_state();
- _menu_view_toolbar->set_active(_conf->get_show_toolbar());
- _menu_view_sprung_layout->set_active(_conf->get_sprung_layout());
- _menu_view_sort_ports->set_active(_conf->get_sort_ports());
- _status_text->set_pixels_inside_wrap(2);
- _status_text->set_left_margin(4);
- _status_text->set_right_margin(4);
- _status_text->set_pixels_below_lines(2);
+ Ganv::Module* gmod = Glib::wrap(GANV_MODULE(node));
+ auto* pmod = dynamic_cast<CanvasModule*>(gmod);
+ if (!pmod) {
+ return;
+ }
- g_signal_connect(_main_win->gobj(), "configure-event",
- G_CALLBACK(configure_cb), this);
+ for (Ganv::Port* p : *pmod) {
+ auto* port = dynamic_cast<CanvasPort*>(p);
+ if (port) {
+ const uint32_t rgba = patchage->conf().get_port_color(port->type());
+ port->set_fill_color(rgba);
+ port->set_border_color(highlight_color(rgba, 0x20));
+ }
+ }
+}
- _canvas->widget().grab_focus();
+void
+update_edge_color(GanvEdge* edge, void* data)
+{
+ auto* patchage = static_cast<Patchage*>(data);
+ Ganv::Edge* edgemm = Glib::wrap(edge);
+
+ if (edgemm) {
+ auto* tail = dynamic_cast<CanvasPort*>((edgemm)->get_tail());
+ if (tail) {
+ edgemm->set_color(patchage->conf().get_port_color(tail->type()));
+ }
+ }
+}
+
+} // namespace
+
+#define INIT_WIDGET(x) x(_xml, (#x) + 1)
+
+Patchage::Patchage(Options options)
+ : _xml(UIFile::open("patchage"))
+ , INIT_WIDGET(_about_win)
+ , INIT_WIDGET(_main_scrolledwin)
+ , INIT_WIDGET(_main_win)
+ , INIT_WIDGET(_main_vbox)
+ , INIT_WIDGET(_menubar)
+ , INIT_WIDGET(_menu_alsa_connect)
+ , INIT_WIDGET(_menu_alsa_disconnect)
+ , INIT_WIDGET(_menu_file_quit)
+ , INIT_WIDGET(_menu_export_image)
+ , INIT_WIDGET(_menu_help_about)
+ , INIT_WIDGET(_menu_jack_connect)
+ , INIT_WIDGET(_menu_jack_disconnect)
+ , INIT_WIDGET(_menu_view_arrange)
+ , INIT_WIDGET(_menu_view_sprung_layout)
+ , INIT_WIDGET(_menu_view_messages)
+ , INIT_WIDGET(_menu_view_toolbar)
+ , INIT_WIDGET(_menu_view_refresh)
+ , INIT_WIDGET(_menu_view_human_names)
+ , INIT_WIDGET(_menu_view_sort_ports)
+ , INIT_WIDGET(_menu_zoom_in)
+ , INIT_WIDGET(_menu_zoom_out)
+ , INIT_WIDGET(_menu_zoom_normal)
+ , INIT_WIDGET(_menu_zoom_full)
+ , INIT_WIDGET(_menu_increase_font_size)
+ , INIT_WIDGET(_menu_decrease_font_size)
+ , INIT_WIDGET(_menu_normal_font_size)
+ , INIT_WIDGET(_toolbar)
+ , INIT_WIDGET(_clear_load_but)
+ , INIT_WIDGET(_dropouts_label)
+ , INIT_WIDGET(_buf_size_combo)
+ , INIT_WIDGET(_latency_label)
+ , INIT_WIDGET(_legend_alignment)
+ , INIT_WIDGET(_main_paned)
+ , INIT_WIDGET(_log_scrolledwindow)
+ , INIT_WIDGET(_status_text)
+ , _conf([this](const Setting& setting) { on_conf_change(setting); })
+ , _log(_status_text)
+ , _canvas(new Canvas{_log, _action_sink, 1600 * 2, 1200 * 2})
+ , _drivers(_log, [this](const Event& event) { on_driver_event(event); })
+ , _reactor(_conf, _drivers, *_canvas, _log)
+ , _action_sink([this](const Action& action) { _reactor(action); })
+ , _options{options}
+{
+ Glib::set_application_name("Patchage");
+ _about_win->property_program_name() = "Patchage";
+ _about_win->property_logo_icon_name() = "patchage";
+ gtk_window_set_default_icon_name("patchage");
+
+ // Create list model for buffer size selector
+ const Glib::RefPtr<Gtk::ListStore> buf_size_store =
+ Gtk::ListStore::create(_buf_size_columns);
+ for (size_t i = 32; i <= 4096; i *= 2) {
+ const Gtk::TreeModel::Row row = *(buf_size_store->append());
+ row[_buf_size_columns.label] = std::to_string(i);
+ }
+
+ _buf_size_combo->set_model(buf_size_store);
+ _buf_size_combo->pack_start(_buf_size_columns.label);
+
+ _main_scrolledwin->add(_canvas->widget());
+
+ _main_scrolledwin->property_hadjustment().get_value()->set_step_increment(10);
+ _main_scrolledwin->property_vadjustment().get_value()->set_step_increment(10);
+
+ _main_scrolledwin->signal_scroll_event().connect(
+ sigc::mem_fun(this, &Patchage::on_scroll));
+ _clear_load_but->signal_clicked().connect(
+ sigc::mem_fun(this, &Patchage::clear_load));
+ _buf_size_combo->signal_changed().connect(
+ sigc::mem_fun(this, &Patchage::buffer_size_changed));
+ _status_text->signal_size_allocate().connect(
+ sigc::mem_fun(this, &Patchage::on_messages_resized));
+
+ _menu_file_quit->signal_activate().connect(
+ sigc::mem_fun(this, &Patchage::on_quit));
+ _menu_export_image->signal_activate().connect(
+ sigc::mem_fun(this, &Patchage::on_export_image));
+ _menu_view_refresh->signal_activate().connect(sigc::bind(
+ sigc::mem_fun(this, &Patchage::on_menu_action), Action{action::Refresh{}}));
+
+ _menu_view_human_names->signal_activate().connect(
+ sigc::bind(sigc::ptr_fun(&on_setting_toggled<setting::HumanNames>),
+ &_reactor,
+ _menu_view_human_names.get()));
+
+ _menu_view_sort_ports->signal_activate().connect(
+ sigc::bind(sigc::ptr_fun(&on_setting_toggled<setting::SortedPorts>),
+ &_reactor,
+ _menu_view_sort_ports.get()));
+
+ _menu_view_arrange->signal_activate().connect(
+ sigc::mem_fun(this, &Patchage::on_arrange));
+
+ _menu_view_sprung_layout->signal_activate().connect(
+ sigc::bind(sigc::ptr_fun(&on_setting_toggled<setting::SprungLayout>),
+ &_reactor,
+ _menu_view_sprung_layout.get()));
+
+ _menu_view_messages->signal_activate().connect(
+ sigc::bind(sigc::ptr_fun(&on_setting_toggled<setting::MessagesVisible>),
+ &_reactor,
+ _menu_view_messages.get()));
+
+ _menu_view_toolbar->signal_activate().connect(
+ sigc::bind(sigc::ptr_fun(&on_setting_toggled<setting::ToolbarVisible>),
+ &_reactor,
+ _menu_view_toolbar.get()));
+
+ _menu_help_about->signal_activate().connect(
+ sigc::mem_fun(this, &Patchage::on_help_about));
+
+ _menu_zoom_in->signal_activate().connect(sigc::bind(
+ sigc::mem_fun(this, &Patchage::on_menu_action), Action{action::ZoomIn{}}));
+ _menu_zoom_out->signal_activate().connect(sigc::bind(
+ sigc::mem_fun(this, &Patchage::on_menu_action), Action{action::ZoomOut{}}));
+ _menu_zoom_normal->signal_activate().connect(
+ sigc::bind(sigc::mem_fun(this, &Patchage::on_menu_action),
+ Action{action::ZoomNormal{}}));
+ _menu_zoom_full->signal_activate().connect(
+ sigc::bind(sigc::mem_fun(this, &Patchage::on_menu_action),
+ Action{action::ZoomFull{}}));
+ _menu_increase_font_size->signal_activate().connect(
+ sigc::bind(sigc::mem_fun(this, &Patchage::on_menu_action),
+ Action{action::IncreaseFontSize{}}));
+ _menu_decrease_font_size->signal_activate().connect(
+ sigc::bind(sigc::mem_fun(this, &Patchage::on_menu_action),
+ Action{action::DecreaseFontSize{}}));
+ _menu_normal_font_size->signal_activate().connect(
+ sigc::bind(sigc::mem_fun(this, &Patchage::on_menu_action),
+ Action{action::ResetFontSize{}}));
+
+ if (_canvas->supports_sprung_layout()) {
+ _menu_view_sprung_layout->set_active(true);
+ } else {
+ _menu_view_sprung_layout->set_active(false);
+ _menu_view_sprung_layout->set_sensitive(false);
+ }
+
+ // Present window so that display attributes like font size are available
+ _canvas->widget().show();
+ _main_win->present();
+
+ // Set the default font size based on the current GUI environment
+ _conf.set<setting::FontSize>(_canvas->get_default_font_size());
+
+ // Load configuration file (but do not apply it yet, see below)
+ _conf.load();
+
+ _legend = new Legend(_conf);
+ _legend->signal_color_changed.connect(
+ sigc::mem_fun(this, &Patchage::on_legend_color_change));
+ _legend_alignment->add(*Gtk::manage(_legend));
+ _legend->show_all();
+
+ _about_win->set_transient_for(*_main_win);
- // Idle callback, check if we need to refresh
- Glib::signal_timeout().connect(
- sigc::mem_fun(this, &Patchage::idle_callback), 100);
+#ifdef __APPLE__
+ try {
+ _about_win->set_logo(Gdk::Pixbuf::create_from_file(
+ bundle_location() + "/Resources/Patchage.icns"));
+ } catch (const Glib::Exception& e) {
+ _log.error(fmt::format("Failed to set logo ({})", std::string(e.what())));
+ }
+#endif
+
+ // Enable JACK menu items if driver is present
+ if (_drivers.jack()) {
+ _menu_jack_connect->signal_activate().connect(sigc::bind(
+ sigc::mem_fun(_drivers.jack().get(), &AudioDriver::attach), true));
+ _menu_jack_disconnect->signal_activate().connect(
+ sigc::mem_fun(_drivers.jack().get(), &AudioDriver::detach));
+ } else {
+ _menu_jack_connect->set_sensitive(false);
+ _menu_jack_disconnect->set_sensitive(false);
+ }
+
+ // Enable ALSA menu items if driver is present
+ if (_drivers.alsa()) {
+ _menu_alsa_connect->signal_activate().connect(
+ sigc::bind(sigc::mem_fun(_drivers.alsa().get(), &Driver::attach), false));
+ _menu_alsa_disconnect->signal_activate().connect(
+ sigc::mem_fun(_drivers.alsa().get(), &Driver::detach));
+ } else {
+ _menu_alsa_connect->set_sensitive(false);
+ _menu_alsa_disconnect->set_sensitive(false);
+ }
+
+ g_signal_connect(
+ _main_win->gobj(), "configure-event", G_CALLBACK(configure_cb), this);
+
+ _canvas->widget().grab_focus();
#ifdef PATCHAGE_GTK_OSX
- // Set up Mac menu bar
- GtkosxApplication* osxapp = (GtkosxApplication*)g_object_new(
- GTKOSX_TYPE_APPLICATION, NULL);
- _menubar->hide();
- _menu_file_quit->hide();
- gtkosx_application_set_menu_bar(osxapp, GTK_MENU_SHELL(_menubar->gobj()));
- gtkosx_application_insert_app_menu_item(
- osxapp, GTK_WIDGET(_menu_help_about->gobj()), 0);
- g_signal_connect(_menubar->gobj(), "can-activate-accel",
- G_CALLBACK(can_activate_cb), NULL);
- g_signal_connect(osxapp, "NSApplicationWillTerminate",
- G_CALLBACK(terminate_cb), this);
- gtkosx_application_ready(osxapp);
+ // Set up Mac menu bar
+ GtkosxApplication* osxapp = static_cast<GtkosxApplication*>(
+ g_object_new(GTKOSX_TYPE_APPLICATION, nullptr));
+
+ _menubar->hide();
+ _menu_file_quit->hide();
+ gtkosx_application_set_menu_bar(osxapp, GTK_MENU_SHELL(_menubar->gobj()));
+ gtkosx_application_insert_app_menu_item(
+ osxapp, GTK_WIDGET(_menu_help_about->gobj()), 0);
+ g_signal_connect(_menubar->gobj(),
+ "can-activate-accel",
+ G_CALLBACK(can_activate_cb),
+ nullptr);
+ g_signal_connect(
+ osxapp, "NSApplicationWillTerminate", G_CALLBACK(terminate_cb), this);
+ gtkosx_application_ready(osxapp);
#endif
+
+ // Apply all configuration settings to ensure the GUI is synced
+ _conf.each([this](const Setting& setting) { on_conf_change(setting); });
+
+ // Set up an idle callback to process events and update the GUI if necessary
+ Glib::signal_timeout().connect(sigc::mem_fun(this, &Patchage::idle_callback),
+ 100);
}
Patchage::~Patchage()
{
-#if defined(PATCHAGE_LIBJACK) || defined(HAVE_JACK_DBUS)
- delete _jack_driver;
-#endif
-#ifdef HAVE_ALSA
- delete _alsa_driver;
-#endif
-
- delete _conf;
-
- _about_win.destroy();
- _xml.reset();
+ _about_win.destroy();
+ _xml.reset();
}
void
Patchage::attach()
{
- _enable_refresh = false;
+ if (_drivers.jack() && _options.jack_driver_autoattach) {
+ _drivers.jack()->attach(true);
+ }
-#if defined(PATCHAGE_LIBJACK) || defined(HAVE_JACK_DBUS)
- if (_jack_driver_autoattach)
- _jack_driver->attach(true);
-#endif
-
-#ifdef HAVE_ALSA
- if (_alsa_driver_autoattach)
- _alsa_driver->attach();
-#endif
+ if (_drivers.alsa() && _options.alsa_driver_autoattach) {
+ _drivers.alsa()->attach(false);
+ }
- _enable_refresh = true;
-
- refresh();
- update_toolbar();
+ process_events();
+ update_toolbar();
}
bool
Patchage::idle_callback()
{
- // Initial run, attach
- if (_attach) {
- attach();
- _menu_view_messages->set_active(_conf->get_show_messages());
- _attach = false;
- }
-
- // Process any JACK events
-#if defined(PATCHAGE_LIBJACK) || defined(HAVE_JACK_DBUS)
- if (_jack_driver) {
- _jack_driver->process_events(this);
- }
-#endif
+ // Initial run, attach
+ if (_attach) {
+ attach();
+ _menu_view_messages->set_active(_conf.get<setting::MessagesVisible>());
+ _attach = false;
+ }
- // Process any ALSA events
-#ifdef HAVE_ALSA
- if (_alsa_driver) {
- _alsa_driver->process_events(this);
- }
-#endif
-
- // Do a full refresh
- if (_refresh) {
- refresh();
- } else if (_driver_detached) {
-#if defined(PATCHAGE_LIBJACK) || defined(HAVE_JACK_DBUS)
- if (_jack_driver && !_jack_driver->is_attached())
- _jack_driver->destroy_all();
-#endif
-#ifdef HAVE_ALSA
- if (_alsa_driver && !_alsa_driver->is_attached())
- _alsa_driver->destroy_all();
-#endif
- }
-
- _refresh = false;
- _driver_detached = false;
+ // Process any events from drivers
+ process_events();
- // Update load every 5 idle callbacks
- static int count = 0;
- if (++count == 5) {
- update_load();
- count = 0;
- }
+ // Update load every 5 idle callbacks
+ static int count = 0;
+ if (++count == 5) {
+ update_load();
+ count = 0;
+ }
- return true;
+ return true;
}
void
Patchage::update_toolbar()
{
- static bool updating = false;
- if (updating) {
- return;
- } else {
- updating = true;
- }
-
-#if defined(PATCHAGE_LIBJACK) || defined(HAVE_JACK_DBUS)
- if (_jack_driver->is_attached()) {
- const jack_nframes_t buffer_size = _jack_driver->buffer_size();
- const jack_nframes_t sample_rate = _jack_driver->sample_rate();
- if (sample_rate != 0) {
- const int latency_ms = lrintf(buffer_size * 1000 / (float)sample_rate);
- std::stringstream ss;
- ss << " frames @ " << (sample_rate / 1000)
- << "kHz (" << latency_ms << "ms)";
- _latency_label->set_label(ss.str());
- _latency_label->set_visible(true);
- _buf_size_combo->set_active((int)log2f(_jack_driver->buffer_size()) - 5);
- updating = false;
- return;
- }
- }
-#endif
- _latency_label->set_visible(false);
- updating = false;
+ static bool updating = false;
+ if (updating) {
+ return;
+ }
+
+ updating = true;
+
+ if (_drivers.jack() && _drivers.jack()->is_attached()) {
+ const auto buffer_size = _drivers.jack()->buffer_size();
+ const auto sample_rate = _drivers.jack()->sample_rate();
+ if (sample_rate != 0) {
+ const auto sample_rate_khz = sample_rate / 1000.0;
+ const auto latency_ms = buffer_size / sample_rate_khz;
+
+ _latency_label->set_label(" " +
+ fmt::format(T("frames at {} kHz ({:0.2f} ms)"),
+ sample_rate_khz,
+ latency_ms));
+
+ _latency_label->set_visible(true);
+ _buf_size_combo->set_active(
+ static_cast<int>(log2f(_drivers.jack()->buffer_size()) - 5));
+ updating = false;
+ return;
+ }
+ }
+
+ _latency_label->set_visible(false);
+ updating = false;
}
bool
Patchage::update_load()
{
-#if defined(PATCHAGE_LIBJACK) || defined(HAVE_JACK_DBUS)
- if (_jack_driver->is_attached()) {
- char buf[8];
- snprintf(buf, sizeof(buf), "%u", _jack_driver->get_xruns());
- _xrun_progress->set_text(std::string(buf) + " Dropouts");
- _xrun_progress->set_fraction(_jack_driver->get_max_dsp_load());
- }
-#endif
+ if (_drivers.jack() && _drivers.jack()->is_attached()) {
+ const auto xruns = _drivers.jack()->xruns();
- return true;
-}
+ _dropouts_label->set_text(" " + fmt::format(T("Dropouts: {}"), xruns));
-void
-Patchage::zoom(double z)
-{
- _conf->set_zoom(z);
- _canvas->set_zoom(z);
+ if (xruns > 0u) {
+ _dropouts_label->show();
+ _clear_load_but->show();
+ } else {
+ _dropouts_label->hide();
+ _clear_load_but->hide();
+ }
+ }
+
+ return true;
}
void
-Patchage::refresh()
+Patchage::store_window_location()
{
- if (_canvas && _enable_refresh) {
- _canvas->clear();
+ int loc_x = 0;
+ int loc_y = 0;
+ _main_win->get_position(loc_x, loc_y);
-#if defined(PATCHAGE_LIBJACK) || defined(HAVE_JACK_DBUS)
- if (_jack_driver)
- _jack_driver->refresh();
-#endif
+ int size_x = 0;
+ int size_y = 0;
+ _main_win->get_size(size_x, size_y);
-#ifdef HAVE_ALSA
- if (_alsa_driver)
- _alsa_driver->refresh();
-#endif
- }
-}
+ _conf.set<setting::WindowLocation>(
+ {static_cast<double>(loc_x), static_cast<double>(loc_y)});
-void
-Patchage::store_window_location()
-{
- int loc_x, loc_y, size_x, size_y;
- _main_win->get_position(loc_x, loc_y);
- _main_win->get_size(size_x, size_y);
- Coord window_location;
- window_location.x = loc_x;
- window_location.y = loc_y;
- Coord window_size;
- window_size.x = size_x;
- window_size.y = size_y;
- _conf->set_window_location(window_location);
- _conf->set_window_size(window_size);
+ _conf.set<setting::WindowSize>(
+ {static_cast<double>(size_x), static_cast<double>(size_y)});
}
void
Patchage::clear_load()
{
-#if defined(PATCHAGE_LIBJACK) || defined(HAVE_JACK_DBUS)
- _xrun_progress->set_fraction(0.0);
- _jack_driver->reset_xruns();
- _jack_driver->reset_max_dsp_load();
-#endif
+ _dropouts_label->set_text(" " + fmt::format(T("Dropouts: {}"), 0U));
+ _dropouts_label->hide();
+ _clear_load_but->hide();
+ if (_drivers.jack()) {
+ _drivers.jack()->reset_xruns();
+ }
}
void
-Patchage::error_msg(const std::string& msg)
+Patchage::operator()(const setting::AlsaAttached& setting)
{
- Glib::RefPtr<Gtk::TextBuffer> buffer = _status_text->get_buffer();
- buffer->insert_with_tag(buffer->end(), std::string("\n") + msg, _error_tag);
- _status_text->scroll_to_mark(buffer->get_insert(), 0);
- _menu_view_messages->set_active(true);
-}
+ if (setting.value) {
+ _menu_alsa_connect->set_sensitive(false);
+ _menu_alsa_disconnect->set_sensitive(true);
-void
-Patchage::info_msg(const std::string& msg)
-{
- Glib::RefPtr<Gtk::TextBuffer> buffer = _status_text->get_buffer();
- buffer->insert(buffer->end(), std::string("\n") + msg);
- _status_text->scroll_to_mark(buffer->get_insert(), 0);
+ if (_drivers.alsa()) {
+ _drivers.alsa()->refresh([this](const Event& event) {
+ handle_event(_conf, _metadata, *_canvas, _log, event);
+ });
+ }
+ } else {
+ _menu_alsa_connect->set_sensitive(true);
+ _menu_alsa_disconnect->set_sensitive(false);
+
+ _canvas->remove_ports([](const CanvasPort* port) {
+ return port->type() == PortType::alsa_midi;
+ });
+ }
}
void
-Patchage::warning_msg(const std::string& msg)
+Patchage::operator()(const setting::JackAttached& setting)
{
- Glib::RefPtr<Gtk::TextBuffer> buffer = _status_text->get_buffer();
- buffer->insert_with_tag(buffer->end(), std::string("\n") + msg, _warning_tag);
- _status_text->scroll_to_mark(buffer->get_insert(), 0);
-}
+ if (setting.value) {
+ _menu_jack_connect->set_sensitive(false);
+ _menu_jack_disconnect->set_sensitive(true);
-static void
-load_module_location(GanvNode* node, void* data)
-{
- if (GANV_IS_MODULE(node)) {
- Ganv::Module* gmod = Glib::wrap(GANV_MODULE(node));
- PatchageModule* pmod = dynamic_cast<PatchageModule*>(gmod);
- if (pmod) {
- pmod->load_location();
- }
- }
+ if (_drivers.jack()) {
+ _drivers.jack()->refresh([this](const Event& event) {
+ handle_event(_conf, _metadata, *_canvas, _log, event);
+ });
+ }
+ } else {
+ _menu_jack_connect->set_sensitive(true);
+ _menu_jack_disconnect->set_sensitive(false);
+
+ _canvas->remove_ports([](const CanvasPort* port) {
+ return (port->type() == PortType::jack_audio ||
+ port->type() == PortType::jack_midi ||
+ port->type() == PortType::jack_osc ||
+ port->type() == PortType::jack_cv);
+ });
+ }
}
void
-Patchage::update_state()
+Patchage::operator()(const setting::FontSize& setting)
{
- _canvas->for_each_node(load_module_location, NULL);
+ if (static_cast<float>(_canvas->get_font_size()) != setting.value) {
+ _canvas->set_font_size(setting.value);
+ }
}
-/** Update the sensitivity status of menus to reflect the present.
- *
- * (eg. disable "Connect to Jack" when Patchage is already connected to Jack)
- */
void
-Patchage::connect_widgets()
+Patchage::operator()(const setting::HumanNames& setting)
{
-#if defined(PATCHAGE_LIBJACK) || defined(HAVE_JACK_DBUS)
- _jack_driver->signal_attached.connect(sigc::bind(
- sigc::mem_fun(*_menu_jack_connect, &Gtk::MenuItem::set_sensitive), false));
- _jack_driver->signal_attached.connect(
- sigc::mem_fun(this, &Patchage::refresh));
- _jack_driver->signal_attached.connect(sigc::bind(
- sigc::mem_fun(*_menu_jack_disconnect, &Gtk::MenuItem::set_sensitive), true));
-
- _jack_driver->signal_detached.connect(sigc::bind(
- sigc::mem_fun(*_menu_jack_connect, &Gtk::MenuItem::set_sensitive), true));
- _jack_driver->signal_detached.connect(sigc::bind(
- sigc::mem_fun(*_menu_jack_disconnect, &Gtk::MenuItem::set_sensitive), false));
-#endif
-
-#ifdef HAVE_ALSA
- _alsa_driver->signal_attached.connect(sigc::bind(
- sigc::mem_fun(*_menu_alsa_connect, &Gtk::MenuItem::set_sensitive), false));
- _alsa_driver->signal_attached.connect(sigc::bind(
- sigc::mem_fun(*_menu_alsa_disconnect, &Gtk::MenuItem::set_sensitive), true));
+ bool human_names = setting.value;
- _alsa_driver->signal_detached.connect(sigc::bind(
- sigc::mem_fun(*_menu_alsa_connect, &Gtk::MenuItem::set_sensitive), true));
- _alsa_driver->signal_detached.connect(sigc::bind(
- sigc::mem_fun(*_menu_alsa_disconnect, &Gtk::MenuItem::set_sensitive), false));
-#endif
+ _menu_view_human_names->set_active(human_names);
+ _canvas->for_each_node(update_labels, &human_names);
}
-#ifdef PATCHAGE_JACK_SESSION
void
-Patchage::show_open_session_dialog()
-{
- Gtk::FileChooserDialog dialog(*_main_win, "Open Session",
- Gtk::FILE_CHOOSER_ACTION_SELECT_FOLDER);
-
- dialog.add_button(Gtk::Stock::CANCEL, Gtk::RESPONSE_CANCEL);
- Gtk::Button* open_but = dialog.add_button(Gtk::Stock::OPEN, Gtk::RESPONSE_OK);
- open_but->property_has_default() = true;
-
- if (dialog.run() != Gtk::RESPONSE_OK) {
- return;
- }
-
- const std::string dir = dialog.get_filename();
- if (g_chdir(dir.c_str())) {
- error_msg("Failed to switch to session directory " + dir);
- return;
- }
-
- if (system("./jack-session") < 0) {
- error_msg("Error executing `./jack-session' in " + dir);
- } else {
- info_msg("Loaded session " + dir);
- }
-}
-
-static void
-print_edge(GanvEdge* edge, void* data)
+Patchage::operator()(const setting::MessagesHeight& setting)
{
- std::ofstream* script = (std::ofstream*)data;
- Ganv::Edge* edgemm = Glib::wrap(edge);
-
- PatchagePort* src = dynamic_cast<PatchagePort*>((edgemm)->get_tail());
- PatchagePort* dst = dynamic_cast<PatchagePort*>((edgemm)->get_head());
+ if (_log_scrolledwindow->is_visible()) {
+ const int min_height = _log.min_height();
+ const int max_pos = _main_paned->get_allocation().get_height();
+ const int conf_height = setting.value;
- if (!src || !dst || src->type() == ALSA_MIDI || dst->type() == ALSA_MIDI) {
- return;
- }
-
- (*script) << "jack_connect '" << src->full_name()
- << "' '" << dst->full_name() << "' &" << endl;
+ _main_paned->set_position(max_pos - std::max(conf_height, min_height));
+ }
}
void
-Patchage::save_session(bool close)
+Patchage::operator()(const setting::MessagesVisible& setting)
{
- Gtk::FileChooserDialog dialog(*_main_win, "Save Session",
- Gtk::FILE_CHOOSER_ACTION_SAVE);
-
- dialog.add_button(Gtk::Stock::CANCEL, Gtk::RESPONSE_CANCEL);
- Gtk::Button* save_but = dialog.add_button(Gtk::Stock::SAVE, Gtk::RESPONSE_OK);
- save_but->property_has_default() = true;
-
- if (dialog.run() != Gtk::RESPONSE_OK) {
- return;
- }
-
- std::string path = dialog.get_filename();
- if (g_mkdir_with_parents(path.c_str(), 0740)) {
- error_msg("Failed to create session directory " + path);
- return;
- }
-
- path += '/';
- jack_session_command_t* cmd = jack_session_notify(
- _jack_driver->client(),
- NULL,
- close ? JackSessionSaveAndQuit : JackSessionSave,
- path.c_str());
-
- const std::string script_path = path + "jack-session";
- std::ofstream script(script_path.c_str());
- script << "#!/bin/sh" << endl << endl;
-
- const std::string var("${SESSION_DIR}");
- for (int c = 0; cmd[c].uuid; ++c) {
- std::string command = cmd[c].command;
- const size_t index = command.find(var);
- if (index != string::npos) {
- command.replace(index, var.length(), cmd[c].client_name);
- }
-
- script << command << " &" << endl;
- }
-
- script << endl;
- script << "sleep 3" << endl;
- script << endl;
-
- _canvas->for_each_edge(print_edge, &script);
-
- script.close();
- g_chmod(script_path.c_str(), 0740);
-}
+ if (setting.value) {
+ _log_scrolledwindow->show();
+ _status_text->scroll_to_mark(_status_text->get_buffer()->get_insert(), 0);
+ } else {
+ _log_scrolledwindow->hide();
+ }
-void
-Patchage::show_save_session_dialog()
-{
- save_session(false);
+ _menu_view_messages->set_active(setting.value);
}
void
-Patchage::show_save_close_session_dialog()
+Patchage::operator()(const setting::PortColor&)
{
- save_session(true);
+ _canvas->for_each_node(update_port_colors, this);
+ _canvas->for_each_edge(update_edge_color, this);
}
-#endif
-
-#ifdef HAVE_ALSA
void
-Patchage::menu_alsa_connect()
+Patchage::operator()(const setting::SortedPorts& setting)
{
- _alsa_driver->attach(false);
- _alsa_driver->refresh();
+ _menu_view_sort_ports->set_active(setting.value);
+ if (setting.value) {
+ _canvas->set_port_order(port_order, nullptr);
+ } else {
+ _canvas->set_port_order(nullptr, nullptr);
+ }
}
void
-Patchage::menu_alsa_disconnect()
+Patchage::operator()(const setting::SprungLayout& setting)
{
- _alsa_driver->detach();
- refresh();
+ _canvas->set_sprung_layout(setting.value);
+ _menu_view_sprung_layout->set_active(setting.value);
}
-#endif
void
-Patchage::on_arrange()
+Patchage::operator()(const setting::ToolbarVisible& setting)
{
- if (_canvas) {
- _canvas->arrange();
- }
+ if (setting.value) {
+ _toolbar->show();
+ _menu_view_toolbar->set_active(true);
+ } else {
+ _toolbar->hide();
+ _menu_view_toolbar->set_active(false);
+ }
}
void
-Patchage::on_sprung_layout_toggled()
+Patchage::operator()(const setting::WindowLocation& setting)
{
- const bool sprung = _menu_view_sprung_layout->get_active();
+ const int new_x = static_cast<int>(setting.value.x);
+ const int new_y = static_cast<int>(setting.value.y);
+
+ int current_x = 0;
+ int current_y = 0;
+ _main_win->get_position(current_x, current_y);
- _canvas->set_sprung_layout(sprung);
- _conf->set_sprung_layout(sprung);
+ if (new_x != current_x || new_y != current_y) {
+ _main_win->move(new_x, new_y);
+ }
}
void
-Patchage::on_help_about()
+Patchage::operator()(const setting::WindowSize& setting)
{
- _about_win->run();
- _about_win->hide();
-}
+ const int new_w = static_cast<int>(setting.value.x);
+ const int new_h = static_cast<int>(setting.value.y);
-static void
-update_labels(GanvNode* node, void* data)
-{
- const bool human_names = *(const bool*)data;
- if (GANV_IS_MODULE(node)) {
- Ganv::Module* gmod = Glib::wrap(GANV_MODULE(node));
- PatchageModule* pmod = dynamic_cast<PatchageModule*>(gmod);
- if (pmod) {
- for (Ganv::Port* gport : *gmod) {
- PatchagePort* pport = dynamic_cast<PatchagePort*>(gport);
- if (pport) {
- pport->show_human_name(human_names);
- }
- }
- }
- }
-}
+ int current_w = 0;
+ int current_h = 0;
+ _main_win->get_size(current_w, current_h);
-void
-Patchage::on_view_human_names()
-{
- bool human_names = show_human_names();
- _canvas->for_each_node(update_labels, &human_names);
+ if (new_w != current_w || new_h != current_h) {
+ _main_win->resize(new_w, new_h);
+ }
}
void
-Patchage::on_view_sort_ports()
+Patchage::operator()(const setting::Zoom& setting)
{
- const bool sort_ports = this->sort_ports();
- _canvas->set_port_order(sort_ports ? port_order : NULL, NULL);
- _conf->set_sort_ports(sort_ports);
- refresh();
+ if (static_cast<float>(_canvas->get_zoom()) != setting.value) {
+ _canvas->set_zoom(setting.value);
+ }
}
void
-Patchage::on_zoom_in()
+Patchage::on_driver_event(const Event& event)
{
- const float zoom = _canvas->get_zoom() * 1.25;
- _canvas->set_zoom(zoom);
- _conf->set_zoom(zoom);
-}
+ const std::lock_guard<std::mutex> lock{_events_mutex};
-void
-Patchage::on_zoom_out()
-{
- const float zoom = _canvas->get_zoom() * 0.75;
- _canvas->set_zoom(zoom);
- _conf->set_zoom(zoom);
+ _driver_events.emplace(event);
}
void
-Patchage::on_zoom_normal()
+Patchage::process_events()
{
- _canvas->set_zoom(1.0);
- _conf->set_zoom(1.0);
-}
+ const std::lock_guard<std::mutex> lock{_events_mutex};
-void
-Patchage::on_zoom_full()
-{
- _canvas->zoom_full();
- _conf->set_zoom(_canvas->get_zoom());
-}
+ while (!_driver_events.empty()) {
+ const Event& event = _driver_events.front();
-void
-Patchage::on_increase_font_size()
-{
- const float points = _canvas->get_font_size() + 1.0;
- _canvas->set_font_size(points);
- _conf->set_font_size(points);
-}
+ _log.info(event_to_string(event));
+ handle_event(_conf, _metadata, *_canvas, _log, event);
-void
-Patchage::on_decrease_font_size()
-{
- const float points = _canvas->get_font_size() - 1.0;
- _canvas->set_font_size(points);
- _conf->set_font_size(points);
+ _driver_events.pop();
+ }
}
void
-Patchage::on_normal_font_size()
+Patchage::on_conf_change(const Setting& setting)
{
- _canvas->set_font_size(_canvas->get_default_font_size());
- _conf->set_font_size(_canvas->get_default_font_size());
+ std::visit(*this, setting);
}
-static inline guint
-highlight_color(guint c, guint delta)
+void
+Patchage::on_arrange()
{
- const guint max_char = 255;
- const guint r = MIN((c >> 24) + delta, max_char);
- const guint g = MIN(((c >> 16) & 0xFF) + delta, max_char);
- const guint b = MIN(((c >> 8) & 0xFF) + delta, max_char);
- const guint a = c & 0xFF;
-
- return ((((guint)(r)) << 24) |
- (((guint)(g)) << 16) |
- (((guint)(b)) << 8) |
- (((guint)(a))));
+ if (_canvas) {
+ _canvas->arrange();
+ }
}
-static void
-update_port_colors(GanvNode* node, void* data)
+void
+Patchage::on_help_about()
{
- Patchage* patchage = (Patchage*)data;
- if (!GANV_IS_MODULE(node)) {
- return;
- }
-
- Ganv::Module* gmod = Glib::wrap(GANV_MODULE(node));
- PatchageModule* pmod = dynamic_cast<PatchageModule*>(gmod);
- if (!pmod) {
- return;
- }
-
- for (PatchageModule::iterator i = pmod->begin(); i != pmod->end(); ++i) {
- PatchagePort* port = dynamic_cast<PatchagePort*>(*i);
- if (port) {
- const uint32_t rgba = patchage->conf()->get_port_color(port->type());
- port->set_fill_color(rgba);
- port->set_border_color(highlight_color(rgba, 0x20));
- }
- }
+ _about_win->run();
+ _about_win->hide();
}
-static void
-update_edge_color(GanvEdge* edge, void* data)
+void
+Patchage::on_legend_color_change(PortType id, const std::string&, uint32_t rgba)
{
- Patchage* patchage = (Patchage*)data;
- Ganv::Edge* edgemm = Glib::wrap(edge);
-
- PatchagePort* tail = dynamic_cast<PatchagePort*>((edgemm)->get_tail());
- if (tail) {
- edgemm->set_color(patchage->conf()->get_port_color(tail->type()));
- }
+ _reactor(action::ChangeSetting{{setting::PortColor{id, rgba}}});
}
void
-Patchage::on_legend_color_change(int id, const std::string& label, uint32_t rgba)
+Patchage::on_messages_resized(Gtk::Allocation&)
{
- _conf->set_port_color((PortType)id, rgba);
- _canvas->for_each_node(update_port_colors, this);
- _canvas->for_each_edge(update_edge_color, this);
+ const int max_pos = _main_paned->get_allocation().get_height();
+
+ _conf.set<setting::MessagesHeight>(max_pos - _main_paned->get_position());
}
void
-Patchage::on_messages_resized(Gtk::Allocation& alloc)
+Patchage::save()
{
- const int max_pos = _main_paned->get_allocation().get_height();
- _conf->set_messages_height(max_pos - _main_paned->get_position());
+ _conf.set<setting::Zoom>(_canvas->get_zoom()); // Can be changed by ganv
+ _conf.save();
}
void
-Patchage::save()
+Patchage::quit()
{
- _conf->set_zoom(_canvas->get_zoom()); // Can be changed by ganv
- _conf->save();
+ _main_win->hide();
}
void
Patchage::on_quit()
{
-#ifdef HAVE_ALSA
- _alsa_driver->detach();
-#endif
-#if defined(PATCHAGE_LIBJACK) || defined(HAVE_JACK_DBUS)
- _jack_driver->detach();
-#endif
- _main_win->hide();
+ if (_drivers.alsa()) {
+ _drivers.alsa()->detach();
+ }
+
+ if (_drivers.jack()) {
+ _drivers.jack()->detach();
+ }
+
+ _main_win->hide();
}
void
Patchage::on_export_image()
{
- Gtk::FileChooserDialog dialog("Export Image", Gtk::FILE_CHOOSER_ACTION_SAVE);
- dialog.add_button(Gtk::Stock::CANCEL, Gtk::RESPONSE_CANCEL);
- dialog.add_button(Gtk::Stock::SAVE, Gtk::RESPONSE_OK);
- dialog.set_default_response(Gtk::RESPONSE_OK);
- dialog.set_transient_for(*_main_win);
-
- typedef std::map<std::string, std::string> Types;
- Types types;
- types["*.dot"] = "Graphviz DOT";
- types["*.pdf"] = "Portable Document Format";
- types["*.ps"] = "PostScript";
- types["*.svg"] = "Scalable Vector Graphics";
- for (Types::const_iterator t = types.begin(); t != types.end(); ++t) {
- Gtk::FileFilter filt;
- filt.add_pattern(t->first);
- filt.set_name(t->second);
- dialog.add_filter(filt);
- }
-
- Gtk::CheckButton* bg_but = new Gtk::CheckButton("Draw _Background", true);
- Gtk::Alignment* extra = new Gtk::Alignment(1.0, 0.5, 0.0, 0.0);
- bg_but->set_active(true);
- extra->add(*Gtk::manage(bg_but));
- extra->show_all();
- dialog.set_extra_widget(*Gtk::manage(extra));
-
- if (dialog.run() == Gtk::RESPONSE_OK) {
- const std::string filename = dialog.get_filename();
- if (Glib::file_test(filename, Glib::FILE_TEST_EXISTS)) {
- Gtk::MessageDialog confirm(
- std::string("File exists! Overwrite ") + filename + "?",
- true, Gtk::MESSAGE_WARNING, Gtk::BUTTONS_YES_NO, true);
- confirm.set_transient_for(dialog);
- if (confirm.run() != Gtk::RESPONSE_YES) {
- return;
- }
- }
- _canvas->export_image(filename.c_str(), bg_but->get_active());
- }
+ Gtk::FileChooserDialog dialog(T("Export Image"),
+ Gtk::FILE_CHOOSER_ACTION_SAVE);
+
+ dialog.add_button(Gtk::Stock::CANCEL, Gtk::RESPONSE_CANCEL);
+ dialog.add_button(Gtk::Stock::SAVE, Gtk::RESPONSE_OK);
+ dialog.set_default_response(Gtk::RESPONSE_OK);
+ dialog.set_transient_for(*_main_win);
+
+ using Types = std::map<std::string, std::string>;
+
+ Types types;
+ types["*.dot"] = "Graphviz DOT";
+ types["*.pdf"] = "Portable Document Format";
+ types["*.ps"] = "PostScript";
+ types["*.svg"] = "Scalable Vector Graphics";
+ for (const auto& t : types) {
+ Gtk::FileFilter filt;
+ filt.add_pattern(t.first);
+ filt.set_name(t.second);
+ dialog.add_filter(filt);
+ }
+
+ auto* bg_but = new Gtk::CheckButton(T("Draw _Background"), true);
+ auto* extra = new Gtk::Alignment(1.0, 0.5, 0.0, 0.0);
+ bg_but->set_active(true);
+ extra->add(*Gtk::manage(bg_but));
+ extra->show_all();
+ dialog.set_extra_widget(*Gtk::manage(extra));
+
+ if (dialog.run() == Gtk::RESPONSE_OK) {
+ const std::string filename = dialog.get_filename();
+ if (Glib::file_test(filename, Glib::FILE_TEST_EXISTS)) {
+ Gtk::MessageDialog confirm(
+ fmt::format(T("File exists! Overwrite {}?"), filename),
+ true,
+ Gtk::MESSAGE_WARNING,
+ Gtk::BUTTONS_YES_NO,
+ true);
+ confirm.set_transient_for(dialog);
+ if (confirm.run() != Gtk::RESPONSE_YES) {
+ return;
+ }
+ }
+ _canvas->export_image(filename.c_str(), bg_but->get_active());
+ }
}
-void
-Patchage::on_view_messages()
+bool
+Patchage::on_scroll(GdkEventScroll*)
{
- if (_menu_view_messages->get_active()) {
- Glib::RefPtr<Gtk::TextBuffer> buffer = _status_text->get_buffer();
- if (!_pane_initialized) {
- int y, line_height;
- _status_text->get_line_yrange(buffer->begin(), y, line_height);
- const int pad = _status_text->get_pixels_inside_wrap();
- const int max_pos = _main_paned->get_allocation().get_height();
- const int min_height = (line_height + 2 * pad);
- const int conf_height = _conf->get_messages_height();
- _main_paned->set_position(max_pos - std::max(conf_height, min_height));
- _pane_initialized = true;
- }
-
- _log_scrolledwindow->show();
- _status_text->scroll_to_mark(
- _status_text->get_buffer()->get_insert(), 0);
- _conf->set_show_messages(true);
- } else {
- _log_scrolledwindow->hide();
- _conf->set_show_messages(false);
- }
+ return false;
}
void
-Patchage::on_view_toolbar()
-{
- if (_menu_view_toolbar->get_active()) {
- _toolbar->show();
- } else {
- _toolbar->hide();
- }
- _conf->set_show_toolbar(_menu_view_toolbar->get_active());
-}
-
-bool
-Patchage::on_scroll(GdkEventScroll* ev)
+Patchage::on_menu_action(const Action& action)
{
- return false;
+ _reactor(action);
}
void
Patchage::buffer_size_changed()
{
-#if defined(HAVE_JACK) || defined(HAVE_JACK_DBUS)
- const int selected = _buf_size_combo->get_active_row_number();
-
- if (selected == -1) {
- update_toolbar();
- } else {
- const jack_nframes_t buffer_size = 1 << (selected + 5);
- _jack_driver->set_buffer_size(buffer_size);
- update_toolbar();
- }
-#endif
+ if (_drivers.jack()) {
+ const int selected = _buf_size_combo->get_active_row_number();
+
+ if (selected == -1) {
+ update_toolbar();
+ } else {
+ const uint32_t buffer_size = 1u << (selected + 5);
+ _drivers.jack()->set_buffer_size(buffer_size);
+ update_toolbar();
+ }
+ }
}
+} // namespace patchage
diff --git a/src/Patchage.hpp b/src/Patchage.hpp
index cf550f0..8f3cca0 100644
--- a/src/Patchage.hpp
+++ b/src/Patchage.hpp
@@ -1,212 +1,199 @@
-/* This file is part of Patchage.
- * Copyright 2007-2014 David Robillard <http://drobilla.net>
- *
- * Patchage is free software: you can redistribute it and/or modify it under
- * the terms of the GNU General Public License as published by the Free
- * Software Foundation, either version 3 of the License, or (at your option)
- * any later version.
- *
- * Patchage is distributed in the hope that it will be useful, but WITHOUT ANY
- * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
- * FOR A PARTICULAR PURPOSE. See the GNU General Public License for details.
- *
- * You should have received a copy of the GNU General Public License
- * along with Patchage. If not, see <http://www.gnu.org/licenses/>.
- */
+// Copyright 2007-2021 David Robillard <d@drobilla.net>
+// SPDX-License-Identifier: GPL-3.0-or-later
#ifndef PATCHAGE_PATCHAGE_HPP
#define PATCHAGE_PATCHAGE_HPP
-#include <set>
-#include <string>
-
-#include <boost/shared_ptr.hpp>
-
-#include <gtkmm/aboutdialog.h>
-#include <gtkmm/alignment.h>
-#include <gtkmm/builder.h>
-#include <gtkmm/button.h>
-#include <gtkmm/checkmenuitem.h>
-#include <gtkmm/combobox.h>
-#include <gtkmm/dialog.h>
-#include <gtkmm/imagemenuitem.h>
-#include <gtkmm/label.h>
-#include <gtkmm/main.h>
-#include <gtkmm/menubar.h>
-#include <gtkmm/menuitem.h>
-#include <gtkmm/paned.h>
-#include <gtkmm/progressbar.h>
-#include <gtkmm/scrolledwindow.h>
-#include <gtkmm/textview.h>
-#include <gtkmm/toolbar.h>
-#include <gtkmm/toolbutton.h>
-#include <gtkmm/viewport.h>
-#include <gtkmm/window.h>
-
-#include "patchage_config.h"
+#include <gdk/gdk.h>
+#include <glibmm/refptr.h>
+#include <gtkmm/treemodel.h>
+#include <gtkmm/treemodelcolumn.h>
+#include <gtkmm/widget.h>
+
+#include "Action.hpp"
+#include "ActionSink.hpp"
+#include "Configuration.hpp"
+#include "Drivers.hpp"
+#include "Event.hpp"
+#include "Metadata.hpp"
+#include "Options.hpp"
+#include "Reactor.hpp"
+#include "Setting.hpp"
+#include "TextViewLog.hpp"
#include "Widget.hpp"
-#include "Legend.hpp"
-class AlsaDriver;
-class JackDriver;
-class PatchageCanvas;
-class Configuration;
-
-namespace Ganv { class Module; }
+#include <cstdint>
+#include <memory>
+#include <mutex>
+#include <queue>
+#include <string>
-class Patchage {
+namespace Glib {
+class ustring;
+} // namespace Glib
+
+namespace Gtk {
+class AboutDialog;
+class Alignment;
+class Builder;
+class CheckMenuItem;
+class ComboBox;
+class ImageMenuItem;
+class Label;
+class MenuBar;
+class MenuItem;
+class Paned;
+class ScrolledWindow;
+class TextTag;
+class TextView;
+class ToolButton;
+class Toolbar;
+class VBox;
+class Window;
+} // namespace Gtk
+
+namespace patchage {
+
+enum class PortType;
+
+class Canvas;
+class ILog;
+class Legend;
+
+/// Main application class
+class Patchage
+{
public:
- Patchage(int argc, char** argv);
- ~Patchage();
-
- const boost::shared_ptr<PatchageCanvas>& canvas() const { return _canvas; }
-
- Gtk::Window* window() { return _main_win.get(); }
-
- Configuration* conf() const { return _conf; }
- JackDriver* jack_driver() const { return _jack_driver; }
-#ifdef HAVE_ALSA
- AlsaDriver* alsa_driver() const { return _alsa_driver; }
-#endif
-#ifdef PATCHAGE_JACK_SESSION
- void show_open_session_dialog();
- void show_save_session_dialog();
- void show_save_close_session_dialog();
-#endif
-
- Glib::RefPtr<Gtk::Builder> xml() { return _xml; }
-
- void attach();
- void save();
- void quit() { _main_win->hide(); }
-
- void refresh();
- inline void queue_refresh() { _refresh = true; }
- inline void driver_detached() { _driver_detached = true; }
-
- void info_msg(const std::string& msg);
- void error_msg(const std::string& msg);
- void warning_msg(const std::string& msg);
-
- void update_state();
- void store_window_location();
-
- bool show_human_names() const { return _menu_view_human_names->get_active(); }
- bool sort_ports() const { return _menu_view_sort_ports->get_active(); }
+ explicit Patchage(Options options);
+ ~Patchage();
+
+ Patchage(const Patchage&) = delete;
+ Patchage& operator=(const Patchage&) = delete;
+
+ Patchage(Patchage&&) = delete;
+ Patchage& operator=(Patchage&&) = delete;
+
+ void operator()(const setting::AlsaAttached& setting);
+ void operator()(const setting::FontSize& setting);
+ void operator()(const setting::HumanNames& setting);
+ void operator()(const setting::JackAttached& setting);
+ void operator()(const setting::MessagesHeight& setting);
+ void operator()(const setting::MessagesVisible& setting);
+ void operator()(const setting::PortColor& setting);
+ void operator()(const setting::SortedPorts& setting);
+ void operator()(const setting::SprungLayout& setting);
+ void operator()(const setting::ToolbarVisible& setting);
+ void operator()(const setting::WindowLocation& setting);
+ void operator()(const setting::WindowSize& setting);
+ void operator()(const setting::Zoom& setting);
+
+ void attach();
+ void save();
+ void quit();
+
+ void store_window_location();
+
+ Canvas& canvas() const { return *_canvas; }
+ Gtk::Window* window() { return _main_win.get(); }
+ ILog& log() { return _log; }
+ Metadata& metadata() { return _metadata; }
+ const Configuration& conf() const { return _conf; }
+ Configuration& conf() { return _conf; }
protected:
- class BufferSizeColumns : public Gtk::TreeModel::ColumnRecord {
- public:
- BufferSizeColumns() { add(label); }
-
- Gtk::TreeModelColumn<Glib::ustring> label;
- };
-
- void connect_widgets();
-
- void on_arrange();
- void on_sprung_layout_toggled();
- void on_help_about();
- void on_quit();
- void on_export_image();
- void on_view_messages();
- void on_view_toolbar();
- void on_store_positions();
- void on_view_human_names();
- void on_view_sort_ports();
- void on_zoom_in();
- void on_zoom_out();
- void on_zoom_normal();
- void on_zoom_full();
- void on_increase_font_size();
- void on_decrease_font_size();
- void on_normal_font_size();
- void on_legend_color_change(int id, const std::string& label, uint32_t rgba);
- void on_messages_resized(Gtk::Allocation& alloc);
-
- bool on_scroll(GdkEventScroll* ev);
-
- void zoom(double z);
- bool idle_callback();
- void clear_load();
- bool update_load();
- void update_toolbar();
-
- void buffer_size_changed();
-
- Glib::RefPtr<Gtk::Builder> _xml;
-
-#ifdef HAVE_ALSA
- AlsaDriver* _alsa_driver;
- void menu_alsa_connect();
- void menu_alsa_disconnect();
-#endif
-
-#ifdef PATCHAGE_JACK_SESSION
- void save_session(bool close);
-#endif
-
- boost::shared_ptr<PatchageCanvas> _canvas;
-
- JackDriver* _jack_driver;
- Configuration* _conf;
-
- Gtk::Main* _gtk_main;
-
- BufferSizeColumns _buf_size_columns;
-
- Widget<Gtk::AboutDialog> _about_win;
- Widget<Gtk::ScrolledWindow> _main_scrolledwin;
- Widget<Gtk::Window> _main_win;
- Widget<Gtk::VBox> _main_vbox;
- Widget<Gtk::MenuBar> _menubar;
- Widget<Gtk::MenuItem> _menu_alsa_connect;
- Widget<Gtk::MenuItem> _menu_alsa_disconnect;
- Widget<Gtk::MenuItem> _menu_file_quit;
- Widget<Gtk::MenuItem> _menu_export_image;
- Widget<Gtk::MenuItem> _menu_help_about;
- Widget<Gtk::MenuItem> _menu_jack_connect;
- Widget<Gtk::MenuItem> _menu_jack_disconnect;
- Widget<Gtk::MenuItem> _menu_open_session;
- Widget<Gtk::MenuItem> _menu_save_session;
- Widget<Gtk::MenuItem> _menu_save_close_session;
- Widget<Gtk::MenuItem> _menu_view_arrange;
- Widget<Gtk::CheckMenuItem> _menu_view_sprung_layout;
- Widget<Gtk::CheckMenuItem> _menu_view_messages;
- Widget<Gtk::CheckMenuItem> _menu_view_toolbar;
- Widget<Gtk::MenuItem> _menu_view_refresh;
- Widget<Gtk::CheckMenuItem> _menu_view_human_names;
- Widget<Gtk::CheckMenuItem> _menu_view_sort_ports;
- Widget<Gtk::ImageMenuItem> _menu_zoom_in;
- Widget<Gtk::ImageMenuItem> _menu_zoom_out;
- Widget<Gtk::ImageMenuItem> _menu_zoom_normal;
- Widget<Gtk::ImageMenuItem> _menu_zoom_full;
- Widget<Gtk::MenuItem> _menu_increase_font_size;
- Widget<Gtk::MenuItem> _menu_decrease_font_size;
- Widget<Gtk::MenuItem> _menu_normal_font_size;
- Widget<Gtk::Toolbar> _toolbar;
- Widget<Gtk::ToolButton> _clear_load_but;
- Widget<Gtk::ProgressBar> _xrun_progress;
- Widget<Gtk::ComboBox> _buf_size_combo;
- Widget<Gtk::Label> _latency_label;
- Widget<Gtk::Alignment> _legend_alignment;
- Widget<Gtk::Paned> _main_paned;
- Widget<Gtk::ScrolledWindow> _log_scrolledwindow;
- Widget<Gtk::TextView> _status_text;
- Legend* _legend;
-
- Glib::RefPtr<Gtk::TextTag> _error_tag;
- Glib::RefPtr<Gtk::TextTag> _warning_tag;
-
- bool _pane_initialized;
- bool _attach;
- bool _driver_detached;
- bool _refresh;
- bool _enable_refresh;
- bool _jack_driver_autoattach;
-#ifdef HAVE_ALSA
- bool _alsa_driver_autoattach;
-#endif
+ class BufferSizeColumns : public Gtk::TreeModel::ColumnRecord
+ {
+ public:
+ BufferSizeColumns() { add(label); }
+
+ Gtk::TreeModelColumn<Glib::ustring> label;
+ };
+
+ void on_driver_event(const Event& event);
+ void process_events();
+
+ void on_conf_change(const Setting& setting);
+
+ void on_arrange();
+ void on_help_about();
+ void on_quit();
+ void on_export_image();
+ void on_store_positions();
+
+ void on_legend_color_change(PortType id,
+ const std::string& label,
+ uint32_t rgba);
+
+ void on_messages_resized(Gtk::Allocation& alloc);
+
+ bool on_scroll(GdkEventScroll* ev);
+
+ void on_menu_action(const Action& action);
+
+ bool idle_callback();
+ void clear_load();
+ bool update_load();
+ void update_toolbar();
+
+ void buffer_size_changed();
+
+ Glib::RefPtr<Gtk::Builder> _xml;
+
+ Widget<Gtk::AboutDialog> _about_win;
+ Widget<Gtk::ScrolledWindow> _main_scrolledwin;
+ Widget<Gtk::Window> _main_win;
+ Widget<Gtk::VBox> _main_vbox;
+ Widget<Gtk::MenuBar> _menubar;
+ Widget<Gtk::MenuItem> _menu_alsa_connect;
+ Widget<Gtk::MenuItem> _menu_alsa_disconnect;
+ Widget<Gtk::MenuItem> _menu_file_quit;
+ Widget<Gtk::MenuItem> _menu_export_image;
+ Widget<Gtk::MenuItem> _menu_help_about;
+ Widget<Gtk::MenuItem> _menu_jack_connect;
+ Widget<Gtk::MenuItem> _menu_jack_disconnect;
+ Widget<Gtk::MenuItem> _menu_view_arrange;
+ Widget<Gtk::CheckMenuItem> _menu_view_sprung_layout;
+ Widget<Gtk::CheckMenuItem> _menu_view_messages;
+ Widget<Gtk::CheckMenuItem> _menu_view_toolbar;
+ Widget<Gtk::MenuItem> _menu_view_refresh;
+ Widget<Gtk::CheckMenuItem> _menu_view_human_names;
+ Widget<Gtk::CheckMenuItem> _menu_view_sort_ports;
+ Widget<Gtk::ImageMenuItem> _menu_zoom_in;
+ Widget<Gtk::ImageMenuItem> _menu_zoom_out;
+ Widget<Gtk::ImageMenuItem> _menu_zoom_normal;
+ Widget<Gtk::ImageMenuItem> _menu_zoom_full;
+ Widget<Gtk::MenuItem> _menu_increase_font_size;
+ Widget<Gtk::MenuItem> _menu_decrease_font_size;
+ Widget<Gtk::MenuItem> _menu_normal_font_size;
+ Widget<Gtk::Toolbar> _toolbar;
+ Widget<Gtk::ToolButton> _clear_load_but;
+ Widget<Gtk::Label> _dropouts_label;
+ Widget<Gtk::ComboBox> _buf_size_combo;
+ Widget<Gtk::Label> _latency_label;
+ Widget<Gtk::Alignment> _legend_alignment;
+ Widget<Gtk::Paned> _main_paned;
+ Widget<Gtk::ScrolledWindow> _log_scrolledwindow;
+ Widget<Gtk::TextView> _status_text;
+
+ Configuration _conf;
+ TextViewLog _log;
+ std::unique_ptr<Canvas> _canvas;
+ std::mutex _events_mutex;
+ std::queue<Event> _driver_events;
+ BufferSizeColumns _buf_size_columns;
+ Legend* _legend{nullptr};
+ Metadata _metadata;
+ Drivers _drivers;
+ Reactor _reactor;
+ ActionSink _action_sink;
+
+ Glib::RefPtr<Gtk::TextTag> _error_tag;
+ Glib::RefPtr<Gtk::TextTag> _warning_tag;
+
+ Options _options;
+ bool _attach{true};
};
+} // namespace patchage
+
#endif // PATCHAGE_PATCHAGE_HPP
diff --git a/src/PatchageCanvas.cpp b/src/PatchageCanvas.cpp
deleted file mode 100644
index 4d63a4b..0000000
--- a/src/PatchageCanvas.cpp
+++ /dev/null
@@ -1,338 +0,0 @@
-/* This file is part of Patchage.
- * Copyright 2007-2014 David Robillard <http://drobilla.net>
- *
- * Patchage is free software: you can redistribute it and/or modify it under
- * the terms of the GNU General Public License as published by the Free
- * Software Foundation, either version 3 of the License, or (at your option)
- * any later version.
- *
- * Patchage is distributed in the hope that it will be useful, but WITHOUT ANY
- * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
- * FOR A PARTICULAR PURPOSE. See the GNU General Public License for details.
- *
- * You should have received a copy of the GNU General Public License
- * along with Patchage. If not, see <http://www.gnu.org/licenses/>.
- */
-
-#include <boost/format.hpp>
-
-#include "patchage_config.h"
-
-#if defined(HAVE_JACK_DBUS)
- #include "JackDbusDriver.hpp"
-#elif defined(PATCHAGE_LIBJACK)
- #include "JackDriver.hpp"
-#endif
-#ifdef HAVE_ALSA
- #include "AlsaDriver.hpp"
-#endif
-
-#include "ganv/Edge.hpp"
-
-#include "Patchage.hpp"
-#include "PatchageCanvas.hpp"
-#include "PatchageModule.hpp"
-#include "PatchagePort.hpp"
-
-using std::string;
-using boost::format;
-
-PatchageCanvas::PatchageCanvas(Patchage* app, int width, int height)
- : Ganv::Canvas(width, height)
- , _app(app)
-{
- signal_event.connect(
- sigc::mem_fun(this, &PatchageCanvas::on_event));
- signal_connect.connect(
- sigc::mem_fun(this, &PatchageCanvas::connect));
- signal_disconnect.connect(
- sigc::mem_fun(this, &PatchageCanvas::disconnect));
-}
-
-PatchageModule*
-PatchageCanvas::find_module(const string& name, ModuleType type)
-{
- const ModuleIndex::const_iterator i = _module_index.find(name);
- if (i == _module_index.end())
- return NULL;
-
- PatchageModule* io_module = NULL;
- for (ModuleIndex::const_iterator j = i; j != _module_index.end() && j->first == name; ++j) {
- if (j->second->type() == type) {
- return j->second;
- } else if (j->second->type() == InputOutput) {
- io_module = j->second;
- }
- }
-
- // Return InputOutput module for Input or Output (or NULL if not found at all)
- return io_module;
-}
-
-void
-PatchageCanvas::remove_module(const string& name)
-{
- ModuleIndex::iterator i = _module_index.find(name);
- while (i != _module_index.end()) {
- PatchageModule* mod = i->second;
- _module_index.erase(i);
- i = _module_index.find(name);
- delete mod;
- }
-}
-
-PatchagePort*
-PatchageCanvas::find_port(const PortID& id)
-{
- PatchagePort* pp = NULL;
-
- PortIndex::iterator i = _port_index.find(id);
- if (i != _port_index.end()) {
- assert(i->second->get_module());
- return i->second;
- }
-
-#ifdef PATCHAGE_LIBJACK
- // Alsa ports are always indexed (or don't exist at all)
- if (id.type == PortID::JACK_ID) {
- jack_port_t* jack_port = jack_port_by_id(_app->jack_driver()->client(), id.id.jack_id);
- if (!jack_port)
- return NULL;
-
- string module_name;
- string port_name;
- _app->jack_driver()->port_names(id, module_name, port_name);
-
- PatchageModule* module = find_module(
- module_name, (jack_port_flags(jack_port) & JackPortIsInput) ? Input : Output);
-
- if (module)
- pp = dynamic_cast<PatchagePort*>(module->get_port(port_name));
-
- if (pp)
- index_port(id, pp);
- }
-#endif // PATCHAGE_LIBJACK
-
- return pp;
-}
-
-void
-PatchageCanvas::remove_port(const PortID& id)
-{
- PatchagePort* const port = find_port(id);
- _port_index.erase(id);
- delete port;
-}
-
-struct RemovePortsData {
- typedef bool (*Predicate)(const PatchagePort*);
-
- RemovePortsData(Predicate p) : pred(p) {}
-
- Predicate pred;
- std::set<PatchageModule*> empty;
-};
-
-static void
-delete_port_if_matches(GanvPort* port, void* cdata)
-{
- RemovePortsData* data = (RemovePortsData*)cdata;
- PatchagePort* pport = dynamic_cast<PatchagePort*>(Glib::wrap(port));
- if (pport && data->pred(pport)) {
- delete pport;
- }
-}
-
-static void
-remove_ports_matching(GanvNode* node, void* cdata)
-{
- if (!GANV_IS_MODULE(node)) {
- return;
- }
-
- Ganv::Module* cmodule = Glib::wrap(GANV_MODULE(node));
- PatchageModule* pmodule = dynamic_cast<PatchageModule*>(cmodule);
- if (!pmodule) {
- return;
- }
-
- RemovePortsData* data = (RemovePortsData*)cdata;
-
- pmodule->for_each_port(delete_port_if_matches, data);
-
- if (pmodule->num_ports() == 0) {
- data->empty.insert(pmodule);
- }
-}
-
-void
-PatchageCanvas::remove_ports(bool (*pred)(const PatchagePort*))
-{
- RemovePortsData data(pred);
-
- for_each_node(remove_ports_matching, &data);
-
- for (PortIndex::iterator i = _port_index.begin();
- i != _port_index.end();) {
- PortIndex::iterator next = i;
- ++next;
- if (pred(i->second)) {
- _port_index.erase(i);
- }
- i = next;
- }
-
- for (std::set<PatchageModule*>::iterator i = data.empty.begin();
- i != data.empty.end(); ++i) {
- delete *i;
- }
-}
-
-PatchagePort*
-PatchageCanvas::find_port_by_name(const std::string& client_name,
- const std::string& port_name)
-{
- const ModuleIndex::const_iterator i = _module_index.find(client_name);
- if (i == _module_index.end())
- return NULL;
-
- for (ModuleIndex::const_iterator j = i; j != _module_index.end() && j->first == client_name; ++j) {
- PatchagePort* port = dynamic_cast<PatchagePort*>(j->second->get_port(port_name));
- if (port)
- return port;
- }
-
- return NULL;
-}
-
-void
-PatchageCanvas::connect(Ganv::Node* port1,
- Ganv::Node* port2)
-{
- PatchagePort* p1 = dynamic_cast<PatchagePort*>(port1);
- PatchagePort* p2 = dynamic_cast<PatchagePort*>(port2);
- if (!p1 || !p2)
- return;
-
- if ((p1->type() == JACK_AUDIO && p2->type() == JACK_AUDIO) ||
- (p1->type() == JACK_MIDI && p2->type() == JACK_MIDI) ||
- (p1->type() == JACK_AUDIO && p2->type() == JACK_CV) ||
- (p1->type() == JACK_CV && p2->type() == JACK_CV) ||
- (p1->type() == JACK_OSC && p2->type() == JACK_OSC)) {
-#if defined(PATCHAGE_LIBJACK) || defined(HAVE_JACK_DBUS)
- _app->jack_driver()->connect(p1, p2);
-#endif
-#ifdef HAVE_ALSA
- } else if (p1->type() == ALSA_MIDI && p2->type() == ALSA_MIDI) {
- _app->alsa_driver()->connect(p1, p2);
-#endif
- } else {
- _app->warning_msg("Cannot make connection, incompatible port types.");
- }
-}
-
-void
-PatchageCanvas::disconnect(Ganv::Node* port1,
- Ganv::Node* port2)
-{
- PatchagePort* input = dynamic_cast<PatchagePort*>(port1);
- PatchagePort* output = dynamic_cast<PatchagePort*>(port2);
- if (!input || !output)
- return;
-
- if (input->is_output() && output->is_input()) {
- // Damn, guessed wrong
- PatchagePort* swap = input;
- input = output;
- output = swap;
- }
-
- if (!input || !output || input->is_output() || output->is_input()) {
- _app->error_msg("Attempt to disconnect mismatched/unknown ports.");
- return;
- }
-
- if (input->type() == JACK_AUDIO ||
- input->type() == JACK_MIDI ||
- input->type() == JACK_CV ||
- input->type() == JACK_OSC) {
-#if defined(PATCHAGE_LIBJACK) || defined(HAVE_JACK_DBUS)
- _app->jack_driver()->disconnect(output, input);
-#endif
-#ifdef HAVE_ALSA
- } else if (input->type() == ALSA_MIDI) {
- _app->alsa_driver()->disconnect(output, input);
-#endif
- } else {
- _app->error_msg("Attempt to disconnect ports with strange types.");
- }
-}
-
-void
-PatchageCanvas::add_module(const std::string& name, PatchageModule* module)
-{
- _module_index.insert(std::make_pair(name, module));
-
- // Join partners, if applicable
- PatchageModule* in_module = NULL;
- PatchageModule* out_module = NULL;
- if (module->type() == Input) {
- in_module = module;
- out_module = find_module(name, Output);
- } else if (module->type() == Output) {
- in_module = find_module(name, Output);
- out_module = module;
- }
-
- if (in_module && out_module)
- out_module->set_partner(in_module);
-}
-
-static void
-disconnect_edge(GanvEdge* edge, void* data)
-{
- PatchageCanvas* canvas = (PatchageCanvas*)data;
- Ganv::Edge* edgemm = Glib::wrap(edge);
- canvas->disconnect(edgemm->get_tail(), edgemm->get_head());
-}
-
-bool
-PatchageCanvas::on_event(GdkEvent* ev)
-{
- if (ev->type == GDK_KEY_PRESS && ev->key.keyval == GDK_Delete) {
- for_each_selected_edge(disconnect_edge, this);
- clear_selection();
- return true;
- }
-
- return false;
-}
-
-bool
-PatchageCanvas::make_connection(Ganv::Node* tail, Ganv::Node* head)
-{
- new Ganv::Edge(*this, tail, head);
- return true;
-}
-
-void
-PatchageCanvas::remove_module(PatchageModule* module)
-{
- // Remove module from cache
- for (ModuleIndex::iterator i = _module_index.find(module->get_label());
- i != _module_index.end() && i->first == module->get_label(); ++i) {
- if (i->second == module) {
- _module_index.erase(i);
- return;
- }
- }
-}
-
-void
-PatchageCanvas::clear()
-{
- _port_index.clear();
- _module_index.clear();
- Ganv::Canvas::clear();
-}
diff --git a/src/PatchageCanvas.hpp b/src/PatchageCanvas.hpp
deleted file mode 100644
index 4b5fac1..0000000
--- a/src/PatchageCanvas.hpp
+++ /dev/null
@@ -1,85 +0,0 @@
-/* This file is part of Patchage.
- * Copyright 2007-2014 David Robillard <http://drobilla.net>
- *
- * Patchage is free software: you can redistribute it and/or modify it under
- * the terms of the GNU General Public License as published by the Free
- * Software Foundation, either version 3 of the License, or (at your option)
- * any later version.
- *
- * Patchage is distributed in the hope that it will be useful, but WITHOUT ANY
- * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
- * FOR A PARTICULAR PURPOSE. See the GNU General Public License for details.
- *
- * You should have received a copy of the GNU General Public License
- * along with Patchage. If not, see <http://www.gnu.org/licenses/>.
- */
-
-#ifndef PATCHAGE_PATCHAGECANVAS_HPP
-#define PATCHAGE_PATCHAGECANVAS_HPP
-
-#include <map>
-#include <string>
-
-#include "patchage_config.h"
-
-#ifdef HAVE_ALSA
- #include <alsa/asoundlib.h>
-#endif
-
-#include "ganv/Canvas.hpp"
-
-#include "PatchageEvent.hpp"
-#include "PatchageModule.hpp"
-#include "PortID.hpp"
-
-class Patchage;
-class PatchageModule;
-class PatchagePort;
-
-class PatchageCanvas : public Ganv::Canvas {
-public:
- PatchageCanvas(Patchage* _app, int width, int height);
-
- PatchageModule* find_module(const std::string& name, ModuleType type);
- PatchagePort* find_port(const PortID& id);
-
- void remove_module(const std::string& name);
- void remove_module(PatchageModule* module);
-
- PatchagePort* find_port_by_name(const std::string& client_name,
- const std::string& port_name);
-
- void connect(Ganv::Node* port1,
- Ganv::Node* port2);
-
- void disconnect(Ganv::Node* port1,
- Ganv::Node* port2);
-
- void index_port(const PortID& id, PatchagePort* port) {
- _port_index.insert(std::make_pair(id, port));
- }
-
- void remove_ports(bool (*pred)(const PatchagePort*));
-
- void add_module(const std::string& name, PatchageModule* module);
-
- bool make_connection(Ganv::Node* tail, Ganv::Node* head);
-
- void remove_port(const PortID& id);
-
- void clear();
-
-private:
- Patchage* _app;
-
- bool on_event(GdkEvent* ev);
- bool on_connection_event(Ganv::Edge* c, GdkEvent* ev);
-
- typedef std::map<const PortID, PatchagePort*> PortIndex;
- PortIndex _port_index;
-
- typedef std::multimap<const std::string, PatchageModule*> ModuleIndex;
- ModuleIndex _module_index;
-};
-
-#endif // PATCHAGE_PATCHAGECANVAS_HPP
diff --git a/src/PatchageEvent.cpp b/src/PatchageEvent.cpp
deleted file mode 100644
index ea9a758..0000000
--- a/src/PatchageEvent.cpp
+++ /dev/null
@@ -1,110 +0,0 @@
-/* This file is part of Patchage.
- * Copyright 2007-2014 David Robillard <http://drobilla.net>
- *
- * Patchage is free software: you can redistribute it and/or modify it under
- * the terms of the GNU General Public License as published by the Free
- * Software Foundation, either version 3 of the License, or (at your option)
- * any later version.
- *
- * Patchage is distributed in the hope that it will be useful, but WITHOUT ANY
- * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
- * FOR A PARTICULAR PURPOSE. See the GNU General Public License for details.
- *
- * You should have received a copy of the GNU General Public License
- * along with Patchage. If not, see <http://www.gnu.org/licenses/>.
- */
-
-#include <boost/format.hpp>
-
-#include "patchage_config.h"
-#include "Patchage.hpp"
-#include "PatchageCanvas.hpp"
-#include "PatchageModule.hpp"
-#include "PatchageEvent.hpp"
-#include "Driver.hpp"
-#if defined(HAVE_JACK_DBUS)
-# include "JackDbusDriver.hpp"
-#elif defined(PATCHAGE_LIBJACK)
-# include "JackDriver.hpp"
-#endif
-#ifdef HAVE_ALSA
-# include "AlsaDriver.hpp"
-#endif
-
-using std::endl;
-using boost::format;
-
-void
-PatchageEvent::execute(Patchage* patchage)
-{
- if (_type == REFRESH) {
- patchage->refresh();
-
- } else if (_type == CLIENT_CREATION) {
- // No empty modules (for now)
- g_free(_str);
- _str = NULL;
-
- } else if (_type == CLIENT_DESTRUCTION) {
- patchage->canvas()->remove_module(_str);
- g_free(_str);
- _str = NULL;
-
- } else if (_type == PORT_CREATION) {
-
- Driver* driver = NULL;
- if (_port_1.type == PortID::JACK_ID) {
-#if defined(PATCHAGE_LIBJACK) || defined(HAVE_JACK_DBUS)
- driver = patchage->jack_driver();
-#endif
-#ifdef HAVE_ALSA
- } else if (_port_1.type == PortID::ALSA_ADDR) {
- driver = patchage->alsa_driver();
-#endif
- }
-
- if (driver) {
- PatchagePort* port = driver->create_port_view(patchage, _port_1);
- if (!port) {
- patchage->error_msg(
- (format("Unable to create view for port `%1%'")
- % _port_1).str());
- }
- } else {
- patchage->error_msg(
- (format("Unknown type for port `%1%'") % _port_1).str());
- }
-
- } else if (_type == PORT_DESTRUCTION) {
-
- patchage->canvas()->remove_port(_port_1);
-
- } else if (_type == CONNECTION) {
-
- PatchagePort* port_1 = patchage->canvas()->find_port(_port_1);
- PatchagePort* port_2 = patchage->canvas()->find_port(_port_2);
-
- if (!port_1)
- patchage->error_msg((format("Unable to find port `%1%' to connect")
- % _port_1).str());
- else if (!port_2)
- patchage->error_msg((format("Unable to find port `%1%' to connect")
- % _port_2).str());
- else
- patchage->canvas()->make_connection(port_1, port_2);
-
- } else if (_type == DISCONNECTION) {
-
- PatchagePort* port_1 = patchage->canvas()->find_port(_port_1);
- PatchagePort* port_2 = patchage->canvas()->find_port(_port_2);
-
- if (!port_1)
- patchage->error_msg((format("Unable to find port `%1%' to disconnect")
- % _port_1).str());
- else if (!port_2)
- patchage->error_msg((format("Unable to find port `%1%' to disconnect")
- % _port_2).str());
- else
- patchage->canvas()->remove_edge_between(port_1, port_2);
- }
-}
diff --git a/src/PatchageEvent.hpp b/src/PatchageEvent.hpp
deleted file mode 100644
index 899f77f..0000000
--- a/src/PatchageEvent.hpp
+++ /dev/null
@@ -1,87 +0,0 @@
-/* This file is part of Patchage.
- * Copyright 2007-2014 David Robillard <http://drobilla.net>
- *
- * Patchage is free software: you can redistribute it and/or modify it under
- * the terms of the GNU General Public License as published by the Free
- * Software Foundation, either version 3 of the License, or (at your option)
- * any later version.
- *
- * Patchage is distributed in the hope that it will be useful, but WITHOUT ANY
- * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
- * FOR A PARTICULAR PURPOSE. See the GNU General Public License for details.
- *
- * You should have received a copy of the GNU General Public License
- * along with Patchage. If not, see <http://www.gnu.org/licenses/>.
- */
-
-#ifndef PATCHAGE_PATCHAGEEVENT_HPP
-#define PATCHAGE_PATCHAGEEVENT_HPP
-
-#include <cstring>
-
-#include "patchage_config.h"
-
-#ifdef PATCHAGE_LIBJACK
- #include <jack/jack.h>
-#endif
-#ifdef HAVE_ALSA
- #include <alsa/asoundlib.h>
-#endif
-
-#include "PatchagePort.hpp"
-#include "PortID.hpp"
-
-class Patchage;
-
-/** A Driver event to be processed by the GUI thread.
- */
-class PatchageEvent {
-public:
- enum Type {
- NULL_EVENT = 0,
- REFRESH,
- CLIENT_CREATION,
- CLIENT_DESTRUCTION,
- PORT_CREATION,
- PORT_DESTRUCTION,
- CONNECTION,
- DISCONNECTION
- };
-
- explicit PatchageEvent(Type type=NULL_EVENT)
- : _str(NULL)
- , _type(type)
- {}
-
- PatchageEvent(Type type, const char* str)
- : _str(g_strdup(str))
- , _type(type)
- {}
-
- template <typename P>
- PatchageEvent(Type type, P port)
- : _str(NULL)
- , _port_1(port)
- , _type(type)
- {}
-
- template <typename P>
- PatchageEvent(Type type, P port_1, P port_2)
- : _str(NULL)
- , _port_1(port_1, false)
- , _port_2(port_2, true)
- , _type(type)
- {}
-
- void execute(Patchage* patchage);
-
- inline Type type() const { return (Type)_type; }
-
-private:
- char* _str;
- PortID _port_1;
- PortID _port_2;
- uint8_t _type;
-};
-
-#endif // PATCHAGE_PATCHAGEEVENT_HPP
diff --git a/src/PatchageModule.cpp b/src/PatchageModule.cpp
deleted file mode 100644
index 8ba5296..0000000
--- a/src/PatchageModule.cpp
+++ /dev/null
@@ -1,157 +0,0 @@
-/* This file is part of Patchage.
- * Copyright 2010-2014 David Robillard <http://drobilla.net>
- *
- * Patchage is free software: you can redistribute it and/or modify it under
- * the terms of the GNU General Public License as published by the Free
- * Software Foundation, either version 3 of the License, or (at your option)
- * any later version.
- *
- * Patchage is distributed in the hope that it will be useful, but WITHOUT ANY
- * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
- * FOR A PARTICULAR PURPOSE. See the GNU General Public License for details.
- *
- * You should have received a copy of the GNU General Public License
- * along with Patchage. If not, see <http://www.gnu.org/licenses/>.
- */
-
-#include "Patchage.hpp"
-#include "PatchageCanvas.hpp"
-#include "PatchageModule.hpp"
-#include "PatchagePort.hpp"
-
-PatchageModule::PatchageModule(
- Patchage* app, const std::string& name, ModuleType type, double x, double y)
- : Module(*app->canvas().get(), name, x, y)
- , _app(app)
- , _menu(NULL)
- , _name(name)
- , _type(type)
-{
- signal_event().connect(
- sigc::mem_fun(this, &PatchageModule::on_event));
-
- signal_moved().connect(
- sigc::mem_fun(this, &PatchageModule::store_location));
-
- // Set as source by default, turned off if input ports added
- set_is_source(true);
-}
-
-PatchageModule::~PatchageModule()
-{
- _app->canvas()->remove_module(this);
- delete _menu;
- _menu = NULL;
-}
-
-void
-PatchageModule::update_menu()
-{
- if (!_menu)
- return;
-
- if (_type == InputOutput) {
- bool has_in = false;
- bool has_out = false;
- for (const_iterator p = begin(); p != end(); ++p) {
- if ((*p)->is_input()) {
- has_in = true;
- } else {
- has_out = true;
- }
- if (has_in && has_out) {
- _menu->items()[0].show(); // Show "Split" menu item
- return;
- }
- }
- _menu->items()[0].hide(); // Hide "Split" menu item
- }
-}
-
-bool
-PatchageModule::show_menu(GdkEventButton* ev)
-{
- _menu = new Gtk::Menu();
- Gtk::Menu::MenuList& items = _menu->items();
- if (_type == InputOutput) {
- items.push_back(
- Gtk::Menu_Helpers::MenuElem(
- "_Split", sigc::mem_fun(this, &PatchageModule::split)));
- update_menu();
- } else {
- items.push_back(
- Gtk::Menu_Helpers::MenuElem(
- "_Join", sigc::mem_fun(this, &PatchageModule::join)));
- }
- items.push_back(
- Gtk::Menu_Helpers::MenuElem(
- "_Disconnect All",
- sigc::mem_fun(this, &PatchageModule::menu_disconnect_all)));
-
- _menu->popup(ev->button, ev->time);
- return true;
-}
-
-bool
-PatchageModule::on_event(GdkEvent* ev)
-{
- if (ev->type == GDK_BUTTON_PRESS && ev->button.button == 3) {
- return show_menu(&ev->button);
- }
- return false;
-}
-
-void
-PatchageModule::load_location()
-{
- Coord loc;
-
- if (_app->conf()->get_module_location(_name, _type, loc))
- move_to(loc.x, loc.y);
- else
- move_to(20 + rand() % 640,
- 20 + rand() % 480);
-}
-
-void
-PatchageModule::store_location(double x, double y)
-{
- Coord loc(get_x(), get_y());
- _app->conf()->set_module_location(_name, _type, loc);
-}
-
-void
-PatchageModule::split()
-{
- assert(_type == InputOutput);
- _app->conf()->set_module_split(_name, true);
- _app->refresh();
-}
-
-void
-PatchageModule::join()
-{
- assert(_type != InputOutput);
- _app->conf()->set_module_split(_name, false);
- _app->refresh();
-}
-
-void
-PatchageModule::menu_disconnect_all()
-{
- for (iterator p = begin(); p != end(); ++p)
- (*p)->disconnect();
-}
-
-PatchagePort*
-PatchageModule::get_port(const std::string& name)
-{
- for (iterator p = begin(); p != end(); ++p) {
- PatchagePort* pport = dynamic_cast<PatchagePort*>(*p);
- if (pport && pport->name() == name) {
- return pport;
- }
- }
-
- return NULL;
-}
diff --git a/src/PatchageModule.hpp b/src/PatchageModule.hpp
deleted file mode 100644
index 99527ac..0000000
--- a/src/PatchageModule.hpp
+++ /dev/null
@@ -1,67 +0,0 @@
-/* This file is part of Patchage.
- * Copyright 2007-2014 David Robillard <http://drobilla.net>
- *
- * Patchage is free software: you can redistribute it and/or modify it under
- * the terms of the GNU General Public License as published by the Free
- * Software Foundation, either version 3 of the License, or (at your option)
- * any later version.
- *
- * Patchage is distributed in the hope that it will be useful, but WITHOUT ANY
- * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
- * FOR A PARTICULAR PURPOSE. See the GNU General Public License for details.
- *
- * You should have received a copy of the GNU General Public License
- * along with Patchage. If not, see <http://www.gnu.org/licenses/>.
- */
-
-#ifndef PATCHAGE_PATCHAGEMODULE_HPP
-#define PATCHAGE_PATCHAGEMODULE_HPP
-
-#include <string>
-
-#include <gtkmm/menu_elems.h>
-
-#include "ganv/Module.hpp"
-#include "ganv/Port.hpp"
-
-#include "Configuration.hpp"
-
-class Patchage;
-class PatchagePort;
-
-class PatchageModule : public Ganv::Module
-{
-public:
- PatchageModule(Patchage* app,
- const std::string& name,
- ModuleType type,
- double x = 0,
- double y = 0);
- ~PatchageModule();
-
- void split();
- void join();
-
- bool show_menu(GdkEventButton* ev);
- void update_menu();
-
- PatchagePort* get_port(const std::string& name);
-
- void load_location();
- void menu_disconnect_all();
- void show_dialog() {}
- void store_location(double x, double y);
-
- ModuleType type() const { return _type; }
- const std::string& name() const { return _name; }
-
-protected:
- bool on_event(GdkEvent* ev);
-
- Patchage* _app;
- Gtk::Menu* _menu;
- std::string _name;
- ModuleType _type;
-};
-
-#endif // PATCHAGE_PATCHAGEMODULE_HPP
diff --git a/src/PatchagePort.hpp b/src/PatchagePort.hpp
deleted file mode 100644
index d5d6cb3..0000000
--- a/src/PatchagePort.hpp
+++ /dev/null
@@ -1,104 +0,0 @@
-/* This file is part of Patchage.
- * Copyright 2007-2014 David Robillard <http://drobilla.net>
- *
- * Patchage is free software: you can redistribute it and/or modify it under
- * the terms of the GNU General Public License as published by the Free
- * Software Foundation, either version 3 of the License, or (at your option)
- * any later version.
- *
- * Patchage is distributed in the hope that it will be useful, but WITHOUT ANY
- * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
- * FOR A PARTICULAR PURPOSE. See the GNU General Public License for details.
- *
- * You should have received a copy of the GNU General Public License
- * along with Patchage. If not, see <http://www.gnu.org/licenses/>.
- */
-
-#ifndef PATCHAGE_PATCHAGEPORT_HPP
-#define PATCHAGE_PATCHAGEPORT_HPP
-
-#include <string>
-
-#include <boost/shared_ptr.hpp>
-
-#include <gtkmm/menu.h>
-#include <gtkmm/menushell.h>
-
-#include "ganv/Port.hpp"
-#include "ganv/Module.hpp"
-
-#include "Configuration.hpp"
-#include "PatchageCanvas.hpp"
-#include "PatchageModule.hpp"
-#include "PortID.hpp"
-#include "patchage_config.h"
-
-/** A Port on a PatchageModule
- */
-class PatchagePort : public Ganv::Port
-{
-public:
- PatchagePort(Ganv::Module& module,
- PortType type,
- const std::string& name,
- const std::string& human_name,
- bool is_input,
- uint32_t color,
- bool show_human_name,
- boost::optional<int> order=boost::optional<int>())
- : Port(module,
- (show_human_name && !human_name.empty()) ? human_name : name,
- is_input,
- color)
- , _type(type)
- , _name(name)
- , _human_name(human_name)
- , _order(order)
- {
- signal_event().connect(
- sigc::mem_fun(this, &PatchagePort::on_event));
- }
-
- virtual ~PatchagePort() {}
-
- /** Returns the full name of this port, as "modulename:portname" */
- std::string full_name() const {
- PatchageModule* pmod = dynamic_cast<PatchageModule*>(get_module());
- return std::string(pmod->name()) + ":" + _name;
- }
-
- void show_human_name(bool human) {
- if (human && !_human_name.empty()) {
- set_label(_human_name.c_str());
- } else {
- set_label(_name.c_str());
- }
- }
-
- bool on_event(GdkEvent* ev) {
- if (ev->type != GDK_BUTTON_PRESS || ev->button.button != 3) {
- return false;
- }
-
- Gtk::Menu* menu = Gtk::manage(new Gtk::Menu());
- menu->items().push_back(
- Gtk::Menu_Helpers::MenuElem(
- "Disconnect", sigc::mem_fun(this, &Port::disconnect)));
-
- menu->popup(ev->button.button, ev->button.time);
- return true;
- }
-
- PortType type() const { return _type; }
- const std::string& name() const { return _name; }
- const std::string& human_name() const { return _human_name; }
- const boost::optional<int>& order() const { return _order; }
-
-private:
- PortType _type;
- std::string _name;
- std::string _human_name;
- boost::optional<int> _order;
-};
-
-#endif // PATCHAGE_PATCHAGEPORT_HPP
diff --git a/src/PortID.hpp b/src/PortID.hpp
index 3f916c0..54e4c5d 100644
--- a/src/PortID.hpp
+++ b/src/PortID.hpp
@@ -1,120 +1,178 @@
-/* This file is part of Patchage.
- * Copyright 2008-2014 David Robillard <http://drobilla.net>
- *
- * Patchage is free software: you can redistribute it and/or modify it under
- * the terms of the GNU General Public License as published by the Free
- * Software Foundation, either version 3 of the License, or (at your option)
- * any later version.
- *
- * Patchage is distributed in the hope that it will be useful, but WITHOUT ANY
- * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
- * FOR A PARTICULAR PURPOSE. See the GNU General Public License for details.
- *
- * You should have received a copy of the GNU General Public License
- * along with Patchage. If not, see <http://www.gnu.org/licenses/>.
- */
+// Copyright 2008-2020 David Robillard <d@drobilla.net>
+// SPDX-License-Identifier: GPL-3.0-or-later
#ifndef PATCHAGE_PORTID_HPP
#define PATCHAGE_PORTID_HPP
-#include <cstring>
+#include "ClientID.hpp"
+#include "ClientType.hpp"
+#include "warnings.hpp"
+
+#include <cassert>
+#include <cstddef>
+#include <cstdint>
+#include <functional>
#include <iostream>
+#include <string>
+#include <tuple>
+#include <utility>
+
+namespace patchage {
-#include "patchage_config.h"
+/// An ID for some port on a client (program)
+struct PortID {
+ using Type = ClientType;
-#ifdef PATCHAGE_LIBJACK
- #include <jack/jack.h>
-#endif
-#ifdef HAVE_ALSA
- #include <alsa/asoundlib.h>
-#endif
+ PortID(const PortID& copy) = default;
+ PortID& operator=(const PortID& copy) = default;
-#include "PatchagePort.hpp"
+ PortID(PortID&& id) = default;
+ PortID& operator=(PortID&& id) = default;
-struct PortID {
- PortID() : type(NULL_PORT_ID) { memset(&id, 0, sizeof(id)); }
- PortID(const PortID& copy) : type(copy.type) {
- memcpy(&id, &copy.id, sizeof(id));
- }
-
- enum { NULL_PORT_ID, JACK_ID, ALSA_ADDR } type;
-
-#ifdef PATCHAGE_LIBJACK
- PortID(jack_port_id_t jack_id, bool ign=false)
- : type(JACK_ID) { id.jack_id = jack_id; }
-#endif
-
-#ifdef HAVE_ALSA
- PortID(snd_seq_addr_t addr, bool in)
- : type(ALSA_ADDR) { id.alsa_addr = addr; id.is_input = in; }
-#endif
-
- union {
-#ifdef PATCHAGE_LIBJACK
- jack_port_id_t jack_id;
-#endif
-#ifdef HAVE_ALSA
- struct {
- snd_seq_addr_t alsa_addr;
- bool is_input : 1;
- };
-#endif
- } id;
+ ~PortID() = default;
+
+ /// Return an ID for a JACK port by full name (like "client:port")
+ static PortID jack(std::string name)
+ {
+ return PortID{Type::jack, std::move(name)};
+ }
+
+ /// Return an ID for a JACK port by separate client and port name
+ static PortID jack(const std::string& client_name,
+ const std::string& port_name)
+ {
+ return PortID{Type::jack, client_name + ":" + port_name};
+ }
+
+ /// Return an ID for an ALSA Sequencer port by ID
+ static PortID alsa(const uint8_t client_id,
+ const uint8_t port,
+ const bool is_input)
+ {
+ return PortID{Type::alsa, client_id, port, is_input};
+ }
+
+ /// Return the ID of the client that hosts this port
+ ClientID client() const
+ {
+ switch (_type) {
+ case Type::jack:
+ return ClientID::jack(_jack_name.substr(0, _jack_name.find(':')));
+ case Type::alsa:
+ return ClientID::alsa(_alsa_client);
+ }
+
+ PATCHAGE_UNREACHABLE();
+ }
+
+ Type type() const { return _type; }
+ const std::string& jack_name() const { return _jack_name; }
+ uint8_t alsa_client() const { return _alsa_client; }
+ uint8_t alsa_port() const { return _alsa_port; }
+ bool alsa_is_input() const { return _alsa_is_input; }
+
+private:
+ PortID(const Type type, std::string jack_name)
+ : _type{type}
+ , _jack_name{std::move(jack_name)}
+ {
+ assert(_type == Type::jack);
+ assert(_jack_name.find(':') != std::string::npos);
+ assert(_jack_name.find(':') > 0);
+ assert(_jack_name.find(':') < _jack_name.length() - 1);
+ }
+
+ PortID(const Type type,
+ const uint8_t alsa_client,
+ const uint8_t alsa_port,
+ const bool is_input)
+ : _type{type}
+ , _alsa_client{alsa_client}
+ , _alsa_port{alsa_port}
+ , _alsa_is_input{is_input}
+ {
+ assert(_type == Type::alsa);
+ }
+
+ Type _type; ///< Determines which field is active
+ std::string _jack_name; ///< Full port name for Type::jack
+ uint8_t _alsa_client{}; ///< Client ID for Type::alsa
+ uint8_t _alsa_port{}; ///< Port ID for Type::alsa
+ bool _alsa_is_input{}; ///< Input flag for Type::alsa
};
-static inline std::ostream&
+inline std::ostream&
operator<<(std::ostream& os, const PortID& id)
{
- switch (id.type) {
- case PortID::NULL_PORT_ID:
- return os << "(null)";
- case PortID::JACK_ID:
-#ifdef PATCHAGE_LIBJACK
- return os << "jack:" << id.id.jack_id;
-#endif
- break;
- case PortID::ALSA_ADDR:
-#ifdef HAVE_ALSA
- return os << "alsa:" << (int)id.id.alsa_addr.client << ":" << (int)id.id.alsa_addr.port
- << ":" << (id.id.is_input ? "in" : "out");
-#endif
- break;
- }
- assert(false);
- return os;
+ switch (id.type()) {
+ case PortID::Type::jack:
+ return os << "jack:" << id.jack_name();
+ case PortID::Type::alsa:
+ return os << "alsa:" << int(id.alsa_client()) << ":" << int(id.alsa_port())
+ << ":" << (id.alsa_is_input() ? "in" : "out");
+ }
+
+ assert(false);
+ return os;
}
-static inline bool
-operator<(const PortID& a, const PortID& b)
+inline bool
+operator==(const PortID& lhs, const PortID& rhs)
{
- if (a.type != b.type)
- return a.type < b.type;
-
- switch (a.type) {
- case PortID::NULL_PORT_ID:
- return true;
- case PortID::JACK_ID:
-#ifdef PATCHAGE_LIBJACK
- return a.id.jack_id < b.id.jack_id;
-#endif
- break;
- case PortID::ALSA_ADDR:
-#ifdef HAVE_ALSA
- if ((a.id.alsa_addr.client < b.id.alsa_addr.client)
- || ((a.id.alsa_addr.client == b.id.alsa_addr.client)
- && a.id.alsa_addr.port < b.id.alsa_addr.port)) {
- return true;
- } else if (a.id.alsa_addr.client == b.id.alsa_addr.client
- && a.id.alsa_addr.port == b.id.alsa_addr.port) {
- return (a.id.is_input < b.id.is_input);
- } else {
- return false;
- }
-#endif
- break;
- }
- assert(false);
- return false;
+ if (lhs.type() != rhs.type()) {
+ return false;
+ }
+
+ switch (lhs.type()) {
+ case PortID::Type::jack:
+ return lhs.jack_name() == rhs.jack_name();
+ case PortID::Type::alsa:
+ return std::make_tuple(
+ lhs.alsa_client(), lhs.alsa_port(), lhs.alsa_is_input()) ==
+ std::make_tuple(
+ rhs.alsa_client(), rhs.alsa_port(), rhs.alsa_is_input());
+ }
+
+ assert(false);
+ return false;
+}
+
+inline bool
+operator<(const PortID& lhs, const PortID& rhs)
+{
+ if (lhs.type() != rhs.type()) {
+ return lhs.type() < rhs.type();
+ }
+
+ switch (lhs.type()) {
+ case PortID::Type::jack:
+ return lhs.jack_name() < rhs.jack_name();
+ case PortID::Type::alsa:
+ return std::make_tuple(
+ lhs.alsa_client(), lhs.alsa_port(), lhs.alsa_is_input()) <
+ std::make_tuple(
+ rhs.alsa_client(), rhs.alsa_port(), rhs.alsa_is_input());
+ }
+
+ assert(false);
+ return false;
}
+} // namespace patchage
+
+namespace std {
+
+template<>
+struct hash<patchage::PortID::Type> {
+ size_t operator()(const patchage::PortID::Type& v) const noexcept
+ {
+ return hash<unsigned>()(static_cast<unsigned>(v));
+ }
+};
+
+} // namespace std
+
+template<>
+struct fmt::formatter<patchage::PortID> : fmt::ostream_formatter {};
+
#endif // PATCHAGE_PORTID_HPP
diff --git a/src/PortInfo.hpp b/src/PortInfo.hpp
new file mode 100644
index 0000000..b6d88b5
--- /dev/null
+++ b/src/PortInfo.hpp
@@ -0,0 +1,26 @@
+// Copyright 2007-2020 David Robillard <d@drobilla.net>
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+#ifndef PATCHAGE_PORTINFO_HPP
+#define PATCHAGE_PORTINFO_HPP
+
+#include "PortType.hpp"
+#include "SignalDirection.hpp"
+
+#include <optional>
+#include <string>
+
+namespace patchage {
+
+/// Extra information about a port not expressed in its ID
+struct PortInfo {
+ std::string label; ///< Human-friendly label
+ PortType type; ///< Detailed port type
+ SignalDirection direction; ///< Signal direction
+ std::optional<int> order; ///< Order key on client
+ bool is_terminal; ///< True if this is a system port
+};
+
+} // namespace patchage
+
+#endif // PATCHAGE_PORTINFO_HPP
diff --git a/src/PortNames.hpp b/src/PortNames.hpp
new file mode 100644
index 0000000..4dce7c6
--- /dev/null
+++ b/src/PortNames.hpp
@@ -0,0 +1,44 @@
+// Copyright 2008-2020 David Robillard <d@drobilla.net>
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+#ifndef PATCHAGE_PORTNAMES_HPP
+#define PATCHAGE_PORTNAMES_HPP
+
+#include "PortID.hpp"
+
+#include <cassert>
+#include <string>
+
+namespace patchage {
+
+/// Utility class that splits a Jack port ID into client and client names
+class PortNames
+{
+public:
+ explicit PortNames(const std::string& jack_name)
+ {
+ const auto colon = jack_name.find(':');
+
+ if (colon != std::string::npos) {
+ _client_name = jack_name.substr(0, colon);
+ _port_name = jack_name.substr(colon + 1);
+ }
+ }
+
+ explicit PortNames(const PortID& id)
+ : PortNames(id.jack_name())
+ {
+ assert(id.type() == PortID::Type::jack);
+ }
+
+ const std::string& client() const { return _client_name; }
+ const std::string& port() const { return _port_name; }
+
+private:
+ std::string _client_name;
+ std::string _port_name;
+};
+
+} // namespace patchage
+
+#endif // PATCHAGE_PORTNAMES_HPP
diff --git a/src/PortType.hpp b/src/PortType.hpp
new file mode 100644
index 0000000..97ecaab
--- /dev/null
+++ b/src/PortType.hpp
@@ -0,0 +1,50 @@
+// Copyright 2007-2020 David Robillard <d@drobilla.net>
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+#ifndef PATCHAGE_PORTTYPE_HPP
+#define PATCHAGE_PORTTYPE_HPP
+
+#include "warnings.hpp"
+
+PATCHAGE_DISABLE_FMT_WARNINGS
+#include <fmt/core.h>
+#include <fmt/ostream.h>
+PATCHAGE_RESTORE_WARNINGS
+
+#include <ostream>
+
+namespace patchage {
+
+enum class PortType {
+ jack_audio,
+ jack_midi,
+ alsa_midi,
+ jack_osc,
+ jack_cv,
+};
+
+inline std::ostream&
+operator<<(std::ostream& os, const PortType port_type)
+{
+ switch (port_type) {
+ case PortType::jack_audio:
+ return os << "JACK audio";
+ case PortType::jack_midi:
+ return os << "JACK MIDI";
+ case PortType::alsa_midi:
+ return os << "ALSA MIDI";
+ case PortType::jack_osc:
+ return os << "JACK OSC";
+ case PortType::jack_cv:
+ return os << "JACK CV";
+ }
+
+ PATCHAGE_UNREACHABLE();
+}
+
+} // namespace patchage
+
+template<>
+struct fmt::formatter<patchage::PortType> : fmt::ostream_formatter {};
+
+#endif // PATCHAGE_PORTTYPE_HPP
diff --git a/src/Queue.hpp b/src/Queue.hpp
deleted file mode 100644
index ab47aed..0000000
--- a/src/Queue.hpp
+++ /dev/null
@@ -1,131 +0,0 @@
-/* This file is part of Patchage.
- * Copyright 2007-2017 David Robillard <http://drobilla.net>
- *
- * Patchage is free software: you can redistribute it and/or modify it under
- * the terms of the GNU General Public License as published by the Free
- * Software Foundation, either version 3 of the License, or any later version.
- *
- * Patchage is distributed in the hope that it will be useful, but WITHOUT ANY
- * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
- * FOR A PARTICULAR PURPOSE. See the GNU General Public License for details.
- *
- * You should have received a copy of the GNU General Public License along with
- * Patchage. If not, see <http://www.gnu.org/licenses/>.
- */
-
-#ifndef QUEUE_HPP_INCLUDED
-#define QUEUE_HPP_INCLUDED
-
-#include <atomic>
-#include <cassert>
-
-/** Realtime-safe single-reader single-writer queue */
-template <typename T>
-class Queue
-{
-public:
- /** @param size Size in number of elements */
- explicit Queue(size_t size);
- ~Queue();
-
- // Any thread:
-
- inline size_t capacity() const { return _size - 1; }
-
- // Write thread(s):
-
- inline bool full() const;
- inline bool push(const T& obj);
-
- // Read thread:
-
- inline bool empty() const;
- inline T& front() const;
- inline void pop();
-
-private:
- std::atomic<size_t> _front; ///< Index to front of queue
- std::atomic<size_t> _back; ///< Index to back of queue (one past end)
- const size_t _size; ///< Size of `_objects` (at most _size-1)
- T* const _objects; ///< Fixed array containing queued elements
-};
-
-template<typename T>
-Queue<T>::Queue(size_t size)
- : _front(0)
- , _back(0)
- , _size(size + 1)
- , _objects(new T[_size])
-{
- assert(size > 1);
-}
-
-template <typename T>
-Queue<T>::~Queue()
-{
- delete[] _objects;
-}
-
-/** Return whether or not the queue is empty.
- */
-template <typename T>
-inline bool
-Queue<T>::empty() const
-{
- return (_back.load() == _front.load());
-}
-
-/** Return whether or not the queue is full.
- */
-template <typename T>
-inline bool
-Queue<T>::full() const
-{
- return (((_front.load() - _back.load() + _size) % _size) == 1);
-}
-
-/** Return the element at the front of the queue without removing it
- */
-template <typename T>
-inline T&
-Queue<T>::front() const
-{
- return _objects[_front.load()];
-}
-
-/** Push an item onto the back of the Queue - realtime-safe, not thread-safe.
- *
- * @returns true if `elem` was successfully pushed onto the queue,
- * false otherwise (queue is full).
- */
-template <typename T>
-inline bool
-Queue<T>::push(const T& elem)
-{
- if (full()) {
- return false;
- } else {
- unsigned back = _back.load();
- _objects[back] = elem;
- _back = (back + 1) % _size;
- return true;
- }
-}
-
-/** Pop an item off the front of the queue - realtime-safe, not thread-safe.
- *
- * It is a fatal error to call pop() when the queue is empty.
- *
- * @returns the element popped.
- */
-template <typename T>
-inline void
-Queue<T>::pop()
-{
- assert(!empty());
- assert(_size > 0);
-
- _front = (_front.load() + 1) % (_size);
-}
-
-#endif // QUEUE_HPP_INCLUDED
diff --git a/src/Reactor.cpp b/src/Reactor.cpp
new file mode 100644
index 0000000..49cbe5a
--- /dev/null
+++ b/src/Reactor.cpp
@@ -0,0 +1,216 @@
+// Copyright 2007-2021 David Robillard <d@drobilla.net>
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+#include "Reactor.hpp"
+
+#include "Action.hpp"
+#include "Canvas.hpp"
+#include "CanvasModule.hpp"
+#include "CanvasPort.hpp"
+#include "ClientType.hpp"
+#include "Configuration.hpp"
+#include "Driver.hpp"
+#include "Drivers.hpp"
+#include "ILog.hpp"
+#include "PortID.hpp"
+#include "Setting.hpp"
+#include "SignalDirection.hpp"
+#include "warnings.hpp"
+
+#include "ganv/Port.hpp"
+
+PATCHAGE_DISABLE_FMT_WARNINGS
+#include <fmt/core.h>
+PATCHAGE_RESTORE_WARNINGS
+
+#include <variant>
+
+namespace patchage {
+
+class SettingVisitor
+{
+public:
+ explicit SettingVisitor(Configuration& conf)
+ : _conf{conf}
+ {}
+
+ template<class S>
+ void operator()(const S& setting) const
+ {
+ _conf.set_setting(setting);
+ }
+
+private:
+ Configuration& _conf;
+};
+
+Reactor::Reactor(Configuration& conf,
+ Drivers& drivers,
+ Canvas& canvas,
+ ILog& log)
+ : _conf{conf}
+ , _drivers{drivers}
+ , _canvas{canvas}
+ , _log{log}
+{}
+
+void
+Reactor::operator()(const action::ChangeSetting& action)
+{
+ const SettingVisitor visitor{_conf};
+ std::visit(visitor, action.setting);
+}
+
+void
+Reactor::operator()(const action::ConnectPorts& action)
+{
+ if (action.tail.type() == action.head.type()) {
+ if (auto* d = _drivers.driver(action.tail.type())) {
+ d->connect(action.tail, action.head);
+ } else {
+ _log.error(fmt::format("No driver for {}", action.tail.type()));
+ }
+ } else {
+ _log.warning("Unable to connect incompatible port");
+ }
+}
+
+void
+Reactor::operator()(const action::DecreaseFontSize&)
+{
+ _conf.set<setting::FontSize>(_conf.get<setting::FontSize>() - 1.0f);
+}
+
+void
+Reactor::operator()(const action::DisconnectClient& action)
+{
+ if (CanvasModule* mod = find_module(action.client, action.direction)) {
+ for (Ganv::Port* p : *mod) {
+ if (p) {
+ p->disconnect();
+ }
+ }
+ }
+}
+
+void
+Reactor::operator()(const action::DisconnectPort& action)
+{
+ if (CanvasPort* port = find_port(action.port)) {
+ port->disconnect();
+ }
+}
+
+void
+Reactor::operator()(const action::DisconnectPorts& action)
+{
+ if (action.tail.type() == action.head.type()) {
+ if (auto* d = _drivers.driver(action.tail.type())) {
+ d->disconnect(action.tail, action.head);
+ } else {
+ _log.error(fmt::format("No driver available to disconnect ports"));
+ }
+ } else {
+ _log.error("Unable to disconnect incompatible ports");
+ }
+}
+
+void
+Reactor::operator()(const action::IncreaseFontSize&)
+{
+ _conf.set<setting::FontSize>(_conf.get<setting::FontSize>() + 1.0f);
+}
+
+void
+Reactor::operator()(const action::MoveModule& action)
+{
+ _conf.set_module_location(
+ module_name(action.client), action.direction, {action.x, action.y});
+}
+
+void
+Reactor::operator()(const action::Refresh&)
+{
+ _drivers.refresh();
+}
+
+void
+Reactor::operator()(const action::ResetFontSize&)
+{
+ _conf.set<setting::FontSize>(_canvas.get_default_font_size());
+}
+
+void
+Reactor::operator()(const action::SplitModule& action)
+{
+ _conf.set_module_split(module_name(action.client), true);
+ _drivers.refresh();
+}
+
+void
+Reactor::operator()(const action::UnsplitModule& action)
+{
+ _conf.set_module_split(module_name(action.client), false);
+ _drivers.refresh();
+}
+
+void
+Reactor::operator()(const action::ZoomFull&)
+{
+ _canvas.zoom_full();
+ _conf.set<setting::Zoom>(_canvas.get_zoom());
+}
+
+void
+Reactor::operator()(const action::ZoomIn&)
+{
+ _conf.set<setting::Zoom>(_conf.get<setting::Zoom>() * 1.25f);
+}
+
+void
+Reactor::operator()(const action::ZoomNormal&)
+{
+ _conf.set<setting::Zoom>(1.0);
+}
+
+void
+Reactor::operator()(const action::ZoomOut&)
+{
+ _conf.set<setting::Zoom>(_conf.get<setting::Zoom>() * 0.75f);
+}
+
+void
+Reactor::operator()(const Action& action)
+{
+ std::visit(*this, action);
+}
+
+std::string
+Reactor::module_name(const ClientID& client)
+{
+ // Note that split modules always have the same name
+
+ if (CanvasModule* mod = find_module(client, SignalDirection::input)) {
+ return mod->name();
+ }
+
+ if (CanvasModule* mod = find_module(client, SignalDirection::output)) {
+ return mod->name();
+ }
+
+ return std::string{};
+}
+
+CanvasModule*
+Reactor::find_module(const ClientID& client, const SignalDirection type)
+{
+ return _canvas.find_module(client, type);
+}
+
+CanvasPort*
+Reactor::find_port(const PortID& port)
+{
+ return _canvas.find_port(port);
+}
+
+} // namespace patchage
diff --git a/src/Reactor.hpp b/src/Reactor.hpp
new file mode 100644
index 0000000..a5bc9e9
--- /dev/null
+++ b/src/Reactor.hpp
@@ -0,0 +1,75 @@
+// Copyright 2007-2021 David Robillard <d@drobilla.net>
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+#ifndef PATCHAGE_REACTOR_HPP
+#define PATCHAGE_REACTOR_HPP
+
+#include "Action.hpp"
+
+#include <string>
+
+namespace patchage {
+
+enum class SignalDirection;
+
+struct ClientID;
+struct PortID;
+
+class Canvas;
+class CanvasModule;
+class CanvasPort;
+class Configuration;
+class Drivers;
+class ILog;
+
+/// Reacts to actions from the user
+class Reactor
+{
+public:
+ explicit Reactor(Configuration& conf,
+ Drivers& drivers,
+ Canvas& canvas,
+ ILog& log);
+
+ Reactor(const Reactor&) = delete;
+ Reactor& operator=(const Reactor&) = delete;
+
+ Reactor(Reactor&&) = delete;
+ Reactor& operator=(Reactor&&) = delete;
+
+ ~Reactor() = default;
+
+ void operator()(const action::ChangeSetting& action);
+ void operator()(const action::ConnectPorts& action);
+ void operator()(const action::DecreaseFontSize& action);
+ void operator()(const action::DisconnectClient& action);
+ void operator()(const action::DisconnectPort& action);
+ void operator()(const action::DisconnectPorts& action);
+ void operator()(const action::IncreaseFontSize& action);
+ void operator()(const action::MoveModule& action);
+ void operator()(const action::Refresh& action);
+ void operator()(const action::ResetFontSize& action);
+ void operator()(const action::SplitModule& action);
+ void operator()(const action::UnsplitModule& action);
+ void operator()(const action::ZoomFull& action);
+ void operator()(const action::ZoomIn& action);
+ void operator()(const action::ZoomNormal& action);
+ void operator()(const action::ZoomOut& action);
+
+ void operator()(const Action& action);
+
+private:
+ std::string module_name(const ClientID& client);
+
+ CanvasModule* find_module(const ClientID& client, SignalDirection type);
+ CanvasPort* find_port(const PortID& port);
+
+ Configuration& _conf;
+ Drivers& _drivers;
+ Canvas& _canvas;
+ ILog& _log;
+};
+
+} // namespace patchage
+
+#endif // PATCHAGE_REACTOR_HPP
diff --git a/src/Setting.hpp b/src/Setting.hpp
new file mode 100644
index 0000000..4bcfc81
--- /dev/null
+++ b/src/Setting.hpp
@@ -0,0 +1,88 @@
+// Copyright 2007-2021 David Robillard <d@drobilla.net>
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+#ifndef PATCHAGE_SETTING_HPP
+#define PATCHAGE_SETTING_HPP
+
+#include "Coord.hpp"
+#include "PortType.hpp"
+
+#include <cstdint>
+#include <variant>
+
+namespace patchage {
+namespace setting {
+
+struct AlsaAttached {
+ bool value{};
+};
+
+struct FontSize {
+ float value{};
+};
+
+struct HumanNames {
+ bool value{};
+};
+
+struct JackAttached {
+ bool value{};
+};
+
+struct MessagesHeight {
+ int value{};
+};
+
+struct MessagesVisible {
+ bool value{};
+};
+
+struct PortColor {
+ PortType type{};
+ uint32_t color{};
+};
+
+struct SortedPorts {
+ bool value{};
+};
+
+struct SprungLayout {
+ bool value{};
+};
+
+struct ToolbarVisible {
+ bool value{};
+};
+
+struct WindowLocation {
+ Coord value{};
+};
+
+struct WindowSize {
+ Coord value{};
+};
+
+struct Zoom {
+ float value{};
+};
+
+} // namespace setting
+
+/// A configuration setting
+using Setting = std::variant<setting::AlsaAttached,
+ setting::FontSize,
+ setting::HumanNames,
+ setting::JackAttached,
+ setting::MessagesHeight,
+ setting::MessagesVisible,
+ setting::PortColor,
+ setting::SortedPorts,
+ setting::SprungLayout,
+ setting::ToolbarVisible,
+ setting::WindowLocation,
+ setting::WindowSize,
+ setting::Zoom>;
+
+} // namespace patchage
+
+#endif // PATCHAGE_SETTING_HPP
diff --git a/src/SignalDirection.hpp b/src/SignalDirection.hpp
new file mode 100644
index 0000000..84c3cc2
--- /dev/null
+++ b/src/SignalDirection.hpp
@@ -0,0 +1,44 @@
+// Copyright 2007-2020 David Robillard <d@drobilla.net>
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+#ifndef PATCHAGE_SIGNALDIRECTION_HPP
+#define PATCHAGE_SIGNALDIRECTION_HPP
+
+#include "warnings.hpp"
+
+PATCHAGE_DISABLE_FMT_WARNINGS
+#include <fmt/core.h>
+#include <fmt/ostream.h>
+PATCHAGE_RESTORE_WARNINGS
+
+#include <ostream>
+
+namespace patchage {
+
+enum class SignalDirection {
+ input,
+ output,
+ duplex,
+};
+
+inline std::ostream&
+operator<<(std::ostream& os, const SignalDirection direction)
+{
+ switch (direction) {
+ case SignalDirection::input:
+ return os << "input";
+ case SignalDirection::output:
+ return os << "output";
+ case SignalDirection::duplex:
+ return os << "duplex";
+ }
+
+ PATCHAGE_UNREACHABLE();
+}
+
+} // namespace patchage
+
+template<>
+struct fmt::formatter<patchage::SignalDirection> : fmt::ostream_formatter {};
+
+#endif // PATCHAGE_SIGNALDIRECTION_HPP
diff --git a/src/TextViewLog.cpp b/src/TextViewLog.cpp
new file mode 100644
index 0000000..712b760
--- /dev/null
+++ b/src/TextViewLog.cpp
@@ -0,0 +1,81 @@
+// Copyright 2007-2020 David Robillard <d@drobilla.net>
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+#include "TextViewLog.hpp"
+
+#include "Widget.hpp"
+
+#include <gdkmm/color.h>
+#include <glibmm/propertyproxy.h>
+#include <glibmm/refptr.h>
+#include <gtkmm/enums.h>
+#include <gtkmm/textbuffer.h>
+#include <gtkmm/texttag.h>
+#include <gtkmm/texttagtable.h>
+#include <gtkmm/textview.h>
+
+#include <string>
+
+namespace patchage {
+
+TextViewLog::TextViewLog(Widget<Gtk::TextView>& text_view)
+ : _error_tag{Gtk::TextTag::create()}
+ , _warning_tag{Gtk::TextTag::create()}
+ , _text_view{text_view}
+{
+ for (int s = Gtk::STATE_NORMAL; s <= Gtk::STATE_INSENSITIVE; ++s) {
+ _text_view->modify_base(static_cast<Gtk::StateType>(s),
+ Gdk::Color("#000000"));
+ _text_view->modify_text(static_cast<Gtk::StateType>(s),
+ Gdk::Color("#FFFFFF"));
+ }
+
+ _error_tag->property_foreground() = "#CC0000";
+ _text_view->get_buffer()->get_tag_table()->add(_error_tag);
+
+ _warning_tag->property_foreground() = "#C4A000";
+ _text_view->get_buffer()->get_tag_table()->add(_warning_tag);
+
+ _text_view->set_pixels_inside_wrap(2);
+ _text_view->set_left_margin(4);
+ _text_view->set_right_margin(4);
+ _text_view->set_pixels_below_lines(2);
+}
+
+void
+TextViewLog::info(const std::string& msg)
+{
+ const Glib::RefPtr<Gtk::TextBuffer> buffer = _text_view->get_buffer();
+ buffer->insert(buffer->end(), std::string("\n") + msg);
+ _text_view->scroll_to_mark(buffer->get_insert(), 0);
+}
+
+void
+TextViewLog::warning(const std::string& msg)
+{
+ const Glib::RefPtr<Gtk::TextBuffer> buffer = _text_view->get_buffer();
+ buffer->insert_with_tag(buffer->end(), std::string("\n") + msg, _warning_tag);
+ _text_view->scroll_to_mark(buffer->get_insert(), 0);
+}
+
+void
+TextViewLog::error(const std::string& msg)
+{
+ const Glib::RefPtr<Gtk::TextBuffer> buffer = _text_view->get_buffer();
+ buffer->insert_with_tag(buffer->end(), std::string("\n") + msg, _error_tag);
+ _text_view->scroll_to_mark(buffer->get_insert(), 0);
+}
+
+int
+TextViewLog::min_height() const
+{
+ const Glib::RefPtr<Gtk::TextBuffer> buffer = _text_view->get_buffer();
+
+ int y = 0;
+ int line_height = 0;
+ _text_view->get_line_yrange(buffer->begin(), y, line_height);
+
+ return line_height + 2 * _text_view->get_pixels_inside_wrap();
+}
+
+} // namespace patchage
diff --git a/src/TextViewLog.hpp b/src/TextViewLog.hpp
new file mode 100644
index 0000000..40d0782
--- /dev/null
+++ b/src/TextViewLog.hpp
@@ -0,0 +1,54 @@
+// Copyright 2007-2020 David Robillard <d@drobilla.net>
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+#ifndef PATCHAGE_TEXTVIEWLOG_HPP
+#define PATCHAGE_TEXTVIEWLOG_HPP
+
+#include "ILog.hpp"
+
+#include <glibmm/refptr.h>
+#include <gtkmm/texttag.h>
+
+#include <string>
+
+namespace Gtk {
+class TextView;
+} // namespace Gtk
+
+namespace patchage {
+
+template<typename W>
+class Widget;
+
+/// Log that writes colored messages to a Gtk TextView
+class TextViewLog : public ILog
+{
+public:
+ explicit TextViewLog(Widget<Gtk::TextView>& text_view);
+
+ TextViewLog(const TextViewLog&) = delete;
+ TextViewLog& operator=(const TextViewLog&) = delete;
+
+ TextViewLog(TextViewLog&&) = delete;
+ TextViewLog& operator=(TextViewLog&&) = delete;
+
+ ~TextViewLog() override = default;
+
+ void info(const std::string& msg) override;
+ void error(const std::string& msg) override;
+ void warning(const std::string& msg) override;
+
+ int min_height() const;
+
+ const Widget<Gtk::TextView>& text_view() const { return _text_view; }
+ Widget<Gtk::TextView>& text_view() { return _text_view; }
+
+private:
+ Glib::RefPtr<Gtk::TextTag> _error_tag;
+ Glib::RefPtr<Gtk::TextTag> _warning_tag;
+ Widget<Gtk::TextView>& _text_view;
+};
+
+} // namespace patchage
+
+#endif // PATCHAGE_TEXTVIEWLOG_HPP
diff --git a/src/UIFile.hpp b/src/UIFile.hpp
index f1ab5f8..36be144 100644
--- a/src/UIFile.hpp
+++ b/src/UIFile.hpp
@@ -1,66 +1,65 @@
-/* This file is part of Patchage.
- * Copyright 2007-2014 David Robillard <http://drobilla.net>
- *
- * Patchage is free software: you can redistribute it and/or modify it under
- * the terms of the GNU General Public License as published by the Free
- * Software Foundation, either version 3 of the License, or (at your option)
- * any later version.
- *
- * Patchage is distributed in the hope that it will be useful, but WITHOUT ANY
- * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
- * FOR A PARTICULAR PURPOSE. See the GNU General Public License for details.
- *
- * You should have received a copy of the GNU General Public License
- * along with Patchage. If not, see <http://www.gnu.org/licenses/>.
- */
+// Copyright 2007-2020 David Robillard <d@drobilla.net>
+// SPDX-License-Identifier: GPL-3.0-or-later
-#ifndef PATCHAGE_GLADEFILE_HPP
-#define PATCHAGE_GLADEFILE_HPP
+#ifndef PATCHAGE_UIFILE_HPP
+#define PATCHAGE_UIFILE_HPP
+
+#include "patchage_config.h"
+
+#if PATCHAGE_BUNDLED
+# include "binary_location.h"
+#endif
+
+#include <glibmm/refptr.h>
+#include <gtkmm/builder.h>
#include <fstream>
#include <iostream>
#include <sstream>
+#include <stdexcept>
#include <string>
-#include <gtkmm/builder.h>
+namespace patchage {
-#include "patchage_config.h"
-#ifdef PATCHAGE_BINLOC
-#include "binary_location.h"
-#endif
-
-class UIFile {
+class UIFile
+{
public:
- inline static bool is_readable(const std::string& filename) {
- std::ifstream fs(filename.c_str());
- const bool fail = fs.fail();
- fs.close();
- return !fail;
- }
+ inline static bool is_readable(const std::string& filename)
+ {
+ std::ifstream fs(filename.c_str());
+ const bool fail = fs.fail();
+ fs.close();
+ return !fail;
+ }
- static Glib::RefPtr<Gtk::Builder> open(const std::string& base_name) {
- std::string ui_filename;
-#ifdef PATCHAGE_BINLOC
- const std::string bundle = bundle_location();
- if (!bundle.empty()) {
- ui_filename = bundle + "/" + base_name + ".ui";
- if (is_readable(ui_filename)) {
- std::cout << "Loading UI file " << ui_filename << std::endl;
- return Gtk::Builder::create_from_file(ui_filename);
- }
- }
+ static Glib::RefPtr<Gtk::Builder> open(const std::string& base_name)
+ {
+ std::string ui_filename = base_name + ".ui";
+
+#if PATCHAGE_BUNDLED
+ const std::string bundle = bundle_location();
+ if (!bundle.empty()) {
+ const std::string bundle_ui_filename = bundle + "/" + ui_filename;
+ if (is_readable(bundle_ui_filename)) {
+ std::cout << "Loading UI file " << bundle_ui_filename << std::endl;
+ return Gtk::Builder::create_from_file(bundle_ui_filename);
+ }
+ }
#endif
- ui_filename = std::string(PATCHAGE_DATA_DIR) + "/" + base_name + ".ui";
- if (is_readable(ui_filename)) {
- std::cout << "Loading UI file " << ui_filename << std::endl;
- return Gtk::Builder::create_from_file(ui_filename);
- }
- std::stringstream ss;
- ss << "Unable to find " << base_name << std::endl;
- throw std::runtime_error(ss.str());
- return Glib::RefPtr<Gtk::Builder>();
- }
+ ui_filename = std::string(PATCHAGE_DATA_DIR) + "/" + ui_filename;
+ if (is_readable(ui_filename)) {
+ std::cout << "Loading UI file " << ui_filename << std::endl;
+ return Gtk::Builder::create_from_file(ui_filename);
+ }
+
+ std::stringstream ss;
+ ss << "Unable to find " << ui_filename << std::endl;
+ throw std::runtime_error(ss.str());
+ return {};
+ }
};
-#endif // PATCHAGE_GLADEFILE_HPP
+} // namespace patchage
+
+#endif // PATCHAGE_UIFILE_HPP
diff --git a/src/Widget.hpp b/src/Widget.hpp
index 038f880..1c3b6ee 100644
--- a/src/Widget.hpp
+++ b/src/Widget.hpp
@@ -1,46 +1,46 @@
-/* This file is part of Patchage
- * Copyright 2007-2014 David Robillard <http://drobilla.net>
- *
- * Patchage is free software: you can redistribute it and/or modify it under
- * the terms of the GNU General Public License as published by the Free
- * Software Foundation, either version 3 of the License, or (at your option)
- * any later version.
- *
- * Patchage is distributed in the hope that it will be useful, but WITHOUT ANY
- * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
- * FOR A PARTICULAR PURPOSE. See the GNU General Public License for details.
- *
- * You should have received a copy of the GNU General Public License
- * along with Patchage. If not, see <http://www.gnu.org/licenses/>.
- */
+// Copyright 2007-2020 David Robillard <d@drobilla.net>
+// SPDX-License-Identifier: GPL-3.0-or-later
#ifndef PATCHAGE_WIDGET_HPP
#define PATCHAGE_WIDGET_HPP
-#include <string>
+#include <glibmm/refptr.h>
+#include <gtkmm/builder.h> // IWYU pragma: keep
-#include <boost/utility.hpp>
+#include <string>
-#include <gtkmm/builder.h>
+namespace patchage {
-template <typename W>
-class Widget : public boost::noncopyable {
+template<typename W>
+class Widget
+{
public:
- Widget(Glib::RefPtr<Gtk::Builder> xml, const std::string& name) {
- xml->get_widget(name, _me);
- }
+ Widget(const Glib::RefPtr<Gtk::Builder>& xml, const std::string& name)
+ {
+ xml->get_widget(name, _me);
+ }
+
+ Widget(const Widget&) = delete;
+ Widget& operator=(const Widget&) = delete;
- void destroy() { delete _me; }
+ Widget(Widget&&) = delete;
+ Widget& operator=(Widget&&) = delete;
- W* get() { return _me; }
- const W* get() const { return _me; }
- W* operator->() { return _me; }
- const W* operator->() const { return _me; }
- W& operator*() { return *_me; }
- const W& operator*() const { return *_me; }
+ ~Widget() = default;
+
+ void destroy() { delete _me; }
+
+ W* get() { return _me; }
+ const W* get() const { return _me; }
+ W* operator->() { return _me; }
+ const W* operator->() const { return _me; }
+ W& operator*() { return *_me; }
+ const W& operator*() const { return *_me; }
private:
- W* _me;
+ W* _me;
};
+} // namespace patchage
+
#endif // PATCHAGE_WIDGET_HPP
diff --git a/src/binary_location.h b/src/binary_location.h
index 303a3bd..12f726e 100644
--- a/src/binary_location.h
+++ b/src/binary_location.h
@@ -1,54 +1,43 @@
-/* This file is part of Patchage.
- * Copyright 2008-2014 David Robillard <http://drobilla.net>
- *
- * Patchage is free software: you can redistribute it and/or modify it under
- * the terms of the GNU General Public License as published by the Free
- * Software Foundation, either version 3 of the License, or (at your option)
- * any later version.
- *
- * Patchage is distributed in the hope that it will be useful, but WITHOUT ANY
- * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
- * FOR A PARTICULAR PURPOSE. See the GNU General Public License for details.
- *
- * You should have received a copy of the GNU General Public License
- * along with Patchage. If not, see <http://www.gnu.org/licenses/>.
- */
-
-#ifndef _GNU_SOURCE
- #define _GNU_SOURCE
-#endif
-
-#include <assert.h>
-#include <limits.h>
-#include <stdlib.h>
+// Copyright 2008-2020 David Robillard <d@drobilla.net>
+// SPDX-License-Identifier: GPL-3.0-or-later
+
#include <dlfcn.h>
+#include <climits>
+#include <cstdlib>
#include <string>
+namespace patchage {
+
/** Return the absolute path of the binary. */
-static std::string
+inline std::string
binary_location()
{
- Dl_info dli;
- std::string loc;
- const int ret = dladdr((void*)&binary_location, &dli);
- if (ret) {
- char* const bin_loc = (char*)calloc(PATH_MAX, 1);
- if (realpath(dli.dli_fname, bin_loc)) {
- loc = bin_loc;
- }
- free(bin_loc);
- }
- return loc;
+ Dl_info dli = {};
+ const int ret = dladdr(reinterpret_cast<void*>(&binary_location), &dli);
+ if (!ret) {
+ return "";
+ }
+
+ char* const bin_loc = realpath(dli.dli_fname, nullptr);
+ if (!bin_loc) {
+ return "";
+ }
+
+ std::string loc{bin_loc};
+ free(bin_loc);
+ return loc;
}
/** Return the absolute path of the bundle (binary parent directory). */
-static std::string
+inline std::string
bundle_location()
{
- const std::string binary = binary_location();
- if (binary.empty()) {
- return "";
- }
- return binary.substr(0, binary.find_last_of('/'));
+ const std::string binary = binary_location();
+ if (binary.empty()) {
+ return "";
+ }
+ return binary.substr(0, binary.find_last_of('/'));
}
+
+} // namespace patchage
diff --git a/src/event_to_string.cpp b/src/event_to_string.cpp
new file mode 100644
index 0000000..499966a
--- /dev/null
+++ b/src/event_to_string.cpp
@@ -0,0 +1,110 @@
+// Copyright 2007-2020 David Robillard <d@drobilla.net>
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+#include "event_to_string.hpp"
+
+#include "ClientID.hpp"
+#include "ClientInfo.hpp"
+#include "ClientType.hpp"
+#include "Event.hpp"
+#include "PortID.hpp"
+#include "PortInfo.hpp"
+#include "PortType.hpp"
+#include "SignalDirection.hpp"
+#include "warnings.hpp"
+
+PATCHAGE_DISABLE_FMT_WARNINGS
+#include <fmt/core.h>
+PATCHAGE_RESTORE_WARNINGS
+
+#include <optional>
+#include <string>
+#include <variant>
+
+namespace patchage {
+
+namespace {
+
+struct EventPrinter {
+ std::string operator()(const ClientType type)
+ {
+ switch (type) {
+ case ClientType::jack:
+ return "JACK";
+ case ClientType::alsa:
+ return "ALSA";
+ }
+
+ PATCHAGE_UNREACHABLE();
+ }
+
+ std::string operator()(const event::Cleared&) { return "Cleared"; }
+
+ std::string operator()(const event::DriverAttached& event)
+ {
+ return fmt::format("Attached to {}", (*this)(event.type));
+ }
+
+ std::string operator()(const event::DriverDetached& event)
+ {
+ return fmt::format("Detached from {}", (*this)(event.type));
+ }
+
+ std::string operator()(const event::ClientCreated& event)
+ {
+ return fmt::format(R"(Add client "{}" ("{}"))", event.id, event.info.label);
+ }
+
+ std::string operator()(const event::ClientDestroyed& event)
+ {
+ return fmt::format(R"(Remove client "{}")", event.id);
+ }
+
+ std::string operator()(const event::PortCreated& event)
+ {
+ auto result = fmt::format(R"(Add {}{} {} "{}" ("{}"))",
+ event.info.type,
+ event.info.is_terminal ? " terminal" : "",
+ event.info.direction,
+ event.id,
+ event.info.label);
+
+ if (event.info.order) {
+ result += fmt::format(" order: {}", *event.info.order);
+ }
+
+ return result;
+ }
+
+ std::string operator()(const event::PortDestroyed& event)
+ {
+ return fmt::format(R"(Remove port "{}")", event.id);
+ }
+
+ std::string operator()(const event::PortsConnected& event)
+ {
+ return fmt::format(R"(Connect "{}" to "{}")", event.tail, event.head);
+ }
+
+ std::string operator()(const event::PortsDisconnected& event)
+ {
+ return fmt::format(R"(Disconnect "{}" from "{}")", event.tail, event.head);
+ }
+};
+
+} // namespace
+
+std::string
+event_to_string(const Event& event)
+{
+ EventPrinter printer;
+ return std::visit(printer, event);
+}
+
+std::ostream&
+operator<<(std::ostream& os, const Event& event)
+{
+ return os << event_to_string(event);
+}
+
+} // namespace patchage
diff --git a/src/event_to_string.hpp b/src/event_to_string.hpp
new file mode 100644
index 0000000..926065c
--- /dev/null
+++ b/src/event_to_string.hpp
@@ -0,0 +1,22 @@
+// Copyright 2007-2020 David Robillard <d@drobilla.net>
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+#ifndef PATCHAGE_EVENT_TO_STRING_HPP
+#define PATCHAGE_EVENT_TO_STRING_HPP
+
+#include "Event.hpp"
+
+#include <iosfwd>
+#include <string>
+
+namespace patchage {
+
+std::string
+event_to_string(const Event& event);
+
+std::ostream&
+operator<<(std::ostream& os, const Event& event);
+
+} // namespace patchage
+
+#endif // PATCHAGE_EVENT_TO_STRING_HPP
diff --git a/src/handle_event.cpp b/src/handle_event.cpp
new file mode 100644
index 0000000..d75c42d
--- /dev/null
+++ b/src/handle_event.cpp
@@ -0,0 +1,149 @@
+// Copyright 2007-2020 David Robillard <d@drobilla.net>
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+#include "handle_event.hpp"
+
+#include "Canvas.hpp"
+#include "CanvasPort.hpp"
+#include "ClientType.hpp"
+#include "Configuration.hpp"
+#include "Event.hpp"
+#include "ILog.hpp"
+#include "Metadata.hpp"
+#include "PortID.hpp"
+#include "Setting.hpp"
+#include "warnings.hpp"
+
+PATCHAGE_DISABLE_FMT_WARNINGS
+#include <fmt/core.h>
+PATCHAGE_RESTORE_WARNINGS
+
+#include <variant>
+
+namespace patchage {
+
+namespace {
+
+class EventHandler
+{
+public:
+ explicit EventHandler(Configuration& conf,
+ Metadata& metadata,
+ Canvas& canvas,
+ ILog& log)
+ : _conf{conf}
+ , _metadata{metadata}
+ , _canvas{canvas}
+ , _log{log}
+ {}
+
+ void operator()(const event::Cleared&) { _canvas.clear(); }
+
+ void operator()(const event::DriverAttached& event)
+ {
+ switch (event.type) {
+ case ClientType::alsa:
+ _conf.set<setting::AlsaAttached>(true);
+ break;
+ case ClientType::jack:
+ _conf.set<setting::JackAttached>(true);
+ break;
+ }
+ }
+
+ void operator()(const event::DriverDetached& event)
+ {
+ switch (event.type) {
+ case ClientType::alsa:
+ _conf.set<setting::AlsaAttached>(false);
+ break;
+ case ClientType::jack:
+ _conf.set<setting::JackAttached>(false);
+ break;
+ }
+ }
+
+ void operator()(const event::ClientCreated& event)
+ {
+ // Don't create empty modules, they will be created when ports are added
+ _metadata.set_client(event.id, event.info);
+ }
+
+ void operator()(const event::ClientDestroyed& event)
+ {
+ _canvas.remove_module(event.id);
+ _metadata.erase_client(event.id);
+ }
+
+ void operator()(const event::PortCreated& event)
+ {
+ _metadata.set_port(event.id, event.info);
+
+ auto* const port =
+ _canvas.create_port(_conf, _metadata, event.id, event.info);
+
+ if (!port) {
+ _log.error(
+ fmt::format("Unable to create view for port \"{}\"", event.id));
+ }
+ }
+
+ void operator()(const event::PortDestroyed& event)
+ {
+ _canvas.remove_port(event.id);
+ _metadata.erase_port(event.id);
+ }
+
+ void operator()(const event::PortsConnected& event)
+ {
+ CanvasPort* port_1 = _canvas.find_port(event.tail);
+ CanvasPort* port_2 = _canvas.find_port(event.head);
+
+ if (!port_1) {
+ _log.error(
+ fmt::format("Unable to find port \"{}\" to connect", event.tail));
+ } else if (!port_2) {
+ _log.error(
+ fmt::format("Unable to find port \"{}\" to connect", event.head));
+ } else {
+ _canvas.make_connection(port_1, port_2);
+ }
+ }
+
+ void operator()(const event::PortsDisconnected& event)
+ {
+ CanvasPort* port_1 = _canvas.find_port(event.tail);
+ CanvasPort* port_2 = _canvas.find_port(event.head);
+
+ if (!port_1) {
+ _log.error(
+ fmt::format("Unable to find port \"{}\" to disconnect", event.tail));
+ } else if (!port_2) {
+ _log.error(
+ fmt::format("Unable to find port \"{}\" to disconnect", event.head));
+ } else {
+ _canvas.remove_edge_between(port_1, port_2);
+ }
+ }
+
+private:
+ Configuration& _conf;
+ Metadata& _metadata;
+ Canvas& _canvas;
+ ILog& _log;
+};
+
+} // namespace
+
+void
+handle_event(Configuration& conf,
+ Metadata& metadata,
+ Canvas& canvas,
+ ILog& log,
+ const Event& event)
+{
+ EventHandler handler{conf, metadata, canvas, log};
+ std::visit(handler, event);
+}
+
+} // namespace patchage
diff --git a/src/handle_event.hpp b/src/handle_event.hpp
new file mode 100644
index 0000000..fae6d78
--- /dev/null
+++ b/src/handle_event.hpp
@@ -0,0 +1,26 @@
+// Copyright 2007-2021 David Robillard <d@drobilla.net>
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+#ifndef PATCHAGE_HANDLE_EVENT_HPP
+#define PATCHAGE_HANDLE_EVENT_HPP
+
+#include "Event.hpp"
+
+namespace patchage {
+
+class Configuration;
+class Metadata;
+class Canvas;
+class ILog;
+
+/// Handle an event from the system by updating the GUI as necessary
+void
+handle_event(Configuration& conf,
+ Metadata& metadata,
+ Canvas& canvas,
+ ILog& log,
+ const Event& event);
+
+} // namespace patchage
+
+#endif // PATCHAGE_HANDLE_EVENT_HPP
diff --git a/src/i18n.hpp b/src/i18n.hpp
new file mode 100644
index 0000000..ebf8fd1
--- /dev/null
+++ b/src/i18n.hpp
@@ -0,0 +1,12 @@
+// Copyright 2022 David Robillard <d@drobilla.net>
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+#ifndef PATCHAGE_I18N_HPP
+#define PATCHAGE_I18N_HPP
+
+#include <libintl.h>
+
+/// Mark a string literal as translatable
+#define T(msgid) gettext(msgid)
+
+#endif // PATCHAGE_I18N_HPP
diff --git a/src/jackey.h b/src/jackey.h
index 02a7735..607eadb 100644
--- a/src/jackey.h
+++ b/src/jackey.h
@@ -1,18 +1,8 @@
-/*
- Copyright 2014 David Robillard <http://drobilla.net>
+// Copyright 2014-2020 David Robillard <d@drobilla.net>
+// SPDX-License-Identifier: GPL-3.0-or-later
- Permission to use, copy, modify, and/or distribute this software for any
- purpose with or without fee is hereby granted, provided that the above
- copyright notice and this permission notice appear in all copies.
-
- THIS SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
- WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
- MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
- ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
- WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
- ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
- OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
-*/
+#ifndef JACKEY_H
+#define JACKEY_H
/**
The supported event types of an event port.
@@ -70,3 +60,5 @@
other relevance to order values.
*/
#define JACKEY_ORDER "http://jackaudio.org/metadata/order"
+
+#endif // JACKEY_H
diff --git a/src/main.cpp b/src/main.cpp
index 4822d3d..6d09fe4 100644
--- a/src/main.cpp
+++ b/src/main.cpp
@@ -1,93 +1,159 @@
-/* This file is part of Patchage.
- * Copyright 2007-2014 David Robillard <http://drobilla.net>
- *
- * Patchage is free software: you can redistribute it and/or modify it under
- * the terms of the GNU General Public License as published by the Free
- * Software Foundation, either version 3 of the License, or (at your option)
- * any later version.
- *
- * Patchage is distributed in the hope that it will be useful, but WITHOUT ANY
- * WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
- * FOR A PARTICULAR PURPOSE. See the GNU General Public License for details.
- *
- * You should have received a copy of the GNU General Public License
- * along with Patchage. If not, see <http://www.gnu.org/licenses/>.
- */
+// Copyright 2007-2022 David Robillard <d@drobilla.net>
+// SPDX-License-Identifier: GPL-3.0-or-later
#ifdef __APPLE__
-#include <stdlib.h>
-#include <unistd.h>
-#include <string>
-#include <gtk/gtkrc.h>
-#include "binary_location.h"
+# include "binary_location.h"
+
+# include <gtk/gtkrc.h>
+
+# include <unistd.h>
+
+# include <cstdlib>
+# include <string>
#endif
-#include <iostream>
+#include "Options.hpp"
+#include "Patchage.hpp"
+#include "patchage_config.h"
#include <glibmm/exception.h>
+#include <glibmm/thread.h>
+#include <glibmm/ustring.h>
+#include <gtkmm/main.h>
-#include "Patchage.hpp"
+#if USE_GETTEXT
+# include <libintl.h>
+
+# include <clocale>
+#endif
+
+#include <cstring>
+#include <exception>
+#include <iostream>
+
+namespace {
#ifdef __APPLE__
void
set_bundle_environment()
{
- const std::string bundle = bundle_location();
- const std::string lib_path = bundle + "/lib";
- if (!Glib::file_test(lib_path, Glib::FILE_TEST_EXISTS)) {
- // If lib does not exist, we have not been bundleified, do nothing
- return;
- }
-
- setenv("GTK_PATH", lib_path.c_str(), 1);
- setenv("DYLD_LIBRARY_PATH", lib_path.c_str(), 1);
-
- const std::string pangorc_path(bundle + "/Resources/pangorc");
- if (Glib::file_test(pangorc_path, Glib::FILE_TEST_EXISTS)) {
- setenv("PANGO_RC_FILE", pangorc_path.c_str(), 1);
- }
-
- const std::string fonts_conf_path(bundle + "/Resources/fonts.conf");
- if (Glib::file_test(fonts_conf_path, Glib::FILE_TEST_EXISTS)) {
- setenv("FONTCONFIG_FILE", fonts_conf_path.c_str(), 1);
- }
-
- const std::string loaders_cache_path(bundle + "/Resources/loaders.cache");
- if (Glib::file_test(loaders_cache_path, Glib::FILE_TEST_EXISTS)) {
- setenv("GDK_PIXBUF_MODULE_FILE", loaders_cache_path.c_str(), 1);
- }
-
- const std::string gtkrc_path(bundle + "/Resources/gtkrc");
- if (Glib::file_test(gtkrc_path, Glib::FILE_TEST_EXISTS)) {
- gtk_rc_parse(gtkrc_path.c_str());
- }
+ const std::string bundle = patchage::bundle_location();
+ const std::string lib_path = bundle + "/lib";
+ if (!Glib::file_test(lib_path, Glib::FILE_TEST_EXISTS)) {
+ // If lib does not exist, we have not been bundleified, do nothing
+ return;
+ }
+
+ setenv("GTK_PATH", lib_path.c_str(), 1);
+ setenv("DYLD_LIBRARY_PATH", lib_path.c_str(), 1);
+
+ const std::string pangorc_path(bundle + "/Resources/pangorc");
+ if (Glib::file_test(pangorc_path, Glib::FILE_TEST_EXISTS)) {
+ setenv("PANGO_RC_FILE", pangorc_path.c_str(), 1);
+ }
+
+ const std::string fonts_conf_path(bundle + "/Resources/fonts.conf");
+ if (Glib::file_test(fonts_conf_path, Glib::FILE_TEST_EXISTS)) {
+ setenv("FONTCONFIG_FILE", fonts_conf_path.c_str(), 1);
+ }
+
+ const std::string loaders_cache_path(bundle + "/Resources/loaders.cache");
+ if (Glib::file_test(loaders_cache_path, Glib::FILE_TEST_EXISTS)) {
+ setenv("GDK_PIXBUF_MODULE_FILE", loaders_cache_path.c_str(), 1);
+ }
+
+ const std::string gtkrc_path(bundle + "/Resources/gtkrc");
+ if (Glib::file_test(gtkrc_path, Glib::FILE_TEST_EXISTS)) {
+ gtk_rc_parse(gtkrc_path.c_str());
+ }
}
#endif
+void
+print_usage()
+{
+ std::cout << "Usage: patchage [OPTION]...\n";
+ std::cout << "Visually connect JACK and ALSA Audio and MIDI ports.\n\n";
+ std::cout << "Options:\n";
+ std::cout << " -h, --help Display this help and exit.\n";
+ std::cout << " -A, --no-alsa Do not automatically attach to ALSA.\n";
+ std::cout << " -J, --no-jack Do not automatically attack to JACK.\n";
+}
+
+void
+print_version()
+{
+ std::cout << "Patchage " PATCHAGE_VERSION << R"(
+Copyright 2007-2022 David Robillard <d@drobilla.net>.
+License GPLv3+: <http://gnu.org/licenses/gpl.html>.
+This is free software: you are free to change and redistribute it.
+There is NO WARRANTY, to the extent permitted by law.
+)";
+}
+
+} // namespace
+
int
main(int argc, char** argv)
{
#ifdef __APPLE__
- set_bundle_environment();
+ set_bundle_environment();
#endif
- try {
-
- Glib::thread_init();
+#if USE_GETTEXT
+ if (!setlocale(LC_ALL, "")) {
+ std::cerr << "patchage: failed to set locale\n";
+ }
- Gtk::Main app(argc, argv);
-
- Patchage patchage(argc, argv);
- app.run(*patchage.window());
- patchage.save();
-
- } catch (std::exception& e) {
- std::cerr << "patchage: error: " << e.what() << std::endl;
- return 1;
- } catch (Glib::Exception& e) {
- std::cerr << "patchage: error: " << e.what() << std::endl;
- return 1;
- }
+ bindtextdomain("patchage", PATCHAGE_LOCALE_DIR);
+ bind_textdomain_codeset("patchage", "UTF-8");
+ textdomain("patchage");
+#endif
- return 0;
+ try {
+ Glib::thread_init();
+
+ const Gtk::Main app(argc, argv);
+ ++argv;
+ --argc;
+
+ // Parse command line options
+ patchage::Options options;
+ while (argc > 0) {
+ if (!strcmp(*argv, "-h") || !strcmp(*argv, "--help")) {
+ print_usage();
+ return 0;
+ }
+
+ if (!strcmp(*argv, "-A") || !strcmp(*argv, "--no-alsa")) {
+ options.alsa_driver_autoattach = false;
+ } else if (!strcmp(*argv, "-J") || !strcmp(*argv, "--no-jack")) {
+ options.jack_driver_autoattach = false;
+ } else if (!strcmp(*argv, "-V") || !strcmp(*argv, "--version")) {
+ print_version();
+ return 0;
+ } else {
+ std::cerr << "patchage: invalid option -- '" << *argv << "'\n";
+ print_usage();
+ return 1;
+ }
+
+ ++argv;
+ --argc;
+ }
+
+ // Run until main loop is finished
+ patchage::Patchage patchage(options);
+ Gtk::Main::run(*patchage.window());
+ patchage.save();
+
+ } catch (std::exception& e) {
+ std::cerr << "patchage: error: " << e.what() << std::endl;
+ return 1;
+ } catch (Glib::Exception& e) {
+ std::cerr << "patchage: error: " << e.what() << std::endl;
+ return 1;
+ }
+
+ return 0;
}
diff --git a/src/make_alsa_driver.hpp b/src/make_alsa_driver.hpp
new file mode 100644
index 0000000..7f3b594
--- /dev/null
+++ b/src/make_alsa_driver.hpp
@@ -0,0 +1,20 @@
+// Copyright 2007-2020 David Robillard <d@drobilla.net>
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+#ifndef PATCHAGE_MAKE_ALSA_DRIVER_HPP
+#define PATCHAGE_MAKE_ALSA_DRIVER_HPP
+
+#include "Driver.hpp"
+
+#include <memory>
+
+namespace patchage {
+
+class ILog;
+
+std::unique_ptr<Driver>
+make_alsa_driver(ILog& log, Driver::EventSink emit_event);
+
+} // namespace patchage
+
+#endif // PATCHAGE_MAKE_ALSA_DRIVER_HPP
diff --git a/src/make_jack_driver.hpp b/src/make_jack_driver.hpp
new file mode 100644
index 0000000..79c6eb5
--- /dev/null
+++ b/src/make_jack_driver.hpp
@@ -0,0 +1,21 @@
+// Copyright 2007-2020 David Robillard <d@drobilla.net>
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+#ifndef PATCHAGE_MAKE_JACK_DRIVER_HPP
+#define PATCHAGE_MAKE_JACK_DRIVER_HPP
+
+#include "Driver.hpp"
+
+#include <memory>
+
+namespace patchage {
+
+class AudioDriver;
+class ILog;
+
+std::unique_ptr<AudioDriver>
+make_jack_driver(ILog& log, Driver::EventSink emit_event);
+
+} // namespace patchage
+
+#endif // PATCHAGE_MAKE_JACK_DRIVER_HPP
diff --git a/src/patchage.ui b/src/patchage.ui.in
index 355d4dd..a7078d5 100644
--- a/src/patchage.ui
+++ b/src/patchage.ui.in
@@ -5,7 +5,7 @@
<object class="GtkWindow" id="main_win">
<property name="can_focus">False</property>
<property name="border_width">1</property>
- <property name="title" translatable="yes">Patchage</property>
+ <property name="title" translatable="no">Patchage</property>
<child>
<object class="GtkVBox" id="main_vbox">
<property name="visible">True</property>
@@ -24,39 +24,10 @@
<object class="GtkMenu" id="file_menu_menu">
<property name="can_focus">False</property>
<child>
- <object class="GtkImageMenuItem" id="menu_open_session">
- <property name="label">gtk-open</property>
- <property name="visible">True</property>
- <property name="can_focus">False</property>
- <property name="use_underline">True</property>
- <property name="use_stock">True</property>
- <accelerator key="O" signal="activate" modifiers="GDK_CONTROL_MASK"/>
- <signal name="activate" handler="on_open_session_menuitem_activate" swapped="no"/>
- </object>
- </child>
- <child>
- <object class="GtkImageMenuItem" id="menu_save_session">
- <property name="label">gtk-save</property>
- <property name="visible">True</property>
- <property name="can_focus">False</property>
- <property name="use_underline">True</property>
- <property name="use_stock">True</property>
- <accelerator key="s" signal="activate" modifiers="GDK_CONTROL_MASK"/>
- </object>
- </child>
- <child>
- <object class="GtkMenuItem" id="menu_save_close_session">
- <property name="visible">True</property>
- <property name="can_focus">False</property>
- <property name="label" translatable="yes">Save and _Close</property>
- <property name="use_underline">True</property>
- </object>
- </child>
- <child>
<object class="GtkMenuItem" id="menu_export_image">
<property name="visible">True</property>
<property name="can_focus">False</property>
- <property name="label" translatable="yes">_Export Image...</property>
+ <property name="label" translatable="yes">_Export Image…</property>
<property name="use_underline">True</property>
<accelerator key="e" signal="activate" modifiers="GDK_CONTROL_MASK"/>
</object>
@@ -93,7 +64,7 @@
<property name="can_focus">False</property>
<child>
<object class="GtkImageMenuItem" id="menu_jack_connect">
- <property name="label">Connect to _Jack</property>
+ <property name="label" translatable="yes">Connect to _JACK</property>
<property name="visible">True</property>
<property name="can_focus">False</property>
<property name="use_underline">True</property>
@@ -104,7 +75,7 @@
</child>
<child>
<object class="GtkImageMenuItem" id="menu_jack_disconnect">
- <property name="label">Disconnect from Jack</property>
+ <property name="label" translatable="yes">Disconnect from JACK</property>
<property name="visible">True</property>
<property name="sensitive">False</property>
<property name="can_focus">False</property>
@@ -121,7 +92,7 @@
</child>
<child>
<object class="GtkImageMenuItem" id="menu_alsa_connect">
- <property name="label">Connect to _Alsa</property>
+ <property name="label" translatable="yes">Connect to _ALSA</property>
<property name="visible">True</property>
<property name="can_focus">False</property>
<property name="use_underline">True</property>
@@ -132,7 +103,7 @@
</child>
<child>
<object class="GtkImageMenuItem" id="menu_alsa_disconnect">
- <property name="label">Disconnect from ALSA</property>
+ <property name="label" translatable="yes">Disconnect from ALSA</property>
<property name="visible">True</property>
<property name="sensitive">False</property>
<property name="can_focus">False</property>
@@ -298,7 +269,7 @@
</child>
<child>
<object class="GtkImageMenuItem" id="menu_view_arrange">
- <property name="label">_Arrange</property>
+ <property name="label" translatable="yes">_Arrange</property>
<property name="visible">True</property>
<property name="can_focus">False</property>
<property name="use_underline">True</property>
@@ -313,6 +284,7 @@
<property name="can_focus">False</property>
<property name="label" translatable="yes">Sprung Layou_t</property>
<property name="use_underline">True</property>
+ <property name="active">False</property>
<accelerator key="t" signal="activate" modifiers="GDK_CONTROL_MASK"/>
</object>
</child>
@@ -359,51 +331,6 @@
<property name="icon_size">1</property>
<property name="icon_size_set">True</property>
<child>
- <object class="GtkToolButton" id="clear_load_but">
- <property name="visible">True</property>
- <property name="can_focus">False</property>
- <property name="has_tooltip">True</property>
- <property name="tooltip_markup">Clear the dropout indicator</property>
- <property name="tooltip_text" translatable="yes">Clear dropout indicator.</property>
- <property name="stock_id">gtk-clear</property>
- </object>
- <packing>
- <property name="expand">False</property>
- <property name="homogeneous">True</property>
- </packing>
- </child>
- <child>
- <object class="GtkToolItem" id="toolitem30">
- <property name="visible">True</property>
- <property name="can_focus">False</property>
- <child>
- <object class="GtkAlignment" id="alignment3">
- <property name="visible">True</property>
- <property name="can_focus">False</property>
- <property name="yscale">0</property>
- <child>
- <object class="GtkProgressBar" id="xrun_progress">
- <property name="visible">True</property>
- <property name="can_focus">False</property>
- <property name="has_tooltip">True</property>
- <property name="tooltip_markup">Drouput (XRun) Indicator
-
-The bar represents the percentage of available time used for audio processing (i.e. the DSP load). If the bar reaches 100%, a dropout will occur.</property>
- <property name="tooltip_text" translatable="yes">Load and dropout gauge. The bar shows the percentage of available time used for audio processing. If it reaches 100%, a dropout will occur, and the bar is reset. Click to reset.</property>
- <property name="show_text">True</property>
- <property name="pulse_step">0.10000000149</property>
- <property name="text" translatable="yes">0 Dropouts</property>
- <property name="discrete_blocks">100</property>
- </object>
- </child>
- </object>
- </child>
- </object>
- <packing>
- <property name="expand">False</property>
- </packing>
- </child>
- <child>
<object class="GtkToolItem" id="toolitem28">
<property name="visible">True</property>
<property name="can_focus">False</property>
@@ -412,31 +339,19 @@ The bar represents the percentage of available time used for audio processing (i
<object class="GtkAlignment" id="alignment2">
<property name="visible">True</property>
<property name="can_focus">False</property>
- <property name="tooltip_text" translatable="yes">Jack buffer size and sample rate.</property>
+ <property name="tooltip_text" translatable="yes">JACK buffer size and sample rate.</property>
<property name="yscale">0</property>
<child>
<object class="GtkHBox" id="hbox4">
<property name="visible">True</property>
<property name="can_focus">False</property>
<child>
- <object class="GtkLabel" id="label10">
- <property name="visible">True</property>
- <property name="can_focus">False</property>
- <property name="label" translatable="yes"> / </property>
- </object>
- <packing>
- <property name="expand">False</property>
- <property name="fill">False</property>
- <property name="position">0</property>
- </packing>
- </child>
- <child>
<object class="GtkComboBox" id="buf_size_combo">
<property name="visible">True</property>
<property name="can_focus">True</property>
<property name="has_tooltip">True</property>
- <property name="tooltip_markup">Jack buffer length in frames</property>
- <property name="tooltip_text" translatable="yes">Jack buffer length in frames</property>
+ <property name="tooltip_markup">JACK buffer length in frames</property>
+ <property name="tooltip_text" translatable="yes">JACK buffer length in frames.</property>
<property name="border_width">1</property>
</object>
<packing>
@@ -448,7 +363,7 @@ The bar represents the percentage of available time used for audio processing (i
<child>
<object class="GtkLabel" id="latency_label">
<property name="can_focus">False</property>
- <property name="label" translatable="yes">frames @ ? kHz (? ms)</property>
+ <property name="label" translatable="yes">frames at ? kHz (? ms)</property>
</object>
<packing>
<property name="expand">False</property>
@@ -467,6 +382,36 @@ The bar represents the percentage of available time used for audio processing (i
</packing>
</child>
<child>
+ <object class="GtkToolItem" id="toolitem30">
+ <property name="visible">True</property>
+ <property name="can_focus">False</property>
+ <child>
+ <object class="GtkLabel" id="dropouts_label">
+ <property name="can_focus">False</property>
+ <property name="visible">False</property>
+ <property name="label" translatable="yes">Dropouts: {}</property>
+ </object>
+ </child>
+ </object>
+ <packing>
+ <property name="expand">False</property>
+ </packing>
+ </child>
+ <child>
+ <object class="GtkToolButton" id="clear_load_but">
+ <property name="visible">False</property>
+ <property name="can_focus">False</property>
+ <property name="has_tooltip">True</property>
+ <property name="tooltip_markup">Clear the dropout indicator</property>
+ <property name="tooltip_text" translatable="yes">Clear dropout indicator.</property>
+ <property name="stock_id">gtk-clear</property>
+ </object>
+ <packing>
+ <property name="expand">False</property>
+ <property name="homogeneous">True</property>
+ </packing>
+ </child>
+ <child>
<object class="GtkToolItem" id="toolitem1">
<property name="visible">True</property>
<property name="can_focus">False</property>
@@ -556,11 +501,11 @@ The bar represents the percentage of available time used for audio processing (i
<property name="transient_for">main_win</property>
<property name="program_name">Patchage</property>
<property name="version">@PATCHAGE_VERSION@</property>
- <property name="copyright" translatable="yes">© 2005-2017 David Robillard
+ <property name="copyright" translatable="no">© 2005-2022 David Robillard
© 2008 Nedko Arnaudov</property>
- <property name="comments" translatable="yes">A JACK and ALSA front-end.</property>
+ <property name="comments" translatable="yes">A modular patchbay for JACK and ALSA applications.</property>
<property name="website">http://drobilla.net/software/patchage</property>
- <property name="license" translatable="yes"> GNU GENERAL PUBLIC LICENSE
+ <property name="license" translatable="no"> GNU GENERAL PUBLIC LICENSE
Version 3, 29 June 2007
Copyright (C) 2007 Free Software Foundation, Inc. &lt;http://fsf.org/&gt;
diff --git a/src/patchage_config.h b/src/patchage_config.h
new file mode 100644
index 0000000..6c4f09f
--- /dev/null
+++ b/src/patchage_config.h
@@ -0,0 +1,112 @@
+// Copyright 2021-2023 David Robillard <d@drobilla.net>
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+/*
+ Configuration header that defines reasonable defaults at compile-time.
+
+ This allows configuration from the command-line (usually by the build system)
+ while still allowing the code to compile "as-is" with reasonable default
+ features on supported platforms.
+
+ This system is designed so that, ideally, no command-line or build-system
+ configuration is needed, but automatic feature detection can be disabled or
+ overridden for maximum control. It should never be necessary to edit the
+ source code to achieve a given configuration.
+
+ Usage:
+
+ - By default, features are enabled if they can be detected or assumed to be
+ available from the build environment, unless `PATCHAGE_NO_DEFAULT_CONFIG`
+ is defined, which disables everything by default.
+
+ - If a symbol like `HAVE_SOMETHING` is defined to non-zero, then the
+ "something" feature is assumed to be available.
+
+ Code rules:
+
+ - To check for a feature, this header must be included, and the symbol
+ `USE_SOMETHING` used as a boolean in an `#if` expression.
+
+ - None of the other configuration symbols described here may be used
+ directly. In particular, this header should be the only place in the
+ source code that touches `HAVE` symbols.
+*/
+
+#ifndef PATCHAGE_CONFIG_H
+#define PATCHAGE_CONFIG_H
+
+// Define version unconditionally so a warning will catch a mismatch
+#define PATCHAGE_VERSION "1.0.11"
+
+#if !defined(PATCHAGE_NO_DEFAULT_CONFIG)
+
+// Classic UNIX: dladdr()
+# ifndef HAVE_DLADDR
+# ifdef __has_include
+# if __has_include(<dlfcn.h>)
+# define HAVE_DLADDR 1
+# endif
+# elif defined(__unix__) || defined(__APPLE__)
+# define HAVE_DLADDR 1
+# endif
+# endif
+
+// GNU gettext()
+# ifndef HAVE_GETTEXT
+# ifdef __has_include
+# if __has_include(<libintl.h>)
+# define HAVE_GETTEXT 1
+# endif
+# endif
+# endif
+
+// JACK metadata API
+# ifndef HAVE_JACK_METADATA
+# ifdef __has_include
+# if __has_include(<jack/metadata.h>)
+# define HAVE_JACK_METADATA 1
+# endif
+# endif
+# endif
+
+#endif // !defined(PATCHAGE_NO_DEFAULT_CONFIG)
+
+/*
+ Make corresponding USE_FEATURE defines based on the HAVE_FEATURE defines from
+ above or the command line. The code checks for these using #if (not #ifdef),
+ so there will be an undefined warning if it checks for an unknown feature,
+ and this header is always required by any code that checks for features, even
+ if the build system defines them all.
+*/
+
+#if defined(HAVE_DLADDR) && HAVE_DLADDR
+# define USE_DLADDR 1
+#else
+# define USE_DLADDR 0
+#endif
+
+#if defined(HAVE_GETTEXT) && HAVE_GETTEXT
+# define USE_GETTEXT 1
+#else
+# define USE_GETTEXT 0
+#endif
+
+#if defined(HAVE_JACK_METADATA) && HAVE_JACK_METADATA
+# define USE_JACK_METADATA 1
+#else
+# define USE_JACK_METADATA 0
+#endif
+
+#if !defined(PATCHAGE_USE_LIGHT_THEME)
+# define PATCHAGE_USE_LIGHT_THEME 0
+#endif
+
+#ifndef PATCHAGE_BUNDLED
+# ifdef __APPLE__
+# define PATCHAGE_BUNDLED 1
+# else
+# define PATCHAGE_BUNDLED 0
+# endif
+#endif
+
+#endif // PATCHAGE_CONFIG_H
diff --git a/src/warnings.hpp b/src/warnings.hpp
new file mode 100644
index 0000000..ee9b3d8
--- /dev/null
+++ b/src/warnings.hpp
@@ -0,0 +1,45 @@
+// Copyright 2020 David Robillard <d@drobilla.net>
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+#ifndef PATCHAGE_WARNINGS_HPP
+#define PATCHAGE_WARNINGS_HPP
+
+#if defined(__clang__)
+
+# define PATCHAGE_DISABLE_FMT_WARNINGS \
+ _Pragma("clang diagnostic push") \
+ _Pragma("clang diagnostic ignored \"-Wdocumentation-unknown-command\"") \
+ _Pragma("clang diagnostic ignored \"-Wglobal-constructors\"") \
+ _Pragma("clang diagnostic ignored \"-Wsigned-enum-bitfield\"")
+
+// clang-format off
+# define PATCHAGE_DISABLE_GANV_WARNINGS \
+ _Pragma("clang diagnostic push") \
+ _Pragma( \
+ "clang diagnostic ignored \"-Wdocumentation-unknown-command\"")
+// clang-format on
+
+# define PATCHAGE_RESTORE_WARNINGS _Pragma("clang diagnostic pop")
+
+#elif defined(__GNUC__)
+
+# define PATCHAGE_DISABLE_FMT_WARNINGS _Pragma("GCC diagnostic push")
+
+# define PATCHAGE_DISABLE_GANV_WARNINGS
+
+# define PATCHAGE_RESTORE_WARNINGS _Pragma("GCC diagnostic pop")
+
+#else
+
+# define PATCHAGE_DISABLE_GANV_WARNINGS
+# define PATCHAGE_RESTORE_WARNINGS
+
+#endif
+
+#if defined(__GNUC__)
+# define PATCHAGE_UNREACHABLE() __builtin_unreachable()
+#else
+# define PATCHAGE_UNREACHABLE()
+#endif
+
+#endif // PATCHAGE_WARNINGS_HPP
diff --git a/subprojects/fmt/include/fmt/core.h b/subprojects/fmt/include/fmt/core.h
new file mode 100644
index 0000000..a3096d5
--- /dev/null
+++ b/subprojects/fmt/include/fmt/core.h
@@ -0,0 +1,3277 @@
+// Formatting library for C++ - the core API for char/UTF-8
+//
+// Copyright (c) 2012 - present, Victor Zverovich
+// All rights reserved.
+//
+// For the license information refer to format.h.
+
+#ifndef FMT_CORE_H_
+#define FMT_CORE_H_
+
+#include <cstddef> // std::byte
+#include <cstdio> // std::FILE
+#include <cstring> // std::strlen
+#include <iterator>
+#include <limits>
+#include <string>
+#include <type_traits>
+
+// The fmt library version in the form major * 10000 + minor * 100 + patch.
+#define FMT_VERSION 90000
+
+#if defined(__clang__) && !defined(__ibmxl__)
+# define FMT_CLANG_VERSION (__clang_major__ * 100 + __clang_minor__)
+#else
+# define FMT_CLANG_VERSION 0
+#endif
+
+#if defined(__GNUC__) && !defined(__clang__) && !defined(__INTEL_COMPILER) && \
+ !defined(__NVCOMPILER)
+# define FMT_GCC_VERSION (__GNUC__ * 100 + __GNUC_MINOR__)
+#else
+# define FMT_GCC_VERSION 0
+#endif
+
+#ifndef FMT_GCC_PRAGMA
+// Workaround _Pragma bug https://gcc.gnu.org/bugzilla/show_bug.cgi?id=59884.
+# if FMT_GCC_VERSION >= 504
+# define FMT_GCC_PRAGMA(arg) _Pragma(arg)
+# else
+# define FMT_GCC_PRAGMA(arg)
+# endif
+#endif
+
+#ifdef __ICL
+# define FMT_ICC_VERSION __ICL
+#elif defined(__INTEL_COMPILER)
+# define FMT_ICC_VERSION __INTEL_COMPILER
+#else
+# define FMT_ICC_VERSION 0
+#endif
+
+#ifdef _MSC_VER
+# define FMT_MSC_VERSION _MSC_VER
+# define FMT_MSC_WARNING(...) __pragma(warning(__VA_ARGS__))
+#else
+# define FMT_MSC_VERSION 0
+# define FMT_MSC_WARNING(...)
+#endif
+
+#ifdef _MSVC_LANG
+# define FMT_CPLUSPLUS _MSVC_LANG
+#else
+# define FMT_CPLUSPLUS __cplusplus
+#endif
+
+#ifdef __has_feature
+# define FMT_HAS_FEATURE(x) __has_feature(x)
+#else
+# define FMT_HAS_FEATURE(x) 0
+#endif
+
+#if (defined(__has_include) || FMT_ICC_VERSION >= 1600 || \
+ FMT_MSC_VERSION > 1900) && \
+ !defined(__INTELLISENSE__)
+# define FMT_HAS_INCLUDE(x) __has_include(x)
+#else
+# define FMT_HAS_INCLUDE(x) 0
+#endif
+
+#ifdef __has_cpp_attribute
+# define FMT_HAS_CPP_ATTRIBUTE(x) __has_cpp_attribute(x)
+#else
+# define FMT_HAS_CPP_ATTRIBUTE(x) 0
+#endif
+
+#define FMT_HAS_CPP14_ATTRIBUTE(attribute) \
+ (FMT_CPLUSPLUS >= 201402L && FMT_HAS_CPP_ATTRIBUTE(attribute))
+
+#define FMT_HAS_CPP17_ATTRIBUTE(attribute) \
+ (FMT_CPLUSPLUS >= 201703L && FMT_HAS_CPP_ATTRIBUTE(attribute))
+
+// Check if relaxed C++14 constexpr is supported.
+// GCC doesn't allow throw in constexpr until version 6 (bug 67371).
+#ifndef FMT_USE_CONSTEXPR
+# if (FMT_HAS_FEATURE(cxx_relaxed_constexpr) || FMT_MSC_VERSION >= 1912 || \
+ (FMT_GCC_VERSION >= 600 && FMT_CPLUSPLUS >= 201402L)) && \
+ !FMT_ICC_VERSION && !defined(__NVCC__)
+# define FMT_USE_CONSTEXPR 1
+# else
+# define FMT_USE_CONSTEXPR 0
+# endif
+#endif
+#if FMT_USE_CONSTEXPR
+# define FMT_CONSTEXPR constexpr
+#else
+# define FMT_CONSTEXPR
+#endif
+
+#if ((FMT_CPLUSPLUS >= 202002L) && \
+ (!defined(_GLIBCXX_RELEASE) || _GLIBCXX_RELEASE > 9)) || \
+ (FMT_CPLUSPLUS >= 201709L && FMT_GCC_VERSION >= 1002)
+# define FMT_CONSTEXPR20 constexpr
+#else
+# define FMT_CONSTEXPR20
+#endif
+
+// Check if constexpr std::char_traits<>::{compare,length} are supported.
+#if defined(__GLIBCXX__)
+# if FMT_CPLUSPLUS >= 201703L && defined(_GLIBCXX_RELEASE) && \
+ _GLIBCXX_RELEASE >= 7 // GCC 7+ libstdc++ has _GLIBCXX_RELEASE.
+# define FMT_CONSTEXPR_CHAR_TRAITS constexpr
+# endif
+#elif defined(_LIBCPP_VERSION) && FMT_CPLUSPLUS >= 201703L && \
+ _LIBCPP_VERSION >= 4000
+# define FMT_CONSTEXPR_CHAR_TRAITS constexpr
+#elif FMT_MSC_VERSION >= 1914 && FMT_CPLUSPLUS >= 201703L
+# define FMT_CONSTEXPR_CHAR_TRAITS constexpr
+#endif
+#ifndef FMT_CONSTEXPR_CHAR_TRAITS
+# define FMT_CONSTEXPR_CHAR_TRAITS
+#endif
+
+// Check if exceptions are disabled.
+#ifndef FMT_EXCEPTIONS
+# if (defined(__GNUC__) && !defined(__EXCEPTIONS)) || \
+ (FMT_MSC_VERSION && !_HAS_EXCEPTIONS)
+# define FMT_EXCEPTIONS 0
+# else
+# define FMT_EXCEPTIONS 1
+# endif
+#endif
+
+#ifndef FMT_DEPRECATED
+# if FMT_HAS_CPP14_ATTRIBUTE(deprecated) || FMT_MSC_VERSION >= 1900
+# define FMT_DEPRECATED [[deprecated]]
+# else
+# if (defined(__GNUC__) && !defined(__LCC__)) || defined(__clang__)
+# define FMT_DEPRECATED __attribute__((deprecated))
+# elif FMT_MSC_VERSION
+# define FMT_DEPRECATED __declspec(deprecated)
+# else
+# define FMT_DEPRECATED /* deprecated */
+# endif
+# endif
+#endif
+
+// [[noreturn]] is disabled on MSVC and NVCC because of bogus unreachable code
+// warnings.
+#if FMT_EXCEPTIONS && FMT_HAS_CPP_ATTRIBUTE(noreturn) && !FMT_MSC_VERSION && \
+ !defined(__NVCC__)
+# define FMT_NORETURN [[noreturn]]
+#else
+# define FMT_NORETURN
+#endif
+
+#if FMT_HAS_CPP17_ATTRIBUTE(fallthrough)
+# define FMT_FALLTHROUGH [[fallthrough]]
+#elif defined(__clang__)
+# define FMT_FALLTHROUGH [[clang::fallthrough]]
+#elif FMT_GCC_VERSION >= 700 && \
+ (!defined(__EDG_VERSION__) || __EDG_VERSION__ >= 520)
+# define FMT_FALLTHROUGH [[gnu::fallthrough]]
+#else
+# define FMT_FALLTHROUGH
+#endif
+
+#ifndef FMT_NODISCARD
+# if FMT_HAS_CPP17_ATTRIBUTE(nodiscard)
+# define FMT_NODISCARD [[nodiscard]]
+# else
+# define FMT_NODISCARD
+# endif
+#endif
+
+#ifndef FMT_USE_FLOAT
+# define FMT_USE_FLOAT 1
+#endif
+#ifndef FMT_USE_DOUBLE
+# define FMT_USE_DOUBLE 1
+#endif
+#ifndef FMT_USE_LONG_DOUBLE
+# define FMT_USE_LONG_DOUBLE 1
+#endif
+
+#ifndef FMT_INLINE
+# if FMT_GCC_VERSION || FMT_CLANG_VERSION
+# define FMT_INLINE inline __attribute__((always_inline))
+# else
+# define FMT_INLINE inline
+# endif
+#endif
+
+#ifdef _MSC_VER
+# define FMT_UNCHECKED_ITERATOR(It) \
+ using _Unchecked_type = It // Mark iterator as checked.
+#else
+# define FMT_UNCHECKED_ITERATOR(It) using unchecked_type = It
+#endif
+
+#ifndef FMT_BEGIN_NAMESPACE
+# define FMT_BEGIN_NAMESPACE \
+ namespace fmt { \
+ inline namespace v9 {
+# define FMT_END_NAMESPACE \
+ } \
+ }
+#endif
+
+#ifndef FMT_MODULE_EXPORT
+# define FMT_MODULE_EXPORT
+# define FMT_MODULE_EXPORT_BEGIN
+# define FMT_MODULE_EXPORT_END
+# define FMT_BEGIN_DETAIL_NAMESPACE namespace detail {
+# define FMT_END_DETAIL_NAMESPACE }
+#endif
+
+#if !defined(FMT_HEADER_ONLY) && defined(_WIN32)
+# define FMT_CLASS_API FMT_MSC_WARNING(suppress : 4275)
+# ifdef FMT_EXPORT
+# define FMT_API __declspec(dllexport)
+# elif defined(FMT_SHARED)
+# define FMT_API __declspec(dllimport)
+# endif
+#else
+# define FMT_CLASS_API
+# if defined(FMT_EXPORT) || defined(FMT_SHARED)
+# if defined(__GNUC__) || defined(__clang__)
+# define FMT_API __attribute__((visibility("default")))
+# endif
+# endif
+#endif
+#ifndef FMT_API
+# define FMT_API
+#endif
+
+// libc++ supports string_view in pre-c++17.
+#if FMT_HAS_INCLUDE(<string_view>) && \
+ (FMT_CPLUSPLUS >= 201703L || defined(_LIBCPP_VERSION))
+# include <string_view>
+# define FMT_USE_STRING_VIEW
+#elif FMT_HAS_INCLUDE("experimental/string_view") && FMT_CPLUSPLUS >= 201402L
+# include <experimental/string_view>
+# define FMT_USE_EXPERIMENTAL_STRING_VIEW
+#endif
+
+#ifndef FMT_UNICODE
+# define FMT_UNICODE !FMT_MSC_VERSION
+#endif
+
+#ifndef FMT_CONSTEVAL
+# if ((FMT_GCC_VERSION >= 1000 || FMT_CLANG_VERSION >= 1101) && \
+ FMT_CPLUSPLUS >= 202002L && !defined(__apple_build_version__)) || \
+ (defined(__cpp_consteval) && \
+ (!FMT_MSC_VERSION || _MSC_FULL_VER >= 193030704))
+// consteval is broken in MSVC before VS2022 and Apple clang 13.
+# define FMT_CONSTEVAL consteval
+# define FMT_HAS_CONSTEVAL
+# else
+# define FMT_CONSTEVAL
+# endif
+#endif
+
+#ifndef FMT_USE_NONTYPE_TEMPLATE_ARGS
+# if defined(__cpp_nontype_template_args) && \
+ ((FMT_GCC_VERSION >= 903 && FMT_CPLUSPLUS >= 201709L) || \
+ __cpp_nontype_template_args >= 201911L)
+# define FMT_USE_NONTYPE_TEMPLATE_ARGS 1
+# else
+# define FMT_USE_NONTYPE_TEMPLATE_ARGS 0
+# endif
+#endif
+
+// Enable minimal optimizations for more compact code in debug mode.
+FMT_GCC_PRAGMA("GCC push_options")
+
+FMT_BEGIN_NAMESPACE
+FMT_MODULE_EXPORT_BEGIN
+
+// Implementations of enable_if_t and other metafunctions for older systems.
+template <bool B, typename T = void>
+using enable_if_t = typename std::enable_if<B, T>::type;
+template <bool B, typename T, typename F>
+using conditional_t = typename std::conditional<B, T, F>::type;
+template <bool B> using bool_constant = std::integral_constant<bool, B>;
+template <typename T>
+using remove_reference_t = typename std::remove_reference<T>::type;
+template <typename T>
+using remove_const_t = typename std::remove_const<T>::type;
+template <typename T>
+using remove_cvref_t = typename std::remove_cv<remove_reference_t<T>>::type;
+template <typename T> struct type_identity { using type = T; };
+template <typename T> using type_identity_t = typename type_identity<T>::type;
+template <typename T>
+using underlying_t = typename std::underlying_type<T>::type;
+
+template <typename...> struct disjunction : std::false_type {};
+template <typename P> struct disjunction<P> : P {};
+template <typename P1, typename... Pn>
+struct disjunction<P1, Pn...>
+ : conditional_t<bool(P1::value), P1, disjunction<Pn...>> {};
+
+template <typename...> struct conjunction : std::true_type {};
+template <typename P> struct conjunction<P> : P {};
+template <typename P1, typename... Pn>
+struct conjunction<P1, Pn...>
+ : conditional_t<bool(P1::value), conjunction<Pn...>, P1> {};
+
+struct monostate {
+ constexpr monostate() {}
+};
+
+// An enable_if helper to be used in template parameters which results in much
+// shorter symbols: https://godbolt.org/z/sWw4vP. Extra parentheses are needed
+// to workaround a bug in MSVC 2019 (see #1140 and #1186).
+#ifdef FMT_DOC
+# define FMT_ENABLE_IF(...)
+#else
+# define FMT_ENABLE_IF(...) enable_if_t<(__VA_ARGS__), int> = 0
+#endif
+
+FMT_BEGIN_DETAIL_NAMESPACE
+
+// Suppresses "unused variable" warnings with the method described in
+// https://herbsutter.com/2009/10/18/mailbag-shutting-up-compiler-warnings/.
+// (void)var does not work on many Intel compilers.
+template <typename... T> FMT_CONSTEXPR void ignore_unused(const T&...) {}
+
+constexpr FMT_INLINE auto is_constant_evaluated(
+ bool default_value = false) noexcept -> bool {
+#ifdef __cpp_lib_is_constant_evaluated
+ ignore_unused(default_value);
+ return std::is_constant_evaluated();
+#else
+ return default_value;
+#endif
+}
+
+// Suppresses "conditional expression is constant" warnings.
+template <typename T> constexpr FMT_INLINE auto const_check(T value) -> T {
+ return value;
+}
+
+FMT_NORETURN FMT_API void assert_fail(const char* file, int line,
+ const char* message);
+
+#ifndef FMT_ASSERT
+# ifdef NDEBUG
+// FMT_ASSERT is not empty to avoid -Wempty-body.
+# define FMT_ASSERT(condition, message) \
+ ::fmt::detail::ignore_unused((condition), (message))
+# else
+# define FMT_ASSERT(condition, message) \
+ ((condition) /* void() fails with -Winvalid-constexpr on clang 4.0.1 */ \
+ ? (void)0 \
+ : ::fmt::detail::assert_fail(__FILE__, __LINE__, (message)))
+# endif
+#endif
+
+#if defined(FMT_USE_STRING_VIEW)
+template <typename Char> using std_string_view = std::basic_string_view<Char>;
+#elif defined(FMT_USE_EXPERIMENTAL_STRING_VIEW)
+template <typename Char>
+using std_string_view = std::experimental::basic_string_view<Char>;
+#else
+template <typename T> struct std_string_view {};
+#endif
+
+#ifdef FMT_USE_INT128
+// Do nothing.
+#elif defined(__SIZEOF_INT128__) && !defined(__NVCC__) && \
+ !(FMT_CLANG_VERSION && FMT_MSC_VERSION)
+# define FMT_USE_INT128 1
+using int128_opt = __int128_t; // An optional native 128-bit integer.
+using uint128_opt = __uint128_t;
+template <typename T> inline auto convert_for_visit(T value) -> T {
+ return value;
+}
+#else
+# define FMT_USE_INT128 0
+#endif
+#if !FMT_USE_INT128
+enum class int128_opt {};
+enum class uint128_opt {};
+// Reduce template instantiations.
+template <typename T> auto convert_for_visit(T) -> monostate { return {}; }
+#endif
+
+// Casts a nonnegative integer to unsigned.
+template <typename Int>
+FMT_CONSTEXPR auto to_unsigned(Int value) ->
+ typename std::make_unsigned<Int>::type {
+ FMT_ASSERT(value >= 0, "negative value");
+ return static_cast<typename std::make_unsigned<Int>::type>(value);
+}
+
+FMT_MSC_WARNING(suppress : 4566) constexpr unsigned char micro[] = "\u00B5";
+
+constexpr auto is_utf8() -> bool {
+ // Avoid buggy sign extensions in MSVC's constant evaluation mode (#2297).
+ using uchar = unsigned char;
+ return FMT_UNICODE || (sizeof(micro) == 3 && uchar(micro[0]) == 0xC2 &&
+ uchar(micro[1]) == 0xB5);
+}
+FMT_END_DETAIL_NAMESPACE
+
+/**
+ An implementation of ``std::basic_string_view`` for pre-C++17. It provides a
+ subset of the API. ``fmt::basic_string_view`` is used for format strings even
+ if ``std::string_view`` is available to prevent issues when a library is
+ compiled with a different ``-std`` option than the client code (which is not
+ recommended).
+ */
+template <typename Char> class basic_string_view {
+ private:
+ const Char* data_;
+ size_t size_;
+
+ public:
+ using value_type = Char;
+ using iterator = const Char*;
+
+ constexpr basic_string_view() noexcept : data_(nullptr), size_(0) {}
+
+ /** Constructs a string reference object from a C string and a size. */
+ constexpr basic_string_view(const Char* s, size_t count) noexcept
+ : data_(s), size_(count) {}
+
+ /**
+ \rst
+ Constructs a string reference object from a C string computing
+ the size with ``std::char_traits<Char>::length``.
+ \endrst
+ */
+ FMT_CONSTEXPR_CHAR_TRAITS
+ FMT_INLINE
+ basic_string_view(const Char* s)
+ : data_(s),
+ size_(detail::const_check(std::is_same<Char, char>::value &&
+ !detail::is_constant_evaluated(true))
+ ? std::strlen(reinterpret_cast<const char*>(s))
+ : std::char_traits<Char>::length(s)) {}
+
+ /** Constructs a string reference from a ``std::basic_string`` object. */
+ template <typename Traits, typename Alloc>
+ FMT_CONSTEXPR basic_string_view(
+ const std::basic_string<Char, Traits, Alloc>& s) noexcept
+ : data_(s.data()), size_(s.size()) {}
+
+ template <typename S, FMT_ENABLE_IF(std::is_same<
+ S, detail::std_string_view<Char>>::value)>
+ FMT_CONSTEXPR basic_string_view(S s) noexcept
+ : data_(s.data()), size_(s.size()) {}
+
+ /** Returns a pointer to the string data. */
+ constexpr auto data() const noexcept -> const Char* { return data_; }
+
+ /** Returns the string size. */
+ constexpr auto size() const noexcept -> size_t { return size_; }
+
+ constexpr auto begin() const noexcept -> iterator { return data_; }
+ constexpr auto end() const noexcept -> iterator { return data_ + size_; }
+
+ constexpr auto operator[](size_t pos) const noexcept -> const Char& {
+ return data_[pos];
+ }
+
+ FMT_CONSTEXPR void remove_prefix(size_t n) noexcept {
+ data_ += n;
+ size_ -= n;
+ }
+
+ // Lexicographically compare this string reference to other.
+ FMT_CONSTEXPR_CHAR_TRAITS auto compare(basic_string_view other) const -> int {
+ size_t str_size = size_ < other.size_ ? size_ : other.size_;
+ int result = std::char_traits<Char>::compare(data_, other.data_, str_size);
+ if (result == 0)
+ result = size_ == other.size_ ? 0 : (size_ < other.size_ ? -1 : 1);
+ return result;
+ }
+
+ FMT_CONSTEXPR_CHAR_TRAITS friend auto operator==(basic_string_view lhs,
+ basic_string_view rhs)
+ -> bool {
+ return lhs.compare(rhs) == 0;
+ }
+ friend auto operator!=(basic_string_view lhs, basic_string_view rhs) -> bool {
+ return lhs.compare(rhs) != 0;
+ }
+ friend auto operator<(basic_string_view lhs, basic_string_view rhs) -> bool {
+ return lhs.compare(rhs) < 0;
+ }
+ friend auto operator<=(basic_string_view lhs, basic_string_view rhs) -> bool {
+ return lhs.compare(rhs) <= 0;
+ }
+ friend auto operator>(basic_string_view lhs, basic_string_view rhs) -> bool {
+ return lhs.compare(rhs) > 0;
+ }
+ friend auto operator>=(basic_string_view lhs, basic_string_view rhs) -> bool {
+ return lhs.compare(rhs) >= 0;
+ }
+};
+
+using string_view = basic_string_view<char>;
+
+/** Specifies if ``T`` is a character type. Can be specialized by users. */
+template <typename T> struct is_char : std::false_type {};
+template <> struct is_char<char> : std::true_type {};
+
+FMT_BEGIN_DETAIL_NAMESPACE
+
+// A base class for compile-time strings.
+struct compile_string {};
+
+template <typename S>
+struct is_compile_string : std::is_base_of<compile_string, S> {};
+
+// Returns a string view of `s`.
+template <typename Char, FMT_ENABLE_IF(is_char<Char>::value)>
+FMT_INLINE auto to_string_view(const Char* s) -> basic_string_view<Char> {
+ return s;
+}
+template <typename Char, typename Traits, typename Alloc>
+inline auto to_string_view(const std::basic_string<Char, Traits, Alloc>& s)
+ -> basic_string_view<Char> {
+ return s;
+}
+template <typename Char>
+constexpr auto to_string_view(basic_string_view<Char> s)
+ -> basic_string_view<Char> {
+ return s;
+}
+template <typename Char,
+ FMT_ENABLE_IF(!std::is_empty<std_string_view<Char>>::value)>
+inline auto to_string_view(std_string_view<Char> s) -> basic_string_view<Char> {
+ return s;
+}
+template <typename S, FMT_ENABLE_IF(is_compile_string<S>::value)>
+constexpr auto to_string_view(const S& s)
+ -> basic_string_view<typename S::char_type> {
+ return basic_string_view<typename S::char_type>(s);
+}
+void to_string_view(...);
+
+// Specifies whether S is a string type convertible to fmt::basic_string_view.
+// It should be a constexpr function but MSVC 2017 fails to compile it in
+// enable_if and MSVC 2015 fails to compile it as an alias template.
+// ADL invocation of to_string_view is DEPRECATED!
+template <typename S>
+struct is_string : std::is_class<decltype(to_string_view(std::declval<S>()))> {
+};
+
+template <typename S, typename = void> struct char_t_impl {};
+template <typename S> struct char_t_impl<S, enable_if_t<is_string<S>::value>> {
+ using result = decltype(to_string_view(std::declval<S>()));
+ using type = typename result::value_type;
+};
+
+enum class type {
+ none_type,
+ // Integer types should go first,
+ int_type,
+ uint_type,
+ long_long_type,
+ ulong_long_type,
+ int128_type,
+ uint128_type,
+ bool_type,
+ char_type,
+ last_integer_type = char_type,
+ // followed by floating-point types.
+ float_type,
+ double_type,
+ long_double_type,
+ last_numeric_type = long_double_type,
+ cstring_type,
+ string_type,
+ pointer_type,
+ custom_type
+};
+
+// Maps core type T to the corresponding type enum constant.
+template <typename T, typename Char>
+struct type_constant : std::integral_constant<type, type::custom_type> {};
+
+#define FMT_TYPE_CONSTANT(Type, constant) \
+ template <typename Char> \
+ struct type_constant<Type, Char> \
+ : std::integral_constant<type, type::constant> {}
+
+FMT_TYPE_CONSTANT(int, int_type);
+FMT_TYPE_CONSTANT(unsigned, uint_type);
+FMT_TYPE_CONSTANT(long long, long_long_type);
+FMT_TYPE_CONSTANT(unsigned long long, ulong_long_type);
+FMT_TYPE_CONSTANT(int128_opt, int128_type);
+FMT_TYPE_CONSTANT(uint128_opt, uint128_type);
+FMT_TYPE_CONSTANT(bool, bool_type);
+FMT_TYPE_CONSTANT(Char, char_type);
+FMT_TYPE_CONSTANT(float, float_type);
+FMT_TYPE_CONSTANT(double, double_type);
+FMT_TYPE_CONSTANT(long double, long_double_type);
+FMT_TYPE_CONSTANT(const Char*, cstring_type);
+FMT_TYPE_CONSTANT(basic_string_view<Char>, string_type);
+FMT_TYPE_CONSTANT(const void*, pointer_type);
+
+constexpr bool is_integral_type(type t) {
+ return t > type::none_type && t <= type::last_integer_type;
+}
+
+constexpr bool is_arithmetic_type(type t) {
+ return t > type::none_type && t <= type::last_numeric_type;
+}
+
+FMT_NORETURN FMT_API void throw_format_error(const char* message);
+
+struct error_handler {
+ constexpr error_handler() = default;
+ constexpr error_handler(const error_handler&) = default;
+
+ // This function is intentionally not constexpr to give a compile-time error.
+ FMT_NORETURN void on_error(const char* message) {
+ throw_format_error(message);
+ }
+};
+FMT_END_DETAIL_NAMESPACE
+
+/** String's character type. */
+template <typename S> using char_t = typename detail::char_t_impl<S>::type;
+
+/**
+ \rst
+ Parsing context consisting of a format string range being parsed and an
+ argument counter for automatic indexing.
+ You can use the ``format_parse_context`` type alias for ``char`` instead.
+ \endrst
+ */
+template <typename Char, typename ErrorHandler = detail::error_handler>
+class basic_format_parse_context : private ErrorHandler {
+ private:
+ basic_string_view<Char> format_str_;
+ int next_arg_id_;
+
+ FMT_CONSTEXPR void do_check_arg_id(int id);
+
+ public:
+ using char_type = Char;
+ using iterator = typename basic_string_view<Char>::iterator;
+
+ explicit constexpr basic_format_parse_context(
+ basic_string_view<Char> format_str, ErrorHandler eh = {},
+ int next_arg_id = 0)
+ : ErrorHandler(eh), format_str_(format_str), next_arg_id_(next_arg_id) {}
+
+ /**
+ Returns an iterator to the beginning of the format string range being
+ parsed.
+ */
+ constexpr auto begin() const noexcept -> iterator {
+ return format_str_.begin();
+ }
+
+ /**
+ Returns an iterator past the end of the format string range being parsed.
+ */
+ constexpr auto end() const noexcept -> iterator { return format_str_.end(); }
+
+ /** Advances the begin iterator to ``it``. */
+ FMT_CONSTEXPR void advance_to(iterator it) {
+ format_str_.remove_prefix(detail::to_unsigned(it - begin()));
+ }
+
+ /**
+ Reports an error if using the manual argument indexing; otherwise returns
+ the next argument index and switches to the automatic indexing.
+ */
+ FMT_CONSTEXPR auto next_arg_id() -> int {
+ if (next_arg_id_ < 0) {
+ on_error("cannot switch from manual to automatic argument indexing");
+ return 0;
+ }
+ int id = next_arg_id_++;
+ do_check_arg_id(id);
+ return id;
+ }
+
+ /**
+ Reports an error if using the automatic argument indexing; otherwise
+ switches to the manual indexing.
+ */
+ FMT_CONSTEXPR void check_arg_id(int id) {
+ if (next_arg_id_ > 0) {
+ on_error("cannot switch from automatic to manual argument indexing");
+ return;
+ }
+ next_arg_id_ = -1;
+ do_check_arg_id(id);
+ }
+
+ FMT_CONSTEXPR void check_arg_id(basic_string_view<Char>) {}
+
+ FMT_CONSTEXPR void on_error(const char* message) {
+ ErrorHandler::on_error(message);
+ }
+
+ constexpr auto error_handler() const -> ErrorHandler { return *this; }
+};
+
+using format_parse_context = basic_format_parse_context<char>;
+
+FMT_BEGIN_DETAIL_NAMESPACE
+// A parse context with extra data used only in compile-time checks.
+template <typename Char, typename ErrorHandler = detail::error_handler>
+class compile_parse_context
+ : public basic_format_parse_context<Char, ErrorHandler> {
+ private:
+ int num_args_;
+ const type* types_;
+ using base = basic_format_parse_context<Char, ErrorHandler>;
+
+ public:
+ explicit FMT_CONSTEXPR compile_parse_context(
+ basic_string_view<Char> format_str, int num_args, const type* types,
+ ErrorHandler eh = {}, int next_arg_id = 0)
+ : base(format_str, eh, next_arg_id), num_args_(num_args), types_(types) {}
+
+ constexpr int num_args() const { return num_args_; }
+
+ FMT_CONSTEXPR auto next_arg_id() -> int {
+ int id = base::next_arg_id();
+ if (id >= num_args_) this->on_error("argument not found");
+ return id;
+ }
+
+ FMT_CONSTEXPR void check_arg_id(int id) {
+ base::check_arg_id(id);
+ if (id >= num_args_) this->on_error("argument not found");
+ }
+ using base::check_arg_id;
+};
+FMT_END_DETAIL_NAMESPACE
+
+template <typename Char, typename ErrorHandler>
+FMT_CONSTEXPR void
+basic_format_parse_context<Char, ErrorHandler>::do_check_arg_id(int id) {
+ // Argument id is only checked at compile-time during parsing because
+ // formatting has its own validation.
+ if (detail::is_constant_evaluated() && FMT_GCC_VERSION >= 1200) {
+ using context = detail::compile_parse_context<Char, ErrorHandler>;
+ if (id >= static_cast<context*>(this)->num_args())
+ on_error("argument not found");
+ }
+}
+
+template <typename Context> class basic_format_arg;
+template <typename Context> class basic_format_args;
+template <typename Context> class dynamic_format_arg_store;
+
+// A formatter for objects of type T.
+template <typename T, typename Char = char, typename Enable = void>
+struct formatter {
+ // A deleted default constructor indicates a disabled formatter.
+ formatter() = delete;
+};
+
+// Specifies if T has an enabled formatter specialization. A type can be
+// formattable even if it doesn't have a formatter e.g. via a conversion.
+template <typename T, typename Context>
+using has_formatter =
+ std::is_constructible<typename Context::template formatter_type<T>>;
+
+// Checks whether T is a container with contiguous storage.
+template <typename T> struct is_contiguous : std::false_type {};
+template <typename Char>
+struct is_contiguous<std::basic_string<Char>> : std::true_type {};
+
+class appender;
+
+FMT_BEGIN_DETAIL_NAMESPACE
+
+template <typename Context, typename T>
+constexpr auto has_const_formatter_impl(T*)
+ -> decltype(typename Context::template formatter_type<T>().format(
+ std::declval<const T&>(), std::declval<Context&>()),
+ true) {
+ return true;
+}
+template <typename Context>
+constexpr auto has_const_formatter_impl(...) -> bool {
+ return false;
+}
+template <typename T, typename Context>
+constexpr auto has_const_formatter() -> bool {
+ return has_const_formatter_impl<Context>(static_cast<T*>(nullptr));
+}
+
+// Extracts a reference to the container from back_insert_iterator.
+template <typename Container>
+inline auto get_container(std::back_insert_iterator<Container> it)
+ -> Container& {
+ using base = std::back_insert_iterator<Container>;
+ struct accessor : base {
+ accessor(base b) : base(b) {}
+ using base::container;
+ };
+ return *accessor(it).container;
+}
+
+template <typename Char, typename InputIt, typename OutputIt>
+FMT_CONSTEXPR auto copy_str(InputIt begin, InputIt end, OutputIt out)
+ -> OutputIt {
+ while (begin != end) *out++ = static_cast<Char>(*begin++);
+ return out;
+}
+
+template <typename Char, typename T, typename U,
+ FMT_ENABLE_IF(
+ std::is_same<remove_const_t<T>, U>::value&& is_char<U>::value)>
+FMT_CONSTEXPR auto copy_str(T* begin, T* end, U* out) -> U* {
+ if (is_constant_evaluated()) return copy_str<Char, T*, U*>(begin, end, out);
+ auto size = to_unsigned(end - begin);
+ memcpy(out, begin, size * sizeof(U));
+ return out + size;
+}
+
+/**
+ \rst
+ A contiguous memory buffer with an optional growing ability. It is an internal
+ class and shouldn't be used directly, only via `~fmt::basic_memory_buffer`.
+ \endrst
+ */
+template <typename T> class buffer {
+ private:
+ T* ptr_;
+ size_t size_;
+ size_t capacity_;
+
+ protected:
+ // Don't initialize ptr_ since it is not accessed to save a few cycles.
+ FMT_MSC_WARNING(suppress : 26495)
+ buffer(size_t sz) noexcept : size_(sz), capacity_(sz) {}
+
+ FMT_CONSTEXPR20 buffer(T* p = nullptr, size_t sz = 0, size_t cap = 0) noexcept
+ : ptr_(p), size_(sz), capacity_(cap) {}
+
+ FMT_CONSTEXPR20 ~buffer() = default;
+ buffer(buffer&&) = default;
+
+ /** Sets the buffer data and capacity. */
+ FMT_CONSTEXPR void set(T* buf_data, size_t buf_capacity) noexcept {
+ ptr_ = buf_data;
+ capacity_ = buf_capacity;
+ }
+
+ /** Increases the buffer capacity to hold at least *capacity* elements. */
+ virtual FMT_CONSTEXPR20 void grow(size_t capacity) = 0;
+
+ public:
+ using value_type = T;
+ using const_reference = const T&;
+
+ buffer(const buffer&) = delete;
+ void operator=(const buffer&) = delete;
+
+ auto begin() noexcept -> T* { return ptr_; }
+ auto end() noexcept -> T* { return ptr_ + size_; }
+
+ auto begin() const noexcept -> const T* { return ptr_; }
+ auto end() const noexcept -> const T* { return ptr_ + size_; }
+
+ /** Returns the size of this buffer. */
+ constexpr auto size() const noexcept -> size_t { return size_; }
+
+ /** Returns the capacity of this buffer. */
+ constexpr auto capacity() const noexcept -> size_t { return capacity_; }
+
+ /** Returns a pointer to the buffer data. */
+ FMT_CONSTEXPR auto data() noexcept -> T* { return ptr_; }
+
+ /** Returns a pointer to the buffer data. */
+ FMT_CONSTEXPR auto data() const noexcept -> const T* { return ptr_; }
+
+ /** Clears this buffer. */
+ void clear() { size_ = 0; }
+
+ // Tries resizing the buffer to contain *count* elements. If T is a POD type
+ // the new elements may not be initialized.
+ FMT_CONSTEXPR20 void try_resize(size_t count) {
+ try_reserve(count);
+ size_ = count <= capacity_ ? count : capacity_;
+ }
+
+ // Tries increasing the buffer capacity to *new_capacity*. It can increase the
+ // capacity by a smaller amount than requested but guarantees there is space
+ // for at least one additional element either by increasing the capacity or by
+ // flushing the buffer if it is full.
+ FMT_CONSTEXPR20 void try_reserve(size_t new_capacity) {
+ if (new_capacity > capacity_) grow(new_capacity);
+ }
+
+ FMT_CONSTEXPR20 void push_back(const T& value) {
+ try_reserve(size_ + 1);
+ ptr_[size_++] = value;
+ }
+
+ /** Appends data to the end of the buffer. */
+ template <typename U> void append(const U* begin, const U* end);
+
+ template <typename I> FMT_CONSTEXPR auto operator[](I index) -> T& {
+ return ptr_[index];
+ }
+ template <typename I>
+ FMT_CONSTEXPR auto operator[](I index) const -> const T& {
+ return ptr_[index];
+ }
+};
+
+struct buffer_traits {
+ explicit buffer_traits(size_t) {}
+ auto count() const -> size_t { return 0; }
+ auto limit(size_t size) -> size_t { return size; }
+};
+
+class fixed_buffer_traits {
+ private:
+ size_t count_ = 0;
+ size_t limit_;
+
+ public:
+ explicit fixed_buffer_traits(size_t limit) : limit_(limit) {}
+ auto count() const -> size_t { return count_; }
+ auto limit(size_t size) -> size_t {
+ size_t n = limit_ > count_ ? limit_ - count_ : 0;
+ count_ += size;
+ return size < n ? size : n;
+ }
+};
+
+// A buffer that writes to an output iterator when flushed.
+template <typename OutputIt, typename T, typename Traits = buffer_traits>
+class iterator_buffer final : public Traits, public buffer<T> {
+ private:
+ OutputIt out_;
+ enum { buffer_size = 256 };
+ T data_[buffer_size];
+
+ protected:
+ FMT_CONSTEXPR20 void grow(size_t) override {
+ if (this->size() == buffer_size) flush();
+ }
+
+ void flush() {
+ auto size = this->size();
+ this->clear();
+ out_ = copy_str<T>(data_, data_ + this->limit(size), out_);
+ }
+
+ public:
+ explicit iterator_buffer(OutputIt out, size_t n = buffer_size)
+ : Traits(n), buffer<T>(data_, 0, buffer_size), out_(out) {}
+ iterator_buffer(iterator_buffer&& other)
+ : Traits(other), buffer<T>(data_, 0, buffer_size), out_(other.out_) {}
+ ~iterator_buffer() { flush(); }
+
+ auto out() -> OutputIt {
+ flush();
+ return out_;
+ }
+ auto count() const -> size_t { return Traits::count() + this->size(); }
+};
+
+template <typename T>
+class iterator_buffer<T*, T, fixed_buffer_traits> final
+ : public fixed_buffer_traits,
+ public buffer<T> {
+ private:
+ T* out_;
+ enum { buffer_size = 256 };
+ T data_[buffer_size];
+
+ protected:
+ FMT_CONSTEXPR20 void grow(size_t) override {
+ if (this->size() == this->capacity()) flush();
+ }
+
+ void flush() {
+ size_t n = this->limit(this->size());
+ if (this->data() == out_) {
+ out_ += n;
+ this->set(data_, buffer_size);
+ }
+ this->clear();
+ }
+
+ public:
+ explicit iterator_buffer(T* out, size_t n = buffer_size)
+ : fixed_buffer_traits(n), buffer<T>(out, 0, n), out_(out) {}
+ iterator_buffer(iterator_buffer&& other)
+ : fixed_buffer_traits(other),
+ buffer<T>(std::move(other)),
+ out_(other.out_) {
+ if (this->data() != out_) {
+ this->set(data_, buffer_size);
+ this->clear();
+ }
+ }
+ ~iterator_buffer() { flush(); }
+
+ auto out() -> T* {
+ flush();
+ return out_;
+ }
+ auto count() const -> size_t {
+ return fixed_buffer_traits::count() + this->size();
+ }
+};
+
+template <typename T> class iterator_buffer<T*, T> final : public buffer<T> {
+ protected:
+ FMT_CONSTEXPR20 void grow(size_t) override {}
+
+ public:
+ explicit iterator_buffer(T* out, size_t = 0) : buffer<T>(out, 0, ~size_t()) {}
+
+ auto out() -> T* { return &*this->end(); }
+};
+
+// A buffer that writes to a container with the contiguous storage.
+template <typename Container>
+class iterator_buffer<std::back_insert_iterator<Container>,
+ enable_if_t<is_contiguous<Container>::value,
+ typename Container::value_type>>
+ final : public buffer<typename Container::value_type> {
+ private:
+ Container& container_;
+
+ protected:
+ FMT_CONSTEXPR20 void grow(size_t capacity) override {
+ container_.resize(capacity);
+ this->set(&container_[0], capacity);
+ }
+
+ public:
+ explicit iterator_buffer(Container& c)
+ : buffer<typename Container::value_type>(c.size()), container_(c) {}
+ explicit iterator_buffer(std::back_insert_iterator<Container> out, size_t = 0)
+ : iterator_buffer(get_container(out)) {}
+
+ auto out() -> std::back_insert_iterator<Container> {
+ return std::back_inserter(container_);
+ }
+};
+
+// A buffer that counts the number of code units written discarding the output.
+template <typename T = char> class counting_buffer final : public buffer<T> {
+ private:
+ enum { buffer_size = 256 };
+ T data_[buffer_size];
+ size_t count_ = 0;
+
+ protected:
+ FMT_CONSTEXPR20 void grow(size_t) override {
+ if (this->size() != buffer_size) return;
+ count_ += this->size();
+ this->clear();
+ }
+
+ public:
+ counting_buffer() : buffer<T>(data_, 0, buffer_size) {}
+
+ auto count() -> size_t { return count_ + this->size(); }
+};
+
+template <typename T>
+using buffer_appender = conditional_t<std::is_same<T, char>::value, appender,
+ std::back_insert_iterator<buffer<T>>>;
+
+// Maps an output iterator to a buffer.
+template <typename T, typename OutputIt>
+auto get_buffer(OutputIt out) -> iterator_buffer<OutputIt, T> {
+ return iterator_buffer<OutputIt, T>(out);
+}
+
+template <typename Buffer>
+auto get_iterator(Buffer& buf) -> decltype(buf.out()) {
+ return buf.out();
+}
+template <typename T> auto get_iterator(buffer<T>& buf) -> buffer_appender<T> {
+ return buffer_appender<T>(buf);
+}
+
+template <typename T, typename Char = char, typename Enable = void>
+struct fallback_formatter {
+ fallback_formatter() = delete;
+};
+
+// Specifies if T has an enabled fallback_formatter specialization.
+template <typename T, typename Char>
+using has_fallback_formatter =
+#ifdef FMT_DEPRECATED_OSTREAM
+ std::is_constructible<fallback_formatter<T, Char>>;
+#else
+ std::false_type;
+#endif
+
+struct view {};
+
+template <typename Char, typename T> struct named_arg : view {
+ const Char* name;
+ const T& value;
+ named_arg(const Char* n, const T& v) : name(n), value(v) {}
+};
+
+template <typename Char> struct named_arg_info {
+ const Char* name;
+ int id;
+};
+
+template <typename T, typename Char, size_t NUM_ARGS, size_t NUM_NAMED_ARGS>
+struct arg_data {
+ // args_[0].named_args points to named_args_ to avoid bloating format_args.
+ // +1 to workaround a bug in gcc 7.5 that causes duplicated-branches warning.
+ T args_[1 + (NUM_ARGS != 0 ? NUM_ARGS : +1)];
+ named_arg_info<Char> named_args_[NUM_NAMED_ARGS];
+
+ template <typename... U>
+ arg_data(const U&... init) : args_{T(named_args_, NUM_NAMED_ARGS), init...} {}
+ arg_data(const arg_data& other) = delete;
+ auto args() const -> const T* { return args_ + 1; }
+ auto named_args() -> named_arg_info<Char>* { return named_args_; }
+};
+
+template <typename T, typename Char, size_t NUM_ARGS>
+struct arg_data<T, Char, NUM_ARGS, 0> {
+ // +1 to workaround a bug in gcc 7.5 that causes duplicated-branches warning.
+ T args_[NUM_ARGS != 0 ? NUM_ARGS : +1];
+
+ template <typename... U>
+ FMT_CONSTEXPR FMT_INLINE arg_data(const U&... init) : args_{init...} {}
+ FMT_CONSTEXPR FMT_INLINE auto args() const -> const T* { return args_; }
+ FMT_CONSTEXPR FMT_INLINE auto named_args() -> std::nullptr_t {
+ return nullptr;
+ }
+};
+
+template <typename Char>
+inline void init_named_args(named_arg_info<Char>*, int, int) {}
+
+template <typename T> struct is_named_arg : std::false_type {};
+template <typename T> struct is_statically_named_arg : std::false_type {};
+
+template <typename T, typename Char>
+struct is_named_arg<named_arg<Char, T>> : std::true_type {};
+
+template <typename Char, typename T, typename... Tail,
+ FMT_ENABLE_IF(!is_named_arg<T>::value)>
+void init_named_args(named_arg_info<Char>* named_args, int arg_count,
+ int named_arg_count, const T&, const Tail&... args) {
+ init_named_args(named_args, arg_count + 1, named_arg_count, args...);
+}
+
+template <typename Char, typename T, typename... Tail,
+ FMT_ENABLE_IF(is_named_arg<T>::value)>
+void init_named_args(named_arg_info<Char>* named_args, int arg_count,
+ int named_arg_count, const T& arg, const Tail&... args) {
+ named_args[named_arg_count++] = {arg.name, arg_count};
+ init_named_args(named_args, arg_count + 1, named_arg_count, args...);
+}
+
+template <typename... Args>
+FMT_CONSTEXPR FMT_INLINE void init_named_args(std::nullptr_t, int, int,
+ const Args&...) {}
+
+template <bool B = false> constexpr auto count() -> size_t { return B ? 1 : 0; }
+template <bool B1, bool B2, bool... Tail> constexpr auto count() -> size_t {
+ return (B1 ? 1 : 0) + count<B2, Tail...>();
+}
+
+template <typename... Args> constexpr auto count_named_args() -> size_t {
+ return count<is_named_arg<Args>::value...>();
+}
+
+template <typename... Args>
+constexpr auto count_statically_named_args() -> size_t {
+ return count<is_statically_named_arg<Args>::value...>();
+}
+
+struct unformattable {};
+struct unformattable_char : unformattable {};
+struct unformattable_const : unformattable {};
+struct unformattable_pointer : unformattable {};
+
+template <typename Char> struct string_value {
+ const Char* data;
+ size_t size;
+};
+
+template <typename Char> struct named_arg_value {
+ const named_arg_info<Char>* data;
+ size_t size;
+};
+
+template <typename Context> struct custom_value {
+ using parse_context = typename Context::parse_context_type;
+ void* value;
+ void (*format)(void* arg, parse_context& parse_ctx, Context& ctx);
+};
+
+// A formatting argument value.
+template <typename Context> class value {
+ public:
+ using char_type = typename Context::char_type;
+
+ union {
+ monostate no_value;
+ int int_value;
+ unsigned uint_value;
+ long long long_long_value;
+ unsigned long long ulong_long_value;
+ int128_opt int128_value;
+ uint128_opt uint128_value;
+ bool bool_value;
+ char_type char_value;
+ float float_value;
+ double double_value;
+ long double long_double_value;
+ const void* pointer;
+ string_value<char_type> string;
+ custom_value<Context> custom;
+ named_arg_value<char_type> named_args;
+ };
+
+ constexpr FMT_INLINE value() : no_value() {}
+ constexpr FMT_INLINE value(int val) : int_value(val) {}
+ constexpr FMT_INLINE value(unsigned val) : uint_value(val) {}
+ constexpr FMT_INLINE value(long long val) : long_long_value(val) {}
+ constexpr FMT_INLINE value(unsigned long long val) : ulong_long_value(val) {}
+ FMT_INLINE value(int128_opt val) : int128_value(val) {}
+ FMT_INLINE value(uint128_opt val) : uint128_value(val) {}
+ constexpr FMT_INLINE value(float val) : float_value(val) {}
+ constexpr FMT_INLINE value(double val) : double_value(val) {}
+ FMT_INLINE value(long double val) : long_double_value(val) {}
+ constexpr FMT_INLINE value(bool val) : bool_value(val) {}
+ constexpr FMT_INLINE value(char_type val) : char_value(val) {}
+ FMT_CONSTEXPR FMT_INLINE value(const char_type* val) {
+ string.data = val;
+ if (is_constant_evaluated()) string.size = {};
+ }
+ FMT_CONSTEXPR FMT_INLINE value(basic_string_view<char_type> val) {
+ string.data = val.data();
+ string.size = val.size();
+ }
+ FMT_INLINE value(const void* val) : pointer(val) {}
+ FMT_INLINE value(const named_arg_info<char_type>* args, size_t size)
+ : named_args{args, size} {}
+
+ template <typename T> FMT_CONSTEXPR FMT_INLINE value(T& val) {
+ using value_type = remove_cvref_t<T>;
+ custom.value = const_cast<value_type*>(&val);
+ // Get the formatter type through the context to allow different contexts
+ // have different extension points, e.g. `formatter<T>` for `format` and
+ // `printf_formatter<T>` for `printf`.
+ custom.format = format_custom_arg<
+ value_type,
+ conditional_t<has_formatter<value_type, Context>::value,
+ typename Context::template formatter_type<value_type>,
+ fallback_formatter<value_type, char_type>>>;
+ }
+ value(unformattable);
+ value(unformattable_char);
+ value(unformattable_const);
+ value(unformattable_pointer);
+
+ private:
+ // Formats an argument of a custom type, such as a user-defined class.
+ template <typename T, typename Formatter>
+ static void format_custom_arg(void* arg,
+ typename Context::parse_context_type& parse_ctx,
+ Context& ctx) {
+ auto f = Formatter();
+ parse_ctx.advance_to(f.parse(parse_ctx));
+ using qualified_type =
+ conditional_t<has_const_formatter<T, Context>(), const T, T>;
+ ctx.advance_to(f.format(*static_cast<qualified_type*>(arg), ctx));
+ }
+};
+
+template <typename Context, typename T>
+FMT_CONSTEXPR auto make_arg(T&& value) -> basic_format_arg<Context>;
+
+// To minimize the number of types we need to deal with, long is translated
+// either to int or to long long depending on its size.
+enum { long_short = sizeof(long) == sizeof(int) };
+using long_type = conditional_t<long_short, int, long long>;
+using ulong_type = conditional_t<long_short, unsigned, unsigned long long>;
+
+#ifdef __cpp_lib_byte
+inline auto format_as(std::byte b) -> unsigned char {
+ return static_cast<unsigned char>(b);
+}
+#endif
+
+template <typename T> struct has_format_as {
+ template <typename U, typename V = decltype(format_as(U())),
+ FMT_ENABLE_IF(std::is_enum<U>::value&& std::is_integral<V>::value)>
+ static auto check(U*) -> std::true_type;
+ static auto check(...) -> std::false_type;
+
+ enum { value = decltype(check(static_cast<T*>(nullptr)))::value };
+};
+
+// Maps formatting arguments to core types.
+// arg_mapper reports errors by returning unformattable instead of using
+// static_assert because it's used in the is_formattable trait.
+template <typename Context> struct arg_mapper {
+ using char_type = typename Context::char_type;
+
+ FMT_CONSTEXPR FMT_INLINE auto map(signed char val) -> int { return val; }
+ FMT_CONSTEXPR FMT_INLINE auto map(unsigned char val) -> unsigned {
+ return val;
+ }
+ FMT_CONSTEXPR FMT_INLINE auto map(short val) -> int { return val; }
+ FMT_CONSTEXPR FMT_INLINE auto map(unsigned short val) -> unsigned {
+ return val;
+ }
+ FMT_CONSTEXPR FMT_INLINE auto map(int val) -> int { return val; }
+ FMT_CONSTEXPR FMT_INLINE auto map(unsigned val) -> unsigned { return val; }
+ FMT_CONSTEXPR FMT_INLINE auto map(long val) -> long_type { return val; }
+ FMT_CONSTEXPR FMT_INLINE auto map(unsigned long val) -> ulong_type {
+ return val;
+ }
+ FMT_CONSTEXPR FMT_INLINE auto map(long long val) -> long long { return val; }
+ FMT_CONSTEXPR FMT_INLINE auto map(unsigned long long val)
+ -> unsigned long long {
+ return val;
+ }
+ FMT_CONSTEXPR FMT_INLINE auto map(int128_opt val) -> int128_opt {
+ return val;
+ }
+ FMT_CONSTEXPR FMT_INLINE auto map(uint128_opt val) -> uint128_opt {
+ return val;
+ }
+ FMT_CONSTEXPR FMT_INLINE auto map(bool val) -> bool { return val; }
+
+ template <typename T, FMT_ENABLE_IF(std::is_same<T, char>::value ||
+ std::is_same<T, char_type>::value)>
+ FMT_CONSTEXPR FMT_INLINE auto map(T val) -> char_type {
+ return val;
+ }
+ template <typename T, enable_if_t<(std::is_same<T, wchar_t>::value ||
+#ifdef __cpp_char8_t
+ std::is_same<T, char8_t>::value ||
+#endif
+ std::is_same<T, char16_t>::value ||
+ std::is_same<T, char32_t>::value) &&
+ !std::is_same<T, char_type>::value,
+ int> = 0>
+ FMT_CONSTEXPR FMT_INLINE auto map(T) -> unformattable_char {
+ return {};
+ }
+
+ FMT_CONSTEXPR FMT_INLINE auto map(float val) -> float { return val; }
+ FMT_CONSTEXPR FMT_INLINE auto map(double val) -> double { return val; }
+ FMT_CONSTEXPR FMT_INLINE auto map(long double val) -> long double {
+ return val;
+ }
+
+ FMT_CONSTEXPR FMT_INLINE auto map(char_type* val) -> const char_type* {
+ return val;
+ }
+ FMT_CONSTEXPR FMT_INLINE auto map(const char_type* val) -> const char_type* {
+ return val;
+ }
+ template <typename T,
+ FMT_ENABLE_IF(is_string<T>::value && !std::is_pointer<T>::value &&
+ std::is_same<char_type, char_t<T>>::value)>
+ FMT_CONSTEXPR FMT_INLINE auto map(const T& val)
+ -> basic_string_view<char_type> {
+ return to_string_view(val);
+ }
+ template <typename T,
+ FMT_ENABLE_IF(is_string<T>::value && !std::is_pointer<T>::value &&
+ !std::is_same<char_type, char_t<T>>::value)>
+ FMT_CONSTEXPR FMT_INLINE auto map(const T&) -> unformattable_char {
+ return {};
+ }
+ template <typename T,
+ FMT_ENABLE_IF(
+ std::is_convertible<T, basic_string_view<char_type>>::value &&
+ !is_string<T>::value && !has_formatter<T, Context>::value &&
+ !has_fallback_formatter<T, char_type>::value)>
+ FMT_CONSTEXPR FMT_INLINE auto map(const T& val)
+ -> basic_string_view<char_type> {
+ return basic_string_view<char_type>(val);
+ }
+ template <typename T,
+ FMT_ENABLE_IF(
+ std::is_convertible<T, std_string_view<char_type>>::value &&
+ !std::is_convertible<T, basic_string_view<char_type>>::value &&
+ !is_string<T>::value && !has_formatter<T, Context>::value &&
+ !has_fallback_formatter<T, char_type>::value)>
+ FMT_CONSTEXPR FMT_INLINE auto map(const T& val)
+ -> basic_string_view<char_type> {
+ return std_string_view<char_type>(val);
+ }
+
+ FMT_CONSTEXPR FMT_INLINE auto map(void* val) -> const void* { return val; }
+ FMT_CONSTEXPR FMT_INLINE auto map(const void* val) -> const void* {
+ return val;
+ }
+ FMT_CONSTEXPR FMT_INLINE auto map(std::nullptr_t val) -> const void* {
+ return val;
+ }
+
+ // We use SFINAE instead of a const T* parameter to avoid conflicting with
+ // the C array overload.
+ template <
+ typename T,
+ FMT_ENABLE_IF(
+ std::is_pointer<T>::value || std::is_member_pointer<T>::value ||
+ std::is_function<typename std::remove_pointer<T>::type>::value ||
+ (std::is_convertible<const T&, const void*>::value &&
+ !std::is_convertible<const T&, const char_type*>::value &&
+ !has_formatter<T, Context>::value))>
+ FMT_CONSTEXPR auto map(const T&) -> unformattable_pointer {
+ return {};
+ }
+
+ template <typename T, std::size_t N,
+ FMT_ENABLE_IF(!std::is_same<T, wchar_t>::value)>
+ FMT_CONSTEXPR FMT_INLINE auto map(const T (&values)[N]) -> const T (&)[N] {
+ return values;
+ }
+
+ template <typename T,
+ FMT_ENABLE_IF(
+ std::is_enum<T>::value&& std::is_convertible<T, int>::value &&
+ !has_format_as<T>::value && !has_formatter<T, Context>::value &&
+ !has_fallback_formatter<T, char_type>::value)>
+ FMT_DEPRECATED FMT_CONSTEXPR FMT_INLINE auto map(const T& val)
+ -> decltype(std::declval<arg_mapper>().map(
+ static_cast<underlying_t<T>>(val))) {
+ return map(static_cast<underlying_t<T>>(val));
+ }
+
+ template <typename T, FMT_ENABLE_IF(has_format_as<T>::value &&
+ !has_formatter<T, Context>::value)>
+ FMT_CONSTEXPR FMT_INLINE auto map(const T& val)
+ -> decltype(std::declval<arg_mapper>().map(format_as(T()))) {
+ return map(format_as(val));
+ }
+
+ template <typename T, typename U = remove_cvref_t<T>>
+ struct formattable
+ : bool_constant<has_const_formatter<U, Context>() ||
+ !std::is_const<remove_reference_t<T>>::value ||
+ has_fallback_formatter<U, char_type>::value> {};
+
+#if (FMT_MSC_VERSION != 0 && FMT_MSC_VERSION < 1910) || \
+ FMT_ICC_VERSION != 0 || defined(__NVCC__)
+ // Workaround a bug in MSVC and Intel (Issue 2746).
+ template <typename T> FMT_CONSTEXPR FMT_INLINE auto do_map(T&& val) -> T& {
+ return val;
+ }
+#else
+ template <typename T, FMT_ENABLE_IF(formattable<T>::value)>
+ FMT_CONSTEXPR FMT_INLINE auto do_map(T&& val) -> T& {
+ return val;
+ }
+ template <typename T, FMT_ENABLE_IF(!formattable<T>::value)>
+ FMT_CONSTEXPR FMT_INLINE auto do_map(T&&) -> unformattable_const {
+ return {};
+ }
+#endif
+
+ template <typename T, typename U = remove_cvref_t<T>,
+ FMT_ENABLE_IF(!is_string<U>::value && !is_char<U>::value &&
+ !std::is_array<U>::value &&
+ !std::is_pointer<U>::value &&
+ !has_format_as<U>::value &&
+ (has_formatter<U, Context>::value ||
+ has_fallback_formatter<U, char_type>::value))>
+ FMT_CONSTEXPR FMT_INLINE auto map(T&& val)
+ -> decltype(this->do_map(std::forward<T>(val))) {
+ return do_map(std::forward<T>(val));
+ }
+
+ template <typename T, FMT_ENABLE_IF(is_named_arg<T>::value)>
+ FMT_CONSTEXPR FMT_INLINE auto map(const T& named_arg)
+ -> decltype(std::declval<arg_mapper>().map(named_arg.value)) {
+ return map(named_arg.value);
+ }
+
+ auto map(...) -> unformattable { return {}; }
+};
+
+// A type constant after applying arg_mapper<Context>.
+template <typename T, typename Context>
+using mapped_type_constant =
+ type_constant<decltype(arg_mapper<Context>().map(std::declval<const T&>())),
+ typename Context::char_type>;
+
+enum { packed_arg_bits = 4 };
+// Maximum number of arguments with packed types.
+enum { max_packed_args = 62 / packed_arg_bits };
+enum : unsigned long long { is_unpacked_bit = 1ULL << 63 };
+enum : unsigned long long { has_named_args_bit = 1ULL << 62 };
+
+FMT_END_DETAIL_NAMESPACE
+
+// An output iterator that appends to a buffer.
+// It is used to reduce symbol sizes for the common case.
+class appender : public std::back_insert_iterator<detail::buffer<char>> {
+ using base = std::back_insert_iterator<detail::buffer<char>>;
+
+ template <typename T>
+ friend auto get_buffer(appender out) -> detail::buffer<char>& {
+ return detail::get_container(out);
+ }
+
+ public:
+ using std::back_insert_iterator<detail::buffer<char>>::back_insert_iterator;
+ appender(base it) noexcept : base(it) {}
+ FMT_UNCHECKED_ITERATOR(appender);
+
+ auto operator++() noexcept -> appender& { return *this; }
+ auto operator++(int) noexcept -> appender { return *this; }
+};
+
+// A formatting argument. It is a trivially copyable/constructible type to
+// allow storage in basic_memory_buffer.
+template <typename Context> class basic_format_arg {
+ private:
+ detail::value<Context> value_;
+ detail::type type_;
+
+ template <typename ContextType, typename T>
+ friend FMT_CONSTEXPR auto detail::make_arg(T&& value)
+ -> basic_format_arg<ContextType>;
+
+ template <typename Visitor, typename Ctx>
+ friend FMT_CONSTEXPR auto visit_format_arg(Visitor&& vis,
+ const basic_format_arg<Ctx>& arg)
+ -> decltype(vis(0));
+
+ friend class basic_format_args<Context>;
+ friend class dynamic_format_arg_store<Context>;
+
+ using char_type = typename Context::char_type;
+
+ template <typename T, typename Char, size_t NUM_ARGS, size_t NUM_NAMED_ARGS>
+ friend struct detail::arg_data;
+
+ basic_format_arg(const detail::named_arg_info<char_type>* args, size_t size)
+ : value_(args, size) {}
+
+ public:
+ class handle {
+ public:
+ explicit handle(detail::custom_value<Context> custom) : custom_(custom) {}
+
+ void format(typename Context::parse_context_type& parse_ctx,
+ Context& ctx) const {
+ custom_.format(custom_.value, parse_ctx, ctx);
+ }
+
+ private:
+ detail::custom_value<Context> custom_;
+ };
+
+ constexpr basic_format_arg() : type_(detail::type::none_type) {}
+
+ constexpr explicit operator bool() const noexcept {
+ return type_ != detail::type::none_type;
+ }
+
+ auto type() const -> detail::type { return type_; }
+
+ auto is_integral() const -> bool { return detail::is_integral_type(type_); }
+ auto is_arithmetic() const -> bool {
+ return detail::is_arithmetic_type(type_);
+ }
+};
+
+/**
+ \rst
+ Visits an argument dispatching to the appropriate visit method based on
+ the argument type. For example, if the argument type is ``double`` then
+ ``vis(value)`` will be called with the value of type ``double``.
+ \endrst
+ */
+template <typename Visitor, typename Context>
+FMT_CONSTEXPR FMT_INLINE auto visit_format_arg(
+ Visitor&& vis, const basic_format_arg<Context>& arg) -> decltype(vis(0)) {
+ switch (arg.type_) {
+ case detail::type::none_type:
+ break;
+ case detail::type::int_type:
+ return vis(arg.value_.int_value);
+ case detail::type::uint_type:
+ return vis(arg.value_.uint_value);
+ case detail::type::long_long_type:
+ return vis(arg.value_.long_long_value);
+ case detail::type::ulong_long_type:
+ return vis(arg.value_.ulong_long_value);
+ case detail::type::int128_type:
+ return vis(detail::convert_for_visit(arg.value_.int128_value));
+ case detail::type::uint128_type:
+ return vis(detail::convert_for_visit(arg.value_.uint128_value));
+ case detail::type::bool_type:
+ return vis(arg.value_.bool_value);
+ case detail::type::char_type:
+ return vis(arg.value_.char_value);
+ case detail::type::float_type:
+ return vis(arg.value_.float_value);
+ case detail::type::double_type:
+ return vis(arg.value_.double_value);
+ case detail::type::long_double_type:
+ return vis(arg.value_.long_double_value);
+ case detail::type::cstring_type:
+ return vis(arg.value_.string.data);
+ case detail::type::string_type:
+ using sv = basic_string_view<typename Context::char_type>;
+ return vis(sv(arg.value_.string.data, arg.value_.string.size));
+ case detail::type::pointer_type:
+ return vis(arg.value_.pointer);
+ case detail::type::custom_type:
+ return vis(typename basic_format_arg<Context>::handle(arg.value_.custom));
+ }
+ return vis(monostate());
+}
+
+FMT_BEGIN_DETAIL_NAMESPACE
+
+template <typename Char, typename InputIt>
+auto copy_str(InputIt begin, InputIt end, appender out) -> appender {
+ get_container(out).append(begin, end);
+ return out;
+}
+
+#if FMT_GCC_VERSION && FMT_GCC_VERSION < 500
+// A workaround for gcc 4.8 to make void_t work in a SFINAE context.
+template <typename... Ts> struct void_t_impl { using type = void; };
+template <typename... Ts>
+using void_t = typename detail::void_t_impl<Ts...>::type;
+#else
+template <typename...> using void_t = void;
+#endif
+
+template <typename It, typename T, typename Enable = void>
+struct is_output_iterator : std::false_type {};
+
+template <typename It, typename T>
+struct is_output_iterator<
+ It, T,
+ void_t<typename std::iterator_traits<It>::iterator_category,
+ decltype(*std::declval<It>() = std::declval<T>())>>
+ : std::true_type {};
+
+template <typename OutputIt>
+struct is_back_insert_iterator : std::false_type {};
+template <typename Container>
+struct is_back_insert_iterator<std::back_insert_iterator<Container>>
+ : std::true_type {};
+
+template <typename OutputIt>
+struct is_contiguous_back_insert_iterator : std::false_type {};
+template <typename Container>
+struct is_contiguous_back_insert_iterator<std::back_insert_iterator<Container>>
+ : is_contiguous<Container> {};
+template <>
+struct is_contiguous_back_insert_iterator<appender> : std::true_type {};
+
+// A type-erased reference to an std::locale to avoid a heavy <locale> include.
+class locale_ref {
+ private:
+ const void* locale_; // A type-erased pointer to std::locale.
+
+ public:
+ constexpr locale_ref() : locale_(nullptr) {}
+ template <typename Locale> explicit locale_ref(const Locale& loc);
+
+ explicit operator bool() const noexcept { return locale_ != nullptr; }
+
+ template <typename Locale> auto get() const -> Locale;
+};
+
+template <typename> constexpr auto encode_types() -> unsigned long long {
+ return 0;
+}
+
+template <typename Context, typename Arg, typename... Args>
+constexpr auto encode_types() -> unsigned long long {
+ return static_cast<unsigned>(mapped_type_constant<Arg, Context>::value) |
+ (encode_types<Context, Args...>() << packed_arg_bits);
+}
+
+template <typename Context, typename T>
+FMT_CONSTEXPR FMT_INLINE auto make_value(T&& val) -> value<Context> {
+ const auto& arg = arg_mapper<Context>().map(std::forward<T>(val));
+
+ constexpr bool formattable_char =
+ !std::is_same<decltype(arg), const unformattable_char&>::value;
+ static_assert(formattable_char, "Mixing character types is disallowed.");
+
+ constexpr bool formattable_const =
+ !std::is_same<decltype(arg), const unformattable_const&>::value;
+ static_assert(formattable_const, "Cannot format a const argument.");
+
+ // Formatting of arbitrary pointers is disallowed. If you want to output
+ // a pointer cast it to "void *" or "const void *". In particular, this
+ // forbids formatting of "[const] volatile char *" which is printed as bool
+ // by iostreams.
+ constexpr bool formattable_pointer =
+ !std::is_same<decltype(arg), const unformattable_pointer&>::value;
+ static_assert(formattable_pointer,
+ "Formatting of non-void pointers is disallowed.");
+
+ constexpr bool formattable =
+ !std::is_same<decltype(arg), const unformattable&>::value;
+ static_assert(
+ formattable,
+ "Cannot format an argument. To make type T formattable provide a "
+ "formatter<T> specialization: https://fmt.dev/latest/api.html#udt");
+ return {arg};
+}
+
+template <typename Context, typename T>
+FMT_CONSTEXPR auto make_arg(T&& value) -> basic_format_arg<Context> {
+ basic_format_arg<Context> arg;
+ arg.type_ = mapped_type_constant<T, Context>::value;
+ arg.value_ = make_value<Context>(value);
+ return arg;
+}
+
+// The type template parameter is there to avoid an ODR violation when using
+// a fallback formatter in one translation unit and an implicit conversion in
+// another (not recommended).
+template <bool IS_PACKED, typename Context, type, typename T,
+ FMT_ENABLE_IF(IS_PACKED)>
+FMT_CONSTEXPR FMT_INLINE auto make_arg(T&& val) -> value<Context> {
+ return make_value<Context>(val);
+}
+
+template <bool IS_PACKED, typename Context, type, typename T,
+ FMT_ENABLE_IF(!IS_PACKED)>
+FMT_CONSTEXPR inline auto make_arg(T&& value) -> basic_format_arg<Context> {
+ return make_arg<Context>(value);
+}
+FMT_END_DETAIL_NAMESPACE
+
+// Formatting context.
+template <typename OutputIt, typename Char> class basic_format_context {
+ public:
+ /** The character type for the output. */
+ using char_type = Char;
+
+ private:
+ OutputIt out_;
+ basic_format_args<basic_format_context> args_;
+ detail::locale_ref loc_;
+
+ public:
+ using iterator = OutputIt;
+ using format_arg = basic_format_arg<basic_format_context>;
+ using parse_context_type = basic_format_parse_context<Char>;
+ template <typename T> using formatter_type = formatter<T, char_type>;
+
+ basic_format_context(basic_format_context&&) = default;
+ basic_format_context(const basic_format_context&) = delete;
+ void operator=(const basic_format_context&) = delete;
+ /**
+ Constructs a ``basic_format_context`` object. References to the arguments are
+ stored in the object so make sure they have appropriate lifetimes.
+ */
+ constexpr basic_format_context(
+ OutputIt out, basic_format_args<basic_format_context> ctx_args,
+ detail::locale_ref loc = detail::locale_ref())
+ : out_(out), args_(ctx_args), loc_(loc) {}
+
+ constexpr auto arg(int id) const -> format_arg { return args_.get(id); }
+ FMT_CONSTEXPR auto arg(basic_string_view<char_type> name) -> format_arg {
+ return args_.get(name);
+ }
+ FMT_CONSTEXPR auto arg_id(basic_string_view<char_type> name) -> int {
+ return args_.get_id(name);
+ }
+ auto args() const -> const basic_format_args<basic_format_context>& {
+ return args_;
+ }
+
+ FMT_CONSTEXPR auto error_handler() -> detail::error_handler { return {}; }
+ void on_error(const char* message) { error_handler().on_error(message); }
+
+ // Returns an iterator to the beginning of the output range.
+ FMT_CONSTEXPR auto out() -> iterator { return out_; }
+
+ // Advances the begin iterator to ``it``.
+ void advance_to(iterator it) {
+ if (!detail::is_back_insert_iterator<iterator>()) out_ = it;
+ }
+
+ FMT_CONSTEXPR auto locale() -> detail::locale_ref { return loc_; }
+};
+
+template <typename Char>
+using buffer_context =
+ basic_format_context<detail::buffer_appender<Char>, Char>;
+using format_context = buffer_context<char>;
+
+// Workaround an alias issue: https://stackoverflow.com/q/62767544/471164.
+#define FMT_BUFFER_CONTEXT(Char) \
+ basic_format_context<detail::buffer_appender<Char>, Char>
+
+template <typename T, typename Char = char>
+using is_formattable = bool_constant<
+ !std::is_base_of<detail::unformattable,
+ decltype(detail::arg_mapper<buffer_context<Char>>().map(
+ std::declval<T>()))>::value &&
+ !detail::has_fallback_formatter<T, Char>::value>;
+
+/**
+ \rst
+ An array of references to arguments. It can be implicitly converted into
+ `~fmt::basic_format_args` for passing into type-erased formatting functions
+ such as `~fmt::vformat`.
+ \endrst
+ */
+template <typename Context, typename... Args>
+class format_arg_store
+#if FMT_GCC_VERSION && FMT_GCC_VERSION < 409
+ // Workaround a GCC template argument substitution bug.
+ : public basic_format_args<Context>
+#endif
+{
+ private:
+ static const size_t num_args = sizeof...(Args);
+ static const size_t num_named_args = detail::count_named_args<Args...>();
+ static const bool is_packed = num_args <= detail::max_packed_args;
+
+ using value_type = conditional_t<is_packed, detail::value<Context>,
+ basic_format_arg<Context>>;
+
+ detail::arg_data<value_type, typename Context::char_type, num_args,
+ num_named_args>
+ data_;
+
+ friend class basic_format_args<Context>;
+
+ static constexpr unsigned long long desc =
+ (is_packed ? detail::encode_types<Context, Args...>()
+ : detail::is_unpacked_bit | num_args) |
+ (num_named_args != 0
+ ? static_cast<unsigned long long>(detail::has_named_args_bit)
+ : 0);
+
+ public:
+ template <typename... T>
+ FMT_CONSTEXPR FMT_INLINE format_arg_store(T&&... args)
+ :
+#if FMT_GCC_VERSION && FMT_GCC_VERSION < 409
+ basic_format_args<Context>(*this),
+#endif
+ data_{detail::make_arg<
+ is_packed, Context,
+ detail::mapped_type_constant<remove_cvref_t<T>, Context>::value>(
+ std::forward<T>(args))...} {
+ detail::init_named_args(data_.named_args(), 0, 0, args...);
+ }
+};
+
+/**
+ \rst
+ Constructs a `~fmt::format_arg_store` object that contains references to
+ arguments and can be implicitly converted to `~fmt::format_args`. `Context`
+ can be omitted in which case it defaults to `~fmt::context`.
+ See `~fmt::arg` for lifetime considerations.
+ \endrst
+ */
+template <typename Context = format_context, typename... Args>
+constexpr auto make_format_args(Args&&... args)
+ -> format_arg_store<Context, remove_cvref_t<Args>...> {
+ return {std::forward<Args>(args)...};
+}
+
+/**
+ \rst
+ Returns a named argument to be used in a formatting function.
+ It should only be used in a call to a formatting function or
+ `dynamic_format_arg_store::push_back`.
+
+ **Example**::
+
+ fmt::print("Elapsed time: {s:.2f} seconds", fmt::arg("s", 1.23));
+ \endrst
+ */
+template <typename Char, typename T>
+inline auto arg(const Char* name, const T& arg) -> detail::named_arg<Char, T> {
+ static_assert(!detail::is_named_arg<T>(), "nested named arguments");
+ return {name, arg};
+}
+
+/**
+ \rst
+ A view of a collection of formatting arguments. To avoid lifetime issues it
+ should only be used as a parameter type in type-erased functions such as
+ ``vformat``::
+
+ void vlog(string_view format_str, format_args args); // OK
+ format_args args = make_format_args(42); // Error: dangling reference
+ \endrst
+ */
+template <typename Context> class basic_format_args {
+ public:
+ using size_type = int;
+ using format_arg = basic_format_arg<Context>;
+
+ private:
+ // A descriptor that contains information about formatting arguments.
+ // If the number of arguments is less or equal to max_packed_args then
+ // argument types are passed in the descriptor. This reduces binary code size
+ // per formatting function call.
+ unsigned long long desc_;
+ union {
+ // If is_packed() returns true then argument values are stored in values_;
+ // otherwise they are stored in args_. This is done to improve cache
+ // locality and reduce compiled code size since storing larger objects
+ // may require more code (at least on x86-64) even if the same amount of
+ // data is actually copied to stack. It saves ~10% on the bloat test.
+ const detail::value<Context>* values_;
+ const format_arg* args_;
+ };
+
+ constexpr auto is_packed() const -> bool {
+ return (desc_ & detail::is_unpacked_bit) == 0;
+ }
+ auto has_named_args() const -> bool {
+ return (desc_ & detail::has_named_args_bit) != 0;
+ }
+
+ FMT_CONSTEXPR auto type(int index) const -> detail::type {
+ int shift = index * detail::packed_arg_bits;
+ unsigned int mask = (1 << detail::packed_arg_bits) - 1;
+ return static_cast<detail::type>((desc_ >> shift) & mask);
+ }
+
+ constexpr FMT_INLINE basic_format_args(unsigned long long desc,
+ const detail::value<Context>* values)
+ : desc_(desc), values_(values) {}
+ constexpr basic_format_args(unsigned long long desc, const format_arg* args)
+ : desc_(desc), args_(args) {}
+
+ public:
+ constexpr basic_format_args() : desc_(0), args_(nullptr) {}
+
+ /**
+ \rst
+ Constructs a `basic_format_args` object from `~fmt::format_arg_store`.
+ \endrst
+ */
+ template <typename... Args>
+ constexpr FMT_INLINE basic_format_args(
+ const format_arg_store<Context, Args...>& store)
+ : basic_format_args(format_arg_store<Context, Args...>::desc,
+ store.data_.args()) {}
+
+ /**
+ \rst
+ Constructs a `basic_format_args` object from
+ `~fmt::dynamic_format_arg_store`.
+ \endrst
+ */
+ constexpr FMT_INLINE basic_format_args(
+ const dynamic_format_arg_store<Context>& store)
+ : basic_format_args(store.get_types(), store.data()) {}
+
+ /**
+ \rst
+ Constructs a `basic_format_args` object from a dynamic set of arguments.
+ \endrst
+ */
+ constexpr basic_format_args(const format_arg* args, int count)
+ : basic_format_args(detail::is_unpacked_bit | detail::to_unsigned(count),
+ args) {}
+
+ /** Returns the argument with the specified id. */
+ FMT_CONSTEXPR auto get(int id) const -> format_arg {
+ format_arg arg;
+ if (!is_packed()) {
+ if (id < max_size()) arg = args_[id];
+ return arg;
+ }
+ if (id >= detail::max_packed_args) return arg;
+ arg.type_ = type(id);
+ if (arg.type_ == detail::type::none_type) return arg;
+ arg.value_ = values_[id];
+ return arg;
+ }
+
+ template <typename Char>
+ auto get(basic_string_view<Char> name) const -> format_arg {
+ int id = get_id(name);
+ return id >= 0 ? get(id) : format_arg();
+ }
+
+ template <typename Char>
+ auto get_id(basic_string_view<Char> name) const -> int {
+ if (!has_named_args()) return -1;
+ const auto& named_args =
+ (is_packed() ? values_[-1] : args_[-1].value_).named_args;
+ for (size_t i = 0; i < named_args.size; ++i) {
+ if (named_args.data[i].name == name) return named_args.data[i].id;
+ }
+ return -1;
+ }
+
+ auto max_size() const -> int {
+ unsigned long long max_packed = detail::max_packed_args;
+ return static_cast<int>(is_packed() ? max_packed
+ : desc_ & ~detail::is_unpacked_bit);
+ }
+};
+
+/** An alias to ``basic_format_args<format_context>``. */
+// A separate type would result in shorter symbols but break ABI compatibility
+// between clang and gcc on ARM (#1919).
+using format_args = basic_format_args<format_context>;
+
+// We cannot use enum classes as bit fields because of a gcc bug, so we put them
+// in namespaces instead (https://gcc.gnu.org/bugzilla/show_bug.cgi?id=61414).
+// Additionally, if an underlying type is specified, older gcc incorrectly warns
+// that the type is too small. Both bugs are fixed in gcc 9.3.
+#if FMT_GCC_VERSION && FMT_GCC_VERSION < 903
+# define FMT_ENUM_UNDERLYING_TYPE(type)
+#else
+# define FMT_ENUM_UNDERLYING_TYPE(type) : type
+#endif
+namespace align {
+enum type FMT_ENUM_UNDERLYING_TYPE(unsigned char){none, left, right, center,
+ numeric};
+}
+using align_t = align::type;
+namespace sign {
+enum type FMT_ENUM_UNDERLYING_TYPE(unsigned char){none, minus, plus, space};
+}
+using sign_t = sign::type;
+
+FMT_BEGIN_DETAIL_NAMESPACE
+
+// Workaround an array initialization issue in gcc 4.8.
+template <typename Char> struct fill_t {
+ private:
+ enum { max_size = 4 };
+ Char data_[max_size] = {Char(' '), Char(0), Char(0), Char(0)};
+ unsigned char size_ = 1;
+
+ public:
+ FMT_CONSTEXPR void operator=(basic_string_view<Char> s) {
+ auto size = s.size();
+ if (size > max_size) return throw_format_error("invalid fill");
+ for (size_t i = 0; i < size; ++i) data_[i] = s[i];
+ size_ = static_cast<unsigned char>(size);
+ }
+
+ constexpr auto size() const -> size_t { return size_; }
+ constexpr auto data() const -> const Char* { return data_; }
+
+ FMT_CONSTEXPR auto operator[](size_t index) -> Char& { return data_[index]; }
+ FMT_CONSTEXPR auto operator[](size_t index) const -> const Char& {
+ return data_[index];
+ }
+};
+FMT_END_DETAIL_NAMESPACE
+
+enum class presentation_type : unsigned char {
+ none,
+ // Integer types should go first,
+ dec, // 'd'
+ oct, // 'o'
+ hex_lower, // 'x'
+ hex_upper, // 'X'
+ bin_lower, // 'b'
+ bin_upper, // 'B'
+ hexfloat_lower, // 'a'
+ hexfloat_upper, // 'A'
+ exp_lower, // 'e'
+ exp_upper, // 'E'
+ fixed_lower, // 'f'
+ fixed_upper, // 'F'
+ general_lower, // 'g'
+ general_upper, // 'G'
+ chr, // 'c'
+ string, // 's'
+ pointer, // 'p'
+ debug // '?'
+};
+
+// Format specifiers for built-in and string types.
+template <typename Char> struct basic_format_specs {
+ int width;
+ int precision;
+ presentation_type type;
+ align_t align : 4;
+ sign_t sign : 3;
+ bool alt : 1; // Alternate form ('#').
+ bool localized : 1;
+ detail::fill_t<Char> fill;
+
+ constexpr basic_format_specs()
+ : width(0),
+ precision(-1),
+ type(presentation_type::none),
+ align(align::none),
+ sign(sign::none),
+ alt(false),
+ localized(false) {}
+};
+
+using format_specs = basic_format_specs<char>;
+
+FMT_BEGIN_DETAIL_NAMESPACE
+
+enum class arg_id_kind { none, index, name };
+
+// An argument reference.
+template <typename Char> struct arg_ref {
+ FMT_CONSTEXPR arg_ref() : kind(arg_id_kind::none), val() {}
+
+ FMT_CONSTEXPR explicit arg_ref(int index)
+ : kind(arg_id_kind::index), val(index) {}
+ FMT_CONSTEXPR explicit arg_ref(basic_string_view<Char> name)
+ : kind(arg_id_kind::name), val(name) {}
+
+ FMT_CONSTEXPR auto operator=(int idx) -> arg_ref& {
+ kind = arg_id_kind::index;
+ val.index = idx;
+ return *this;
+ }
+
+ arg_id_kind kind;
+ union value {
+ FMT_CONSTEXPR value(int id = 0) : index{id} {}
+ FMT_CONSTEXPR value(basic_string_view<Char> n) : name(n) {}
+
+ int index;
+ basic_string_view<Char> name;
+ } val;
+};
+
+// Format specifiers with width and precision resolved at formatting rather
+// than parsing time to allow re-using the same parsed specifiers with
+// different sets of arguments (precompilation of format strings).
+template <typename Char>
+struct dynamic_format_specs : basic_format_specs<Char> {
+ arg_ref<Char> width_ref;
+ arg_ref<Char> precision_ref;
+};
+
+struct auto_id {};
+
+// A format specifier handler that sets fields in basic_format_specs.
+template <typename Char> class specs_setter {
+ protected:
+ basic_format_specs<Char>& specs_;
+
+ public:
+ explicit FMT_CONSTEXPR specs_setter(basic_format_specs<Char>& specs)
+ : specs_(specs) {}
+
+ FMT_CONSTEXPR specs_setter(const specs_setter& other)
+ : specs_(other.specs_) {}
+
+ FMT_CONSTEXPR void on_align(align_t align) { specs_.align = align; }
+ FMT_CONSTEXPR void on_fill(basic_string_view<Char> fill) {
+ specs_.fill = fill;
+ }
+ FMT_CONSTEXPR void on_sign(sign_t s) { specs_.sign = s; }
+ FMT_CONSTEXPR void on_hash() { specs_.alt = true; }
+ FMT_CONSTEXPR void on_localized() { specs_.localized = true; }
+
+ FMT_CONSTEXPR void on_zero() {
+ if (specs_.align == align::none) specs_.align = align::numeric;
+ specs_.fill[0] = Char('0');
+ }
+
+ FMT_CONSTEXPR void on_width(int width) { specs_.width = width; }
+ FMT_CONSTEXPR void on_precision(int precision) {
+ specs_.precision = precision;
+ }
+ FMT_CONSTEXPR void end_precision() {}
+
+ FMT_CONSTEXPR void on_type(presentation_type type) { specs_.type = type; }
+};
+
+// Format spec handler that saves references to arguments representing dynamic
+// width and precision to be resolved at formatting time.
+template <typename ParseContext>
+class dynamic_specs_handler
+ : public specs_setter<typename ParseContext::char_type> {
+ public:
+ using char_type = typename ParseContext::char_type;
+
+ FMT_CONSTEXPR dynamic_specs_handler(dynamic_format_specs<char_type>& specs,
+ ParseContext& ctx)
+ : specs_setter<char_type>(specs), specs_(specs), context_(ctx) {}
+
+ FMT_CONSTEXPR dynamic_specs_handler(const dynamic_specs_handler& other)
+ : specs_setter<char_type>(other),
+ specs_(other.specs_),
+ context_(other.context_) {}
+
+ template <typename Id> FMT_CONSTEXPR void on_dynamic_width(Id arg_id) {
+ specs_.width_ref = make_arg_ref(arg_id);
+ }
+
+ template <typename Id> FMT_CONSTEXPR void on_dynamic_precision(Id arg_id) {
+ specs_.precision_ref = make_arg_ref(arg_id);
+ }
+
+ FMT_CONSTEXPR void on_error(const char* message) {
+ context_.on_error(message);
+ }
+
+ private:
+ dynamic_format_specs<char_type>& specs_;
+ ParseContext& context_;
+
+ using arg_ref_type = arg_ref<char_type>;
+
+ FMT_CONSTEXPR auto make_arg_ref(int arg_id) -> arg_ref_type {
+ context_.check_arg_id(arg_id);
+ return arg_ref_type(arg_id);
+ }
+
+ FMT_CONSTEXPR auto make_arg_ref(auto_id) -> arg_ref_type {
+ return arg_ref_type(context_.next_arg_id());
+ }
+
+ FMT_CONSTEXPR auto make_arg_ref(basic_string_view<char_type> arg_id)
+ -> arg_ref_type {
+ context_.check_arg_id(arg_id);
+ basic_string_view<char_type> format_str(
+ context_.begin(), to_unsigned(context_.end() - context_.begin()));
+ return arg_ref_type(arg_id);
+ }
+};
+
+template <typename Char> constexpr bool is_ascii_letter(Char c) {
+ return (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z');
+}
+
+// Converts a character to ASCII. Returns a number > 127 on conversion failure.
+template <typename Char, FMT_ENABLE_IF(std::is_integral<Char>::value)>
+constexpr auto to_ascii(Char c) -> Char {
+ return c;
+}
+template <typename Char, FMT_ENABLE_IF(std::is_enum<Char>::value)>
+constexpr auto to_ascii(Char c) -> underlying_t<Char> {
+ return c;
+}
+
+template <typename Char>
+FMT_CONSTEXPR auto code_point_length(const Char* begin) -> int {
+ if (const_check(sizeof(Char) != 1)) return 1;
+ auto lengths =
+ "\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\1\0\0\0\0\0\0\0\0\2\2\2\2\3\3\4";
+ int len = lengths[static_cast<unsigned char>(*begin) >> 3];
+
+ // Compute the pointer to the next character early so that the next
+ // iteration can start working on the next character. Neither Clang
+ // nor GCC figure out this reordering on their own.
+ return len + !len;
+}
+
+// Return the result via the out param to workaround gcc bug 77539.
+template <bool IS_CONSTEXPR, typename T, typename Ptr = const T*>
+FMT_CONSTEXPR auto find(Ptr first, Ptr last, T value, Ptr& out) -> bool {
+ for (out = first; out != last; ++out) {
+ if (*out == value) return true;
+ }
+ return false;
+}
+
+template <>
+inline auto find<false, char>(const char* first, const char* last, char value,
+ const char*& out) -> bool {
+ out = static_cast<const char*>(
+ std::memchr(first, value, to_unsigned(last - first)));
+ return out != nullptr;
+}
+
+// Parses the range [begin, end) as an unsigned integer. This function assumes
+// that the range is non-empty and the first character is a digit.
+template <typename Char>
+FMT_CONSTEXPR auto parse_nonnegative_int(const Char*& begin, const Char* end,
+ int error_value) noexcept -> int {
+ FMT_ASSERT(begin != end && '0' <= *begin && *begin <= '9', "");
+ unsigned value = 0, prev = 0;
+ auto p = begin;
+ do {
+ prev = value;
+ value = value * 10 + unsigned(*p - '0');
+ ++p;
+ } while (p != end && '0' <= *p && *p <= '9');
+ auto num_digits = p - begin;
+ begin = p;
+ if (num_digits <= std::numeric_limits<int>::digits10)
+ return static_cast<int>(value);
+ // Check for overflow.
+ const unsigned max = to_unsigned((std::numeric_limits<int>::max)());
+ return num_digits == std::numeric_limits<int>::digits10 + 1 &&
+ prev * 10ull + unsigned(p[-1] - '0') <= max
+ ? static_cast<int>(value)
+ : error_value;
+}
+
+// Parses fill and alignment.
+template <typename Char, typename Handler>
+FMT_CONSTEXPR auto parse_align(const Char* begin, const Char* end,
+ Handler&& handler) -> const Char* {
+ FMT_ASSERT(begin != end, "");
+ auto align = align::none;
+ auto p = begin + code_point_length(begin);
+ if (end - p <= 0) p = begin;
+ for (;;) {
+ switch (to_ascii(*p)) {
+ case '<':
+ align = align::left;
+ break;
+ case '>':
+ align = align::right;
+ break;
+ case '^':
+ align = align::center;
+ break;
+ default:
+ break;
+ }
+ if (align != align::none) {
+ if (p != begin) {
+ auto c = *begin;
+ if (c == '{')
+ return handler.on_error("invalid fill character '{'"), begin;
+ handler.on_fill(basic_string_view<Char>(begin, to_unsigned(p - begin)));
+ begin = p + 1;
+ } else
+ ++begin;
+ handler.on_align(align);
+ break;
+ } else if (p == begin) {
+ break;
+ }
+ p = begin;
+ }
+ return begin;
+}
+
+template <typename Char> FMT_CONSTEXPR bool is_name_start(Char c) {
+ return ('a' <= c && c <= 'z') || ('A' <= c && c <= 'Z') || '_' == c;
+}
+
+template <typename Char, typename IDHandler>
+FMT_CONSTEXPR auto do_parse_arg_id(const Char* begin, const Char* end,
+ IDHandler&& handler) -> const Char* {
+ FMT_ASSERT(begin != end, "");
+ Char c = *begin;
+ if (c >= '0' && c <= '9') {
+ int index = 0;
+ if (c != '0')
+ index =
+ parse_nonnegative_int(begin, end, (std::numeric_limits<int>::max)());
+ else
+ ++begin;
+ if (begin == end || (*begin != '}' && *begin != ':'))
+ handler.on_error("invalid format string");
+ else
+ handler(index);
+ return begin;
+ }
+ if (!is_name_start(c)) {
+ handler.on_error("invalid format string");
+ return begin;
+ }
+ auto it = begin;
+ do {
+ ++it;
+ } while (it != end && (is_name_start(c = *it) || ('0' <= c && c <= '9')));
+ handler(basic_string_view<Char>(begin, to_unsigned(it - begin)));
+ return it;
+}
+
+template <typename Char, typename IDHandler>
+FMT_CONSTEXPR FMT_INLINE auto parse_arg_id(const Char* begin, const Char* end,
+ IDHandler&& handler) -> const Char* {
+ Char c = *begin;
+ if (c != '}' && c != ':') return do_parse_arg_id(begin, end, handler);
+ handler();
+ return begin;
+}
+
+template <typename Char, typename Handler>
+FMT_CONSTEXPR auto parse_width(const Char* begin, const Char* end,
+ Handler&& handler) -> const Char* {
+ using detail::auto_id;
+ struct width_adapter {
+ Handler& handler;
+
+ FMT_CONSTEXPR void operator()() { handler.on_dynamic_width(auto_id()); }
+ FMT_CONSTEXPR void operator()(int id) { handler.on_dynamic_width(id); }
+ FMT_CONSTEXPR void operator()(basic_string_view<Char> id) {
+ handler.on_dynamic_width(id);
+ }
+ FMT_CONSTEXPR void on_error(const char* message) {
+ if (message) handler.on_error(message);
+ }
+ };
+
+ FMT_ASSERT(begin != end, "");
+ if ('0' <= *begin && *begin <= '9') {
+ int width = parse_nonnegative_int(begin, end, -1);
+ if (width != -1)
+ handler.on_width(width);
+ else
+ handler.on_error("number is too big");
+ } else if (*begin == '{') {
+ ++begin;
+ if (begin != end) begin = parse_arg_id(begin, end, width_adapter{handler});
+ if (begin == end || *begin != '}')
+ return handler.on_error("invalid format string"), begin;
+ ++begin;
+ }
+ return begin;
+}
+
+template <typename Char, typename Handler>
+FMT_CONSTEXPR auto parse_precision(const Char* begin, const Char* end,
+ Handler&& handler) -> const Char* {
+ using detail::auto_id;
+ struct precision_adapter {
+ Handler& handler;
+
+ FMT_CONSTEXPR void operator()() { handler.on_dynamic_precision(auto_id()); }
+ FMT_CONSTEXPR void operator()(int id) { handler.on_dynamic_precision(id); }
+ FMT_CONSTEXPR void operator()(basic_string_view<Char> id) {
+ handler.on_dynamic_precision(id);
+ }
+ FMT_CONSTEXPR void on_error(const char* message) {
+ if (message) handler.on_error(message);
+ }
+ };
+
+ ++begin;
+ auto c = begin != end ? *begin : Char();
+ if ('0' <= c && c <= '9') {
+ auto precision = parse_nonnegative_int(begin, end, -1);
+ if (precision != -1)
+ handler.on_precision(precision);
+ else
+ handler.on_error("number is too big");
+ } else if (c == '{') {
+ ++begin;
+ if (begin != end)
+ begin = parse_arg_id(begin, end, precision_adapter{handler});
+ if (begin == end || *begin++ != '}')
+ return handler.on_error("invalid format string"), begin;
+ } else {
+ return handler.on_error("missing precision specifier"), begin;
+ }
+ handler.end_precision();
+ return begin;
+}
+
+template <typename Char>
+FMT_CONSTEXPR auto parse_presentation_type(Char type) -> presentation_type {
+ switch (to_ascii(type)) {
+ case 'd':
+ return presentation_type::dec;
+ case 'o':
+ return presentation_type::oct;
+ case 'x':
+ return presentation_type::hex_lower;
+ case 'X':
+ return presentation_type::hex_upper;
+ case 'b':
+ return presentation_type::bin_lower;
+ case 'B':
+ return presentation_type::bin_upper;
+ case 'a':
+ return presentation_type::hexfloat_lower;
+ case 'A':
+ return presentation_type::hexfloat_upper;
+ case 'e':
+ return presentation_type::exp_lower;
+ case 'E':
+ return presentation_type::exp_upper;
+ case 'f':
+ return presentation_type::fixed_lower;
+ case 'F':
+ return presentation_type::fixed_upper;
+ case 'g':
+ return presentation_type::general_lower;
+ case 'G':
+ return presentation_type::general_upper;
+ case 'c':
+ return presentation_type::chr;
+ case 's':
+ return presentation_type::string;
+ case 'p':
+ return presentation_type::pointer;
+ case '?':
+ return presentation_type::debug;
+ default:
+ return presentation_type::none;
+ }
+}
+
+// Parses standard format specifiers and sends notifications about parsed
+// components to handler.
+template <typename Char, typename SpecHandler>
+FMT_CONSTEXPR FMT_INLINE auto parse_format_specs(const Char* begin,
+ const Char* end,
+ SpecHandler&& handler)
+ -> const Char* {
+ if (1 < end - begin && begin[1] == '}' && is_ascii_letter(*begin) &&
+ *begin != 'L') {
+ presentation_type type = parse_presentation_type(*begin++);
+ if (type == presentation_type::none)
+ handler.on_error("invalid type specifier");
+ handler.on_type(type);
+ return begin;
+ }
+
+ if (begin == end) return begin;
+
+ begin = parse_align(begin, end, handler);
+ if (begin == end) return begin;
+
+ // Parse sign.
+ switch (to_ascii(*begin)) {
+ case '+':
+ handler.on_sign(sign::plus);
+ ++begin;
+ break;
+ case '-':
+ handler.on_sign(sign::minus);
+ ++begin;
+ break;
+ case ' ':
+ handler.on_sign(sign::space);
+ ++begin;
+ break;
+ default:
+ break;
+ }
+ if (begin == end) return begin;
+
+ if (*begin == '#') {
+ handler.on_hash();
+ if (++begin == end) return begin;
+ }
+
+ // Parse zero flag.
+ if (*begin == '0') {
+ handler.on_zero();
+ if (++begin == end) return begin;
+ }
+
+ begin = parse_width(begin, end, handler);
+ if (begin == end) return begin;
+
+ // Parse precision.
+ if (*begin == '.') {
+ begin = parse_precision(begin, end, handler);
+ if (begin == end) return begin;
+ }
+
+ if (*begin == 'L') {
+ handler.on_localized();
+ ++begin;
+ }
+
+ // Parse type.
+ if (begin != end && *begin != '}') {
+ presentation_type type = parse_presentation_type(*begin++);
+ if (type == presentation_type::none)
+ handler.on_error("invalid type specifier");
+ handler.on_type(type);
+ }
+ return begin;
+}
+
+template <typename Char, typename Handler>
+FMT_CONSTEXPR auto parse_replacement_field(const Char* begin, const Char* end,
+ Handler&& handler) -> const Char* {
+ struct id_adapter {
+ Handler& handler;
+ int arg_id;
+
+ FMT_CONSTEXPR void operator()() { arg_id = handler.on_arg_id(); }
+ FMT_CONSTEXPR void operator()(int id) { arg_id = handler.on_arg_id(id); }
+ FMT_CONSTEXPR void operator()(basic_string_view<Char> id) {
+ arg_id = handler.on_arg_id(id);
+ }
+ FMT_CONSTEXPR void on_error(const char* message) {
+ if (message) handler.on_error(message);
+ }
+ };
+
+ ++begin;
+ if (begin == end) return handler.on_error("invalid format string"), end;
+ if (*begin == '}') {
+ handler.on_replacement_field(handler.on_arg_id(), begin);
+ } else if (*begin == '{') {
+ handler.on_text(begin, begin + 1);
+ } else {
+ auto adapter = id_adapter{handler, 0};
+ begin = parse_arg_id(begin, end, adapter);
+ Char c = begin != end ? *begin : Char();
+ if (c == '}') {
+ handler.on_replacement_field(adapter.arg_id, begin);
+ } else if (c == ':') {
+ begin = handler.on_format_specs(adapter.arg_id, begin + 1, end);
+ if (begin == end || *begin != '}')
+ return handler.on_error("unknown format specifier"), end;
+ } else {
+ return handler.on_error("missing '}' in format string"), end;
+ }
+ }
+ return begin + 1;
+}
+
+template <bool IS_CONSTEXPR, typename Char, typename Handler>
+FMT_CONSTEXPR FMT_INLINE void parse_format_string(
+ basic_string_view<Char> format_str, Handler&& handler) {
+ // Workaround a name-lookup bug in MSVC's modules implementation.
+ using detail::find;
+
+ auto begin = format_str.data();
+ auto end = begin + format_str.size();
+ if (end - begin < 32) {
+ // Use a simple loop instead of memchr for small strings.
+ const Char* p = begin;
+ while (p != end) {
+ auto c = *p++;
+ if (c == '{') {
+ handler.on_text(begin, p - 1);
+ begin = p = parse_replacement_field(p - 1, end, handler);
+ } else if (c == '}') {
+ if (p == end || *p != '}')
+ return handler.on_error("unmatched '}' in format string");
+ handler.on_text(begin, p);
+ begin = ++p;
+ }
+ }
+ handler.on_text(begin, end);
+ return;
+ }
+ struct writer {
+ FMT_CONSTEXPR void operator()(const Char* from, const Char* to) {
+ if (from == to) return;
+ for (;;) {
+ const Char* p = nullptr;
+ if (!find<IS_CONSTEXPR>(from, to, Char('}'), p))
+ return handler_.on_text(from, to);
+ ++p;
+ if (p == to || *p != '}')
+ return handler_.on_error("unmatched '}' in format string");
+ handler_.on_text(from, p);
+ from = p + 1;
+ }
+ }
+ Handler& handler_;
+ } write = {handler};
+ while (begin != end) {
+ // Doing two passes with memchr (one for '{' and another for '}') is up to
+ // 2.5x faster than the naive one-pass implementation on big format strings.
+ const Char* p = begin;
+ if (*begin != '{' && !find<IS_CONSTEXPR>(begin + 1, end, Char('{'), p))
+ return write(begin, end);
+ write(begin, p);
+ begin = parse_replacement_field(p, end, handler);
+ }
+}
+
+template <typename T, bool = is_named_arg<T>::value> struct strip_named_arg {
+ using type = T;
+};
+template <typename T> struct strip_named_arg<T, true> {
+ using type = remove_cvref_t<decltype(T::value)>;
+};
+
+template <typename T, typename ParseContext>
+FMT_CONSTEXPR auto parse_format_specs(ParseContext& ctx)
+ -> decltype(ctx.begin()) {
+ using char_type = typename ParseContext::char_type;
+ using context = buffer_context<char_type>;
+ using stripped_type = typename strip_named_arg<T>::type;
+ using mapped_type = conditional_t<
+ mapped_type_constant<T, context>::value != type::custom_type,
+ decltype(arg_mapper<context>().map(std::declval<const T&>())),
+ stripped_type>;
+ auto f = conditional_t<has_formatter<mapped_type, context>::value,
+ formatter<mapped_type, char_type>,
+ fallback_formatter<stripped_type, char_type>>();
+ return f.parse(ctx);
+}
+
+template <typename ErrorHandler>
+FMT_CONSTEXPR void check_int_type_spec(presentation_type type,
+ ErrorHandler&& eh) {
+ if (type > presentation_type::bin_upper && type != presentation_type::chr)
+ eh.on_error("invalid type specifier");
+}
+
+// Checks char specs and returns true if the type spec is char (and not int).
+template <typename Char, typename ErrorHandler = error_handler>
+FMT_CONSTEXPR auto check_char_specs(const basic_format_specs<Char>& specs,
+ ErrorHandler&& eh = {}) -> bool {
+ if (specs.type != presentation_type::none &&
+ specs.type != presentation_type::chr &&
+ specs.type != presentation_type::debug) {
+ check_int_type_spec(specs.type, eh);
+ return false;
+ }
+ if (specs.align == align::numeric || specs.sign != sign::none || specs.alt)
+ eh.on_error("invalid format specifier for char");
+ return true;
+}
+
+// A floating-point presentation format.
+enum class float_format : unsigned char {
+ general, // General: exponent notation or fixed point based on magnitude.
+ exp, // Exponent notation with the default precision of 6, e.g. 1.2e-3.
+ fixed, // Fixed point with the default precision of 6, e.g. 0.0012.
+ hex
+};
+
+struct float_specs {
+ int precision;
+ float_format format : 8;
+ sign_t sign : 8;
+ bool upper : 1;
+ bool locale : 1;
+ bool binary32 : 1;
+ bool showpoint : 1;
+};
+
+template <typename ErrorHandler = error_handler, typename Char>
+FMT_CONSTEXPR auto parse_float_type_spec(const basic_format_specs<Char>& specs,
+ ErrorHandler&& eh = {})
+ -> float_specs {
+ auto result = float_specs();
+ result.showpoint = specs.alt;
+ result.locale = specs.localized;
+ switch (specs.type) {
+ case presentation_type::none:
+ result.format = float_format::general;
+ break;
+ case presentation_type::general_upper:
+ result.upper = true;
+ FMT_FALLTHROUGH;
+ case presentation_type::general_lower:
+ result.format = float_format::general;
+ break;
+ case presentation_type::exp_upper:
+ result.upper = true;
+ FMT_FALLTHROUGH;
+ case presentation_type::exp_lower:
+ result.format = float_format::exp;
+ result.showpoint |= specs.precision != 0;
+ break;
+ case presentation_type::fixed_upper:
+ result.upper = true;
+ FMT_FALLTHROUGH;
+ case presentation_type::fixed_lower:
+ result.format = float_format::fixed;
+ result.showpoint |= specs.precision != 0;
+ break;
+ case presentation_type::hexfloat_upper:
+ result.upper = true;
+ FMT_FALLTHROUGH;
+ case presentation_type::hexfloat_lower:
+ result.format = float_format::hex;
+ break;
+ default:
+ eh.on_error("invalid type specifier");
+ break;
+ }
+ return result;
+}
+
+template <typename ErrorHandler = error_handler>
+FMT_CONSTEXPR auto check_cstring_type_spec(presentation_type type,
+ ErrorHandler&& eh = {}) -> bool {
+ if (type == presentation_type::none || type == presentation_type::string)
+ return true;
+ if (type != presentation_type::pointer) eh.on_error("invalid type specifier");
+ return false;
+}
+
+template <typename ErrorHandler = error_handler>
+FMT_CONSTEXPR void check_string_type_spec(presentation_type type,
+ ErrorHandler&& eh = {}) {
+ if (type != presentation_type::none && type != presentation_type::string &&
+ type != presentation_type::debug)
+ eh.on_error("invalid type specifier");
+}
+
+template <typename ErrorHandler>
+FMT_CONSTEXPR void check_pointer_type_spec(presentation_type type,
+ ErrorHandler&& eh) {
+ if (type != presentation_type::none && type != presentation_type::pointer)
+ eh.on_error("invalid type specifier");
+}
+
+// A parse_format_specs handler that checks if specifiers are consistent with
+// the argument type.
+template <typename Handler> class specs_checker : public Handler {
+ private:
+ detail::type arg_type_;
+
+ FMT_CONSTEXPR void require_numeric_argument() {
+ if (!is_arithmetic_type(arg_type_))
+ this->on_error("format specifier requires numeric argument");
+ }
+
+ public:
+ FMT_CONSTEXPR specs_checker(const Handler& handler, detail::type arg_type)
+ : Handler(handler), arg_type_(arg_type) {}
+
+ FMT_CONSTEXPR void on_align(align_t align) {
+ if (align == align::numeric) require_numeric_argument();
+ Handler::on_align(align);
+ }
+
+ FMT_CONSTEXPR void on_sign(sign_t s) {
+ require_numeric_argument();
+ if (is_integral_type(arg_type_) && arg_type_ != type::int_type &&
+ arg_type_ != type::long_long_type && arg_type_ != type::int128_type &&
+ arg_type_ != type::char_type) {
+ this->on_error("format specifier requires signed argument");
+ }
+ Handler::on_sign(s);
+ }
+
+ FMT_CONSTEXPR void on_hash() {
+ require_numeric_argument();
+ Handler::on_hash();
+ }
+
+ FMT_CONSTEXPR void on_localized() {
+ require_numeric_argument();
+ Handler::on_localized();
+ }
+
+ FMT_CONSTEXPR void on_zero() {
+ require_numeric_argument();
+ Handler::on_zero();
+ }
+
+ FMT_CONSTEXPR void end_precision() {
+ if (is_integral_type(arg_type_) || arg_type_ == type::pointer_type)
+ this->on_error("precision not allowed for this argument type");
+ }
+};
+
+constexpr int invalid_arg_index = -1;
+
+#if FMT_USE_NONTYPE_TEMPLATE_ARGS
+template <int N, typename T, typename... Args, typename Char>
+constexpr auto get_arg_index_by_name(basic_string_view<Char> name) -> int {
+ if constexpr (detail::is_statically_named_arg<T>()) {
+ if (name == T::name) return N;
+ }
+ if constexpr (sizeof...(Args) > 0)
+ return get_arg_index_by_name<N + 1, Args...>(name);
+ (void)name; // Workaround an MSVC bug about "unused" parameter.
+ return invalid_arg_index;
+}
+#endif
+
+template <typename... Args, typename Char>
+FMT_CONSTEXPR auto get_arg_index_by_name(basic_string_view<Char> name) -> int {
+#if FMT_USE_NONTYPE_TEMPLATE_ARGS
+ if constexpr (sizeof...(Args) > 0)
+ return get_arg_index_by_name<0, Args...>(name);
+#endif
+ (void)name;
+ return invalid_arg_index;
+}
+
+template <typename Char, typename ErrorHandler, typename... Args>
+class format_string_checker {
+ private:
+ // In the future basic_format_parse_context will replace compile_parse_context
+ // here and will use is_constant_evaluated and downcasting to access the data
+ // needed for compile-time checks: https://godbolt.org/z/GvWzcTjh1.
+ using parse_context_type = compile_parse_context<Char, ErrorHandler>;
+ static constexpr int num_args = sizeof...(Args);
+
+ // Format specifier parsing function.
+ using parse_func = const Char* (*)(parse_context_type&);
+
+ parse_context_type context_;
+ parse_func parse_funcs_[num_args > 0 ? static_cast<size_t>(num_args) : 1];
+ type types_[num_args > 0 ? static_cast<size_t>(num_args) : 1];
+
+ public:
+ explicit FMT_CONSTEXPR format_string_checker(
+ basic_string_view<Char> format_str, ErrorHandler eh)
+ : context_(format_str, num_args, types_, eh),
+ parse_funcs_{&parse_format_specs<Args, parse_context_type>...},
+ types_{type_constant<Args, char>::value...} {}
+
+ FMT_CONSTEXPR void on_text(const Char*, const Char*) {}
+
+ FMT_CONSTEXPR auto on_arg_id() -> int { return context_.next_arg_id(); }
+ FMT_CONSTEXPR auto on_arg_id(int id) -> int {
+ return context_.check_arg_id(id), id;
+ }
+ FMT_CONSTEXPR auto on_arg_id(basic_string_view<Char> id) -> int {
+#if FMT_USE_NONTYPE_TEMPLATE_ARGS
+ auto index = get_arg_index_by_name<Args...>(id);
+ if (index == invalid_arg_index) on_error("named argument is not found");
+ return context_.check_arg_id(index), index;
+#else
+ (void)id;
+ on_error("compile-time checks for named arguments require C++20 support");
+ return 0;
+#endif
+ }
+
+ FMT_CONSTEXPR void on_replacement_field(int, const Char*) {}
+
+ FMT_CONSTEXPR auto on_format_specs(int id, const Char* begin, const Char*)
+ -> const Char* {
+ context_.advance_to(context_.begin() + (begin - &*context_.begin()));
+ // id >= 0 check is a workaround for gcc 10 bug (#2065).
+ return id >= 0 && id < num_args ? parse_funcs_[id](context_) : begin;
+ }
+
+ FMT_CONSTEXPR void on_error(const char* message) {
+ context_.on_error(message);
+ }
+};
+
+// Reports a compile-time error if S is not a valid format string.
+template <typename..., typename S, FMT_ENABLE_IF(!is_compile_string<S>::value)>
+FMT_INLINE void check_format_string(const S&) {
+#ifdef FMT_ENFORCE_COMPILE_STRING
+ static_assert(is_compile_string<S>::value,
+ "FMT_ENFORCE_COMPILE_STRING requires all format strings to use "
+ "FMT_STRING.");
+#endif
+}
+template <typename... Args, typename S,
+ FMT_ENABLE_IF(is_compile_string<S>::value)>
+void check_format_string(S format_str) {
+ FMT_CONSTEXPR auto s = basic_string_view<typename S::char_type>(format_str);
+ using checker = format_string_checker<typename S::char_type, error_handler,
+ remove_cvref_t<Args>...>;
+ FMT_CONSTEXPR bool invalid_format =
+ (parse_format_string<true>(s, checker(s, {})), true);
+ ignore_unused(invalid_format);
+}
+
+template <typename Char>
+void vformat_to(
+ buffer<Char>& buf, basic_string_view<Char> fmt,
+ basic_format_args<FMT_BUFFER_CONTEXT(type_identity_t<Char>)> args,
+ locale_ref loc = {});
+
+FMT_API void vprint_mojibake(std::FILE*, string_view, format_args);
+#ifndef _WIN32
+inline void vprint_mojibake(std::FILE*, string_view, format_args) {}
+#endif
+FMT_END_DETAIL_NAMESPACE
+
+// A formatter specialization for the core types corresponding to detail::type
+// constants.
+template <typename T, typename Char>
+struct formatter<T, Char,
+ enable_if_t<detail::type_constant<T, Char>::value !=
+ detail::type::custom_type>> {
+ private:
+ detail::dynamic_format_specs<Char> specs_;
+
+ public:
+ // Parses format specifiers stopping either at the end of the range or at the
+ // terminating '}'.
+ template <typename ParseContext>
+ FMT_CONSTEXPR auto parse(ParseContext& ctx) -> decltype(ctx.begin()) {
+ auto begin = ctx.begin(), end = ctx.end();
+ if (begin == end) return begin;
+ using handler_type = detail::dynamic_specs_handler<ParseContext>;
+ auto type = detail::type_constant<T, Char>::value;
+ auto checker =
+ detail::specs_checker<handler_type>(handler_type(specs_, ctx), type);
+ auto it = detail::parse_format_specs(begin, end, checker);
+ auto eh = ctx.error_handler();
+ switch (type) {
+ case detail::type::none_type:
+ FMT_ASSERT(false, "invalid argument type");
+ break;
+ case detail::type::bool_type:
+ if (specs_.type == presentation_type::none ||
+ specs_.type == presentation_type::string) {
+ break;
+ }
+ FMT_FALLTHROUGH;
+ case detail::type::int_type:
+ case detail::type::uint_type:
+ case detail::type::long_long_type:
+ case detail::type::ulong_long_type:
+ case detail::type::int128_type:
+ case detail::type::uint128_type:
+ detail::check_int_type_spec(specs_.type, eh);
+ break;
+ case detail::type::char_type:
+ detail::check_char_specs(specs_, eh);
+ break;
+ case detail::type::float_type:
+ if (detail::const_check(FMT_USE_FLOAT))
+ detail::parse_float_type_spec(specs_, eh);
+ else
+ FMT_ASSERT(false, "float support disabled");
+ break;
+ case detail::type::double_type:
+ if (detail::const_check(FMT_USE_DOUBLE))
+ detail::parse_float_type_spec(specs_, eh);
+ else
+ FMT_ASSERT(false, "double support disabled");
+ break;
+ case detail::type::long_double_type:
+ if (detail::const_check(FMT_USE_LONG_DOUBLE))
+ detail::parse_float_type_spec(specs_, eh);
+ else
+ FMT_ASSERT(false, "long double support disabled");
+ break;
+ case detail::type::cstring_type:
+ detail::check_cstring_type_spec(specs_.type, eh);
+ break;
+ case detail::type::string_type:
+ detail::check_string_type_spec(specs_.type, eh);
+ break;
+ case detail::type::pointer_type:
+ detail::check_pointer_type_spec(specs_.type, eh);
+ break;
+ case detail::type::custom_type:
+ // Custom format specifiers are checked in parse functions of
+ // formatter specializations.
+ break;
+ }
+ return it;
+ }
+
+ template <typename FormatContext>
+ FMT_CONSTEXPR auto format(const T& val, FormatContext& ctx) const
+ -> decltype(ctx.out());
+};
+
+#define FMT_FORMAT_AS(Type, Base) \
+ template <typename Char> \
+ struct formatter<Type, Char> : formatter<Base, Char> { \
+ template <typename FormatContext> \
+ auto format(Type const& val, FormatContext& ctx) const \
+ -> decltype(ctx.out()) { \
+ return formatter<Base, Char>::format(static_cast<Base>(val), ctx); \
+ } \
+ }
+
+FMT_FORMAT_AS(signed char, int);
+FMT_FORMAT_AS(unsigned char, unsigned);
+FMT_FORMAT_AS(short, int);
+FMT_FORMAT_AS(unsigned short, unsigned);
+FMT_FORMAT_AS(long, long long);
+FMT_FORMAT_AS(unsigned long, unsigned long long);
+FMT_FORMAT_AS(Char*, const Char*);
+FMT_FORMAT_AS(std::basic_string<Char>, basic_string_view<Char>);
+FMT_FORMAT_AS(std::nullptr_t, const void*);
+FMT_FORMAT_AS(detail::std_string_view<Char>, basic_string_view<Char>);
+
+template <typename Char> struct basic_runtime { basic_string_view<Char> str; };
+
+/** A compile-time format string. */
+template <typename Char, typename... Args> class basic_format_string {
+ private:
+ basic_string_view<Char> str_;
+
+ public:
+ template <typename S,
+ FMT_ENABLE_IF(
+ std::is_convertible<const S&, basic_string_view<Char>>::value)>
+ FMT_CONSTEVAL FMT_INLINE basic_format_string(const S& s) : str_(s) {
+ static_assert(
+ detail::count<
+ (std::is_base_of<detail::view, remove_reference_t<Args>>::value &&
+ std::is_reference<Args>::value)...>() == 0,
+ "passing views as lvalues is disallowed");
+#ifdef FMT_HAS_CONSTEVAL
+ if constexpr (detail::count_named_args<Args...>() ==
+ detail::count_statically_named_args<Args...>()) {
+ using checker = detail::format_string_checker<Char, detail::error_handler,
+ remove_cvref_t<Args>...>;
+ detail::parse_format_string<true>(str_, checker(s, {}));
+ }
+#else
+ detail::check_format_string<Args...>(s);
+#endif
+ }
+ basic_format_string(basic_runtime<Char> r) : str_(r.str) {}
+
+ FMT_INLINE operator basic_string_view<Char>() const { return str_; }
+};
+
+#if FMT_GCC_VERSION && FMT_GCC_VERSION < 409
+// Workaround broken conversion on older gcc.
+template <typename...> using format_string = string_view;
+inline auto runtime(string_view s) -> basic_string_view<char> { return s; }
+#else
+template <typename... Args>
+using format_string = basic_format_string<char, type_identity_t<Args>...>;
+/**
+ \rst
+ Creates a runtime format string.
+
+ **Example**::
+
+ // Check format string at runtime instead of compile-time.
+ fmt::print(fmt::runtime("{:d}"), "I am not a number");
+ \endrst
+ */
+inline auto runtime(string_view s) -> basic_runtime<char> { return {{s}}; }
+#endif
+
+FMT_API auto vformat(string_view fmt, format_args args) -> std::string;
+
+/**
+ \rst
+ Formats ``args`` according to specifications in ``fmt`` and returns the result
+ as a string.
+
+ **Example**::
+
+ #include <fmt/core.h>
+ std::string message = fmt::format("The answer is {}.", 42);
+ \endrst
+*/
+template <typename... T>
+FMT_NODISCARD FMT_INLINE auto format(format_string<T...> fmt, T&&... args)
+ -> std::string {
+ return vformat(fmt, fmt::make_format_args(args...));
+}
+
+/** Formats a string and writes the output to ``out``. */
+template <typename OutputIt,
+ FMT_ENABLE_IF(detail::is_output_iterator<OutputIt, char>::value)>
+auto vformat_to(OutputIt out, string_view fmt, format_args args) -> OutputIt {
+ using detail::get_buffer;
+ auto&& buf = get_buffer<char>(out);
+ detail::vformat_to(buf, fmt, args, {});
+ return detail::get_iterator(buf);
+}
+
+/**
+ \rst
+ Formats ``args`` according to specifications in ``fmt``, writes the result to
+ the output iterator ``out`` and returns the iterator past the end of the output
+ range. `format_to` does not append a terminating null character.
+
+ **Example**::
+
+ auto out = std::vector<char>();
+ fmt::format_to(std::back_inserter(out), "{}", 42);
+ \endrst
+ */
+template <typename OutputIt, typename... T,
+ FMT_ENABLE_IF(detail::is_output_iterator<OutputIt, char>::value)>
+FMT_INLINE auto format_to(OutputIt out, format_string<T...> fmt, T&&... args)
+ -> OutputIt {
+ return vformat_to(out, fmt, fmt::make_format_args(args...));
+}
+
+template <typename OutputIt> struct format_to_n_result {
+ /** Iterator past the end of the output range. */
+ OutputIt out;
+ /** Total (not truncated) output size. */
+ size_t size;
+};
+
+template <typename OutputIt, typename... T,
+ FMT_ENABLE_IF(detail::is_output_iterator<OutputIt, char>::value)>
+auto vformat_to_n(OutputIt out, size_t n, string_view fmt, format_args args)
+ -> format_to_n_result<OutputIt> {
+ using traits = detail::fixed_buffer_traits;
+ auto buf = detail::iterator_buffer<OutputIt, char, traits>(out, n);
+ detail::vformat_to(buf, fmt, args, {});
+ return {buf.out(), buf.count()};
+}
+
+/**
+ \rst
+ Formats ``args`` according to specifications in ``fmt``, writes up to ``n``
+ characters of the result to the output iterator ``out`` and returns the total
+ (not truncated) output size and the iterator past the end of the output range.
+ `format_to_n` does not append a terminating null character.
+ \endrst
+ */
+template <typename OutputIt, typename... T,
+ FMT_ENABLE_IF(detail::is_output_iterator<OutputIt, char>::value)>
+FMT_INLINE auto format_to_n(OutputIt out, size_t n, format_string<T...> fmt,
+ T&&... args) -> format_to_n_result<OutputIt> {
+ return vformat_to_n(out, n, fmt, fmt::make_format_args(args...));
+}
+
+/** Returns the number of chars in the output of ``format(fmt, args...)``. */
+template <typename... T>
+FMT_NODISCARD FMT_INLINE auto formatted_size(format_string<T...> fmt,
+ T&&... args) -> size_t {
+ auto buf = detail::counting_buffer<>();
+ detail::vformat_to(buf, string_view(fmt), fmt::make_format_args(args...), {});
+ return buf.count();
+}
+
+FMT_API void vprint(string_view fmt, format_args args);
+FMT_API void vprint(std::FILE* f, string_view fmt, format_args args);
+
+/**
+ \rst
+ Formats ``args`` according to specifications in ``fmt`` and writes the output
+ to ``stdout``.
+
+ **Example**::
+
+ fmt::print("Elapsed time: {0:.2f} seconds", 1.23);
+ \endrst
+ */
+template <typename... T>
+FMT_INLINE void print(format_string<T...> fmt, T&&... args) {
+ const auto& vargs = fmt::make_format_args(args...);
+ return detail::is_utf8() ? vprint(fmt, vargs)
+ : detail::vprint_mojibake(stdout, fmt, vargs);
+}
+
+/**
+ \rst
+ Formats ``args`` according to specifications in ``fmt`` and writes the
+ output to the file ``f``.
+
+ **Example**::
+
+ fmt::print(stderr, "Don't {}!", "panic");
+ \endrst
+ */
+template <typename... T>
+FMT_INLINE void print(std::FILE* f, format_string<T...> fmt, T&&... args) {
+ const auto& vargs = fmt::make_format_args(args...);
+ return detail::is_utf8() ? vprint(f, fmt, vargs)
+ : detail::vprint_mojibake(f, fmt, vargs);
+}
+
+FMT_MODULE_EXPORT_END
+FMT_GCC_PRAGMA("GCC pop_options")
+FMT_END_NAMESPACE
+
+#ifdef FMT_HEADER_ONLY
+# include "format.h"
+#endif
+#endif // FMT_CORE_H_
diff --git a/subprojects/fmt/include/fmt/format-inl.h b/subprojects/fmt/include/fmt/format-inl.h
new file mode 100644
index 0000000..f44df01
--- /dev/null
+++ b/subprojects/fmt/include/fmt/format-inl.h
@@ -0,0 +1,1733 @@
+// Formatting library for C++ - implementation
+//
+// Copyright (c) 2012 - 2016, Victor Zverovich
+// All rights reserved.
+//
+// For the license information refer to format.h.
+
+#ifndef FMT_FORMAT_INL_H_
+#define FMT_FORMAT_INL_H_
+
+#include <algorithm>
+#include <cctype>
+#include <cerrno> // errno
+#include <climits>
+#include <cmath>
+#include <cstdarg>
+#include <cstring> // std::memmove
+#include <cwchar>
+#include <exception>
+
+#ifndef FMT_STATIC_THOUSANDS_SEPARATOR
+# include <locale>
+#endif
+
+#ifdef _WIN32
+# include <io.h> // _isatty
+#endif
+
+#include "format.h"
+
+FMT_BEGIN_NAMESPACE
+namespace detail {
+
+FMT_FUNC void assert_fail(const char* file, int line, const char* message) {
+ // Use unchecked std::fprintf to avoid triggering another assertion when
+ // writing to stderr fails
+ std::fprintf(stderr, "%s:%d: assertion failed: %s", file, line, message);
+ // Chosen instead of std::abort to satisfy Clang in CUDA mode during device
+ // code pass.
+ std::terminate();
+}
+
+FMT_FUNC void throw_format_error(const char* message) {
+ FMT_THROW(format_error(message));
+}
+
+FMT_FUNC void format_error_code(detail::buffer<char>& out, int error_code,
+ string_view message) noexcept {
+ // Report error code making sure that the output fits into
+ // inline_buffer_size to avoid dynamic memory allocation and potential
+ // bad_alloc.
+ out.try_resize(0);
+ static const char SEP[] = ": ";
+ static const char ERROR_STR[] = "error ";
+ // Subtract 2 to account for terminating null characters in SEP and ERROR_STR.
+ size_t error_code_size = sizeof(SEP) + sizeof(ERROR_STR) - 2;
+ auto abs_value = static_cast<uint32_or_64_or_128_t<int>>(error_code);
+ if (detail::is_negative(error_code)) {
+ abs_value = 0 - abs_value;
+ ++error_code_size;
+ }
+ error_code_size += detail::to_unsigned(detail::count_digits(abs_value));
+ auto it = buffer_appender<char>(out);
+ if (message.size() <= inline_buffer_size - error_code_size)
+ format_to(it, FMT_STRING("{}{}"), message, SEP);
+ format_to(it, FMT_STRING("{}{}"), ERROR_STR, error_code);
+ FMT_ASSERT(out.size() <= inline_buffer_size, "");
+}
+
+FMT_FUNC void report_error(format_func func, int error_code,
+ const char* message) noexcept {
+ memory_buffer full_message;
+ func(full_message, error_code, message);
+ // Don't use fwrite_fully because the latter may throw.
+ if (std::fwrite(full_message.data(), full_message.size(), 1, stderr) > 0)
+ std::fputc('\n', stderr);
+}
+
+// A wrapper around fwrite that throws on error.
+inline void fwrite_fully(const void* ptr, size_t size, size_t count,
+ FILE* stream) {
+ size_t written = std::fwrite(ptr, size, count, stream);
+ if (written < count)
+ FMT_THROW(system_error(errno, FMT_STRING("cannot write to file")));
+}
+
+#ifndef FMT_STATIC_THOUSANDS_SEPARATOR
+template <typename Locale>
+locale_ref::locale_ref(const Locale& loc) : locale_(&loc) {
+ static_assert(std::is_same<Locale, std::locale>::value, "");
+}
+
+template <typename Locale> Locale locale_ref::get() const {
+ static_assert(std::is_same<Locale, std::locale>::value, "");
+ return locale_ ? *static_cast<const std::locale*>(locale_) : std::locale();
+}
+
+template <typename Char>
+FMT_FUNC auto thousands_sep_impl(locale_ref loc) -> thousands_sep_result<Char> {
+ auto& facet = std::use_facet<std::numpunct<Char>>(loc.get<std::locale>());
+ auto grouping = facet.grouping();
+ auto thousands_sep = grouping.empty() ? Char() : facet.thousands_sep();
+ return {std::move(grouping), thousands_sep};
+}
+template <typename Char> FMT_FUNC Char decimal_point_impl(locale_ref loc) {
+ return std::use_facet<std::numpunct<Char>>(loc.get<std::locale>())
+ .decimal_point();
+}
+#else
+template <typename Char>
+FMT_FUNC auto thousands_sep_impl(locale_ref) -> thousands_sep_result<Char> {
+ return {"\03", FMT_STATIC_THOUSANDS_SEPARATOR};
+}
+template <typename Char> FMT_FUNC Char decimal_point_impl(locale_ref) {
+ return '.';
+}
+#endif
+} // namespace detail
+
+#if !FMT_MSC_VERSION
+FMT_API FMT_FUNC format_error::~format_error() noexcept = default;
+#endif
+
+FMT_FUNC std::system_error vsystem_error(int error_code, string_view format_str,
+ format_args args) {
+ auto ec = std::error_code(error_code, std::generic_category());
+ return std::system_error(ec, vformat(format_str, args));
+}
+
+namespace detail {
+
+template <typename F> inline bool operator==(basic_fp<F> x, basic_fp<F> y) {
+ return x.f == y.f && x.e == y.e;
+}
+
+// Compilers should be able to optimize this into the ror instruction.
+FMT_CONSTEXPR inline uint32_t rotr(uint32_t n, uint32_t r) noexcept {
+ r &= 31;
+ return (n >> r) | (n << (32 - r));
+}
+FMT_CONSTEXPR inline uint64_t rotr(uint64_t n, uint32_t r) noexcept {
+ r &= 63;
+ return (n >> r) | (n << (64 - r));
+}
+
+// Computes 128-bit result of multiplication of two 64-bit unsigned integers.
+inline uint128_fallback umul128(uint64_t x, uint64_t y) noexcept {
+#if FMT_USE_INT128
+ auto p = static_cast<uint128_opt>(x) * static_cast<uint128_opt>(y);
+ return {static_cast<uint64_t>(p >> 64), static_cast<uint64_t>(p)};
+#elif defined(_MSC_VER) && defined(_M_X64)
+ auto result = uint128_fallback();
+ result.lo_ = _umul128(x, y, &result.hi_);
+ return result;
+#else
+ const uint64_t mask = static_cast<uint64_t>(max_value<uint32_t>());
+
+ uint64_t a = x >> 32;
+ uint64_t b = x & mask;
+ uint64_t c = y >> 32;
+ uint64_t d = y & mask;
+
+ uint64_t ac = a * c;
+ uint64_t bc = b * c;
+ uint64_t ad = a * d;
+ uint64_t bd = b * d;
+
+ uint64_t intermediate = (bd >> 32) + (ad & mask) + (bc & mask);
+
+ return {ac + (intermediate >> 32) + (ad >> 32) + (bc >> 32),
+ (intermediate << 32) + (bd & mask)};
+#endif
+}
+
+// Implementation of Dragonbox algorithm: https://github.com/jk-jeon/dragonbox.
+namespace dragonbox {
+// Computes upper 64 bits of multiplication of two 64-bit unsigned integers.
+inline uint64_t umul128_upper64(uint64_t x, uint64_t y) noexcept {
+#if FMT_USE_INT128
+ auto p = static_cast<uint128_opt>(x) * static_cast<uint128_opt>(y);
+ return static_cast<uint64_t>(p >> 64);
+#elif defined(_MSC_VER) && defined(_M_X64)
+ return __umulh(x, y);
+#else
+ return umul128(x, y).high();
+#endif
+}
+
+// Computes upper 128 bits of multiplication of a 64-bit unsigned integer and a
+// 128-bit unsigned integer.
+inline uint128_fallback umul192_upper128(uint64_t x,
+ uint128_fallback y) noexcept {
+ uint128_fallback r = umul128(x, y.high());
+ r += umul128_upper64(x, y.low());
+ return r;
+}
+
+// Computes upper 64 bits of multiplication of a 32-bit unsigned integer and a
+// 64-bit unsigned integer.
+inline uint64_t umul96_upper64(uint32_t x, uint64_t y) noexcept {
+ return umul128_upper64(static_cast<uint64_t>(x) << 32, y);
+}
+
+// Computes lower 128 bits of multiplication of a 64-bit unsigned integer and a
+// 128-bit unsigned integer.
+inline uint128_fallback umul192_lower128(uint64_t x,
+ uint128_fallback y) noexcept {
+ uint64_t high = x * y.high();
+ uint128_fallback high_low = umul128(x, y.low());
+ return {high + high_low.high(), high_low.low()};
+}
+
+// Computes lower 64 bits of multiplication of a 32-bit unsigned integer and a
+// 64-bit unsigned integer.
+inline uint64_t umul96_lower64(uint32_t x, uint64_t y) noexcept {
+ return x * y;
+}
+
+// Computes floor(log10(pow(2, e))) for e in [-2620, 2620] using the method from
+// https://fmt.dev/papers/Dragonbox.pdf#page=28, section 6.1.
+inline int floor_log10_pow2(int e) noexcept {
+ FMT_ASSERT(e <= 2620 && e >= -2620, "too large exponent");
+ static_assert((-1 >> 1) == -1, "right shift is not arithmetic");
+ return (e * 315653) >> 20;
+}
+
+// Various fast log computations.
+inline int floor_log2_pow10(int e) noexcept {
+ FMT_ASSERT(e <= 1233 && e >= -1233, "too large exponent");
+ return (e * 1741647) >> 19;
+}
+inline int floor_log10_pow2_minus_log10_4_over_3(int e) noexcept {
+ FMT_ASSERT(e <= 2936 && e >= -2985, "too large exponent");
+ return (e * 631305 - 261663) >> 21;
+}
+
+static constexpr struct {
+ uint32_t divisor;
+ int shift_amount;
+} div_small_pow10_infos[] = {{10, 16}, {100, 16}};
+
+// Replaces n by floor(n / pow(10, N)) returning true if and only if n is
+// divisible by pow(10, N).
+// Precondition: n <= pow(10, N + 1).
+template <int N>
+bool check_divisibility_and_divide_by_pow10(uint32_t& n) noexcept {
+ // The numbers below are chosen such that:
+ // 1. floor(n/d) = floor(nm / 2^k) where d=10 or d=100,
+ // 2. nm mod 2^k < m if and only if n is divisible by d,
+ // where m is magic_number, k is shift_amount
+ // and d is divisor.
+ //
+ // Item 1 is a common technique of replacing division by a constant with
+ // multiplication, see e.g. "Division by Invariant Integers Using
+ // Multiplication" by Granlund and Montgomery (1994). magic_number (m) is set
+ // to ceil(2^k/d) for large enough k.
+ // The idea for item 2 originates from Schubfach.
+ constexpr auto info = div_small_pow10_infos[N - 1];
+ FMT_ASSERT(n <= info.divisor * 10, "n is too large");
+ constexpr uint32_t magic_number =
+ (1u << info.shift_amount) / info.divisor + 1;
+ n *= magic_number;
+ const uint32_t comparison_mask = (1u << info.shift_amount) - 1;
+ bool result = (n & comparison_mask) < magic_number;
+ n >>= info.shift_amount;
+ return result;
+}
+
+// Computes floor(n / pow(10, N)) for small n and N.
+// Precondition: n <= pow(10, N + 1).
+template <int N> uint32_t small_division_by_pow10(uint32_t n) noexcept {
+ constexpr auto info = div_small_pow10_infos[N - 1];
+ FMT_ASSERT(n <= info.divisor * 10, "n is too large");
+ constexpr uint32_t magic_number =
+ (1u << info.shift_amount) / info.divisor + 1;
+ return (n * magic_number) >> info.shift_amount;
+}
+
+// Computes floor(n / 10^(kappa + 1)) (float)
+inline uint32_t divide_by_10_to_kappa_plus_1(uint32_t n) noexcept {
+ // 1374389535 = ceil(2^37/100)
+ return static_cast<uint32_t>((static_cast<uint64_t>(n) * 1374389535) >> 37);
+}
+// Computes floor(n / 10^(kappa + 1)) (double)
+inline uint64_t divide_by_10_to_kappa_plus_1(uint64_t n) noexcept {
+ // 2361183241434822607 = ceil(2^(64+7)/1000)
+ return umul128_upper64(n, 2361183241434822607ull) >> 7;
+}
+
+// Various subroutines using pow10 cache
+template <class T> struct cache_accessor;
+
+template <> struct cache_accessor<float> {
+ using carrier_uint = float_info<float>::carrier_uint;
+ using cache_entry_type = uint64_t;
+
+ static uint64_t get_cached_power(int k) noexcept {
+ FMT_ASSERT(k >= float_info<float>::min_k && k <= float_info<float>::max_k,
+ "k is out of range");
+ static constexpr const uint64_t pow10_significands[] = {
+ 0x81ceb32c4b43fcf5, 0xa2425ff75e14fc32, 0xcad2f7f5359a3b3f,
+ 0xfd87b5f28300ca0e, 0x9e74d1b791e07e49, 0xc612062576589ddb,
+ 0xf79687aed3eec552, 0x9abe14cd44753b53, 0xc16d9a0095928a28,
+ 0xf1c90080baf72cb2, 0x971da05074da7bef, 0xbce5086492111aeb,
+ 0xec1e4a7db69561a6, 0x9392ee8e921d5d08, 0xb877aa3236a4b44a,
+ 0xe69594bec44de15c, 0x901d7cf73ab0acda, 0xb424dc35095cd810,
+ 0xe12e13424bb40e14, 0x8cbccc096f5088cc, 0xafebff0bcb24aaff,
+ 0xdbe6fecebdedd5bf, 0x89705f4136b4a598, 0xabcc77118461cefd,
+ 0xd6bf94d5e57a42bd, 0x8637bd05af6c69b6, 0xa7c5ac471b478424,
+ 0xd1b71758e219652c, 0x83126e978d4fdf3c, 0xa3d70a3d70a3d70b,
+ 0xcccccccccccccccd, 0x8000000000000000, 0xa000000000000000,
+ 0xc800000000000000, 0xfa00000000000000, 0x9c40000000000000,
+ 0xc350000000000000, 0xf424000000000000, 0x9896800000000000,
+ 0xbebc200000000000, 0xee6b280000000000, 0x9502f90000000000,
+ 0xba43b74000000000, 0xe8d4a51000000000, 0x9184e72a00000000,
+ 0xb5e620f480000000, 0xe35fa931a0000000, 0x8e1bc9bf04000000,
+ 0xb1a2bc2ec5000000, 0xde0b6b3a76400000, 0x8ac7230489e80000,
+ 0xad78ebc5ac620000, 0xd8d726b7177a8000, 0x878678326eac9000,
+ 0xa968163f0a57b400, 0xd3c21bcecceda100, 0x84595161401484a0,
+ 0xa56fa5b99019a5c8, 0xcecb8f27f4200f3a, 0x813f3978f8940985,
+ 0xa18f07d736b90be6, 0xc9f2c9cd04674edf, 0xfc6f7c4045812297,
+ 0x9dc5ada82b70b59e, 0xc5371912364ce306, 0xf684df56c3e01bc7,
+ 0x9a130b963a6c115d, 0xc097ce7bc90715b4, 0xf0bdc21abb48db21,
+ 0x96769950b50d88f5, 0xbc143fa4e250eb32, 0xeb194f8e1ae525fe,
+ 0x92efd1b8d0cf37bf, 0xb7abc627050305ae, 0xe596b7b0c643c71a,
+ 0x8f7e32ce7bea5c70, 0xb35dbf821ae4f38c, 0xe0352f62a19e306f};
+ return pow10_significands[k - float_info<float>::min_k];
+ }
+
+ struct compute_mul_result {
+ carrier_uint result;
+ bool is_integer;
+ };
+ struct compute_mul_parity_result {
+ bool parity;
+ bool is_integer;
+ };
+
+ static compute_mul_result compute_mul(
+ carrier_uint u, const cache_entry_type& cache) noexcept {
+ auto r = umul96_upper64(u, cache);
+ return {static_cast<carrier_uint>(r >> 32),
+ static_cast<carrier_uint>(r) == 0};
+ }
+
+ static uint32_t compute_delta(const cache_entry_type& cache,
+ int beta) noexcept {
+ return static_cast<uint32_t>(cache >> (64 - 1 - beta));
+ }
+
+ static compute_mul_parity_result compute_mul_parity(
+ carrier_uint two_f, const cache_entry_type& cache, int beta) noexcept {
+ FMT_ASSERT(beta >= 1, "");
+ FMT_ASSERT(beta < 64, "");
+
+ auto r = umul96_lower64(two_f, cache);
+ return {((r >> (64 - beta)) & 1) != 0,
+ static_cast<uint32_t>(r >> (32 - beta)) == 0};
+ }
+
+ static carrier_uint compute_left_endpoint_for_shorter_interval_case(
+ const cache_entry_type& cache, int beta) noexcept {
+ return static_cast<carrier_uint>(
+ (cache - (cache >> (num_significand_bits<float>() + 2))) >>
+ (64 - num_significand_bits<float>() - 1 - beta));
+ }
+
+ static carrier_uint compute_right_endpoint_for_shorter_interval_case(
+ const cache_entry_type& cache, int beta) noexcept {
+ return static_cast<carrier_uint>(
+ (cache + (cache >> (num_significand_bits<float>() + 1))) >>
+ (64 - num_significand_bits<float>() - 1 - beta));
+ }
+
+ static carrier_uint compute_round_up_for_shorter_interval_case(
+ const cache_entry_type& cache, int beta) noexcept {
+ return (static_cast<carrier_uint>(
+ cache >> (64 - num_significand_bits<float>() - 2 - beta)) +
+ 1) /
+ 2;
+ }
+};
+
+template <> struct cache_accessor<double> {
+ using carrier_uint = float_info<double>::carrier_uint;
+ using cache_entry_type = uint128_fallback;
+
+ static uint128_fallback get_cached_power(int k) noexcept {
+ FMT_ASSERT(k >= float_info<double>::min_k && k <= float_info<double>::max_k,
+ "k is out of range");
+
+ static constexpr const uint128_fallback pow10_significands[] = {
+#if FMT_USE_FULL_CACHE_DRAGONBOX
+ {0xff77b1fcbebcdc4f, 0x25e8e89c13bb0f7b},
+ {0x9faacf3df73609b1, 0x77b191618c54e9ad},
+ {0xc795830d75038c1d, 0xd59df5b9ef6a2418},
+ {0xf97ae3d0d2446f25, 0x4b0573286b44ad1e},
+ {0x9becce62836ac577, 0x4ee367f9430aec33},
+ {0xc2e801fb244576d5, 0x229c41f793cda740},
+ {0xf3a20279ed56d48a, 0x6b43527578c11110},
+ {0x9845418c345644d6, 0x830a13896b78aaaa},
+ {0xbe5691ef416bd60c, 0x23cc986bc656d554},
+ {0xedec366b11c6cb8f, 0x2cbfbe86b7ec8aa9},
+ {0x94b3a202eb1c3f39, 0x7bf7d71432f3d6aa},
+ {0xb9e08a83a5e34f07, 0xdaf5ccd93fb0cc54},
+ {0xe858ad248f5c22c9, 0xd1b3400f8f9cff69},
+ {0x91376c36d99995be, 0x23100809b9c21fa2},
+ {0xb58547448ffffb2d, 0xabd40a0c2832a78b},
+ {0xe2e69915b3fff9f9, 0x16c90c8f323f516d},
+ {0x8dd01fad907ffc3b, 0xae3da7d97f6792e4},
+ {0xb1442798f49ffb4a, 0x99cd11cfdf41779d},
+ {0xdd95317f31c7fa1d, 0x40405643d711d584},
+ {0x8a7d3eef7f1cfc52, 0x482835ea666b2573},
+ {0xad1c8eab5ee43b66, 0xda3243650005eed0},
+ {0xd863b256369d4a40, 0x90bed43e40076a83},
+ {0x873e4f75e2224e68, 0x5a7744a6e804a292},
+ {0xa90de3535aaae202, 0x711515d0a205cb37},
+ {0xd3515c2831559a83, 0x0d5a5b44ca873e04},
+ {0x8412d9991ed58091, 0xe858790afe9486c3},
+ {0xa5178fff668ae0b6, 0x626e974dbe39a873},
+ {0xce5d73ff402d98e3, 0xfb0a3d212dc81290},
+ {0x80fa687f881c7f8e, 0x7ce66634bc9d0b9a},
+ {0xa139029f6a239f72, 0x1c1fffc1ebc44e81},
+ {0xc987434744ac874e, 0xa327ffb266b56221},
+ {0xfbe9141915d7a922, 0x4bf1ff9f0062baa9},
+ {0x9d71ac8fada6c9b5, 0x6f773fc3603db4aa},
+ {0xc4ce17b399107c22, 0xcb550fb4384d21d4},
+ {0xf6019da07f549b2b, 0x7e2a53a146606a49},
+ {0x99c102844f94e0fb, 0x2eda7444cbfc426e},
+ {0xc0314325637a1939, 0xfa911155fefb5309},
+ {0xf03d93eebc589f88, 0x793555ab7eba27cb},
+ {0x96267c7535b763b5, 0x4bc1558b2f3458df},
+ {0xbbb01b9283253ca2, 0x9eb1aaedfb016f17},
+ {0xea9c227723ee8bcb, 0x465e15a979c1cadd},
+ {0x92a1958a7675175f, 0x0bfacd89ec191eca},
+ {0xb749faed14125d36, 0xcef980ec671f667c},
+ {0xe51c79a85916f484, 0x82b7e12780e7401b},
+ {0x8f31cc0937ae58d2, 0xd1b2ecb8b0908811},
+ {0xb2fe3f0b8599ef07, 0x861fa7e6dcb4aa16},
+ {0xdfbdcece67006ac9, 0x67a791e093e1d49b},
+ {0x8bd6a141006042bd, 0xe0c8bb2c5c6d24e1},
+ {0xaecc49914078536d, 0x58fae9f773886e19},
+ {0xda7f5bf590966848, 0xaf39a475506a899f},
+ {0x888f99797a5e012d, 0x6d8406c952429604},
+ {0xaab37fd7d8f58178, 0xc8e5087ba6d33b84},
+ {0xd5605fcdcf32e1d6, 0xfb1e4a9a90880a65},
+ {0x855c3be0a17fcd26, 0x5cf2eea09a550680},
+ {0xa6b34ad8c9dfc06f, 0xf42faa48c0ea481f},
+ {0xd0601d8efc57b08b, 0xf13b94daf124da27},
+ {0x823c12795db6ce57, 0x76c53d08d6b70859},
+ {0xa2cb1717b52481ed, 0x54768c4b0c64ca6f},
+ {0xcb7ddcdda26da268, 0xa9942f5dcf7dfd0a},
+ {0xfe5d54150b090b02, 0xd3f93b35435d7c4d},
+ {0x9efa548d26e5a6e1, 0xc47bc5014a1a6db0},
+ {0xc6b8e9b0709f109a, 0x359ab6419ca1091c},
+ {0xf867241c8cc6d4c0, 0xc30163d203c94b63},
+ {0x9b407691d7fc44f8, 0x79e0de63425dcf1e},
+ {0xc21094364dfb5636, 0x985915fc12f542e5},
+ {0xf294b943e17a2bc4, 0x3e6f5b7b17b2939e},
+ {0x979cf3ca6cec5b5a, 0xa705992ceecf9c43},
+ {0xbd8430bd08277231, 0x50c6ff782a838354},
+ {0xece53cec4a314ebd, 0xa4f8bf5635246429},
+ {0x940f4613ae5ed136, 0x871b7795e136be9a},
+ {0xb913179899f68584, 0x28e2557b59846e40},
+ {0xe757dd7ec07426e5, 0x331aeada2fe589d0},
+ {0x9096ea6f3848984f, 0x3ff0d2c85def7622},
+ {0xb4bca50b065abe63, 0x0fed077a756b53aa},
+ {0xe1ebce4dc7f16dfb, 0xd3e8495912c62895},
+ {0x8d3360f09cf6e4bd, 0x64712dd7abbbd95d},
+ {0xb080392cc4349dec, 0xbd8d794d96aacfb4},
+ {0xdca04777f541c567, 0xecf0d7a0fc5583a1},
+ {0x89e42caaf9491b60, 0xf41686c49db57245},
+ {0xac5d37d5b79b6239, 0x311c2875c522ced6},
+ {0xd77485cb25823ac7, 0x7d633293366b828c},
+ {0x86a8d39ef77164bc, 0xae5dff9c02033198},
+ {0xa8530886b54dbdeb, 0xd9f57f830283fdfd},
+ {0xd267caa862a12d66, 0xd072df63c324fd7c},
+ {0x8380dea93da4bc60, 0x4247cb9e59f71e6e},
+ {0xa46116538d0deb78, 0x52d9be85f074e609},
+ {0xcd795be870516656, 0x67902e276c921f8c},
+ {0x806bd9714632dff6, 0x00ba1cd8a3db53b7},
+ {0xa086cfcd97bf97f3, 0x80e8a40eccd228a5},
+ {0xc8a883c0fdaf7df0, 0x6122cd128006b2ce},
+ {0xfad2a4b13d1b5d6c, 0x796b805720085f82},
+ {0x9cc3a6eec6311a63, 0xcbe3303674053bb1},
+ {0xc3f490aa77bd60fc, 0xbedbfc4411068a9d},
+ {0xf4f1b4d515acb93b, 0xee92fb5515482d45},
+ {0x991711052d8bf3c5, 0x751bdd152d4d1c4b},
+ {0xbf5cd54678eef0b6, 0xd262d45a78a0635e},
+ {0xef340a98172aace4, 0x86fb897116c87c35},
+ {0x9580869f0e7aac0e, 0xd45d35e6ae3d4da1},
+ {0xbae0a846d2195712, 0x8974836059cca10a},
+ {0xe998d258869facd7, 0x2bd1a438703fc94c},
+ {0x91ff83775423cc06, 0x7b6306a34627ddd0},
+ {0xb67f6455292cbf08, 0x1a3bc84c17b1d543},
+ {0xe41f3d6a7377eeca, 0x20caba5f1d9e4a94},
+ {0x8e938662882af53e, 0x547eb47b7282ee9d},
+ {0xb23867fb2a35b28d, 0xe99e619a4f23aa44},
+ {0xdec681f9f4c31f31, 0x6405fa00e2ec94d5},
+ {0x8b3c113c38f9f37e, 0xde83bc408dd3dd05},
+ {0xae0b158b4738705e, 0x9624ab50b148d446},
+ {0xd98ddaee19068c76, 0x3badd624dd9b0958},
+ {0x87f8a8d4cfa417c9, 0xe54ca5d70a80e5d7},
+ {0xa9f6d30a038d1dbc, 0x5e9fcf4ccd211f4d},
+ {0xd47487cc8470652b, 0x7647c32000696720},
+ {0x84c8d4dfd2c63f3b, 0x29ecd9f40041e074},
+ {0xa5fb0a17c777cf09, 0xf468107100525891},
+ {0xcf79cc9db955c2cc, 0x7182148d4066eeb5},
+ {0x81ac1fe293d599bf, 0xc6f14cd848405531},
+ {0xa21727db38cb002f, 0xb8ada00e5a506a7d},
+ {0xca9cf1d206fdc03b, 0xa6d90811f0e4851d},
+ {0xfd442e4688bd304a, 0x908f4a166d1da664},
+ {0x9e4a9cec15763e2e, 0x9a598e4e043287ff},
+ {0xc5dd44271ad3cdba, 0x40eff1e1853f29fe},
+ {0xf7549530e188c128, 0xd12bee59e68ef47d},
+ {0x9a94dd3e8cf578b9, 0x82bb74f8301958cf},
+ {0xc13a148e3032d6e7, 0xe36a52363c1faf02},
+ {0xf18899b1bc3f8ca1, 0xdc44e6c3cb279ac2},
+ {0x96f5600f15a7b7e5, 0x29ab103a5ef8c0ba},
+ {0xbcb2b812db11a5de, 0x7415d448f6b6f0e8},
+ {0xebdf661791d60f56, 0x111b495b3464ad22},
+ {0x936b9fcebb25c995, 0xcab10dd900beec35},
+ {0xb84687c269ef3bfb, 0x3d5d514f40eea743},
+ {0xe65829b3046b0afa, 0x0cb4a5a3112a5113},
+ {0x8ff71a0fe2c2e6dc, 0x47f0e785eaba72ac},
+ {0xb3f4e093db73a093, 0x59ed216765690f57},
+ {0xe0f218b8d25088b8, 0x306869c13ec3532d},
+ {0x8c974f7383725573, 0x1e414218c73a13fc},
+ {0xafbd2350644eeacf, 0xe5d1929ef90898fb},
+ {0xdbac6c247d62a583, 0xdf45f746b74abf3a},
+ {0x894bc396ce5da772, 0x6b8bba8c328eb784},
+ {0xab9eb47c81f5114f, 0x066ea92f3f326565},
+ {0xd686619ba27255a2, 0xc80a537b0efefebe},
+ {0x8613fd0145877585, 0xbd06742ce95f5f37},
+ {0xa798fc4196e952e7, 0x2c48113823b73705},
+ {0xd17f3b51fca3a7a0, 0xf75a15862ca504c6},
+ {0x82ef85133de648c4, 0x9a984d73dbe722fc},
+ {0xa3ab66580d5fdaf5, 0xc13e60d0d2e0ebbb},
+ {0xcc963fee10b7d1b3, 0x318df905079926a9},
+ {0xffbbcfe994e5c61f, 0xfdf17746497f7053},
+ {0x9fd561f1fd0f9bd3, 0xfeb6ea8bedefa634},
+ {0xc7caba6e7c5382c8, 0xfe64a52ee96b8fc1},
+ {0xf9bd690a1b68637b, 0x3dfdce7aa3c673b1},
+ {0x9c1661a651213e2d, 0x06bea10ca65c084f},
+ {0xc31bfa0fe5698db8, 0x486e494fcff30a63},
+ {0xf3e2f893dec3f126, 0x5a89dba3c3efccfb},
+ {0x986ddb5c6b3a76b7, 0xf89629465a75e01d},
+ {0xbe89523386091465, 0xf6bbb397f1135824},
+ {0xee2ba6c0678b597f, 0x746aa07ded582e2d},
+ {0x94db483840b717ef, 0xa8c2a44eb4571cdd},
+ {0xba121a4650e4ddeb, 0x92f34d62616ce414},
+ {0xe896a0d7e51e1566, 0x77b020baf9c81d18},
+ {0x915e2486ef32cd60, 0x0ace1474dc1d122f},
+ {0xb5b5ada8aaff80b8, 0x0d819992132456bb},
+ {0xe3231912d5bf60e6, 0x10e1fff697ed6c6a},
+ {0x8df5efabc5979c8f, 0xca8d3ffa1ef463c2},
+ {0xb1736b96b6fd83b3, 0xbd308ff8a6b17cb3},
+ {0xddd0467c64bce4a0, 0xac7cb3f6d05ddbdf},
+ {0x8aa22c0dbef60ee4, 0x6bcdf07a423aa96c},
+ {0xad4ab7112eb3929d, 0x86c16c98d2c953c7},
+ {0xd89d64d57a607744, 0xe871c7bf077ba8b8},
+ {0x87625f056c7c4a8b, 0x11471cd764ad4973},
+ {0xa93af6c6c79b5d2d, 0xd598e40d3dd89bd0},
+ {0xd389b47879823479, 0x4aff1d108d4ec2c4},
+ {0x843610cb4bf160cb, 0xcedf722a585139bb},
+ {0xa54394fe1eedb8fe, 0xc2974eb4ee658829},
+ {0xce947a3da6a9273e, 0x733d226229feea33},
+ {0x811ccc668829b887, 0x0806357d5a3f5260},
+ {0xa163ff802a3426a8, 0xca07c2dcb0cf26f8},
+ {0xc9bcff6034c13052, 0xfc89b393dd02f0b6},
+ {0xfc2c3f3841f17c67, 0xbbac2078d443ace3},
+ {0x9d9ba7832936edc0, 0xd54b944b84aa4c0e},
+ {0xc5029163f384a931, 0x0a9e795e65d4df12},
+ {0xf64335bcf065d37d, 0x4d4617b5ff4a16d6},
+ {0x99ea0196163fa42e, 0x504bced1bf8e4e46},
+ {0xc06481fb9bcf8d39, 0xe45ec2862f71e1d7},
+ {0xf07da27a82c37088, 0x5d767327bb4e5a4d},
+ {0x964e858c91ba2655, 0x3a6a07f8d510f870},
+ {0xbbe226efb628afea, 0x890489f70a55368c},
+ {0xeadab0aba3b2dbe5, 0x2b45ac74ccea842f},
+ {0x92c8ae6b464fc96f, 0x3b0b8bc90012929e},
+ {0xb77ada0617e3bbcb, 0x09ce6ebb40173745},
+ {0xe55990879ddcaabd, 0xcc420a6a101d0516},
+ {0x8f57fa54c2a9eab6, 0x9fa946824a12232e},
+ {0xb32df8e9f3546564, 0x47939822dc96abfa},
+ {0xdff9772470297ebd, 0x59787e2b93bc56f8},
+ {0x8bfbea76c619ef36, 0x57eb4edb3c55b65b},
+ {0xaefae51477a06b03, 0xede622920b6b23f2},
+ {0xdab99e59958885c4, 0xe95fab368e45ecee},
+ {0x88b402f7fd75539b, 0x11dbcb0218ebb415},
+ {0xaae103b5fcd2a881, 0xd652bdc29f26a11a},
+ {0xd59944a37c0752a2, 0x4be76d3346f04960},
+ {0x857fcae62d8493a5, 0x6f70a4400c562ddc},
+ {0xa6dfbd9fb8e5b88e, 0xcb4ccd500f6bb953},
+ {0xd097ad07a71f26b2, 0x7e2000a41346a7a8},
+ {0x825ecc24c873782f, 0x8ed400668c0c28c9},
+ {0xa2f67f2dfa90563b, 0x728900802f0f32fb},
+ {0xcbb41ef979346bca, 0x4f2b40a03ad2ffba},
+ {0xfea126b7d78186bc, 0xe2f610c84987bfa9},
+ {0x9f24b832e6b0f436, 0x0dd9ca7d2df4d7ca},
+ {0xc6ede63fa05d3143, 0x91503d1c79720dbc},
+ {0xf8a95fcf88747d94, 0x75a44c6397ce912b},
+ {0x9b69dbe1b548ce7c, 0xc986afbe3ee11abb},
+ {0xc24452da229b021b, 0xfbe85badce996169},
+ {0xf2d56790ab41c2a2, 0xfae27299423fb9c4},
+ {0x97c560ba6b0919a5, 0xdccd879fc967d41b},
+ {0xbdb6b8e905cb600f, 0x5400e987bbc1c921},
+ {0xed246723473e3813, 0x290123e9aab23b69},
+ {0x9436c0760c86e30b, 0xf9a0b6720aaf6522},
+ {0xb94470938fa89bce, 0xf808e40e8d5b3e6a},
+ {0xe7958cb87392c2c2, 0xb60b1d1230b20e05},
+ {0x90bd77f3483bb9b9, 0xb1c6f22b5e6f48c3},
+ {0xb4ecd5f01a4aa828, 0x1e38aeb6360b1af4},
+ {0xe2280b6c20dd5232, 0x25c6da63c38de1b1},
+ {0x8d590723948a535f, 0x579c487e5a38ad0f},
+ {0xb0af48ec79ace837, 0x2d835a9df0c6d852},
+ {0xdcdb1b2798182244, 0xf8e431456cf88e66},
+ {0x8a08f0f8bf0f156b, 0x1b8e9ecb641b5900},
+ {0xac8b2d36eed2dac5, 0xe272467e3d222f40},
+ {0xd7adf884aa879177, 0x5b0ed81dcc6abb10},
+ {0x86ccbb52ea94baea, 0x98e947129fc2b4ea},
+ {0xa87fea27a539e9a5, 0x3f2398d747b36225},
+ {0xd29fe4b18e88640e, 0x8eec7f0d19a03aae},
+ {0x83a3eeeef9153e89, 0x1953cf68300424ad},
+ {0xa48ceaaab75a8e2b, 0x5fa8c3423c052dd8},
+ {0xcdb02555653131b6, 0x3792f412cb06794e},
+ {0x808e17555f3ebf11, 0xe2bbd88bbee40bd1},
+ {0xa0b19d2ab70e6ed6, 0x5b6aceaeae9d0ec5},
+ {0xc8de047564d20a8b, 0xf245825a5a445276},
+ {0xfb158592be068d2e, 0xeed6e2f0f0d56713},
+ {0x9ced737bb6c4183d, 0x55464dd69685606c},
+ {0xc428d05aa4751e4c, 0xaa97e14c3c26b887},
+ {0xf53304714d9265df, 0xd53dd99f4b3066a9},
+ {0x993fe2c6d07b7fab, 0xe546a8038efe402a},
+ {0xbf8fdb78849a5f96, 0xde98520472bdd034},
+ {0xef73d256a5c0f77c, 0x963e66858f6d4441},
+ {0x95a8637627989aad, 0xdde7001379a44aa9},
+ {0xbb127c53b17ec159, 0x5560c018580d5d53},
+ {0xe9d71b689dde71af, 0xaab8f01e6e10b4a7},
+ {0x9226712162ab070d, 0xcab3961304ca70e9},
+ {0xb6b00d69bb55c8d1, 0x3d607b97c5fd0d23},
+ {0xe45c10c42a2b3b05, 0x8cb89a7db77c506b},
+ {0x8eb98a7a9a5b04e3, 0x77f3608e92adb243},
+ {0xb267ed1940f1c61c, 0x55f038b237591ed4},
+ {0xdf01e85f912e37a3, 0x6b6c46dec52f6689},
+ {0x8b61313bbabce2c6, 0x2323ac4b3b3da016},
+ {0xae397d8aa96c1b77, 0xabec975e0a0d081b},
+ {0xd9c7dced53c72255, 0x96e7bd358c904a22},
+ {0x881cea14545c7575, 0x7e50d64177da2e55},
+ {0xaa242499697392d2, 0xdde50bd1d5d0b9ea},
+ {0xd4ad2dbfc3d07787, 0x955e4ec64b44e865},
+ {0x84ec3c97da624ab4, 0xbd5af13bef0b113f},
+ {0xa6274bbdd0fadd61, 0xecb1ad8aeacdd58f},
+ {0xcfb11ead453994ba, 0x67de18eda5814af3},
+ {0x81ceb32c4b43fcf4, 0x80eacf948770ced8},
+ {0xa2425ff75e14fc31, 0xa1258379a94d028e},
+ {0xcad2f7f5359a3b3e, 0x096ee45813a04331},
+ {0xfd87b5f28300ca0d, 0x8bca9d6e188853fd},
+ {0x9e74d1b791e07e48, 0x775ea264cf55347e},
+ {0xc612062576589dda, 0x95364afe032a819e},
+ {0xf79687aed3eec551, 0x3a83ddbd83f52205},
+ {0x9abe14cd44753b52, 0xc4926a9672793543},
+ {0xc16d9a0095928a27, 0x75b7053c0f178294},
+ {0xf1c90080baf72cb1, 0x5324c68b12dd6339},
+ {0x971da05074da7bee, 0xd3f6fc16ebca5e04},
+ {0xbce5086492111aea, 0x88f4bb1ca6bcf585},
+ {0xec1e4a7db69561a5, 0x2b31e9e3d06c32e6},
+ {0x9392ee8e921d5d07, 0x3aff322e62439fd0},
+ {0xb877aa3236a4b449, 0x09befeb9fad487c3},
+ {0xe69594bec44de15b, 0x4c2ebe687989a9b4},
+ {0x901d7cf73ab0acd9, 0x0f9d37014bf60a11},
+ {0xb424dc35095cd80f, 0x538484c19ef38c95},
+ {0xe12e13424bb40e13, 0x2865a5f206b06fba},
+ {0x8cbccc096f5088cb, 0xf93f87b7442e45d4},
+ {0xafebff0bcb24aafe, 0xf78f69a51539d749},
+ {0xdbe6fecebdedd5be, 0xb573440e5a884d1c},
+ {0x89705f4136b4a597, 0x31680a88f8953031},
+ {0xabcc77118461cefc, 0xfdc20d2b36ba7c3e},
+ {0xd6bf94d5e57a42bc, 0x3d32907604691b4d},
+ {0x8637bd05af6c69b5, 0xa63f9a49c2c1b110},
+ {0xa7c5ac471b478423, 0x0fcf80dc33721d54},
+ {0xd1b71758e219652b, 0xd3c36113404ea4a9},
+ {0x83126e978d4fdf3b, 0x645a1cac083126ea},
+ {0xa3d70a3d70a3d70a, 0x3d70a3d70a3d70a4},
+ {0xcccccccccccccccc, 0xcccccccccccccccd},
+ {0x8000000000000000, 0x0000000000000000},
+ {0xa000000000000000, 0x0000000000000000},
+ {0xc800000000000000, 0x0000000000000000},
+ {0xfa00000000000000, 0x0000000000000000},
+ {0x9c40000000000000, 0x0000000000000000},
+ {0xc350000000000000, 0x0000000000000000},
+ {0xf424000000000000, 0x0000000000000000},
+ {0x9896800000000000, 0x0000000000000000},
+ {0xbebc200000000000, 0x0000000000000000},
+ {0xee6b280000000000, 0x0000000000000000},
+ {0x9502f90000000000, 0x0000000000000000},
+ {0xba43b74000000000, 0x0000000000000000},
+ {0xe8d4a51000000000, 0x0000000000000000},
+ {0x9184e72a00000000, 0x0000000000000000},
+ {0xb5e620f480000000, 0x0000000000000000},
+ {0xe35fa931a0000000, 0x0000000000000000},
+ {0x8e1bc9bf04000000, 0x0000000000000000},
+ {0xb1a2bc2ec5000000, 0x0000000000000000},
+ {0xde0b6b3a76400000, 0x0000000000000000},
+ {0x8ac7230489e80000, 0x0000000000000000},
+ {0xad78ebc5ac620000, 0x0000000000000000},
+ {0xd8d726b7177a8000, 0x0000000000000000},
+ {0x878678326eac9000, 0x0000000000000000},
+ {0xa968163f0a57b400, 0x0000000000000000},
+ {0xd3c21bcecceda100, 0x0000000000000000},
+ {0x84595161401484a0, 0x0000000000000000},
+ {0xa56fa5b99019a5c8, 0x0000000000000000},
+ {0xcecb8f27f4200f3a, 0x0000000000000000},
+ {0x813f3978f8940984, 0x4000000000000000},
+ {0xa18f07d736b90be5, 0x5000000000000000},
+ {0xc9f2c9cd04674ede, 0xa400000000000000},
+ {0xfc6f7c4045812296, 0x4d00000000000000},
+ {0x9dc5ada82b70b59d, 0xf020000000000000},
+ {0xc5371912364ce305, 0x6c28000000000000},
+ {0xf684df56c3e01bc6, 0xc732000000000000},
+ {0x9a130b963a6c115c, 0x3c7f400000000000},
+ {0xc097ce7bc90715b3, 0x4b9f100000000000},
+ {0xf0bdc21abb48db20, 0x1e86d40000000000},
+ {0x96769950b50d88f4, 0x1314448000000000},
+ {0xbc143fa4e250eb31, 0x17d955a000000000},
+ {0xeb194f8e1ae525fd, 0x5dcfab0800000000},
+ {0x92efd1b8d0cf37be, 0x5aa1cae500000000},
+ {0xb7abc627050305ad, 0xf14a3d9e40000000},
+ {0xe596b7b0c643c719, 0x6d9ccd05d0000000},
+ {0x8f7e32ce7bea5c6f, 0xe4820023a2000000},
+ {0xb35dbf821ae4f38b, 0xdda2802c8a800000},
+ {0xe0352f62a19e306e, 0xd50b2037ad200000},
+ {0x8c213d9da502de45, 0x4526f422cc340000},
+ {0xaf298d050e4395d6, 0x9670b12b7f410000},
+ {0xdaf3f04651d47b4c, 0x3c0cdd765f114000},
+ {0x88d8762bf324cd0f, 0xa5880a69fb6ac800},
+ {0xab0e93b6efee0053, 0x8eea0d047a457a00},
+ {0xd5d238a4abe98068, 0x72a4904598d6d880},
+ {0x85a36366eb71f041, 0x47a6da2b7f864750},
+ {0xa70c3c40a64e6c51, 0x999090b65f67d924},
+ {0xd0cf4b50cfe20765, 0xfff4b4e3f741cf6d},
+ {0x82818f1281ed449f, 0xbff8f10e7a8921a5},
+ {0xa321f2d7226895c7, 0xaff72d52192b6a0e},
+ {0xcbea6f8ceb02bb39, 0x9bf4f8a69f764491},
+ {0xfee50b7025c36a08, 0x02f236d04753d5b5},
+ {0x9f4f2726179a2245, 0x01d762422c946591},
+ {0xc722f0ef9d80aad6, 0x424d3ad2b7b97ef6},
+ {0xf8ebad2b84e0d58b, 0xd2e0898765a7deb3},
+ {0x9b934c3b330c8577, 0x63cc55f49f88eb30},
+ {0xc2781f49ffcfa6d5, 0x3cbf6b71c76b25fc},
+ {0xf316271c7fc3908a, 0x8bef464e3945ef7b},
+ {0x97edd871cfda3a56, 0x97758bf0e3cbb5ad},
+ {0xbde94e8e43d0c8ec, 0x3d52eeed1cbea318},
+ {0xed63a231d4c4fb27, 0x4ca7aaa863ee4bde},
+ {0x945e455f24fb1cf8, 0x8fe8caa93e74ef6b},
+ {0xb975d6b6ee39e436, 0xb3e2fd538e122b45},
+ {0xe7d34c64a9c85d44, 0x60dbbca87196b617},
+ {0x90e40fbeea1d3a4a, 0xbc8955e946fe31ce},
+ {0xb51d13aea4a488dd, 0x6babab6398bdbe42},
+ {0xe264589a4dcdab14, 0xc696963c7eed2dd2},
+ {0x8d7eb76070a08aec, 0xfc1e1de5cf543ca3},
+ {0xb0de65388cc8ada8, 0x3b25a55f43294bcc},
+ {0xdd15fe86affad912, 0x49ef0eb713f39ebf},
+ {0x8a2dbf142dfcc7ab, 0x6e3569326c784338},
+ {0xacb92ed9397bf996, 0x49c2c37f07965405},
+ {0xd7e77a8f87daf7fb, 0xdc33745ec97be907},
+ {0x86f0ac99b4e8dafd, 0x69a028bb3ded71a4},
+ {0xa8acd7c0222311bc, 0xc40832ea0d68ce0d},
+ {0xd2d80db02aabd62b, 0xf50a3fa490c30191},
+ {0x83c7088e1aab65db, 0x792667c6da79e0fb},
+ {0xa4b8cab1a1563f52, 0x577001b891185939},
+ {0xcde6fd5e09abcf26, 0xed4c0226b55e6f87},
+ {0x80b05e5ac60b6178, 0x544f8158315b05b5},
+ {0xa0dc75f1778e39d6, 0x696361ae3db1c722},
+ {0xc913936dd571c84c, 0x03bc3a19cd1e38ea},
+ {0xfb5878494ace3a5f, 0x04ab48a04065c724},
+ {0x9d174b2dcec0e47b, 0x62eb0d64283f9c77},
+ {0xc45d1df942711d9a, 0x3ba5d0bd324f8395},
+ {0xf5746577930d6500, 0xca8f44ec7ee3647a},
+ {0x9968bf6abbe85f20, 0x7e998b13cf4e1ecc},
+ {0xbfc2ef456ae276e8, 0x9e3fedd8c321a67f},
+ {0xefb3ab16c59b14a2, 0xc5cfe94ef3ea101f},
+ {0x95d04aee3b80ece5, 0xbba1f1d158724a13},
+ {0xbb445da9ca61281f, 0x2a8a6e45ae8edc98},
+ {0xea1575143cf97226, 0xf52d09d71a3293be},
+ {0x924d692ca61be758, 0x593c2626705f9c57},
+ {0xb6e0c377cfa2e12e, 0x6f8b2fb00c77836d},
+ {0xe498f455c38b997a, 0x0b6dfb9c0f956448},
+ {0x8edf98b59a373fec, 0x4724bd4189bd5ead},
+ {0xb2977ee300c50fe7, 0x58edec91ec2cb658},
+ {0xdf3d5e9bc0f653e1, 0x2f2967b66737e3ee},
+ {0x8b865b215899f46c, 0xbd79e0d20082ee75},
+ {0xae67f1e9aec07187, 0xecd8590680a3aa12},
+ {0xda01ee641a708de9, 0xe80e6f4820cc9496},
+ {0x884134fe908658b2, 0x3109058d147fdcde},
+ {0xaa51823e34a7eede, 0xbd4b46f0599fd416},
+ {0xd4e5e2cdc1d1ea96, 0x6c9e18ac7007c91b},
+ {0x850fadc09923329e, 0x03e2cf6bc604ddb1},
+ {0xa6539930bf6bff45, 0x84db8346b786151d},
+ {0xcfe87f7cef46ff16, 0xe612641865679a64},
+ {0x81f14fae158c5f6e, 0x4fcb7e8f3f60c07f},
+ {0xa26da3999aef7749, 0xe3be5e330f38f09e},
+ {0xcb090c8001ab551c, 0x5cadf5bfd3072cc6},
+ {0xfdcb4fa002162a63, 0x73d9732fc7c8f7f7},
+ {0x9e9f11c4014dda7e, 0x2867e7fddcdd9afb},
+ {0xc646d63501a1511d, 0xb281e1fd541501b9},
+ {0xf7d88bc24209a565, 0x1f225a7ca91a4227},
+ {0x9ae757596946075f, 0x3375788de9b06959},
+ {0xc1a12d2fc3978937, 0x0052d6b1641c83af},
+ {0xf209787bb47d6b84, 0xc0678c5dbd23a49b},
+ {0x9745eb4d50ce6332, 0xf840b7ba963646e1},
+ {0xbd176620a501fbff, 0xb650e5a93bc3d899},
+ {0xec5d3fa8ce427aff, 0xa3e51f138ab4cebf},
+ {0x93ba47c980e98cdf, 0xc66f336c36b10138},
+ {0xb8a8d9bbe123f017, 0xb80b0047445d4185},
+ {0xe6d3102ad96cec1d, 0xa60dc059157491e6},
+ {0x9043ea1ac7e41392, 0x87c89837ad68db30},
+ {0xb454e4a179dd1877, 0x29babe4598c311fc},
+ {0xe16a1dc9d8545e94, 0xf4296dd6fef3d67b},
+ {0x8ce2529e2734bb1d, 0x1899e4a65f58660d},
+ {0xb01ae745b101e9e4, 0x5ec05dcff72e7f90},
+ {0xdc21a1171d42645d, 0x76707543f4fa1f74},
+ {0x899504ae72497eba, 0x6a06494a791c53a9},
+ {0xabfa45da0edbde69, 0x0487db9d17636893},
+ {0xd6f8d7509292d603, 0x45a9d2845d3c42b7},
+ {0x865b86925b9bc5c2, 0x0b8a2392ba45a9b3},
+ {0xa7f26836f282b732, 0x8e6cac7768d7141f},
+ {0xd1ef0244af2364ff, 0x3207d795430cd927},
+ {0x8335616aed761f1f, 0x7f44e6bd49e807b9},
+ {0xa402b9c5a8d3a6e7, 0x5f16206c9c6209a7},
+ {0xcd036837130890a1, 0x36dba887c37a8c10},
+ {0x802221226be55a64, 0xc2494954da2c978a},
+ {0xa02aa96b06deb0fd, 0xf2db9baa10b7bd6d},
+ {0xc83553c5c8965d3d, 0x6f92829494e5acc8},
+ {0xfa42a8b73abbf48c, 0xcb772339ba1f17fa},
+ {0x9c69a97284b578d7, 0xff2a760414536efc},
+ {0xc38413cf25e2d70d, 0xfef5138519684abb},
+ {0xf46518c2ef5b8cd1, 0x7eb258665fc25d6a},
+ {0x98bf2f79d5993802, 0xef2f773ffbd97a62},
+ {0xbeeefb584aff8603, 0xaafb550ffacfd8fb},
+ {0xeeaaba2e5dbf6784, 0x95ba2a53f983cf39},
+ {0x952ab45cfa97a0b2, 0xdd945a747bf26184},
+ {0xba756174393d88df, 0x94f971119aeef9e5},
+ {0xe912b9d1478ceb17, 0x7a37cd5601aab85e},
+ {0x91abb422ccb812ee, 0xac62e055c10ab33b},
+ {0xb616a12b7fe617aa, 0x577b986b314d600a},
+ {0xe39c49765fdf9d94, 0xed5a7e85fda0b80c},
+ {0x8e41ade9fbebc27d, 0x14588f13be847308},
+ {0xb1d219647ae6b31c, 0x596eb2d8ae258fc9},
+ {0xde469fbd99a05fe3, 0x6fca5f8ed9aef3bc},
+ {0x8aec23d680043bee, 0x25de7bb9480d5855},
+ {0xada72ccc20054ae9, 0xaf561aa79a10ae6b},
+ {0xd910f7ff28069da4, 0x1b2ba1518094da05},
+ {0x87aa9aff79042286, 0x90fb44d2f05d0843},
+ {0xa99541bf57452b28, 0x353a1607ac744a54},
+ {0xd3fa922f2d1675f2, 0x42889b8997915ce9},
+ {0x847c9b5d7c2e09b7, 0x69956135febada12},
+ {0xa59bc234db398c25, 0x43fab9837e699096},
+ {0xcf02b2c21207ef2e, 0x94f967e45e03f4bc},
+ {0x8161afb94b44f57d, 0x1d1be0eebac278f6},
+ {0xa1ba1ba79e1632dc, 0x6462d92a69731733},
+ {0xca28a291859bbf93, 0x7d7b8f7503cfdcff},
+ {0xfcb2cb35e702af78, 0x5cda735244c3d43f},
+ {0x9defbf01b061adab, 0x3a0888136afa64a8},
+ {0xc56baec21c7a1916, 0x088aaa1845b8fdd1},
+ {0xf6c69a72a3989f5b, 0x8aad549e57273d46},
+ {0x9a3c2087a63f6399, 0x36ac54e2f678864c},
+ {0xc0cb28a98fcf3c7f, 0x84576a1bb416a7de},
+ {0xf0fdf2d3f3c30b9f, 0x656d44a2a11c51d6},
+ {0x969eb7c47859e743, 0x9f644ae5a4b1b326},
+ {0xbc4665b596706114, 0x873d5d9f0dde1fef},
+ {0xeb57ff22fc0c7959, 0xa90cb506d155a7eb},
+ {0x9316ff75dd87cbd8, 0x09a7f12442d588f3},
+ {0xb7dcbf5354e9bece, 0x0c11ed6d538aeb30},
+ {0xe5d3ef282a242e81, 0x8f1668c8a86da5fb},
+ {0x8fa475791a569d10, 0xf96e017d694487bd},
+ {0xb38d92d760ec4455, 0x37c981dcc395a9ad},
+ {0xe070f78d3927556a, 0x85bbe253f47b1418},
+ {0x8c469ab843b89562, 0x93956d7478ccec8f},
+ {0xaf58416654a6babb, 0x387ac8d1970027b3},
+ {0xdb2e51bfe9d0696a, 0x06997b05fcc0319f},
+ {0x88fcf317f22241e2, 0x441fece3bdf81f04},
+ {0xab3c2fddeeaad25a, 0xd527e81cad7626c4},
+ {0xd60b3bd56a5586f1, 0x8a71e223d8d3b075},
+ {0x85c7056562757456, 0xf6872d5667844e4a},
+ {0xa738c6bebb12d16c, 0xb428f8ac016561dc},
+ {0xd106f86e69d785c7, 0xe13336d701beba53},
+ {0x82a45b450226b39c, 0xecc0024661173474},
+ {0xa34d721642b06084, 0x27f002d7f95d0191},
+ {0xcc20ce9bd35c78a5, 0x31ec038df7b441f5},
+ {0xff290242c83396ce, 0x7e67047175a15272},
+ {0x9f79a169bd203e41, 0x0f0062c6e984d387},
+ {0xc75809c42c684dd1, 0x52c07b78a3e60869},
+ {0xf92e0c3537826145, 0xa7709a56ccdf8a83},
+ {0x9bbcc7a142b17ccb, 0x88a66076400bb692},
+ {0xc2abf989935ddbfe, 0x6acff893d00ea436},
+ {0xf356f7ebf83552fe, 0x0583f6b8c4124d44},
+ {0x98165af37b2153de, 0xc3727a337a8b704b},
+ {0xbe1bf1b059e9a8d6, 0x744f18c0592e4c5d},
+ {0xeda2ee1c7064130c, 0x1162def06f79df74},
+ {0x9485d4d1c63e8be7, 0x8addcb5645ac2ba9},
+ {0xb9a74a0637ce2ee1, 0x6d953e2bd7173693},
+ {0xe8111c87c5c1ba99, 0xc8fa8db6ccdd0438},
+ {0x910ab1d4db9914a0, 0x1d9c9892400a22a3},
+ {0xb54d5e4a127f59c8, 0x2503beb6d00cab4c},
+ {0xe2a0b5dc971f303a, 0x2e44ae64840fd61e},
+ {0x8da471a9de737e24, 0x5ceaecfed289e5d3},
+ {0xb10d8e1456105dad, 0x7425a83e872c5f48},
+ {0xdd50f1996b947518, 0xd12f124e28f7771a},
+ {0x8a5296ffe33cc92f, 0x82bd6b70d99aaa70},
+ {0xace73cbfdc0bfb7b, 0x636cc64d1001550c},
+ {0xd8210befd30efa5a, 0x3c47f7e05401aa4f},
+ {0x8714a775e3e95c78, 0x65acfaec34810a72},
+ {0xa8d9d1535ce3b396, 0x7f1839a741a14d0e},
+ {0xd31045a8341ca07c, 0x1ede48111209a051},
+ {0x83ea2b892091e44d, 0x934aed0aab460433},
+ {0xa4e4b66b68b65d60, 0xf81da84d56178540},
+ {0xce1de40642e3f4b9, 0x36251260ab9d668f},
+ {0x80d2ae83e9ce78f3, 0xc1d72b7c6b42601a},
+ {0xa1075a24e4421730, 0xb24cf65b8612f820},
+ {0xc94930ae1d529cfc, 0xdee033f26797b628},
+ {0xfb9b7cd9a4a7443c, 0x169840ef017da3b2},
+ {0x9d412e0806e88aa5, 0x8e1f289560ee864f},
+ {0xc491798a08a2ad4e, 0xf1a6f2bab92a27e3},
+ {0xf5b5d7ec8acb58a2, 0xae10af696774b1dc},
+ {0x9991a6f3d6bf1765, 0xacca6da1e0a8ef2a},
+ {0xbff610b0cc6edd3f, 0x17fd090a58d32af4},
+ {0xeff394dcff8a948e, 0xddfc4b4cef07f5b1},
+ {0x95f83d0a1fb69cd9, 0x4abdaf101564f98f},
+ {0xbb764c4ca7a4440f, 0x9d6d1ad41abe37f2},
+ {0xea53df5fd18d5513, 0x84c86189216dc5ee},
+ {0x92746b9be2f8552c, 0x32fd3cf5b4e49bb5},
+ {0xb7118682dbb66a77, 0x3fbc8c33221dc2a2},
+ {0xe4d5e82392a40515, 0x0fabaf3feaa5334b},
+ {0x8f05b1163ba6832d, 0x29cb4d87f2a7400f},
+ {0xb2c71d5bca9023f8, 0x743e20e9ef511013},
+ {0xdf78e4b2bd342cf6, 0x914da9246b255417},
+ {0x8bab8eefb6409c1a, 0x1ad089b6c2f7548f},
+ {0xae9672aba3d0c320, 0xa184ac2473b529b2},
+ {0xda3c0f568cc4f3e8, 0xc9e5d72d90a2741f},
+ {0x8865899617fb1871, 0x7e2fa67c7a658893},
+ {0xaa7eebfb9df9de8d, 0xddbb901b98feeab8},
+ {0xd51ea6fa85785631, 0x552a74227f3ea566},
+ {0x8533285c936b35de, 0xd53a88958f872760},
+ {0xa67ff273b8460356, 0x8a892abaf368f138},
+ {0xd01fef10a657842c, 0x2d2b7569b0432d86},
+ {0x8213f56a67f6b29b, 0x9c3b29620e29fc74},
+ {0xa298f2c501f45f42, 0x8349f3ba91b47b90},
+ {0xcb3f2f7642717713, 0x241c70a936219a74},
+ {0xfe0efb53d30dd4d7, 0xed238cd383aa0111},
+ {0x9ec95d1463e8a506, 0xf4363804324a40ab},
+ {0xc67bb4597ce2ce48, 0xb143c6053edcd0d6},
+ {0xf81aa16fdc1b81da, 0xdd94b7868e94050b},
+ {0x9b10a4e5e9913128, 0xca7cf2b4191c8327},
+ {0xc1d4ce1f63f57d72, 0xfd1c2f611f63a3f1},
+ {0xf24a01a73cf2dccf, 0xbc633b39673c8ced},
+ {0x976e41088617ca01, 0xd5be0503e085d814},
+ {0xbd49d14aa79dbc82, 0x4b2d8644d8a74e19},
+ {0xec9c459d51852ba2, 0xddf8e7d60ed1219f},
+ {0x93e1ab8252f33b45, 0xcabb90e5c942b504},
+ {0xb8da1662e7b00a17, 0x3d6a751f3b936244},
+ {0xe7109bfba19c0c9d, 0x0cc512670a783ad5},
+ {0x906a617d450187e2, 0x27fb2b80668b24c6},
+ {0xb484f9dc9641e9da, 0xb1f9f660802dedf7},
+ {0xe1a63853bbd26451, 0x5e7873f8a0396974},
+ {0x8d07e33455637eb2, 0xdb0b487b6423e1e9},
+ {0xb049dc016abc5e5f, 0x91ce1a9a3d2cda63},
+ {0xdc5c5301c56b75f7, 0x7641a140cc7810fc},
+ {0x89b9b3e11b6329ba, 0xa9e904c87fcb0a9e},
+ {0xac2820d9623bf429, 0x546345fa9fbdcd45},
+ {0xd732290fbacaf133, 0xa97c177947ad4096},
+ {0x867f59a9d4bed6c0, 0x49ed8eabcccc485e},
+ {0xa81f301449ee8c70, 0x5c68f256bfff5a75},
+ {0xd226fc195c6a2f8c, 0x73832eec6fff3112},
+ {0x83585d8fd9c25db7, 0xc831fd53c5ff7eac},
+ {0xa42e74f3d032f525, 0xba3e7ca8b77f5e56},
+ {0xcd3a1230c43fb26f, 0x28ce1bd2e55f35ec},
+ {0x80444b5e7aa7cf85, 0x7980d163cf5b81b4},
+ {0xa0555e361951c366, 0xd7e105bcc3326220},
+ {0xc86ab5c39fa63440, 0x8dd9472bf3fefaa8},
+ {0xfa856334878fc150, 0xb14f98f6f0feb952},
+ {0x9c935e00d4b9d8d2, 0x6ed1bf9a569f33d4},
+ {0xc3b8358109e84f07, 0x0a862f80ec4700c9},
+ {0xf4a642e14c6262c8, 0xcd27bb612758c0fb},
+ {0x98e7e9cccfbd7dbd, 0x8038d51cb897789d},
+ {0xbf21e44003acdd2c, 0xe0470a63e6bd56c4},
+ {0xeeea5d5004981478, 0x1858ccfce06cac75},
+ {0x95527a5202df0ccb, 0x0f37801e0c43ebc9},
+ {0xbaa718e68396cffd, 0xd30560258f54e6bb},
+ {0xe950df20247c83fd, 0x47c6b82ef32a206a},
+ {0x91d28b7416cdd27e, 0x4cdc331d57fa5442},
+ {0xb6472e511c81471d, 0xe0133fe4adf8e953},
+ {0xe3d8f9e563a198e5, 0x58180fddd97723a7},
+ {0x8e679c2f5e44ff8f, 0x570f09eaa7ea7649},
+ {0xb201833b35d63f73, 0x2cd2cc6551e513db},
+ {0xde81e40a034bcf4f, 0xf8077f7ea65e58d2},
+ {0x8b112e86420f6191, 0xfb04afaf27faf783},
+ {0xadd57a27d29339f6, 0x79c5db9af1f9b564},
+ {0xd94ad8b1c7380874, 0x18375281ae7822bd},
+ {0x87cec76f1c830548, 0x8f2293910d0b15b6},
+ {0xa9c2794ae3a3c69a, 0xb2eb3875504ddb23},
+ {0xd433179d9c8cb841, 0x5fa60692a46151ec},
+ {0x849feec281d7f328, 0xdbc7c41ba6bcd334},
+ {0xa5c7ea73224deff3, 0x12b9b522906c0801},
+ {0xcf39e50feae16bef, 0xd768226b34870a01},
+ {0x81842f29f2cce375, 0xe6a1158300d46641},
+ {0xa1e53af46f801c53, 0x60495ae3c1097fd1},
+ {0xca5e89b18b602368, 0x385bb19cb14bdfc5},
+ {0xfcf62c1dee382c42, 0x46729e03dd9ed7b6},
+ {0x9e19db92b4e31ba9, 0x6c07a2c26a8346d2},
+ {0xc5a05277621be293, 0xc7098b7305241886},
+ { 0xf70867153aa2db38,
+ 0xb8cbee4fc66d1ea8 }
+#else
+ {0xff77b1fcbebcdc4f, 0x25e8e89c13bb0f7b},
+ {0xce5d73ff402d98e3, 0xfb0a3d212dc81290},
+ {0xa6b34ad8c9dfc06f, 0xf42faa48c0ea481f},
+ {0x86a8d39ef77164bc, 0xae5dff9c02033198},
+ {0xd98ddaee19068c76, 0x3badd624dd9b0958},
+ {0xafbd2350644eeacf, 0xe5d1929ef90898fb},
+ {0x8df5efabc5979c8f, 0xca8d3ffa1ef463c2},
+ {0xe55990879ddcaabd, 0xcc420a6a101d0516},
+ {0xb94470938fa89bce, 0xf808e40e8d5b3e6a},
+ {0x95a8637627989aad, 0xdde7001379a44aa9},
+ {0xf1c90080baf72cb1, 0x5324c68b12dd6339},
+ {0xc350000000000000, 0x0000000000000000},
+ {0x9dc5ada82b70b59d, 0xf020000000000000},
+ {0xfee50b7025c36a08, 0x02f236d04753d5b5},
+ {0xcde6fd5e09abcf26, 0xed4c0226b55e6f87},
+ {0xa6539930bf6bff45, 0x84db8346b786151d},
+ {0x865b86925b9bc5c2, 0x0b8a2392ba45a9b3},
+ {0xd910f7ff28069da4, 0x1b2ba1518094da05},
+ {0xaf58416654a6babb, 0x387ac8d1970027b3},
+ {0x8da471a9de737e24, 0x5ceaecfed289e5d3},
+ {0xe4d5e82392a40515, 0x0fabaf3feaa5334b},
+ {0xb8da1662e7b00a17, 0x3d6a751f3b936244},
+ { 0x95527a5202df0ccb,
+ 0x0f37801e0c43ebc9 }
+#endif
+ };
+
+#if FMT_USE_FULL_CACHE_DRAGONBOX
+ return pow10_significands[k - float_info<double>::min_k];
+#else
+ static constexpr const uint64_t powers_of_5_64[] = {
+ 0x0000000000000001, 0x0000000000000005, 0x0000000000000019,
+ 0x000000000000007d, 0x0000000000000271, 0x0000000000000c35,
+ 0x0000000000003d09, 0x000000000001312d, 0x000000000005f5e1,
+ 0x00000000001dcd65, 0x00000000009502f9, 0x0000000002e90edd,
+ 0x000000000e8d4a51, 0x0000000048c27395, 0x000000016bcc41e9,
+ 0x000000071afd498d, 0x0000002386f26fc1, 0x000000b1a2bc2ec5,
+ 0x000003782dace9d9, 0x00001158e460913d, 0x000056bc75e2d631,
+ 0x0001b1ae4d6e2ef5, 0x000878678326eac9, 0x002a5a058fc295ed,
+ 0x00d3c21bcecceda1, 0x0422ca8b0a00a425, 0x14adf4b7320334b9};
+
+ static const int compression_ratio = 27;
+
+ // Compute base index.
+ int cache_index = (k - float_info<double>::min_k) / compression_ratio;
+ int kb = cache_index * compression_ratio + float_info<double>::min_k;
+ int offset = k - kb;
+
+ // Get base cache.
+ uint128_fallback base_cache = pow10_significands[cache_index];
+ if (offset == 0) return base_cache;
+
+ // Compute the required amount of bit-shift.
+ int alpha = floor_log2_pow10(kb + offset) - floor_log2_pow10(kb) - offset;
+ FMT_ASSERT(alpha > 0 && alpha < 64, "shifting error detected");
+
+ // Try to recover the real cache.
+ uint64_t pow5 = powers_of_5_64[offset];
+ uint128_fallback recovered_cache = umul128(base_cache.high(), pow5);
+ uint128_fallback middle_low = umul128(base_cache.low(), pow5);
+
+ recovered_cache += middle_low.high();
+
+ uint64_t high_to_middle = recovered_cache.high() << (64 - alpha);
+ uint64_t middle_to_low = recovered_cache.low() << (64 - alpha);
+
+ recovered_cache =
+ uint128_fallback{(recovered_cache.low() >> alpha) | high_to_middle,
+ ((middle_low.low() >> alpha) | middle_to_low)};
+ FMT_ASSERT(recovered_cache.low() + 1 != 0, "");
+ return {recovered_cache.high(), recovered_cache.low() + 1};
+#endif
+ }
+
+ struct compute_mul_result {
+ carrier_uint result;
+ bool is_integer;
+ };
+ struct compute_mul_parity_result {
+ bool parity;
+ bool is_integer;
+ };
+
+ static compute_mul_result compute_mul(
+ carrier_uint u, const cache_entry_type& cache) noexcept {
+ auto r = umul192_upper128(u, cache);
+ return {r.high(), r.low() == 0};
+ }
+
+ static uint32_t compute_delta(cache_entry_type const& cache,
+ int beta) noexcept {
+ return static_cast<uint32_t>(cache.high() >> (64 - 1 - beta));
+ }
+
+ static compute_mul_parity_result compute_mul_parity(
+ carrier_uint two_f, const cache_entry_type& cache, int beta) noexcept {
+ FMT_ASSERT(beta >= 1, "");
+ FMT_ASSERT(beta < 64, "");
+
+ auto r = umul192_lower128(two_f, cache);
+ return {((r.high() >> (64 - beta)) & 1) != 0,
+ ((r.high() << beta) | (r.low() >> (64 - beta))) == 0};
+ }
+
+ static carrier_uint compute_left_endpoint_for_shorter_interval_case(
+ const cache_entry_type& cache, int beta) noexcept {
+ return (cache.high() -
+ (cache.high() >> (num_significand_bits<double>() + 2))) >>
+ (64 - num_significand_bits<double>() - 1 - beta);
+ }
+
+ static carrier_uint compute_right_endpoint_for_shorter_interval_case(
+ const cache_entry_type& cache, int beta) noexcept {
+ return (cache.high() +
+ (cache.high() >> (num_significand_bits<double>() + 1))) >>
+ (64 - num_significand_bits<double>() - 1 - beta);
+ }
+
+ static carrier_uint compute_round_up_for_shorter_interval_case(
+ const cache_entry_type& cache, int beta) noexcept {
+ return ((cache.high() >> (64 - num_significand_bits<double>() - 2 - beta)) +
+ 1) /
+ 2;
+ }
+};
+
+// Various integer checks
+template <class T>
+bool is_left_endpoint_integer_shorter_interval(int exponent) noexcept {
+ const int case_shorter_interval_left_endpoint_lower_threshold = 2;
+ const int case_shorter_interval_left_endpoint_upper_threshold = 3;
+ return exponent >= case_shorter_interval_left_endpoint_lower_threshold &&
+ exponent <= case_shorter_interval_left_endpoint_upper_threshold;
+}
+
+// Remove trailing zeros from n and return the number of zeros removed (float)
+FMT_INLINE int remove_trailing_zeros(uint32_t& n) noexcept {
+ FMT_ASSERT(n != 0, "");
+ const uint32_t mod_inv_5 = 0xcccccccd;
+ const uint32_t mod_inv_25 = mod_inv_5 * mod_inv_5;
+
+ int s = 0;
+ while (true) {
+ auto q = rotr(n * mod_inv_25, 2);
+ if (q > max_value<uint32_t>() / 100) break;
+ n = q;
+ s += 2;
+ }
+ auto q = rotr(n * mod_inv_5, 1);
+ if (q <= max_value<uint32_t>() / 10) {
+ n = q;
+ s |= 1;
+ }
+
+ return s;
+}
+
+// Removes trailing zeros and returns the number of zeros removed (double)
+FMT_INLINE int remove_trailing_zeros(uint64_t& n) noexcept {
+ FMT_ASSERT(n != 0, "");
+
+ // This magic number is ceil(2^90 / 10^8).
+ constexpr uint64_t magic_number = 12379400392853802749ull;
+ auto nm = umul128(n, magic_number);
+
+ // Is n is divisible by 10^8?
+ if ((nm.high() & ((1ull << (90 - 64)) - 1)) == 0 && nm.low() < magic_number) {
+ // If yes, work with the quotient.
+ auto n32 = static_cast<uint32_t>(nm.high() >> (90 - 64));
+
+ const uint32_t mod_inv_5 = 0xcccccccd;
+ const uint32_t mod_inv_25 = mod_inv_5 * mod_inv_5;
+
+ int s = 8;
+ while (true) {
+ auto q = rotr(n32 * mod_inv_25, 2);
+ if (q > max_value<uint32_t>() / 100) break;
+ n32 = q;
+ s += 2;
+ }
+ auto q = rotr(n32 * mod_inv_5, 1);
+ if (q <= max_value<uint32_t>() / 10) {
+ n32 = q;
+ s |= 1;
+ }
+
+ n = n32;
+ return s;
+ }
+
+ // If n is not divisible by 10^8, work with n itself.
+ const uint64_t mod_inv_5 = 0xcccccccccccccccd;
+ const uint64_t mod_inv_25 = mod_inv_5 * mod_inv_5;
+
+ int s = 0;
+ while (true) {
+ auto q = rotr(n * mod_inv_25, 2);
+ if (q > max_value<uint64_t>() / 100) break;
+ n = q;
+ s += 2;
+ }
+ auto q = rotr(n * mod_inv_5, 1);
+ if (q <= max_value<uint64_t>() / 10) {
+ n = q;
+ s |= 1;
+ }
+
+ return s;
+}
+
+// The main algorithm for shorter interval case
+template <class T>
+FMT_INLINE decimal_fp<T> shorter_interval_case(int exponent) noexcept {
+ decimal_fp<T> ret_value;
+ // Compute k and beta
+ const int minus_k = floor_log10_pow2_minus_log10_4_over_3(exponent);
+ const int beta = exponent + floor_log2_pow10(-minus_k);
+
+ // Compute xi and zi
+ using cache_entry_type = typename cache_accessor<T>::cache_entry_type;
+ const cache_entry_type cache = cache_accessor<T>::get_cached_power(-minus_k);
+
+ auto xi = cache_accessor<T>::compute_left_endpoint_for_shorter_interval_case(
+ cache, beta);
+ auto zi = cache_accessor<T>::compute_right_endpoint_for_shorter_interval_case(
+ cache, beta);
+
+ // If the left endpoint is not an integer, increase it
+ if (!is_left_endpoint_integer_shorter_interval<T>(exponent)) ++xi;
+
+ // Try bigger divisor
+ ret_value.significand = zi / 10;
+
+ // If succeed, remove trailing zeros if necessary and return
+ if (ret_value.significand * 10 >= xi) {
+ ret_value.exponent = minus_k + 1;
+ ret_value.exponent += remove_trailing_zeros(ret_value.significand);
+ return ret_value;
+ }
+
+ // Otherwise, compute the round-up of y
+ ret_value.significand =
+ cache_accessor<T>::compute_round_up_for_shorter_interval_case(cache,
+ beta);
+ ret_value.exponent = minus_k;
+
+ // When tie occurs, choose one of them according to the rule
+ if (exponent >= float_info<T>::shorter_interval_tie_lower_threshold &&
+ exponent <= float_info<T>::shorter_interval_tie_upper_threshold) {
+ ret_value.significand = ret_value.significand % 2 == 0
+ ? ret_value.significand
+ : ret_value.significand - 1;
+ } else if (ret_value.significand < xi) {
+ ++ret_value.significand;
+ }
+ return ret_value;
+}
+
+template <typename T> decimal_fp<T> to_decimal(T x) noexcept {
+ // Step 1: integer promotion & Schubfach multiplier calculation.
+
+ using carrier_uint = typename float_info<T>::carrier_uint;
+ using cache_entry_type = typename cache_accessor<T>::cache_entry_type;
+ auto br = bit_cast<carrier_uint>(x);
+
+ // Extract significand bits and exponent bits.
+ const carrier_uint significand_mask =
+ (static_cast<carrier_uint>(1) << num_significand_bits<T>()) - 1;
+ carrier_uint significand = (br & significand_mask);
+ int exponent =
+ static_cast<int>((br & exponent_mask<T>()) >> num_significand_bits<T>());
+
+ if (exponent != 0) { // Check if normal.
+ exponent -= exponent_bias<T>() + num_significand_bits<T>();
+
+ // Shorter interval case; proceed like Schubfach.
+ // In fact, when exponent == 1 and significand == 0, the interval is
+ // regular. However, it can be shown that the end-results are anyway same.
+ if (significand == 0) return shorter_interval_case<T>(exponent);
+
+ significand |= (static_cast<carrier_uint>(1) << num_significand_bits<T>());
+ } else {
+ // Subnormal case; the interval is always regular.
+ if (significand == 0) return {0, 0};
+ exponent =
+ std::numeric_limits<T>::min_exponent - num_significand_bits<T>() - 1;
+ }
+
+ const bool include_left_endpoint = (significand % 2 == 0);
+ const bool include_right_endpoint = include_left_endpoint;
+
+ // Compute k and beta.
+ const int minus_k = floor_log10_pow2(exponent) - float_info<T>::kappa;
+ const cache_entry_type cache = cache_accessor<T>::get_cached_power(-minus_k);
+ const int beta = exponent + floor_log2_pow10(-minus_k);
+
+ // Compute zi and deltai.
+ // 10^kappa <= deltai < 10^(kappa + 1)
+ const uint32_t deltai = cache_accessor<T>::compute_delta(cache, beta);
+ const carrier_uint two_fc = significand << 1;
+
+ // For the case of binary32, the result of integer check is not correct for
+ // 29711844 * 2^-82
+ // = 6.1442653300000000008655037797566933477355632930994033813476... * 10^-18
+ // and 29711844 * 2^-81
+ // = 1.2288530660000000001731007559513386695471126586198806762695... * 10^-17,
+ // and they are the unique counterexamples. However, since 29711844 is even,
+ // this does not cause any problem for the endpoints calculations; it can only
+ // cause a problem when we need to perform integer check for the center.
+ // Fortunately, with these inputs, that branch is never executed, so we are
+ // fine.
+ const typename cache_accessor<T>::compute_mul_result z_mul =
+ cache_accessor<T>::compute_mul((two_fc | 1) << beta, cache);
+
+ // Step 2: Try larger divisor; remove trailing zeros if necessary.
+
+ // Using an upper bound on zi, we might be able to optimize the division
+ // better than the compiler; we are computing zi / big_divisor here.
+ decimal_fp<T> ret_value;
+ ret_value.significand = divide_by_10_to_kappa_plus_1(z_mul.result);
+ uint32_t r = static_cast<uint32_t>(z_mul.result - float_info<T>::big_divisor *
+ ret_value.significand);
+
+ if (r < deltai) {
+ // Exclude the right endpoint if necessary.
+ if (r == 0 && z_mul.is_integer && !include_right_endpoint) {
+ --ret_value.significand;
+ r = float_info<T>::big_divisor;
+ goto small_divisor_case_label;
+ }
+ } else if (r > deltai) {
+ goto small_divisor_case_label;
+ } else {
+ // r == deltai; compare fractional parts.
+ const carrier_uint two_fl = two_fc - 1;
+
+ if (!include_left_endpoint ||
+ exponent < float_info<T>::case_fc_pm_half_lower_threshold ||
+ exponent > float_info<T>::divisibility_check_by_5_threshold) {
+ // If the left endpoint is not included, the condition for
+ // success is z^(f) < delta^(f) (odd parity).
+ // Otherwise, the inequalities on exponent ensure that
+ // x is not an integer, so if z^(f) >= delta^(f) (even parity), we in fact
+ // have strict inequality.
+ if (!cache_accessor<T>::compute_mul_parity(two_fl, cache, beta).parity) {
+ goto small_divisor_case_label;
+ }
+ } else {
+ const typename cache_accessor<T>::compute_mul_parity_result x_mul =
+ cache_accessor<T>::compute_mul_parity(two_fl, cache, beta);
+ if (!x_mul.parity && !x_mul.is_integer) {
+ goto small_divisor_case_label;
+ }
+ }
+ }
+ ret_value.exponent = minus_k + float_info<T>::kappa + 1;
+
+ // We may need to remove trailing zeros.
+ ret_value.exponent += remove_trailing_zeros(ret_value.significand);
+ return ret_value;
+
+ // Step 3: Find the significand with the smaller divisor.
+
+small_divisor_case_label:
+ ret_value.significand *= 10;
+ ret_value.exponent = minus_k + float_info<T>::kappa;
+
+ uint32_t dist = r - (deltai / 2) + (float_info<T>::small_divisor / 2);
+ const bool approx_y_parity =
+ ((dist ^ (float_info<T>::small_divisor / 2)) & 1) != 0;
+
+ // Is dist divisible by 10^kappa?
+ const bool divisible_by_small_divisor =
+ check_divisibility_and_divide_by_pow10<float_info<T>::kappa>(dist);
+
+ // Add dist / 10^kappa to the significand.
+ ret_value.significand += dist;
+
+ if (!divisible_by_small_divisor) return ret_value;
+
+ // Check z^(f) >= epsilon^(f).
+ // We have either yi == zi - epsiloni or yi == (zi - epsiloni) - 1,
+ // where yi == zi - epsiloni if and only if z^(f) >= epsilon^(f).
+ // Since there are only 2 possibilities, we only need to care about the
+ // parity. Also, zi and r should have the same parity since the divisor
+ // is an even number.
+ const auto y_mul = cache_accessor<T>::compute_mul_parity(two_fc, cache, beta);
+
+ // If z^(f) >= epsilon^(f), we might have a tie when z^(f) == epsilon^(f),
+ // or equivalently, when y is an integer.
+ if (y_mul.parity != approx_y_parity)
+ --ret_value.significand;
+ else if (y_mul.is_integer && ret_value.significand % 2 != 0)
+ --ret_value.significand;
+ return ret_value;
+}
+} // namespace dragonbox
+
+#ifdef _MSC_VER
+FMT_FUNC auto fmt_snprintf(char* buf, size_t size, const char* fmt, ...)
+ -> int {
+ auto args = va_list();
+ va_start(args, fmt);
+ int result = vsnprintf_s(buf, size, _TRUNCATE, fmt, args);
+ va_end(args);
+ return result;
+}
+#endif
+} // namespace detail
+
+template <> struct formatter<detail::bigint> {
+ FMT_CONSTEXPR auto parse(format_parse_context& ctx)
+ -> format_parse_context::iterator {
+ return ctx.begin();
+ }
+
+ template <typename FormatContext>
+ auto format(const detail::bigint& n, FormatContext& ctx) const ->
+ typename FormatContext::iterator {
+ auto out = ctx.out();
+ bool first = true;
+ for (auto i = n.bigits_.size(); i > 0; --i) {
+ auto value = n.bigits_[i - 1u];
+ if (first) {
+ out = format_to(out, FMT_STRING("{:x}"), value);
+ first = false;
+ continue;
+ }
+ out = format_to(out, FMT_STRING("{:08x}"), value);
+ }
+ if (n.exp_ > 0)
+ out = format_to(out, FMT_STRING("p{}"),
+ n.exp_ * detail::bigint::bigit_bits);
+ return out;
+ }
+};
+
+FMT_FUNC detail::utf8_to_utf16::utf8_to_utf16(string_view s) {
+ for_each_codepoint(s, [this](uint32_t cp, string_view) {
+ if (cp == invalid_code_point) FMT_THROW(std::runtime_error("invalid utf8"));
+ if (cp <= 0xFFFF) {
+ buffer_.push_back(static_cast<wchar_t>(cp));
+ } else {
+ cp -= 0x10000;
+ buffer_.push_back(static_cast<wchar_t>(0xD800 + (cp >> 10)));
+ buffer_.push_back(static_cast<wchar_t>(0xDC00 + (cp & 0x3FF)));
+ }
+ return true;
+ });
+ buffer_.push_back(0);
+}
+
+FMT_FUNC void format_system_error(detail::buffer<char>& out, int error_code,
+ const char* message) noexcept {
+ FMT_TRY {
+ auto ec = std::error_code(error_code, std::generic_category());
+ write(std::back_inserter(out), std::system_error(ec, message).what());
+ return;
+ }
+ FMT_CATCH(...) {}
+ format_error_code(out, error_code, message);
+}
+
+FMT_FUNC void report_system_error(int error_code,
+ const char* message) noexcept {
+ report_error(format_system_error, error_code, message);
+}
+
+FMT_FUNC std::string vformat(string_view fmt, format_args args) {
+ // Don't optimize the "{}" case to keep the binary size small and because it
+ // can be better optimized in fmt::format anyway.
+ auto buffer = memory_buffer();
+ detail::vformat_to(buffer, fmt, args);
+ return to_string(buffer);
+}
+
+#ifdef _WIN32
+namespace detail {
+using dword = conditional_t<sizeof(long) == 4, unsigned long, unsigned>;
+extern "C" __declspec(dllimport) int __stdcall WriteConsoleW( //
+ void*, const void*, dword, dword*, void*);
+} // namespace detail
+#endif
+
+namespace detail {
+FMT_FUNC void print(std::FILE* f, string_view text) {
+#ifdef _WIN32
+ auto fd = _fileno(f);
+ if (_isatty(fd)) {
+ detail::utf8_to_utf16 u16(string_view(text.data(), text.size()));
+ auto written = detail::dword();
+ if (detail::WriteConsoleW(reinterpret_cast<void*>(_get_osfhandle(fd)),
+ u16.c_str(), static_cast<uint32_t>(u16.size()),
+ &written, nullptr)) {
+ return;
+ }
+ // Fallback to fwrite on failure. It can happen if the output has been
+ // redirected to NUL.
+ }
+#endif
+ detail::fwrite_fully(text.data(), 1, text.size(), f);
+}
+} // namespace detail
+
+FMT_FUNC void vprint(std::FILE* f, string_view format_str, format_args args) {
+ memory_buffer buffer;
+ detail::vformat_to(buffer, format_str, args);
+ detail::print(f, {buffer.data(), buffer.size()});
+}
+
+#ifdef _WIN32
+// Print assuming legacy (non-Unicode) encoding.
+FMT_FUNC void detail::vprint_mojibake(std::FILE* f, string_view format_str,
+ format_args args) {
+ memory_buffer buffer;
+ detail::vformat_to(buffer, format_str,
+ basic_format_args<buffer_context<char>>(args));
+ fwrite_fully(buffer.data(), 1, buffer.size(), f);
+}
+#endif
+
+FMT_FUNC void vprint(string_view format_str, format_args args) {
+ vprint(stdout, format_str, args);
+}
+
+namespace detail {
+
+struct singleton {
+ unsigned char upper;
+ unsigned char lower_count;
+};
+
+inline auto is_printable(uint16_t x, const singleton* singletons,
+ size_t singletons_size,
+ const unsigned char* singleton_lowers,
+ const unsigned char* normal, size_t normal_size)
+ -> bool {
+ auto upper = x >> 8;
+ auto lower_start = 0;
+ for (size_t i = 0; i < singletons_size; ++i) {
+ auto s = singletons[i];
+ auto lower_end = lower_start + s.lower_count;
+ if (upper < s.upper) break;
+ if (upper == s.upper) {
+ for (auto j = lower_start; j < lower_end; ++j) {
+ if (singleton_lowers[j] == (x & 0xff)) return false;
+ }
+ }
+ lower_start = lower_end;
+ }
+
+ auto xsigned = static_cast<int>(x);
+ auto current = true;
+ for (size_t i = 0; i < normal_size; ++i) {
+ auto v = static_cast<int>(normal[i]);
+ auto len = (v & 0x80) != 0 ? (v & 0x7f) << 8 | normal[++i] : v;
+ xsigned -= len;
+ if (xsigned < 0) break;
+ current = !current;
+ }
+ return current;
+}
+
+// This code is generated by support/printable.py.
+FMT_FUNC auto is_printable(uint32_t cp) -> bool {
+ static constexpr singleton singletons0[] = {
+ {0x00, 1}, {0x03, 5}, {0x05, 6}, {0x06, 3}, {0x07, 6}, {0x08, 8},
+ {0x09, 17}, {0x0a, 28}, {0x0b, 25}, {0x0c, 20}, {0x0d, 16}, {0x0e, 13},
+ {0x0f, 4}, {0x10, 3}, {0x12, 18}, {0x13, 9}, {0x16, 1}, {0x17, 5},
+ {0x18, 2}, {0x19, 3}, {0x1a, 7}, {0x1c, 2}, {0x1d, 1}, {0x1f, 22},
+ {0x20, 3}, {0x2b, 3}, {0x2c, 2}, {0x2d, 11}, {0x2e, 1}, {0x30, 3},
+ {0x31, 2}, {0x32, 1}, {0xa7, 2}, {0xa9, 2}, {0xaa, 4}, {0xab, 8},
+ {0xfa, 2}, {0xfb, 5}, {0xfd, 4}, {0xfe, 3}, {0xff, 9},
+ };
+ static constexpr unsigned char singletons0_lower[] = {
+ 0xad, 0x78, 0x79, 0x8b, 0x8d, 0xa2, 0x30, 0x57, 0x58, 0x8b, 0x8c, 0x90,
+ 0x1c, 0x1d, 0xdd, 0x0e, 0x0f, 0x4b, 0x4c, 0xfb, 0xfc, 0x2e, 0x2f, 0x3f,
+ 0x5c, 0x5d, 0x5f, 0xb5, 0xe2, 0x84, 0x8d, 0x8e, 0x91, 0x92, 0xa9, 0xb1,
+ 0xba, 0xbb, 0xc5, 0xc6, 0xc9, 0xca, 0xde, 0xe4, 0xe5, 0xff, 0x00, 0x04,
+ 0x11, 0x12, 0x29, 0x31, 0x34, 0x37, 0x3a, 0x3b, 0x3d, 0x49, 0x4a, 0x5d,
+ 0x84, 0x8e, 0x92, 0xa9, 0xb1, 0xb4, 0xba, 0xbb, 0xc6, 0xca, 0xce, 0xcf,
+ 0xe4, 0xe5, 0x00, 0x04, 0x0d, 0x0e, 0x11, 0x12, 0x29, 0x31, 0x34, 0x3a,
+ 0x3b, 0x45, 0x46, 0x49, 0x4a, 0x5e, 0x64, 0x65, 0x84, 0x91, 0x9b, 0x9d,
+ 0xc9, 0xce, 0xcf, 0x0d, 0x11, 0x29, 0x45, 0x49, 0x57, 0x64, 0x65, 0x8d,
+ 0x91, 0xa9, 0xb4, 0xba, 0xbb, 0xc5, 0xc9, 0xdf, 0xe4, 0xe5, 0xf0, 0x0d,
+ 0x11, 0x45, 0x49, 0x64, 0x65, 0x80, 0x84, 0xb2, 0xbc, 0xbe, 0xbf, 0xd5,
+ 0xd7, 0xf0, 0xf1, 0x83, 0x85, 0x8b, 0xa4, 0xa6, 0xbe, 0xbf, 0xc5, 0xc7,
+ 0xce, 0xcf, 0xda, 0xdb, 0x48, 0x98, 0xbd, 0xcd, 0xc6, 0xce, 0xcf, 0x49,
+ 0x4e, 0x4f, 0x57, 0x59, 0x5e, 0x5f, 0x89, 0x8e, 0x8f, 0xb1, 0xb6, 0xb7,
+ 0xbf, 0xc1, 0xc6, 0xc7, 0xd7, 0x11, 0x16, 0x17, 0x5b, 0x5c, 0xf6, 0xf7,
+ 0xfe, 0xff, 0x80, 0x0d, 0x6d, 0x71, 0xde, 0xdf, 0x0e, 0x0f, 0x1f, 0x6e,
+ 0x6f, 0x1c, 0x1d, 0x5f, 0x7d, 0x7e, 0xae, 0xaf, 0xbb, 0xbc, 0xfa, 0x16,
+ 0x17, 0x1e, 0x1f, 0x46, 0x47, 0x4e, 0x4f, 0x58, 0x5a, 0x5c, 0x5e, 0x7e,
+ 0x7f, 0xb5, 0xc5, 0xd4, 0xd5, 0xdc, 0xf0, 0xf1, 0xf5, 0x72, 0x73, 0x8f,
+ 0x74, 0x75, 0x96, 0x2f, 0x5f, 0x26, 0x2e, 0x2f, 0xa7, 0xaf, 0xb7, 0xbf,
+ 0xc7, 0xcf, 0xd7, 0xdf, 0x9a, 0x40, 0x97, 0x98, 0x30, 0x8f, 0x1f, 0xc0,
+ 0xc1, 0xce, 0xff, 0x4e, 0x4f, 0x5a, 0x5b, 0x07, 0x08, 0x0f, 0x10, 0x27,
+ 0x2f, 0xee, 0xef, 0x6e, 0x6f, 0x37, 0x3d, 0x3f, 0x42, 0x45, 0x90, 0x91,
+ 0xfe, 0xff, 0x53, 0x67, 0x75, 0xc8, 0xc9, 0xd0, 0xd1, 0xd8, 0xd9, 0xe7,
+ 0xfe, 0xff,
+ };
+ static constexpr singleton singletons1[] = {
+ {0x00, 6}, {0x01, 1}, {0x03, 1}, {0x04, 2}, {0x08, 8}, {0x09, 2},
+ {0x0a, 5}, {0x0b, 2}, {0x0e, 4}, {0x10, 1}, {0x11, 2}, {0x12, 5},
+ {0x13, 17}, {0x14, 1}, {0x15, 2}, {0x17, 2}, {0x19, 13}, {0x1c, 5},
+ {0x1d, 8}, {0x24, 1}, {0x6a, 3}, {0x6b, 2}, {0xbc, 2}, {0xd1, 2},
+ {0xd4, 12}, {0xd5, 9}, {0xd6, 2}, {0xd7, 2}, {0xda, 1}, {0xe0, 5},
+ {0xe1, 2}, {0xe8, 2}, {0xee, 32}, {0xf0, 4}, {0xf8, 2}, {0xf9, 2},
+ {0xfa, 2}, {0xfb, 1},
+ };
+ static constexpr unsigned char singletons1_lower[] = {
+ 0x0c, 0x27, 0x3b, 0x3e, 0x4e, 0x4f, 0x8f, 0x9e, 0x9e, 0x9f, 0x06, 0x07,
+ 0x09, 0x36, 0x3d, 0x3e, 0x56, 0xf3, 0xd0, 0xd1, 0x04, 0x14, 0x18, 0x36,
+ 0x37, 0x56, 0x57, 0x7f, 0xaa, 0xae, 0xaf, 0xbd, 0x35, 0xe0, 0x12, 0x87,
+ 0x89, 0x8e, 0x9e, 0x04, 0x0d, 0x0e, 0x11, 0x12, 0x29, 0x31, 0x34, 0x3a,
+ 0x45, 0x46, 0x49, 0x4a, 0x4e, 0x4f, 0x64, 0x65, 0x5c, 0xb6, 0xb7, 0x1b,
+ 0x1c, 0x07, 0x08, 0x0a, 0x0b, 0x14, 0x17, 0x36, 0x39, 0x3a, 0xa8, 0xa9,
+ 0xd8, 0xd9, 0x09, 0x37, 0x90, 0x91, 0xa8, 0x07, 0x0a, 0x3b, 0x3e, 0x66,
+ 0x69, 0x8f, 0x92, 0x6f, 0x5f, 0xee, 0xef, 0x5a, 0x62, 0x9a, 0x9b, 0x27,
+ 0x28, 0x55, 0x9d, 0xa0, 0xa1, 0xa3, 0xa4, 0xa7, 0xa8, 0xad, 0xba, 0xbc,
+ 0xc4, 0x06, 0x0b, 0x0c, 0x15, 0x1d, 0x3a, 0x3f, 0x45, 0x51, 0xa6, 0xa7,
+ 0xcc, 0xcd, 0xa0, 0x07, 0x19, 0x1a, 0x22, 0x25, 0x3e, 0x3f, 0xc5, 0xc6,
+ 0x04, 0x20, 0x23, 0x25, 0x26, 0x28, 0x33, 0x38, 0x3a, 0x48, 0x4a, 0x4c,
+ 0x50, 0x53, 0x55, 0x56, 0x58, 0x5a, 0x5c, 0x5e, 0x60, 0x63, 0x65, 0x66,
+ 0x6b, 0x73, 0x78, 0x7d, 0x7f, 0x8a, 0xa4, 0xaa, 0xaf, 0xb0, 0xc0, 0xd0,
+ 0xae, 0xaf, 0x79, 0xcc, 0x6e, 0x6f, 0x93,
+ };
+ static constexpr unsigned char normal0[] = {
+ 0x00, 0x20, 0x5f, 0x22, 0x82, 0xdf, 0x04, 0x82, 0x44, 0x08, 0x1b, 0x04,
+ 0x06, 0x11, 0x81, 0xac, 0x0e, 0x80, 0xab, 0x35, 0x28, 0x0b, 0x80, 0xe0,
+ 0x03, 0x19, 0x08, 0x01, 0x04, 0x2f, 0x04, 0x34, 0x04, 0x07, 0x03, 0x01,
+ 0x07, 0x06, 0x07, 0x11, 0x0a, 0x50, 0x0f, 0x12, 0x07, 0x55, 0x07, 0x03,
+ 0x04, 0x1c, 0x0a, 0x09, 0x03, 0x08, 0x03, 0x07, 0x03, 0x02, 0x03, 0x03,
+ 0x03, 0x0c, 0x04, 0x05, 0x03, 0x0b, 0x06, 0x01, 0x0e, 0x15, 0x05, 0x3a,
+ 0x03, 0x11, 0x07, 0x06, 0x05, 0x10, 0x07, 0x57, 0x07, 0x02, 0x07, 0x15,
+ 0x0d, 0x50, 0x04, 0x43, 0x03, 0x2d, 0x03, 0x01, 0x04, 0x11, 0x06, 0x0f,
+ 0x0c, 0x3a, 0x04, 0x1d, 0x25, 0x5f, 0x20, 0x6d, 0x04, 0x6a, 0x25, 0x80,
+ 0xc8, 0x05, 0x82, 0xb0, 0x03, 0x1a, 0x06, 0x82, 0xfd, 0x03, 0x59, 0x07,
+ 0x15, 0x0b, 0x17, 0x09, 0x14, 0x0c, 0x14, 0x0c, 0x6a, 0x06, 0x0a, 0x06,
+ 0x1a, 0x06, 0x59, 0x07, 0x2b, 0x05, 0x46, 0x0a, 0x2c, 0x04, 0x0c, 0x04,
+ 0x01, 0x03, 0x31, 0x0b, 0x2c, 0x04, 0x1a, 0x06, 0x0b, 0x03, 0x80, 0xac,
+ 0x06, 0x0a, 0x06, 0x21, 0x3f, 0x4c, 0x04, 0x2d, 0x03, 0x74, 0x08, 0x3c,
+ 0x03, 0x0f, 0x03, 0x3c, 0x07, 0x38, 0x08, 0x2b, 0x05, 0x82, 0xff, 0x11,
+ 0x18, 0x08, 0x2f, 0x11, 0x2d, 0x03, 0x20, 0x10, 0x21, 0x0f, 0x80, 0x8c,
+ 0x04, 0x82, 0x97, 0x19, 0x0b, 0x15, 0x88, 0x94, 0x05, 0x2f, 0x05, 0x3b,
+ 0x07, 0x02, 0x0e, 0x18, 0x09, 0x80, 0xb3, 0x2d, 0x74, 0x0c, 0x80, 0xd6,
+ 0x1a, 0x0c, 0x05, 0x80, 0xff, 0x05, 0x80, 0xdf, 0x0c, 0xee, 0x0d, 0x03,
+ 0x84, 0x8d, 0x03, 0x37, 0x09, 0x81, 0x5c, 0x14, 0x80, 0xb8, 0x08, 0x80,
+ 0xcb, 0x2a, 0x38, 0x03, 0x0a, 0x06, 0x38, 0x08, 0x46, 0x08, 0x0c, 0x06,
+ 0x74, 0x0b, 0x1e, 0x03, 0x5a, 0x04, 0x59, 0x09, 0x80, 0x83, 0x18, 0x1c,
+ 0x0a, 0x16, 0x09, 0x4c, 0x04, 0x80, 0x8a, 0x06, 0xab, 0xa4, 0x0c, 0x17,
+ 0x04, 0x31, 0xa1, 0x04, 0x81, 0xda, 0x26, 0x07, 0x0c, 0x05, 0x05, 0x80,
+ 0xa5, 0x11, 0x81, 0x6d, 0x10, 0x78, 0x28, 0x2a, 0x06, 0x4c, 0x04, 0x80,
+ 0x8d, 0x04, 0x80, 0xbe, 0x03, 0x1b, 0x03, 0x0f, 0x0d,
+ };
+ static constexpr unsigned char normal1[] = {
+ 0x5e, 0x22, 0x7b, 0x05, 0x03, 0x04, 0x2d, 0x03, 0x66, 0x03, 0x01, 0x2f,
+ 0x2e, 0x80, 0x82, 0x1d, 0x03, 0x31, 0x0f, 0x1c, 0x04, 0x24, 0x09, 0x1e,
+ 0x05, 0x2b, 0x05, 0x44, 0x04, 0x0e, 0x2a, 0x80, 0xaa, 0x06, 0x24, 0x04,
+ 0x24, 0x04, 0x28, 0x08, 0x34, 0x0b, 0x01, 0x80, 0x90, 0x81, 0x37, 0x09,
+ 0x16, 0x0a, 0x08, 0x80, 0x98, 0x39, 0x03, 0x63, 0x08, 0x09, 0x30, 0x16,
+ 0x05, 0x21, 0x03, 0x1b, 0x05, 0x01, 0x40, 0x38, 0x04, 0x4b, 0x05, 0x2f,
+ 0x04, 0x0a, 0x07, 0x09, 0x07, 0x40, 0x20, 0x27, 0x04, 0x0c, 0x09, 0x36,
+ 0x03, 0x3a, 0x05, 0x1a, 0x07, 0x04, 0x0c, 0x07, 0x50, 0x49, 0x37, 0x33,
+ 0x0d, 0x33, 0x07, 0x2e, 0x08, 0x0a, 0x81, 0x26, 0x52, 0x4e, 0x28, 0x08,
+ 0x2a, 0x56, 0x1c, 0x14, 0x17, 0x09, 0x4e, 0x04, 0x1e, 0x0f, 0x43, 0x0e,
+ 0x19, 0x07, 0x0a, 0x06, 0x48, 0x08, 0x27, 0x09, 0x75, 0x0b, 0x3f, 0x41,
+ 0x2a, 0x06, 0x3b, 0x05, 0x0a, 0x06, 0x51, 0x06, 0x01, 0x05, 0x10, 0x03,
+ 0x05, 0x80, 0x8b, 0x62, 0x1e, 0x48, 0x08, 0x0a, 0x80, 0xa6, 0x5e, 0x22,
+ 0x45, 0x0b, 0x0a, 0x06, 0x0d, 0x13, 0x39, 0x07, 0x0a, 0x36, 0x2c, 0x04,
+ 0x10, 0x80, 0xc0, 0x3c, 0x64, 0x53, 0x0c, 0x48, 0x09, 0x0a, 0x46, 0x45,
+ 0x1b, 0x48, 0x08, 0x53, 0x1d, 0x39, 0x81, 0x07, 0x46, 0x0a, 0x1d, 0x03,
+ 0x47, 0x49, 0x37, 0x03, 0x0e, 0x08, 0x0a, 0x06, 0x39, 0x07, 0x0a, 0x81,
+ 0x36, 0x19, 0x80, 0xb7, 0x01, 0x0f, 0x32, 0x0d, 0x83, 0x9b, 0x66, 0x75,
+ 0x0b, 0x80, 0xc4, 0x8a, 0xbc, 0x84, 0x2f, 0x8f, 0xd1, 0x82, 0x47, 0xa1,
+ 0xb9, 0x82, 0x39, 0x07, 0x2a, 0x04, 0x02, 0x60, 0x26, 0x0a, 0x46, 0x0a,
+ 0x28, 0x05, 0x13, 0x82, 0xb0, 0x5b, 0x65, 0x4b, 0x04, 0x39, 0x07, 0x11,
+ 0x40, 0x05, 0x0b, 0x02, 0x0e, 0x97, 0xf8, 0x08, 0x84, 0xd6, 0x2a, 0x09,
+ 0xa2, 0xf7, 0x81, 0x1f, 0x31, 0x03, 0x11, 0x04, 0x08, 0x81, 0x8c, 0x89,
+ 0x04, 0x6b, 0x05, 0x0d, 0x03, 0x09, 0x07, 0x10, 0x93, 0x60, 0x80, 0xf6,
+ 0x0a, 0x73, 0x08, 0x6e, 0x17, 0x46, 0x80, 0x9a, 0x14, 0x0c, 0x57, 0x09,
+ 0x19, 0x80, 0x87, 0x81, 0x47, 0x03, 0x85, 0x42, 0x0f, 0x15, 0x85, 0x50,
+ 0x2b, 0x80, 0xd5, 0x2d, 0x03, 0x1a, 0x04, 0x02, 0x81, 0x70, 0x3a, 0x05,
+ 0x01, 0x85, 0x00, 0x80, 0xd7, 0x29, 0x4c, 0x04, 0x0a, 0x04, 0x02, 0x83,
+ 0x11, 0x44, 0x4c, 0x3d, 0x80, 0xc2, 0x3c, 0x06, 0x01, 0x04, 0x55, 0x05,
+ 0x1b, 0x34, 0x02, 0x81, 0x0e, 0x2c, 0x04, 0x64, 0x0c, 0x56, 0x0a, 0x80,
+ 0xae, 0x38, 0x1d, 0x0d, 0x2c, 0x04, 0x09, 0x07, 0x02, 0x0e, 0x06, 0x80,
+ 0x9a, 0x83, 0xd8, 0x08, 0x0d, 0x03, 0x0d, 0x03, 0x74, 0x0c, 0x59, 0x07,
+ 0x0c, 0x14, 0x0c, 0x04, 0x38, 0x08, 0x0a, 0x06, 0x28, 0x08, 0x22, 0x4e,
+ 0x81, 0x54, 0x0c, 0x15, 0x03, 0x03, 0x05, 0x07, 0x09, 0x19, 0x07, 0x07,
+ 0x09, 0x03, 0x0d, 0x07, 0x29, 0x80, 0xcb, 0x25, 0x0a, 0x84, 0x06,
+ };
+ auto lower = static_cast<uint16_t>(cp);
+ if (cp < 0x10000) {
+ return is_printable(lower, singletons0,
+ sizeof(singletons0) / sizeof(*singletons0),
+ singletons0_lower, normal0, sizeof(normal0));
+ }
+ if (cp < 0x20000) {
+ return is_printable(lower, singletons1,
+ sizeof(singletons1) / sizeof(*singletons1),
+ singletons1_lower, normal1, sizeof(normal1));
+ }
+ if (0x2a6de <= cp && cp < 0x2a700) return false;
+ if (0x2b735 <= cp && cp < 0x2b740) return false;
+ if (0x2b81e <= cp && cp < 0x2b820) return false;
+ if (0x2cea2 <= cp && cp < 0x2ceb0) return false;
+ if (0x2ebe1 <= cp && cp < 0x2f800) return false;
+ if (0x2fa1e <= cp && cp < 0x30000) return false;
+ if (0x3134b <= cp && cp < 0xe0100) return false;
+ if (0xe01f0 <= cp && cp < 0x110000) return false;
+ return cp < 0x110000;
+}
+
+} // namespace detail
+
+FMT_END_NAMESPACE
+
+#endif // FMT_FORMAT_INL_H_
diff --git a/subprojects/fmt/include/fmt/format.h b/subprojects/fmt/include/fmt/format.h
new file mode 100644
index 0000000..0bd2fdb
--- /dev/null
+++ b/subprojects/fmt/include/fmt/format.h
@@ -0,0 +1,4192 @@
+/*
+ Formatting library for C++
+
+ Copyright (c) 2012 - present, Victor Zverovich
+
+ Permission is hereby granted, free of charge, to any person obtaining
+ a copy of this software and associated documentation files (the
+ "Software"), to deal in the Software without restriction, including
+ without limitation the rights to use, copy, modify, merge, publish,
+ distribute, sublicense, and/or sell copies of the Software, and to
+ permit persons to whom the Software is furnished to do so, subject to
+ the following conditions:
+
+ The above copyright notice and this permission notice shall be
+ included in all copies or substantial portions of the Software.
+
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+ EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+ MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+ NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+ LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+ WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+ --- Optional exception to the license ---
+
+ As an exception, if, as a result of your compiling your source code, portions
+ of this Software are embedded into a machine-executable object form of such
+ source code, you may redistribute such embedded portions in such object form
+ without including the above copyright and permission notices.
+ */
+
+#ifndef FMT_FORMAT_H_
+#define FMT_FORMAT_H_
+
+#include <cmath> // std::signbit
+#include <cstdint> // uint32_t
+#include <cstring> // std::memcpy
+#include <limits> // std::numeric_limits
+#include <memory> // std::uninitialized_copy
+#include <stdexcept> // std::runtime_error
+#include <system_error> // std::system_error
+
+#ifdef __cpp_lib_bit_cast
+# include <bit> // std::bitcast
+#endif
+
+#include "core.h"
+
+#if FMT_GCC_VERSION
+# define FMT_GCC_VISIBILITY_HIDDEN __attribute__((visibility("hidden")))
+#else
+# define FMT_GCC_VISIBILITY_HIDDEN
+#endif
+
+#ifdef __NVCC__
+# define FMT_CUDA_VERSION (__CUDACC_VER_MAJOR__ * 100 + __CUDACC_VER_MINOR__)
+#else
+# define FMT_CUDA_VERSION 0
+#endif
+
+#ifdef __has_builtin
+# define FMT_HAS_BUILTIN(x) __has_builtin(x)
+#else
+# define FMT_HAS_BUILTIN(x) 0
+#endif
+
+#if FMT_GCC_VERSION || FMT_CLANG_VERSION
+# define FMT_NOINLINE __attribute__((noinline))
+#else
+# define FMT_NOINLINE
+#endif
+
+#if FMT_MSC_VERSION
+# define FMT_MSC_DEFAULT = default
+#else
+# define FMT_MSC_DEFAULT
+#endif
+
+#ifndef FMT_THROW
+# if FMT_EXCEPTIONS
+# if FMT_MSC_VERSION || defined(__NVCC__)
+FMT_BEGIN_NAMESPACE
+namespace detail {
+template <typename Exception> inline void do_throw(const Exception& x) {
+ // Silence unreachable code warnings in MSVC and NVCC because these
+ // are nearly impossible to fix in a generic code.
+ volatile bool b = true;
+ if (b) throw x;
+}
+} // namespace detail
+FMT_END_NAMESPACE
+# define FMT_THROW(x) detail::do_throw(x)
+# else
+# define FMT_THROW(x) throw x
+# endif
+# else
+# define FMT_THROW(x) \
+ do { \
+ FMT_ASSERT(false, (x).what()); \
+ } while (false)
+# endif
+#endif
+
+#if FMT_EXCEPTIONS
+# define FMT_TRY try
+# define FMT_CATCH(x) catch (x)
+#else
+# define FMT_TRY if (true)
+# define FMT_CATCH(x) if (false)
+#endif
+
+#ifndef FMT_MAYBE_UNUSED
+# if FMT_HAS_CPP17_ATTRIBUTE(maybe_unused)
+# define FMT_MAYBE_UNUSED [[maybe_unused]]
+# else
+# define FMT_MAYBE_UNUSED
+# endif
+#endif
+
+#ifndef FMT_USE_USER_DEFINED_LITERALS
+// EDG based compilers (Intel, NVIDIA, Elbrus, etc), GCC and MSVC support UDLs.
+# if (FMT_HAS_FEATURE(cxx_user_literals) || FMT_GCC_VERSION >= 407 || \
+ FMT_MSC_VERSION >= 1900) && \
+ (!defined(__EDG_VERSION__) || __EDG_VERSION__ >= /* UDL feature */ 480)
+# define FMT_USE_USER_DEFINED_LITERALS 1
+# else
+# define FMT_USE_USER_DEFINED_LITERALS 0
+# endif
+#endif
+
+// Defining FMT_REDUCE_INT_INSTANTIATIONS to 1, will reduce the number of
+// integer formatter template instantiations to just one by only using the
+// largest integer type. This results in a reduction in binary size but will
+// cause a decrease in integer formatting performance.
+#if !defined(FMT_REDUCE_INT_INSTANTIATIONS)
+# define FMT_REDUCE_INT_INSTANTIATIONS 0
+#endif
+
+// __builtin_clz is broken in clang with Microsoft CodeGen:
+// https://github.com/fmtlib/fmt/issues/519.
+#if !FMT_MSC_VERSION
+# if FMT_HAS_BUILTIN(__builtin_clz) || FMT_GCC_VERSION || FMT_ICC_VERSION
+# define FMT_BUILTIN_CLZ(n) __builtin_clz(n)
+# endif
+# if FMT_HAS_BUILTIN(__builtin_clzll) || FMT_GCC_VERSION || FMT_ICC_VERSION
+# define FMT_BUILTIN_CLZLL(n) __builtin_clzll(n)
+# endif
+#endif
+
+// __builtin_ctz is broken in Intel Compiler Classic on Windows:
+// https://github.com/fmtlib/fmt/issues/2510.
+#ifndef __ICL
+# if FMT_HAS_BUILTIN(__builtin_ctz) || FMT_GCC_VERSION || FMT_ICC_VERSION || \
+ defined(__NVCOMPILER)
+# define FMT_BUILTIN_CTZ(n) __builtin_ctz(n)
+# endif
+# if FMT_HAS_BUILTIN(__builtin_ctzll) || FMT_GCC_VERSION || \
+ FMT_ICC_VERSION || defined(__NVCOMPILER)
+# define FMT_BUILTIN_CTZLL(n) __builtin_ctzll(n)
+# endif
+#endif
+
+#if FMT_MSC_VERSION
+# include <intrin.h> // _BitScanReverse[64], _BitScanForward[64], _umul128
+#endif
+
+// Some compilers masquerade as both MSVC and GCC-likes or otherwise support
+// __builtin_clz and __builtin_clzll, so only define FMT_BUILTIN_CLZ using the
+// MSVC intrinsics if the clz and clzll builtins are not available.
+#if FMT_MSC_VERSION && !defined(FMT_BUILTIN_CLZLL) && \
+ !defined(FMT_BUILTIN_CTZLL)
+FMT_BEGIN_NAMESPACE
+namespace detail {
+// Avoid Clang with Microsoft CodeGen's -Wunknown-pragmas warning.
+# if !defined(__clang__)
+# pragma intrinsic(_BitScanForward)
+# pragma intrinsic(_BitScanReverse)
+# if defined(_WIN64)
+# pragma intrinsic(_BitScanForward64)
+# pragma intrinsic(_BitScanReverse64)
+# endif
+# endif
+
+inline auto clz(uint32_t x) -> int {
+ unsigned long r = 0;
+ _BitScanReverse(&r, x);
+ FMT_ASSERT(x != 0, "");
+ // Static analysis complains about using uninitialized data
+ // "r", but the only way that can happen is if "x" is 0,
+ // which the callers guarantee to not happen.
+ FMT_MSC_WARNING(suppress : 6102)
+ return 31 ^ static_cast<int>(r);
+}
+# define FMT_BUILTIN_CLZ(n) detail::clz(n)
+
+inline auto clzll(uint64_t x) -> int {
+ unsigned long r = 0;
+# ifdef _WIN64
+ _BitScanReverse64(&r, x);
+# else
+ // Scan the high 32 bits.
+ if (_BitScanReverse(&r, static_cast<uint32_t>(x >> 32))) return 63 ^ (r + 32);
+ // Scan the low 32 bits.
+ _BitScanReverse(&r, static_cast<uint32_t>(x));
+# endif
+ FMT_ASSERT(x != 0, "");
+ FMT_MSC_WARNING(suppress : 6102) // Suppress a bogus static analysis warning.
+ return 63 ^ static_cast<int>(r);
+}
+# define FMT_BUILTIN_CLZLL(n) detail::clzll(n)
+
+inline auto ctz(uint32_t x) -> int {
+ unsigned long r = 0;
+ _BitScanForward(&r, x);
+ FMT_ASSERT(x != 0, "");
+ FMT_MSC_WARNING(suppress : 6102) // Suppress a bogus static analysis warning.
+ return static_cast<int>(r);
+}
+# define FMT_BUILTIN_CTZ(n) detail::ctz(n)
+
+inline auto ctzll(uint64_t x) -> int {
+ unsigned long r = 0;
+ FMT_ASSERT(x != 0, "");
+ FMT_MSC_WARNING(suppress : 6102) // Suppress a bogus static analysis warning.
+# ifdef _WIN64
+ _BitScanForward64(&r, x);
+# else
+ // Scan the low 32 bits.
+ if (_BitScanForward(&r, static_cast<uint32_t>(x))) return static_cast<int>(r);
+ // Scan the high 32 bits.
+ _BitScanForward(&r, static_cast<uint32_t>(x >> 32));
+ r += 32;
+# endif
+ return static_cast<int>(r);
+}
+# define FMT_BUILTIN_CTZLL(n) detail::ctzll(n)
+} // namespace detail
+FMT_END_NAMESPACE
+#endif
+
+FMT_BEGIN_NAMESPACE
+namespace detail {
+
+FMT_CONSTEXPR inline void abort_fuzzing_if(bool condition) {
+ ignore_unused(condition);
+#ifdef FMT_FUZZ
+ if (condition) throw std::runtime_error("fuzzing limit reached");
+#endif
+}
+
+template <typename Streambuf> class formatbuf : public Streambuf {
+ private:
+ using char_type = typename Streambuf::char_type;
+ using streamsize = decltype(std::declval<Streambuf>().sputn(nullptr, 0));
+ using int_type = typename Streambuf::int_type;
+ using traits_type = typename Streambuf::traits_type;
+
+ buffer<char_type>& buffer_;
+
+ public:
+ explicit formatbuf(buffer<char_type>& buf) : buffer_(buf) {}
+
+ protected:
+ // The put area is always empty. This makes the implementation simpler and has
+ // the advantage that the streambuf and the buffer are always in sync and
+ // sputc never writes into uninitialized memory. A disadvantage is that each
+ // call to sputc always results in a (virtual) call to overflow. There is no
+ // disadvantage here for sputn since this always results in a call to xsputn.
+
+ auto overflow(int_type ch) -> int_type override {
+ if (!traits_type::eq_int_type(ch, traits_type::eof()))
+ buffer_.push_back(static_cast<char_type>(ch));
+ return ch;
+ }
+
+ auto xsputn(const char_type* s, streamsize count) -> streamsize override {
+ buffer_.append(s, s + count);
+ return count;
+ }
+};
+
+// Implementation of std::bit_cast for pre-C++20.
+template <typename To, typename From, FMT_ENABLE_IF(sizeof(To) == sizeof(From))>
+FMT_CONSTEXPR20 auto bit_cast(const From& from) -> To {
+#ifdef __cpp_lib_bit_cast
+ if (is_constant_evaluated()) return std::bit_cast<To>(from);
+#endif
+ auto to = To();
+ std::memcpy(&to, &from, sizeof(to));
+ return to;
+}
+
+inline auto is_big_endian() -> bool {
+#ifdef _WIN32
+ return false;
+#elif defined(__BIG_ENDIAN__)
+ return true;
+#elif defined(__BYTE_ORDER__) && defined(__ORDER_BIG_ENDIAN__)
+ return __BYTE_ORDER__ == __ORDER_BIG_ENDIAN__;
+#else
+ struct bytes {
+ char data[sizeof(int)];
+ };
+ return bit_cast<bytes>(1).data[0] == 0;
+#endif
+}
+
+class uint128_fallback {
+ private:
+ uint64_t lo_, hi_;
+
+ friend uint128_fallback umul128(uint64_t x, uint64_t y) noexcept;
+
+ public:
+ constexpr uint128_fallback(uint64_t hi, uint64_t lo) : lo_(lo), hi_(hi) {}
+ constexpr uint128_fallback(uint64_t value = 0) : lo_(value), hi_(0) {}
+
+ constexpr uint64_t high() const noexcept { return hi_; }
+ constexpr uint64_t low() const noexcept { return lo_; }
+
+ template <typename T, FMT_ENABLE_IF(std::is_integral<T>::value)>
+ constexpr explicit operator T() const {
+ return static_cast<T>(lo_);
+ }
+
+ friend constexpr auto operator==(const uint128_fallback& lhs,
+ const uint128_fallback& rhs) -> bool {
+ return lhs.hi_ == rhs.hi_ && lhs.lo_ == rhs.lo_;
+ }
+ friend constexpr auto operator!=(const uint128_fallback& lhs,
+ const uint128_fallback& rhs) -> bool {
+ return !(lhs == rhs);
+ }
+ friend constexpr auto operator>(const uint128_fallback& lhs,
+ const uint128_fallback& rhs) -> bool {
+ return lhs.hi_ != rhs.hi_ ? lhs.hi_ > rhs.hi_ : lhs.lo_ > rhs.lo_;
+ }
+ friend constexpr auto operator|(const uint128_fallback& lhs,
+ const uint128_fallback& rhs)
+ -> uint128_fallback {
+ return {lhs.hi_ | rhs.hi_, lhs.lo_ | rhs.lo_};
+ }
+ friend constexpr auto operator&(const uint128_fallback& lhs,
+ const uint128_fallback& rhs)
+ -> uint128_fallback {
+ return {lhs.hi_ & rhs.hi_, lhs.lo_ & rhs.lo_};
+ }
+ friend auto operator+(const uint128_fallback& lhs,
+ const uint128_fallback& rhs) -> uint128_fallback {
+ auto result = uint128_fallback(lhs);
+ result += rhs;
+ return result;
+ }
+ friend auto operator*(const uint128_fallback& lhs, uint32_t rhs)
+ -> uint128_fallback {
+ FMT_ASSERT(lhs.hi_ == 0, "");
+ uint64_t hi = (lhs.lo_ >> 32) * rhs;
+ uint64_t lo = (lhs.lo_ & ~uint32_t()) * rhs;
+ uint64_t new_lo = (hi << 32) + lo;
+ return {(hi >> 32) + (new_lo < lo ? 1 : 0), new_lo};
+ }
+ friend auto operator-(const uint128_fallback& lhs, uint64_t rhs)
+ -> uint128_fallback {
+ return {lhs.hi_ - (lhs.lo_ < rhs ? 1 : 0), lhs.lo_ - rhs};
+ }
+ FMT_CONSTEXPR auto operator>>(int shift) const -> uint128_fallback {
+ if (shift == 64) return {0, hi_};
+ return {hi_ >> shift, (hi_ << (64 - shift)) | (lo_ >> shift)};
+ }
+ FMT_CONSTEXPR auto operator<<(int shift) const -> uint128_fallback {
+ if (shift == 64) return {lo_, 0};
+ return {hi_ << shift | (lo_ >> (64 - shift)), (lo_ << shift)};
+ }
+ FMT_CONSTEXPR auto operator>>=(int shift) -> uint128_fallback& {
+ return *this = *this >> shift;
+ }
+ FMT_CONSTEXPR void operator+=(uint128_fallback n) {
+ uint64_t new_lo = lo_ + n.lo_;
+ uint64_t new_hi = hi_ + n.hi_ + (new_lo < lo_ ? 1 : 0);
+ FMT_ASSERT(new_hi >= hi_, "");
+ lo_ = new_lo;
+ hi_ = new_hi;
+ }
+
+ FMT_CONSTEXPR20 uint128_fallback& operator+=(uint64_t n) noexcept {
+ if (is_constant_evaluated()) {
+ lo_ += n;
+ hi_ += (lo_ < n ? 1 : 0);
+ return *this;
+ }
+#if FMT_HAS_BUILTIN(__builtin_addcll)
+ unsigned long long carry;
+ lo_ = __builtin_addcll(lo_, n, 0, &carry);
+ hi_ += carry;
+#elif FMT_HAS_BUILTIN(__builtin_ia32_addcarryx_u64)
+ unsigned long long result;
+ auto carry = __builtin_ia32_addcarryx_u64(0, lo_, n, &result);
+ lo_ = result;
+ hi_ += carry;
+#elif defined(_MSC_VER) && defined(_M_X64)
+ auto carry = _addcarry_u64(0, lo_, n, &lo_);
+ _addcarry_u64(carry, hi_, 0, &hi_);
+#else
+ lo_ += n;
+ hi_ += (lo_ < n ? 1 : 0);
+#endif
+ return *this;
+ }
+};
+
+using uint128_t = conditional_t<FMT_USE_INT128, uint128_opt, uint128_fallback>;
+
+#ifdef UINTPTR_MAX
+using uintptr_t = ::uintptr_t;
+#else
+using uintptr_t = uint128_t;
+#endif
+
+// Returns the largest possible value for type T. Same as
+// std::numeric_limits<T>::max() but shorter and not affected by the max macro.
+template <typename T> constexpr auto max_value() -> T {
+ return (std::numeric_limits<T>::max)();
+}
+template <typename T> constexpr auto num_bits() -> int {
+ return std::numeric_limits<T>::digits;
+}
+// std::numeric_limits<T>::digits may return 0 for 128-bit ints.
+template <> constexpr auto num_bits<int128_opt>() -> int { return 128; }
+template <> constexpr auto num_bits<uint128_t>() -> int { return 128; }
+
+// A heterogeneous bit_cast used for converting 96-bit long double to uint128_t
+// and 128-bit pointers to uint128_fallback.
+template <typename To, typename From, FMT_ENABLE_IF(sizeof(To) > sizeof(From))>
+inline auto bit_cast(const From& from) -> To {
+ constexpr auto size = static_cast<int>(sizeof(From) / sizeof(unsigned));
+ struct data_t {
+ unsigned value[static_cast<unsigned>(size)];
+ } data = bit_cast<data_t>(from);
+ auto result = To();
+ if (const_check(is_big_endian())) {
+ for (int i = 0; i < size; ++i)
+ result = (result << num_bits<unsigned>()) | data.value[i];
+ } else {
+ for (int i = size - 1; i >= 0; --i)
+ result = (result << num_bits<unsigned>()) | data.value[i];
+ }
+ return result;
+}
+
+FMT_INLINE void assume(bool condition) {
+ (void)condition;
+#if FMT_HAS_BUILTIN(__builtin_assume) && !FMT_ICC_VERSION
+ __builtin_assume(condition);
+#endif
+}
+
+// An approximation of iterator_t for pre-C++20 systems.
+template <typename T>
+using iterator_t = decltype(std::begin(std::declval<T&>()));
+template <typename T> using sentinel_t = decltype(std::end(std::declval<T&>()));
+
+// A workaround for std::string not having mutable data() until C++17.
+template <typename Char>
+inline auto get_data(std::basic_string<Char>& s) -> Char* {
+ return &s[0];
+}
+template <typename Container>
+inline auto get_data(Container& c) -> typename Container::value_type* {
+ return c.data();
+}
+
+#if defined(_SECURE_SCL) && _SECURE_SCL
+// Make a checked iterator to avoid MSVC warnings.
+template <typename T> using checked_ptr = stdext::checked_array_iterator<T*>;
+template <typename T>
+constexpr auto make_checked(T* p, size_t size) -> checked_ptr<T> {
+ return {p, size};
+}
+#else
+template <typename T> using checked_ptr = T*;
+template <typename T> constexpr auto make_checked(T* p, size_t) -> T* {
+ return p;
+}
+#endif
+
+// Attempts to reserve space for n extra characters in the output range.
+// Returns a pointer to the reserved range or a reference to it.
+template <typename Container, FMT_ENABLE_IF(is_contiguous<Container>::value)>
+#if FMT_CLANG_VERSION >= 307 && !FMT_ICC_VERSION
+__attribute__((no_sanitize("undefined")))
+#endif
+inline auto
+reserve(std::back_insert_iterator<Container> it, size_t n)
+ -> checked_ptr<typename Container::value_type> {
+ Container& c = get_container(it);
+ size_t size = c.size();
+ c.resize(size + n);
+ return make_checked(get_data(c) + size, n);
+}
+
+template <typename T>
+inline auto reserve(buffer_appender<T> it, size_t n) -> buffer_appender<T> {
+ buffer<T>& buf = get_container(it);
+ buf.try_reserve(buf.size() + n);
+ return it;
+}
+
+template <typename Iterator>
+constexpr auto reserve(Iterator& it, size_t) -> Iterator& {
+ return it;
+}
+
+template <typename OutputIt>
+using reserve_iterator =
+ remove_reference_t<decltype(reserve(std::declval<OutputIt&>(), 0))>;
+
+template <typename T, typename OutputIt>
+constexpr auto to_pointer(OutputIt, size_t) -> T* {
+ return nullptr;
+}
+template <typename T> auto to_pointer(buffer_appender<T> it, size_t n) -> T* {
+ buffer<T>& buf = get_container(it);
+ auto size = buf.size();
+ if (buf.capacity() < size + n) return nullptr;
+ buf.try_resize(size + n);
+ return buf.data() + size;
+}
+
+template <typename Container, FMT_ENABLE_IF(is_contiguous<Container>::value)>
+inline auto base_iterator(std::back_insert_iterator<Container>& it,
+ checked_ptr<typename Container::value_type>)
+ -> std::back_insert_iterator<Container> {
+ return it;
+}
+
+template <typename Iterator>
+constexpr auto base_iterator(Iterator, Iterator it) -> Iterator {
+ return it;
+}
+
+// <algorithm> is spectacularly slow to compile in C++20 so use a simple fill_n
+// instead (#1998).
+template <typename OutputIt, typename Size, typename T>
+FMT_CONSTEXPR auto fill_n(OutputIt out, Size count, const T& value)
+ -> OutputIt {
+ for (Size i = 0; i < count; ++i) *out++ = value;
+ return out;
+}
+template <typename T, typename Size>
+FMT_CONSTEXPR20 auto fill_n(T* out, Size count, char value) -> T* {
+ if (is_constant_evaluated()) {
+ return fill_n<T*, Size, T>(out, count, value);
+ }
+ std::memset(out, value, to_unsigned(count));
+ return out + count;
+}
+
+#ifdef __cpp_char8_t
+using char8_type = char8_t;
+#else
+enum char8_type : unsigned char {};
+#endif
+
+template <typename OutChar, typename InputIt, typename OutputIt>
+FMT_CONSTEXPR FMT_NOINLINE auto copy_str_noinline(InputIt begin, InputIt end,
+ OutputIt out) -> OutputIt {
+ return copy_str<OutChar>(begin, end, out);
+}
+
+// A public domain branchless UTF-8 decoder by Christopher Wellons:
+// https://github.com/skeeto/branchless-utf8
+/* Decode the next character, c, from s, reporting errors in e.
+ *
+ * Since this is a branchless decoder, four bytes will be read from the
+ * buffer regardless of the actual length of the next character. This
+ * means the buffer _must_ have at least three bytes of zero padding
+ * following the end of the data stream.
+ *
+ * Errors are reported in e, which will be non-zero if the parsed
+ * character was somehow invalid: invalid byte sequence, non-canonical
+ * encoding, or a surrogate half.
+ *
+ * The function returns a pointer to the next character. When an error
+ * occurs, this pointer will be a guess that depends on the particular
+ * error, but it will always advance at least one byte.
+ */
+FMT_CONSTEXPR inline auto utf8_decode(const char* s, uint32_t* c, int* e)
+ -> const char* {
+ constexpr const int masks[] = {0x00, 0x7f, 0x1f, 0x0f, 0x07};
+ constexpr const uint32_t mins[] = {4194304, 0, 128, 2048, 65536};
+ constexpr const int shiftc[] = {0, 18, 12, 6, 0};
+ constexpr const int shifte[] = {0, 6, 4, 2, 0};
+
+ int len = code_point_length(s);
+ const char* next = s + len;
+
+ // Assume a four-byte character and load four bytes. Unused bits are
+ // shifted out.
+ *c = uint32_t(s[0] & masks[len]) << 18;
+ *c |= uint32_t(s[1] & 0x3f) << 12;
+ *c |= uint32_t(s[2] & 0x3f) << 6;
+ *c |= uint32_t(s[3] & 0x3f) << 0;
+ *c >>= shiftc[len];
+
+ // Accumulate the various error conditions.
+ using uchar = unsigned char;
+ *e = (*c < mins[len]) << 6; // non-canonical encoding
+ *e |= ((*c >> 11) == 0x1b) << 7; // surrogate half?
+ *e |= (*c > 0x10FFFF) << 8; // out of range?
+ *e |= (uchar(s[1]) & 0xc0) >> 2;
+ *e |= (uchar(s[2]) & 0xc0) >> 4;
+ *e |= uchar(s[3]) >> 6;
+ *e ^= 0x2a; // top two bits of each tail byte correct?
+ *e >>= shifte[len];
+
+ return next;
+}
+
+constexpr uint32_t invalid_code_point = ~uint32_t();
+
+// Invokes f(cp, sv) for every code point cp in s with sv being the string view
+// corresponding to the code point. cp is invalid_code_point on error.
+template <typename F>
+FMT_CONSTEXPR void for_each_codepoint(string_view s, F f) {
+ auto decode = [f](const char* buf_ptr, const char* ptr) {
+ auto cp = uint32_t();
+ auto error = 0;
+ auto end = utf8_decode(buf_ptr, &cp, &error);
+ bool result = f(error ? invalid_code_point : cp,
+ string_view(ptr, to_unsigned(end - buf_ptr)));
+ return result ? end : nullptr;
+ };
+ auto p = s.data();
+ const size_t block_size = 4; // utf8_decode always reads blocks of 4 chars.
+ if (s.size() >= block_size) {
+ for (auto end = p + s.size() - block_size + 1; p < end;) {
+ p = decode(p, p);
+ if (!p) return;
+ }
+ }
+ if (auto num_chars_left = s.data() + s.size() - p) {
+ char buf[2 * block_size - 1] = {};
+ copy_str<char>(p, p + num_chars_left, buf);
+ const char* buf_ptr = buf;
+ do {
+ auto end = decode(buf_ptr, p);
+ if (!end) return;
+ p += end - buf_ptr;
+ buf_ptr = end;
+ } while (buf_ptr - buf < num_chars_left);
+ }
+}
+
+template <typename Char>
+inline auto compute_width(basic_string_view<Char> s) -> size_t {
+ return s.size();
+}
+
+// Computes approximate display width of a UTF-8 string.
+FMT_CONSTEXPR inline size_t compute_width(string_view s) {
+ size_t num_code_points = 0;
+ // It is not a lambda for compatibility with C++14.
+ struct count_code_points {
+ size_t* count;
+ FMT_CONSTEXPR auto operator()(uint32_t cp, string_view) const -> bool {
+ *count += detail::to_unsigned(
+ 1 +
+ (cp >= 0x1100 &&
+ (cp <= 0x115f || // Hangul Jamo init. consonants
+ cp == 0x2329 || // LEFT-POINTING ANGLE BRACKET
+ cp == 0x232a || // RIGHT-POINTING ANGLE BRACKET
+ // CJK ... Yi except IDEOGRAPHIC HALF FILL SPACE:
+ (cp >= 0x2e80 && cp <= 0xa4cf && cp != 0x303f) ||
+ (cp >= 0xac00 && cp <= 0xd7a3) || // Hangul Syllables
+ (cp >= 0xf900 && cp <= 0xfaff) || // CJK Compatibility Ideographs
+ (cp >= 0xfe10 && cp <= 0xfe19) || // Vertical Forms
+ (cp >= 0xfe30 && cp <= 0xfe6f) || // CJK Compatibility Forms
+ (cp >= 0xff00 && cp <= 0xff60) || // Fullwidth Forms
+ (cp >= 0xffe0 && cp <= 0xffe6) || // Fullwidth Forms
+ (cp >= 0x20000 && cp <= 0x2fffd) || // CJK
+ (cp >= 0x30000 && cp <= 0x3fffd) ||
+ // Miscellaneous Symbols and Pictographs + Emoticons:
+ (cp >= 0x1f300 && cp <= 0x1f64f) ||
+ // Supplemental Symbols and Pictographs:
+ (cp >= 0x1f900 && cp <= 0x1f9ff))));
+ return true;
+ }
+ };
+ for_each_codepoint(s, count_code_points{&num_code_points});
+ return num_code_points;
+}
+
+inline auto compute_width(basic_string_view<char8_type> s) -> size_t {
+ return compute_width(
+ string_view(reinterpret_cast<const char*>(s.data()), s.size()));
+}
+
+template <typename Char>
+inline auto code_point_index(basic_string_view<Char> s, size_t n) -> size_t {
+ size_t size = s.size();
+ return n < size ? n : size;
+}
+
+// Calculates the index of the nth code point in a UTF-8 string.
+inline auto code_point_index(string_view s, size_t n) -> size_t {
+ const char* data = s.data();
+ size_t num_code_points = 0;
+ for (size_t i = 0, size = s.size(); i != size; ++i) {
+ if ((data[i] & 0xc0) != 0x80 && ++num_code_points > n) return i;
+ }
+ return s.size();
+}
+
+inline auto code_point_index(basic_string_view<char8_type> s, size_t n)
+ -> size_t {
+ return code_point_index(
+ string_view(reinterpret_cast<const char*>(s.data()), s.size()), n);
+}
+
+#ifndef FMT_USE_FLOAT128
+# ifdef __SIZEOF_FLOAT128__
+# define FMT_USE_FLOAT128 1
+# else
+# define FMT_USE_FLOAT128 0
+# endif
+#endif
+#if FMT_USE_FLOAT128
+using float128 = __float128;
+#else
+using float128 = void;
+#endif
+template <typename T> using is_float128 = std::is_same<T, float128>;
+
+template <typename T>
+using is_floating_point =
+ bool_constant<std::is_floating_point<T>::value || is_float128<T>::value>;
+
+template <typename T, bool = std::is_floating_point<T>::value>
+struct is_fast_float : bool_constant<std::numeric_limits<T>::is_iec559 &&
+ sizeof(T) <= sizeof(double)> {};
+template <typename T> struct is_fast_float<T, false> : std::false_type {};
+
+template <typename T>
+using is_double_double = bool_constant<std::numeric_limits<T>::digits == 106>;
+
+#ifndef FMT_USE_FULL_CACHE_DRAGONBOX
+# define FMT_USE_FULL_CACHE_DRAGONBOX 0
+#endif
+
+template <typename T>
+template <typename U>
+void buffer<T>::append(const U* begin, const U* end) {
+ while (begin != end) {
+ auto count = to_unsigned(end - begin);
+ try_reserve(size_ + count);
+ auto free_cap = capacity_ - size_;
+ if (free_cap < count) count = free_cap;
+ std::uninitialized_copy_n(begin, count, make_checked(ptr_ + size_, count));
+ size_ += count;
+ begin += count;
+ }
+}
+
+template <typename T, typename Enable = void>
+struct is_locale : std::false_type {};
+template <typename T>
+struct is_locale<T, void_t<decltype(T::classic())>> : std::true_type {};
+} // namespace detail
+
+FMT_MODULE_EXPORT_BEGIN
+
+// The number of characters to store in the basic_memory_buffer object itself
+// to avoid dynamic memory allocation.
+enum { inline_buffer_size = 500 };
+
+/**
+ \rst
+ A dynamically growing memory buffer for trivially copyable/constructible types
+ with the first ``SIZE`` elements stored in the object itself.
+
+ You can use the ``memory_buffer`` type alias for ``char`` instead.
+
+ **Example**::
+
+ auto out = fmt::memory_buffer();
+ format_to(std::back_inserter(out), "The answer is {}.", 42);
+
+ This will append the following output to the ``out`` object:
+
+ .. code-block:: none
+
+ The answer is 42.
+
+ The output can be converted to an ``std::string`` with ``to_string(out)``.
+ \endrst
+ */
+template <typename T, size_t SIZE = inline_buffer_size,
+ typename Allocator = std::allocator<T>>
+class basic_memory_buffer final : public detail::buffer<T> {
+ private:
+ T store_[SIZE];
+
+ // Don't inherit from Allocator avoid generating type_info for it.
+ Allocator alloc_;
+
+ // Deallocate memory allocated by the buffer.
+ FMT_CONSTEXPR20 void deallocate() {
+ T* data = this->data();
+ if (data != store_) alloc_.deallocate(data, this->capacity());
+ }
+
+ protected:
+ FMT_CONSTEXPR20 void grow(size_t size) override;
+
+ public:
+ using value_type = T;
+ using const_reference = const T&;
+
+ FMT_CONSTEXPR20 explicit basic_memory_buffer(
+ const Allocator& alloc = Allocator())
+ : alloc_(alloc) {
+ this->set(store_, SIZE);
+ if (detail::is_constant_evaluated()) detail::fill_n(store_, SIZE, T());
+ }
+ FMT_CONSTEXPR20 ~basic_memory_buffer() { deallocate(); }
+
+ private:
+ // Move data from other to this buffer.
+ FMT_CONSTEXPR20 void move(basic_memory_buffer& other) {
+ alloc_ = std::move(other.alloc_);
+ T* data = other.data();
+ size_t size = other.size(), capacity = other.capacity();
+ if (data == other.store_) {
+ this->set(store_, capacity);
+ detail::copy_str<T>(other.store_, other.store_ + size,
+ detail::make_checked(store_, capacity));
+ } else {
+ this->set(data, capacity);
+ // Set pointer to the inline array so that delete is not called
+ // when deallocating.
+ other.set(other.store_, 0);
+ other.clear();
+ }
+ this->resize(size);
+ }
+
+ public:
+ /**
+ \rst
+ Constructs a :class:`fmt::basic_memory_buffer` object moving the content
+ of the other object to it.
+ \endrst
+ */
+ FMT_CONSTEXPR20 basic_memory_buffer(basic_memory_buffer&& other) noexcept {
+ move(other);
+ }
+
+ /**
+ \rst
+ Moves the content of the other ``basic_memory_buffer`` object to this one.
+ \endrst
+ */
+ auto operator=(basic_memory_buffer&& other) noexcept -> basic_memory_buffer& {
+ FMT_ASSERT(this != &other, "");
+ deallocate();
+ move(other);
+ return *this;
+ }
+
+ // Returns a copy of the allocator associated with this buffer.
+ auto get_allocator() const -> Allocator { return alloc_; }
+
+ /**
+ Resizes the buffer to contain *count* elements. If T is a POD type new
+ elements may not be initialized.
+ */
+ FMT_CONSTEXPR20 void resize(size_t count) { this->try_resize(count); }
+
+ /** Increases the buffer capacity to *new_capacity*. */
+ void reserve(size_t new_capacity) { this->try_reserve(new_capacity); }
+
+ // Directly append data into the buffer
+ using detail::buffer<T>::append;
+ template <typename ContiguousRange>
+ void append(const ContiguousRange& range) {
+ append(range.data(), range.data() + range.size());
+ }
+};
+
+template <typename T, size_t SIZE, typename Allocator>
+FMT_CONSTEXPR20 void basic_memory_buffer<T, SIZE, Allocator>::grow(
+ size_t size) {
+ detail::abort_fuzzing_if(size > 5000);
+ const size_t max_size = std::allocator_traits<Allocator>::max_size(alloc_);
+ size_t old_capacity = this->capacity();
+ size_t new_capacity = old_capacity + old_capacity / 2;
+ if (size > new_capacity)
+ new_capacity = size;
+ else if (new_capacity > max_size)
+ new_capacity = size > max_size ? size : max_size;
+ T* old_data = this->data();
+ T* new_data =
+ std::allocator_traits<Allocator>::allocate(alloc_, new_capacity);
+ // The following code doesn't throw, so the raw pointer above doesn't leak.
+ std::uninitialized_copy(old_data, old_data + this->size(),
+ detail::make_checked(new_data, new_capacity));
+ this->set(new_data, new_capacity);
+ // deallocate must not throw according to the standard, but even if it does,
+ // the buffer already uses the new storage and will deallocate it in
+ // destructor.
+ if (old_data != store_) alloc_.deallocate(old_data, old_capacity);
+}
+
+using memory_buffer = basic_memory_buffer<char>;
+
+template <typename T, size_t SIZE, typename Allocator>
+struct is_contiguous<basic_memory_buffer<T, SIZE, Allocator>> : std::true_type {
+};
+
+namespace detail {
+FMT_API void print(std::FILE*, string_view);
+}
+
+/** A formatting error such as invalid format string. */
+FMT_CLASS_API
+class FMT_API format_error : public std::runtime_error {
+ public:
+ explicit format_error(const char* message) : std::runtime_error(message) {}
+ explicit format_error(const std::string& message)
+ : std::runtime_error(message) {}
+ format_error(const format_error&) = default;
+ format_error& operator=(const format_error&) = default;
+ format_error(format_error&&) = default;
+ format_error& operator=(format_error&&) = default;
+ ~format_error() noexcept override FMT_MSC_DEFAULT;
+};
+
+namespace detail_exported {
+#if FMT_USE_NONTYPE_TEMPLATE_ARGS
+template <typename Char, size_t N> struct fixed_string {
+ constexpr fixed_string(const Char (&str)[N]) {
+ detail::copy_str<Char, const Char*, Char*>(static_cast<const Char*>(str),
+ str + N, data);
+ }
+ Char data[N] = {};
+};
+#endif
+
+// Converts a compile-time string to basic_string_view.
+template <typename Char, size_t N>
+constexpr auto compile_string_to_view(const Char (&s)[N])
+ -> basic_string_view<Char> {
+ // Remove trailing NUL character if needed. Won't be present if this is used
+ // with a raw character array (i.e. not defined as a string).
+ return {s, N - (std::char_traits<Char>::to_int_type(s[N - 1]) == 0 ? 1 : 0)};
+}
+template <typename Char>
+constexpr auto compile_string_to_view(detail::std_string_view<Char> s)
+ -> basic_string_view<Char> {
+ return {s.data(), s.size()};
+}
+} // namespace detail_exported
+
+FMT_BEGIN_DETAIL_NAMESPACE
+
+template <typename T> struct is_integral : std::is_integral<T> {};
+template <> struct is_integral<int128_opt> : std::true_type {};
+template <> struct is_integral<uint128_t> : std::true_type {};
+
+template <typename T>
+using is_signed =
+ std::integral_constant<bool, std::numeric_limits<T>::is_signed ||
+ std::is_same<T, int128_opt>::value>;
+
+// Returns true if value is negative, false otherwise.
+// Same as `value < 0` but doesn't produce warnings if T is an unsigned type.
+template <typename T, FMT_ENABLE_IF(is_signed<T>::value)>
+constexpr auto is_negative(T value) -> bool {
+ return value < 0;
+}
+template <typename T, FMT_ENABLE_IF(!is_signed<T>::value)>
+constexpr auto is_negative(T) -> bool {
+ return false;
+}
+
+template <typename T>
+FMT_CONSTEXPR auto is_supported_floating_point(T) -> bool {
+ if (std::is_same<T, float>()) return FMT_USE_FLOAT;
+ if (std::is_same<T, double>()) return FMT_USE_DOUBLE;
+ if (std::is_same<T, long double>()) return FMT_USE_LONG_DOUBLE;
+ return true;
+}
+
+// Smallest of uint32_t, uint64_t, uint128_t that is large enough to
+// represent all values of an integral type T.
+template <typename T>
+using uint32_or_64_or_128_t =
+ conditional_t<num_bits<T>() <= 32 && !FMT_REDUCE_INT_INSTANTIATIONS,
+ uint32_t,
+ conditional_t<num_bits<T>() <= 64, uint64_t, uint128_t>>;
+template <typename T>
+using uint64_or_128_t = conditional_t<num_bits<T>() <= 64, uint64_t, uint128_t>;
+
+#define FMT_POWERS_OF_10(factor) \
+ factor * 10, (factor)*100, (factor)*1000, (factor)*10000, (factor)*100000, \
+ (factor)*1000000, (factor)*10000000, (factor)*100000000, \
+ (factor)*1000000000
+
+// Converts value in the range [0, 100) to a string.
+constexpr const char* digits2(size_t value) {
+ // GCC generates slightly better code when value is pointer-size.
+ return &"0001020304050607080910111213141516171819"
+ "2021222324252627282930313233343536373839"
+ "4041424344454647484950515253545556575859"
+ "6061626364656667686970717273747576777879"
+ "8081828384858687888990919293949596979899"[value * 2];
+}
+
+// Sign is a template parameter to workaround a bug in gcc 4.8.
+template <typename Char, typename Sign> constexpr Char sign(Sign s) {
+#if !FMT_GCC_VERSION || FMT_GCC_VERSION >= 604
+ static_assert(std::is_same<Sign, sign_t>::value, "");
+#endif
+ return static_cast<Char>("\0-+ "[s]);
+}
+
+template <typename T> FMT_CONSTEXPR auto count_digits_fallback(T n) -> int {
+ int count = 1;
+ for (;;) {
+ // Integer division is slow so do it for a group of four digits instead
+ // of for every digit. The idea comes from the talk by Alexandrescu
+ // "Three Optimization Tips for C++". See speed-test for a comparison.
+ if (n < 10) return count;
+ if (n < 100) return count + 1;
+ if (n < 1000) return count + 2;
+ if (n < 10000) return count + 3;
+ n /= 10000u;
+ count += 4;
+ }
+}
+#if FMT_USE_INT128
+FMT_CONSTEXPR inline auto count_digits(uint128_opt n) -> int {
+ return count_digits_fallback(n);
+}
+#endif
+
+#ifdef FMT_BUILTIN_CLZLL
+// It is a separate function rather than a part of count_digits to workaround
+// the lack of static constexpr in constexpr functions.
+inline auto do_count_digits(uint64_t n) -> int {
+ // This has comparable performance to the version by Kendall Willets
+ // (https://github.com/fmtlib/format-benchmark/blob/master/digits10)
+ // but uses smaller tables.
+ // Maps bsr(n) to ceil(log10(pow(2, bsr(n) + 1) - 1)).
+ static constexpr uint8_t bsr2log10[] = {
+ 1, 1, 1, 2, 2, 2, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5,
+ 6, 6, 6, 7, 7, 7, 7, 8, 8, 8, 9, 9, 9, 10, 10, 10,
+ 10, 11, 11, 11, 12, 12, 12, 13, 13, 13, 13, 14, 14, 14, 15, 15,
+ 15, 16, 16, 16, 16, 17, 17, 17, 18, 18, 18, 19, 19, 19, 19, 20};
+ auto t = bsr2log10[FMT_BUILTIN_CLZLL(n | 1) ^ 63];
+ static constexpr const uint64_t zero_or_powers_of_10[] = {
+ 0, 0, FMT_POWERS_OF_10(1U), FMT_POWERS_OF_10(1000000000ULL),
+ 10000000000000000000ULL};
+ return t - (n < zero_or_powers_of_10[t]);
+}
+#endif
+
+// Returns the number of decimal digits in n. Leading zeros are not counted
+// except for n == 0 in which case count_digits returns 1.
+FMT_CONSTEXPR20 inline auto count_digits(uint64_t n) -> int {
+#ifdef FMT_BUILTIN_CLZLL
+ if (!is_constant_evaluated()) {
+ return do_count_digits(n);
+ }
+#endif
+ return count_digits_fallback(n);
+}
+
+// Counts the number of digits in n. BITS = log2(radix).
+template <int BITS, typename UInt>
+FMT_CONSTEXPR auto count_digits(UInt n) -> int {
+#ifdef FMT_BUILTIN_CLZ
+ if (!is_constant_evaluated() && num_bits<UInt>() == 32)
+ return (FMT_BUILTIN_CLZ(static_cast<uint32_t>(n) | 1) ^ 31) / BITS + 1;
+#endif
+ // Lambda avoids unreachable code warnings from NVHPC.
+ return [](UInt m) {
+ int num_digits = 0;
+ do {
+ ++num_digits;
+ } while ((m >>= BITS) != 0);
+ return num_digits;
+ }(n);
+}
+
+#ifdef FMT_BUILTIN_CLZ
+// It is a separate function rather than a part of count_digits to workaround
+// the lack of static constexpr in constexpr functions.
+FMT_INLINE auto do_count_digits(uint32_t n) -> int {
+// An optimization by Kendall Willets from https://bit.ly/3uOIQrB.
+// This increments the upper 32 bits (log10(T) - 1) when >= T is added.
+# define FMT_INC(T) (((sizeof(# T) - 1ull) << 32) - T)
+ static constexpr uint64_t table[] = {
+ FMT_INC(0), FMT_INC(0), FMT_INC(0), // 8
+ FMT_INC(10), FMT_INC(10), FMT_INC(10), // 64
+ FMT_INC(100), FMT_INC(100), FMT_INC(100), // 512
+ FMT_INC(1000), FMT_INC(1000), FMT_INC(1000), // 4096
+ FMT_INC(10000), FMT_INC(10000), FMT_INC(10000), // 32k
+ FMT_INC(100000), FMT_INC(100000), FMT_INC(100000), // 256k
+ FMT_INC(1000000), FMT_INC(1000000), FMT_INC(1000000), // 2048k
+ FMT_INC(10000000), FMT_INC(10000000), FMT_INC(10000000), // 16M
+ FMT_INC(100000000), FMT_INC(100000000), FMT_INC(100000000), // 128M
+ FMT_INC(1000000000), FMT_INC(1000000000), FMT_INC(1000000000), // 1024M
+ FMT_INC(1000000000), FMT_INC(1000000000) // 4B
+ };
+ auto inc = table[FMT_BUILTIN_CLZ(n | 1) ^ 31];
+ return static_cast<int>((n + inc) >> 32);
+}
+#endif
+
+// Optional version of count_digits for better performance on 32-bit platforms.
+FMT_CONSTEXPR20 inline auto count_digits(uint32_t n) -> int {
+#ifdef FMT_BUILTIN_CLZ
+ if (!is_constant_evaluated()) {
+ return do_count_digits(n);
+ }
+#endif
+ return count_digits_fallback(n);
+}
+
+template <typename Int> constexpr auto digits10() noexcept -> int {
+ return std::numeric_limits<Int>::digits10;
+}
+template <> constexpr auto digits10<int128_opt>() noexcept -> int { return 38; }
+template <> constexpr auto digits10<uint128_t>() noexcept -> int { return 38; }
+
+template <typename Char> struct thousands_sep_result {
+ std::string grouping;
+ Char thousands_sep;
+};
+
+template <typename Char>
+FMT_API auto thousands_sep_impl(locale_ref loc) -> thousands_sep_result<Char>;
+template <typename Char>
+inline auto thousands_sep(locale_ref loc) -> thousands_sep_result<Char> {
+ auto result = thousands_sep_impl<char>(loc);
+ return {result.grouping, Char(result.thousands_sep)};
+}
+template <>
+inline auto thousands_sep(locale_ref loc) -> thousands_sep_result<wchar_t> {
+ return thousands_sep_impl<wchar_t>(loc);
+}
+
+template <typename Char>
+FMT_API auto decimal_point_impl(locale_ref loc) -> Char;
+template <typename Char> inline auto decimal_point(locale_ref loc) -> Char {
+ return Char(decimal_point_impl<char>(loc));
+}
+template <> inline auto decimal_point(locale_ref loc) -> wchar_t {
+ return decimal_point_impl<wchar_t>(loc);
+}
+
+// Compares two characters for equality.
+template <typename Char> auto equal2(const Char* lhs, const char* rhs) -> bool {
+ return lhs[0] == Char(rhs[0]) && lhs[1] == Char(rhs[1]);
+}
+inline auto equal2(const char* lhs, const char* rhs) -> bool {
+ return memcmp(lhs, rhs, 2) == 0;
+}
+
+// Copies two characters from src to dst.
+template <typename Char>
+FMT_CONSTEXPR20 FMT_INLINE void copy2(Char* dst, const char* src) {
+ if (!is_constant_evaluated() && sizeof(Char) == sizeof(char)) {
+ memcpy(dst, src, 2);
+ return;
+ }
+ *dst++ = static_cast<Char>(*src++);
+ *dst = static_cast<Char>(*src);
+}
+
+template <typename Iterator> struct format_decimal_result {
+ Iterator begin;
+ Iterator end;
+};
+
+// Formats a decimal unsigned integer value writing into out pointing to a
+// buffer of specified size. The caller must ensure that the buffer is large
+// enough.
+template <typename Char, typename UInt>
+FMT_CONSTEXPR20 auto format_decimal(Char* out, UInt value, int size)
+ -> format_decimal_result<Char*> {
+ FMT_ASSERT(size >= count_digits(value), "invalid digit count");
+ out += size;
+ Char* end = out;
+ while (value >= 100) {
+ // Integer division is slow so do it for a group of two digits instead
+ // of for every digit. The idea comes from the talk by Alexandrescu
+ // "Three Optimization Tips for C++". See speed-test for a comparison.
+ out -= 2;
+ copy2(out, digits2(static_cast<size_t>(value % 100)));
+ value /= 100;
+ }
+ if (value < 10) {
+ *--out = static_cast<Char>('0' + value);
+ return {out, end};
+ }
+ out -= 2;
+ copy2(out, digits2(static_cast<size_t>(value)));
+ return {out, end};
+}
+
+template <typename Char, typename UInt, typename Iterator,
+ FMT_ENABLE_IF(!std::is_pointer<remove_cvref_t<Iterator>>::value)>
+inline auto format_decimal(Iterator out, UInt value, int size)
+ -> format_decimal_result<Iterator> {
+ // Buffer is large enough to hold all digits (digits10 + 1).
+ Char buffer[digits10<UInt>() + 1];
+ auto end = format_decimal(buffer, value, size).end;
+ return {out, detail::copy_str_noinline<Char>(buffer, end, out)};
+}
+
+template <unsigned BASE_BITS, typename Char, typename UInt>
+FMT_CONSTEXPR auto format_uint(Char* buffer, UInt value, int num_digits,
+ bool upper = false) -> Char* {
+ buffer += num_digits;
+ Char* end = buffer;
+ do {
+ const char* digits = upper ? "0123456789ABCDEF" : "0123456789abcdef";
+ unsigned digit = static_cast<unsigned>(value & ((1 << BASE_BITS) - 1));
+ *--buffer = static_cast<Char>(BASE_BITS < 4 ? static_cast<char>('0' + digit)
+ : digits[digit]);
+ } while ((value >>= BASE_BITS) != 0);
+ return end;
+}
+
+template <unsigned BASE_BITS, typename Char, typename It, typename UInt>
+inline auto format_uint(It out, UInt value, int num_digits, bool upper = false)
+ -> It {
+ if (auto ptr = to_pointer<Char>(out, to_unsigned(num_digits))) {
+ format_uint<BASE_BITS>(ptr, value, num_digits, upper);
+ return out;
+ }
+ // Buffer should be large enough to hold all digits (digits / BASE_BITS + 1).
+ char buffer[num_bits<UInt>() / BASE_BITS + 1];
+ format_uint<BASE_BITS>(buffer, value, num_digits, upper);
+ return detail::copy_str_noinline<Char>(buffer, buffer + num_digits, out);
+}
+
+// A converter from UTF-8 to UTF-16.
+class utf8_to_utf16 {
+ private:
+ basic_memory_buffer<wchar_t> buffer_;
+
+ public:
+ FMT_API explicit utf8_to_utf16(string_view s);
+ operator basic_string_view<wchar_t>() const { return {&buffer_[0], size()}; }
+ auto size() const -> size_t { return buffer_.size() - 1; }
+ auto c_str() const -> const wchar_t* { return &buffer_[0]; }
+ auto str() const -> std::wstring { return {&buffer_[0], size()}; }
+};
+
+namespace dragonbox {
+
+// Type-specific information that Dragonbox uses.
+template <typename T, typename Enable = void> struct float_info;
+
+template <> struct float_info<float> {
+ using carrier_uint = uint32_t;
+ static const int exponent_bits = 8;
+ static const int kappa = 1;
+ static const int big_divisor = 100;
+ static const int small_divisor = 10;
+ static const int min_k = -31;
+ static const int max_k = 46;
+ static const int divisibility_check_by_5_threshold = 39;
+ static const int case_fc_pm_half_lower_threshold = -1;
+ static const int shorter_interval_tie_lower_threshold = -35;
+ static const int shorter_interval_tie_upper_threshold = -35;
+};
+
+template <> struct float_info<double> {
+ using carrier_uint = uint64_t;
+ static const int exponent_bits = 11;
+ static const int kappa = 2;
+ static const int big_divisor = 1000;
+ static const int small_divisor = 100;
+ static const int min_k = -292;
+ static const int max_k = 326;
+ static const int divisibility_check_by_5_threshold = 86;
+ static const int case_fc_pm_half_lower_threshold = -2;
+ static const int shorter_interval_tie_lower_threshold = -77;
+ static const int shorter_interval_tie_upper_threshold = -77;
+};
+
+// An 80- or 128-bit floating point number.
+template <typename T>
+struct float_info<T, enable_if_t<std::numeric_limits<T>::digits == 64 ||
+ std::numeric_limits<T>::digits == 113 ||
+ is_float128<T>::value>> {
+ using carrier_uint = detail::uint128_t;
+ static const int exponent_bits = 15;
+};
+
+// A double-double floating point number.
+template <typename T>
+struct float_info<T, enable_if_t<is_double_double<T>::value>> {
+ using carrier_uint = detail::uint128_t;
+};
+
+template <typename T> struct decimal_fp {
+ using significand_type = typename float_info<T>::carrier_uint;
+ significand_type significand;
+ int exponent;
+};
+
+template <typename T> FMT_API auto to_decimal(T x) noexcept -> decimal_fp<T>;
+} // namespace dragonbox
+
+// Returns true iff Float has the implicit bit which is not stored.
+template <typename Float> constexpr bool has_implicit_bit() {
+ // An 80-bit FP number has a 64-bit significand an no implicit bit.
+ return std::numeric_limits<Float>::digits != 64;
+}
+
+// Returns the number of significand bits stored in Float. The implicit bit is
+// not counted since it is not stored.
+template <typename Float> constexpr int num_significand_bits() {
+ // std::numeric_limits may not support __float128.
+ return is_float128<Float>() ? 112
+ : (std::numeric_limits<Float>::digits -
+ (has_implicit_bit<Float>() ? 1 : 0));
+}
+
+template <typename Float>
+constexpr auto exponent_mask() ->
+ typename dragonbox::float_info<Float>::carrier_uint {
+ using uint = typename dragonbox::float_info<Float>::carrier_uint;
+ return ((uint(1) << dragonbox::float_info<Float>::exponent_bits) - 1)
+ << num_significand_bits<Float>();
+}
+template <typename Float> constexpr auto exponent_bias() -> int {
+ // std::numeric_limits may not support __float128.
+ return is_float128<Float>() ? 16383
+ : std::numeric_limits<Float>::max_exponent - 1;
+}
+
+// Writes the exponent exp in the form "[+-]d{2,3}" to buffer.
+template <typename Char, typename It>
+FMT_CONSTEXPR auto write_exponent(int exp, It it) -> It {
+ FMT_ASSERT(-10000 < exp && exp < 10000, "exponent out of range");
+ if (exp < 0) {
+ *it++ = static_cast<Char>('-');
+ exp = -exp;
+ } else {
+ *it++ = static_cast<Char>('+');
+ }
+ if (exp >= 100) {
+ const char* top = digits2(to_unsigned(exp / 100));
+ if (exp >= 1000) *it++ = static_cast<Char>(top[0]);
+ *it++ = static_cast<Char>(top[1]);
+ exp %= 100;
+ }
+ const char* d = digits2(to_unsigned(exp));
+ *it++ = static_cast<Char>(d[0]);
+ *it++ = static_cast<Char>(d[1]);
+ return it;
+}
+
+// A floating-point number f * pow(2, e) where F is an unsigned type.
+template <typename F> struct basic_fp {
+ F f;
+ int e;
+
+ static constexpr const int num_significand_bits =
+ static_cast<int>(sizeof(F) * num_bits<unsigned char>());
+
+ constexpr basic_fp() : f(0), e(0) {}
+ constexpr basic_fp(uint64_t f_val, int e_val) : f(f_val), e(e_val) {}
+
+ // Constructs fp from an IEEE754 floating-point number.
+ template <typename Float> FMT_CONSTEXPR basic_fp(Float n) { assign(n); }
+
+ // Assigns n to this and return true iff predecessor is closer than successor.
+ template <typename Float, FMT_ENABLE_IF(!is_double_double<Float>::value)>
+ FMT_CONSTEXPR auto assign(Float n) -> bool {
+ static_assert(std::numeric_limits<Float>::digits <= 113, "unsupported FP");
+ // Assume Float is in the format [sign][exponent][significand].
+ using carrier_uint = typename dragonbox::float_info<Float>::carrier_uint;
+ const auto num_float_significand_bits =
+ detail::num_significand_bits<Float>();
+ const auto implicit_bit = carrier_uint(1) << num_float_significand_bits;
+ const auto significand_mask = implicit_bit - 1;
+ auto u = bit_cast<carrier_uint>(n);
+ f = static_cast<F>(u & significand_mask);
+ auto biased_e = static_cast<int>((u & exponent_mask<Float>()) >>
+ num_float_significand_bits);
+ // The predecessor is closer if n is a normalized power of 2 (f == 0)
+ // other than the smallest normalized number (biased_e > 1).
+ auto is_predecessor_closer = f == 0 && biased_e > 1;
+ if (biased_e == 0)
+ biased_e = 1; // Subnormals use biased exponent 1 (min exponent).
+ else if (has_implicit_bit<Float>())
+ f += static_cast<F>(implicit_bit);
+ e = biased_e - exponent_bias<Float>() - num_float_significand_bits;
+ if (!has_implicit_bit<Float>()) ++e;
+ return is_predecessor_closer;
+ }
+
+ template <typename Float, FMT_ENABLE_IF(is_double_double<Float>::value)>
+ FMT_CONSTEXPR auto assign(Float n) -> bool {
+ static_assert(std::numeric_limits<double>::is_iec559, "unsupported FP");
+ return assign(static_cast<double>(n));
+ }
+};
+
+using fp = basic_fp<unsigned long long>;
+
+// Normalizes the value converted from double and multiplied by (1 << SHIFT).
+template <int SHIFT = 0, typename F>
+FMT_CONSTEXPR basic_fp<F> normalize(basic_fp<F> value) {
+ // Handle subnormals.
+ const auto implicit_bit = F(1) << num_significand_bits<double>();
+ const auto shifted_implicit_bit = implicit_bit << SHIFT;
+ while ((value.f & shifted_implicit_bit) == 0) {
+ value.f <<= 1;
+ --value.e;
+ }
+ // Subtract 1 to account for hidden bit.
+ const auto offset = basic_fp<F>::num_significand_bits -
+ num_significand_bits<double>() - SHIFT - 1;
+ value.f <<= offset;
+ value.e -= offset;
+ return value;
+}
+
+// Computes lhs * rhs / pow(2, 64) rounded to nearest with half-up tie breaking.
+FMT_CONSTEXPR inline uint64_t multiply(uint64_t lhs, uint64_t rhs) {
+#if FMT_USE_INT128
+ auto product = static_cast<__uint128_t>(lhs) * rhs;
+ auto f = static_cast<uint64_t>(product >> 64);
+ return (static_cast<uint64_t>(product) & (1ULL << 63)) != 0 ? f + 1 : f;
+#else
+ // Multiply 32-bit parts of significands.
+ uint64_t mask = (1ULL << 32) - 1;
+ uint64_t a = lhs >> 32, b = lhs & mask;
+ uint64_t c = rhs >> 32, d = rhs & mask;
+ uint64_t ac = a * c, bc = b * c, ad = a * d, bd = b * d;
+ // Compute mid 64-bit of result and round.
+ uint64_t mid = (bd >> 32) + (ad & mask) + (bc & mask) + (1U << 31);
+ return ac + (ad >> 32) + (bc >> 32) + (mid >> 32);
+#endif
+}
+
+FMT_CONSTEXPR inline fp operator*(fp x, fp y) {
+ return {multiply(x.f, y.f), x.e + y.e + 64};
+}
+
+template <typename T = void> struct basic_data {
+ // Normalized 64-bit significands of pow(10, k), for k = -348, -340, ..., 340.
+ // These are generated by support/compute-powers.py.
+ static constexpr uint64_t pow10_significands[87] = {
+ 0xfa8fd5a0081c0288, 0xbaaee17fa23ebf76, 0x8b16fb203055ac76,
+ 0xcf42894a5dce35ea, 0x9a6bb0aa55653b2d, 0xe61acf033d1a45df,
+ 0xab70fe17c79ac6ca, 0xff77b1fcbebcdc4f, 0xbe5691ef416bd60c,
+ 0x8dd01fad907ffc3c, 0xd3515c2831559a83, 0x9d71ac8fada6c9b5,
+ 0xea9c227723ee8bcb, 0xaecc49914078536d, 0x823c12795db6ce57,
+ 0xc21094364dfb5637, 0x9096ea6f3848984f, 0xd77485cb25823ac7,
+ 0xa086cfcd97bf97f4, 0xef340a98172aace5, 0xb23867fb2a35b28e,
+ 0x84c8d4dfd2c63f3b, 0xc5dd44271ad3cdba, 0x936b9fcebb25c996,
+ 0xdbac6c247d62a584, 0xa3ab66580d5fdaf6, 0xf3e2f893dec3f126,
+ 0xb5b5ada8aaff80b8, 0x87625f056c7c4a8b, 0xc9bcff6034c13053,
+ 0x964e858c91ba2655, 0xdff9772470297ebd, 0xa6dfbd9fb8e5b88f,
+ 0xf8a95fcf88747d94, 0xb94470938fa89bcf, 0x8a08f0f8bf0f156b,
+ 0xcdb02555653131b6, 0x993fe2c6d07b7fac, 0xe45c10c42a2b3b06,
+ 0xaa242499697392d3, 0xfd87b5f28300ca0e, 0xbce5086492111aeb,
+ 0x8cbccc096f5088cc, 0xd1b71758e219652c, 0x9c40000000000000,
+ 0xe8d4a51000000000, 0xad78ebc5ac620000, 0x813f3978f8940984,
+ 0xc097ce7bc90715b3, 0x8f7e32ce7bea5c70, 0xd5d238a4abe98068,
+ 0x9f4f2726179a2245, 0xed63a231d4c4fb27, 0xb0de65388cc8ada8,
+ 0x83c7088e1aab65db, 0xc45d1df942711d9a, 0x924d692ca61be758,
+ 0xda01ee641a708dea, 0xa26da3999aef774a, 0xf209787bb47d6b85,
+ 0xb454e4a179dd1877, 0x865b86925b9bc5c2, 0xc83553c5c8965d3d,
+ 0x952ab45cfa97a0b3, 0xde469fbd99a05fe3, 0xa59bc234db398c25,
+ 0xf6c69a72a3989f5c, 0xb7dcbf5354e9bece, 0x88fcf317f22241e2,
+ 0xcc20ce9bd35c78a5, 0x98165af37b2153df, 0xe2a0b5dc971f303a,
+ 0xa8d9d1535ce3b396, 0xfb9b7cd9a4a7443c, 0xbb764c4ca7a44410,
+ 0x8bab8eefb6409c1a, 0xd01fef10a657842c, 0x9b10a4e5e9913129,
+ 0xe7109bfba19c0c9d, 0xac2820d9623bf429, 0x80444b5e7aa7cf85,
+ 0xbf21e44003acdd2d, 0x8e679c2f5e44ff8f, 0xd433179d9c8cb841,
+ 0x9e19db92b4e31ba9, 0xeb96bf6ebadf77d9, 0xaf87023b9bf0ee6b,
+ };
+
+#if FMT_GCC_VERSION && FMT_GCC_VERSION < 409
+# pragma GCC diagnostic push
+# pragma GCC diagnostic ignored "-Wnarrowing"
+#endif
+ // Binary exponents of pow(10, k), for k = -348, -340, ..., 340, corresponding
+ // to significands above.
+ static constexpr int16_t pow10_exponents[87] = {
+ -1220, -1193, -1166, -1140, -1113, -1087, -1060, -1034, -1007, -980, -954,
+ -927, -901, -874, -847, -821, -794, -768, -741, -715, -688, -661,
+ -635, -608, -582, -555, -529, -502, -475, -449, -422, -396, -369,
+ -343, -316, -289, -263, -236, -210, -183, -157, -130, -103, -77,
+ -50, -24, 3, 30, 56, 83, 109, 136, 162, 189, 216,
+ 242, 269, 295, 322, 348, 375, 402, 428, 455, 481, 508,
+ 534, 561, 588, 614, 641, 667, 694, 720, 747, 774, 800,
+ 827, 853, 880, 907, 933, 960, 986, 1013, 1039, 1066};
+#if FMT_GCC_VERSION && FMT_GCC_VERSION < 409
+# pragma GCC diagnostic pop
+#endif
+
+ static constexpr uint64_t power_of_10_64[20] = {
+ 1, FMT_POWERS_OF_10(1ULL), FMT_POWERS_OF_10(1000000000ULL),
+ 10000000000000000000ULL};
+};
+
+#if FMT_CPLUSPLUS < 201703L
+template <typename T> constexpr uint64_t basic_data<T>::pow10_significands[];
+template <typename T> constexpr int16_t basic_data<T>::pow10_exponents[];
+template <typename T> constexpr uint64_t basic_data<T>::power_of_10_64[];
+#endif
+
+// This is a struct rather than an alias to avoid shadowing warnings in gcc.
+struct data : basic_data<> {};
+
+// Returns a cached power of 10 `c_k = c_k.f * pow(2, c_k.e)` such that its
+// (binary) exponent satisfies `min_exponent <= c_k.e <= min_exponent + 28`.
+FMT_CONSTEXPR inline fp get_cached_power(int min_exponent,
+ int& pow10_exponent) {
+ const int shift = 32;
+ // log10(2) = 0x0.4d104d427de7fbcc...
+ const int64_t significand = 0x4d104d427de7fbcc;
+ int index = static_cast<int>(
+ ((min_exponent + fp::num_significand_bits - 1) * (significand >> shift) +
+ ((int64_t(1) << shift) - 1)) // ceil
+ >> 32 // arithmetic shift
+ );
+ // Decimal exponent of the first (smallest) cached power of 10.
+ const int first_dec_exp = -348;
+ // Difference between 2 consecutive decimal exponents in cached powers of 10.
+ const int dec_exp_step = 8;
+ index = (index - first_dec_exp - 1) / dec_exp_step + 1;
+ pow10_exponent = first_dec_exp + index * dec_exp_step;
+ return {data::pow10_significands[index], data::pow10_exponents[index]};
+}
+
+#ifndef _MSC_VER
+# define FMT_SNPRINTF snprintf
+#else
+FMT_API auto fmt_snprintf(char* buf, size_t size, const char* fmt, ...) -> int;
+# define FMT_SNPRINTF fmt_snprintf
+#endif // _MSC_VER
+
+// Formats a floating-point number with snprintf using the hexfloat format.
+template <typename T>
+auto snprintf_float(T value, int precision, float_specs specs,
+ buffer<char>& buf) -> int {
+ // Buffer capacity must be non-zero, otherwise MSVC's vsnprintf_s will fail.
+ FMT_ASSERT(buf.capacity() > buf.size(), "empty buffer");
+ FMT_ASSERT(specs.format == float_format::hex, "");
+ static_assert(!std::is_same<T, float>::value, "");
+
+ // Build the format string.
+ char format[7]; // The longest format is "%#.*Le".
+ char* format_ptr = format;
+ *format_ptr++ = '%';
+ if (specs.showpoint) *format_ptr++ = '#';
+ if (precision >= 0) {
+ *format_ptr++ = '.';
+ *format_ptr++ = '*';
+ }
+ if (std::is_same<T, long double>()) *format_ptr++ = 'L';
+ *format_ptr++ = specs.upper ? 'A' : 'a';
+ *format_ptr = '\0';
+
+ // Format using snprintf.
+ auto offset = buf.size();
+ for (;;) {
+ auto begin = buf.data() + offset;
+ auto capacity = buf.capacity() - offset;
+ abort_fuzzing_if(precision > 100000);
+ // Suppress the warning about a nonliteral format string.
+ // Cannot use auto because of a bug in MinGW (#1532).
+ int (*snprintf_ptr)(char*, size_t, const char*, ...) = FMT_SNPRINTF;
+ int result = precision >= 0
+ ? snprintf_ptr(begin, capacity, format, precision, value)
+ : snprintf_ptr(begin, capacity, format, value);
+ if (result < 0) {
+ // The buffer will grow exponentially.
+ buf.try_reserve(buf.capacity() + 1);
+ continue;
+ }
+ auto size = to_unsigned(result);
+ // Size equal to capacity means that the last character was truncated.
+ if (size < capacity) {
+ buf.try_resize(size + offset);
+ return 0;
+ }
+ buf.try_reserve(size + offset + 1); // Add 1 for the terminating '\0'.
+ }
+}
+
+template <typename T>
+using convert_float_result =
+ conditional_t<std::is_same<T, float>::value || sizeof(T) == sizeof(double),
+ double, T>;
+
+template <typename T>
+constexpr auto convert_float(T value) -> convert_float_result<T> {
+ return static_cast<convert_float_result<T>>(value);
+}
+
+template <typename OutputIt, typename Char>
+FMT_NOINLINE FMT_CONSTEXPR auto fill(OutputIt it, size_t n,
+ const fill_t<Char>& fill) -> OutputIt {
+ auto fill_size = fill.size();
+ if (fill_size == 1) return detail::fill_n(it, n, fill[0]);
+ auto data = fill.data();
+ for (size_t i = 0; i < n; ++i)
+ it = copy_str<Char>(data, data + fill_size, it);
+ return it;
+}
+
+// Writes the output of f, padded according to format specifications in specs.
+// size: output size in code units.
+// width: output display width in (terminal) column positions.
+template <align::type align = align::left, typename OutputIt, typename Char,
+ typename F>
+FMT_CONSTEXPR auto write_padded(OutputIt out,
+ const basic_format_specs<Char>& specs,
+ size_t size, size_t width, F&& f) -> OutputIt {
+ static_assert(align == align::left || align == align::right, "");
+ unsigned spec_width = to_unsigned(specs.width);
+ size_t padding = spec_width > width ? spec_width - width : 0;
+ // Shifts are encoded as string literals because static constexpr is not
+ // supported in constexpr functions.
+ auto* shifts = align == align::left ? "\x1f\x1f\x00\x01" : "\x00\x1f\x00\x01";
+ size_t left_padding = padding >> shifts[specs.align];
+ size_t right_padding = padding - left_padding;
+ auto it = reserve(out, size + padding * specs.fill.size());
+ if (left_padding != 0) it = fill(it, left_padding, specs.fill);
+ it = f(it);
+ if (right_padding != 0) it = fill(it, right_padding, specs.fill);
+ return base_iterator(out, it);
+}
+
+template <align::type align = align::left, typename OutputIt, typename Char,
+ typename F>
+constexpr auto write_padded(OutputIt out, const basic_format_specs<Char>& specs,
+ size_t size, F&& f) -> OutputIt {
+ return write_padded<align>(out, specs, size, size, f);
+}
+
+template <align::type align = align::left, typename Char, typename OutputIt>
+FMT_CONSTEXPR auto write_bytes(OutputIt out, string_view bytes,
+ const basic_format_specs<Char>& specs)
+ -> OutputIt {
+ return write_padded<align>(
+ out, specs, bytes.size(), [bytes](reserve_iterator<OutputIt> it) {
+ const char* data = bytes.data();
+ return copy_str<Char>(data, data + bytes.size(), it);
+ });
+}
+
+template <typename Char, typename OutputIt, typename UIntPtr>
+auto write_ptr(OutputIt out, UIntPtr value,
+ const basic_format_specs<Char>* specs) -> OutputIt {
+ int num_digits = count_digits<4>(value);
+ auto size = to_unsigned(num_digits) + size_t(2);
+ auto write = [=](reserve_iterator<OutputIt> it) {
+ *it++ = static_cast<Char>('0');
+ *it++ = static_cast<Char>('x');
+ return format_uint<4, Char>(it, value, num_digits);
+ };
+ return specs ? write_padded<align::right>(out, *specs, size, write)
+ : base_iterator(out, write(reserve(out, size)));
+}
+
+// Returns true iff the code point cp is printable.
+FMT_API auto is_printable(uint32_t cp) -> bool;
+
+inline auto needs_escape(uint32_t cp) -> bool {
+ return cp < 0x20 || cp == 0x7f || cp == '"' || cp == '\\' ||
+ !is_printable(cp);
+}
+
+template <typename Char> struct find_escape_result {
+ const Char* begin;
+ const Char* end;
+ uint32_t cp;
+};
+
+template <typename Char>
+using make_unsigned_char =
+ typename conditional_t<std::is_integral<Char>::value,
+ std::make_unsigned<Char>,
+ type_identity<uint32_t>>::type;
+
+template <typename Char>
+auto find_escape(const Char* begin, const Char* end)
+ -> find_escape_result<Char> {
+ for (; begin != end; ++begin) {
+ uint32_t cp = static_cast<make_unsigned_char<Char>>(*begin);
+ if (const_check(sizeof(Char) == 1) && cp >= 0x80) continue;
+ if (needs_escape(cp)) return {begin, begin + 1, cp};
+ }
+ return {begin, nullptr, 0};
+}
+
+inline auto find_escape(const char* begin, const char* end)
+ -> find_escape_result<char> {
+ if (!is_utf8()) return find_escape<char>(begin, end);
+ auto result = find_escape_result<char>{end, nullptr, 0};
+ for_each_codepoint(string_view(begin, to_unsigned(end - begin)),
+ [&](uint32_t cp, string_view sv) {
+ if (needs_escape(cp)) {
+ result = {sv.begin(), sv.end(), cp};
+ return false;
+ }
+ return true;
+ });
+ return result;
+}
+
+#define FMT_STRING_IMPL(s, base, explicit) \
+ [] { \
+ /* Use the hidden visibility as a workaround for a GCC bug (#1973). */ \
+ /* Use a macro-like name to avoid shadowing warnings. */ \
+ struct FMT_GCC_VISIBILITY_HIDDEN FMT_COMPILE_STRING : base { \
+ using char_type = fmt::remove_cvref_t<decltype(s[0])>; \
+ FMT_MAYBE_UNUSED FMT_CONSTEXPR explicit \
+ operator fmt::basic_string_view<char_type>() const { \
+ return fmt::detail_exported::compile_string_to_view<char_type>(s); \
+ } \
+ }; \
+ return FMT_COMPILE_STRING(); \
+ }()
+
+/**
+ \rst
+ Constructs a compile-time format string from a string literal *s*.
+
+ **Example**::
+
+ // A compile-time error because 'd' is an invalid specifier for strings.
+ std::string s = fmt::format(FMT_STRING("{:d}"), "foo");
+ \endrst
+ */
+#define FMT_STRING(s) FMT_STRING_IMPL(s, fmt::detail::compile_string, )
+
+template <size_t width, typename Char, typename OutputIt>
+auto write_codepoint(OutputIt out, char prefix, uint32_t cp) -> OutputIt {
+ *out++ = static_cast<Char>('\\');
+ *out++ = static_cast<Char>(prefix);
+ Char buf[width];
+ fill_n(buf, width, static_cast<Char>('0'));
+ format_uint<4>(buf, cp, width);
+ return copy_str<Char>(buf, buf + width, out);
+}
+
+template <typename OutputIt, typename Char>
+auto write_escaped_cp(OutputIt out, const find_escape_result<Char>& escape)
+ -> OutputIt {
+ auto c = static_cast<Char>(escape.cp);
+ switch (escape.cp) {
+ case '\n':
+ *out++ = static_cast<Char>('\\');
+ c = static_cast<Char>('n');
+ break;
+ case '\r':
+ *out++ = static_cast<Char>('\\');
+ c = static_cast<Char>('r');
+ break;
+ case '\t':
+ *out++ = static_cast<Char>('\\');
+ c = static_cast<Char>('t');
+ break;
+ case '"':
+ FMT_FALLTHROUGH;
+ case '\'':
+ FMT_FALLTHROUGH;
+ case '\\':
+ *out++ = static_cast<Char>('\\');
+ break;
+ default:
+ if (is_utf8()) {
+ if (escape.cp < 0x100) {
+ return write_codepoint<2, Char>(out, 'x', escape.cp);
+ }
+ if (escape.cp < 0x10000) {
+ return write_codepoint<4, Char>(out, 'u', escape.cp);
+ }
+ if (escape.cp < 0x110000) {
+ return write_codepoint<8, Char>(out, 'U', escape.cp);
+ }
+ }
+ for (Char escape_char : basic_string_view<Char>(
+ escape.begin, to_unsigned(escape.end - escape.begin))) {
+ out = write_codepoint<2, Char>(out, 'x',
+ static_cast<uint32_t>(escape_char) & 0xFF);
+ }
+ return out;
+ }
+ *out++ = c;
+ return out;
+}
+
+template <typename Char, typename OutputIt>
+auto write_escaped_string(OutputIt out, basic_string_view<Char> str)
+ -> OutputIt {
+ *out++ = static_cast<Char>('"');
+ auto begin = str.begin(), end = str.end();
+ do {
+ auto escape = find_escape(begin, end);
+ out = copy_str<Char>(begin, escape.begin, out);
+ begin = escape.end;
+ if (!begin) break;
+ out = write_escaped_cp<OutputIt, Char>(out, escape);
+ } while (begin != end);
+ *out++ = static_cast<Char>('"');
+ return out;
+}
+
+template <typename Char, typename OutputIt>
+auto write_escaped_char(OutputIt out, Char v) -> OutputIt {
+ *out++ = static_cast<Char>('\'');
+ if ((needs_escape(static_cast<uint32_t>(v)) && v != static_cast<Char>('"')) ||
+ v == static_cast<Char>('\'')) {
+ out = write_escaped_cp(
+ out, find_escape_result<Char>{&v, &v + 1, static_cast<uint32_t>(v)});
+ } else {
+ *out++ = v;
+ }
+ *out++ = static_cast<Char>('\'');
+ return out;
+}
+
+template <typename Char, typename OutputIt>
+FMT_CONSTEXPR auto write_char(OutputIt out, Char value,
+ const basic_format_specs<Char>& specs)
+ -> OutputIt {
+ bool is_debug = specs.type == presentation_type::debug;
+ return write_padded(out, specs, 1, [=](reserve_iterator<OutputIt> it) {
+ if (is_debug) return write_escaped_char(it, value);
+ *it++ = value;
+ return it;
+ });
+}
+template <typename Char, typename OutputIt>
+FMT_CONSTEXPR auto write(OutputIt out, Char value,
+ const basic_format_specs<Char>& specs,
+ locale_ref loc = {}) -> OutputIt {
+ return check_char_specs(specs)
+ ? write_char(out, value, specs)
+ : write(out, static_cast<int>(value), specs, loc);
+}
+
+// Data for write_int that doesn't depend on output iterator type. It is used to
+// avoid template code bloat.
+template <typename Char> struct write_int_data {
+ size_t size;
+ size_t padding;
+
+ FMT_CONSTEXPR write_int_data(int num_digits, unsigned prefix,
+ const basic_format_specs<Char>& specs)
+ : size((prefix >> 24) + to_unsigned(num_digits)), padding(0) {
+ if (specs.align == align::numeric) {
+ auto width = to_unsigned(specs.width);
+ if (width > size) {
+ padding = width - size;
+ size = width;
+ }
+ } else if (specs.precision > num_digits) {
+ size = (prefix >> 24) + to_unsigned(specs.precision);
+ padding = to_unsigned(specs.precision - num_digits);
+ }
+ }
+};
+
+// Writes an integer in the format
+// <left-padding><prefix><numeric-padding><digits><right-padding>
+// where <digits> are written by write_digits(it).
+// prefix contains chars in three lower bytes and the size in the fourth byte.
+template <typename OutputIt, typename Char, typename W>
+FMT_CONSTEXPR FMT_INLINE auto write_int(OutputIt out, int num_digits,
+ unsigned prefix,
+ const basic_format_specs<Char>& specs,
+ W write_digits) -> OutputIt {
+ // Slightly faster check for specs.width == 0 && specs.precision == -1.
+ if ((specs.width | (specs.precision + 1)) == 0) {
+ auto it = reserve(out, to_unsigned(num_digits) + (prefix >> 24));
+ if (prefix != 0) {
+ for (unsigned p = prefix & 0xffffff; p != 0; p >>= 8)
+ *it++ = static_cast<Char>(p & 0xff);
+ }
+ return base_iterator(out, write_digits(it));
+ }
+ auto data = write_int_data<Char>(num_digits, prefix, specs);
+ return write_padded<align::right>(
+ out, specs, data.size, [=](reserve_iterator<OutputIt> it) {
+ for (unsigned p = prefix & 0xffffff; p != 0; p >>= 8)
+ *it++ = static_cast<Char>(p & 0xff);
+ it = detail::fill_n(it, data.padding, static_cast<Char>('0'));
+ return write_digits(it);
+ });
+}
+
+template <typename Char> class digit_grouping {
+ private:
+ thousands_sep_result<Char> sep_;
+
+ struct next_state {
+ std::string::const_iterator group;
+ int pos;
+ };
+ next_state initial_state() const { return {sep_.grouping.begin(), 0}; }
+
+ // Returns the next digit group separator position.
+ int next(next_state& state) const {
+ if (!sep_.thousands_sep) return max_value<int>();
+ if (state.group == sep_.grouping.end())
+ return state.pos += sep_.grouping.back();
+ if (*state.group <= 0 || *state.group == max_value<char>())
+ return max_value<int>();
+ state.pos += *state.group++;
+ return state.pos;
+ }
+
+ public:
+ explicit digit_grouping(locale_ref loc, bool localized = true) {
+ if (localized)
+ sep_ = thousands_sep<Char>(loc);
+ else
+ sep_.thousands_sep = Char();
+ }
+ explicit digit_grouping(thousands_sep_result<Char> sep) : sep_(sep) {}
+
+ Char separator() const { return sep_.thousands_sep; }
+
+ int count_separators(int num_digits) const {
+ int count = 0;
+ auto state = initial_state();
+ while (num_digits > next(state)) ++count;
+ return count;
+ }
+
+ // Applies grouping to digits and write the output to out.
+ template <typename Out, typename C>
+ Out apply(Out out, basic_string_view<C> digits) const {
+ auto num_digits = static_cast<int>(digits.size());
+ auto separators = basic_memory_buffer<int>();
+ separators.push_back(0);
+ auto state = initial_state();
+ while (int i = next(state)) {
+ if (i >= num_digits) break;
+ separators.push_back(i);
+ }
+ for (int i = 0, sep_index = static_cast<int>(separators.size() - 1);
+ i < num_digits; ++i) {
+ if (num_digits - i == separators[sep_index]) {
+ *out++ = separator();
+ --sep_index;
+ }
+ *out++ = static_cast<Char>(digits[to_unsigned(i)]);
+ }
+ return out;
+ }
+};
+
+template <typename OutputIt, typename UInt, typename Char>
+auto write_int_localized(OutputIt out, UInt value, unsigned prefix,
+ const basic_format_specs<Char>& specs,
+ const digit_grouping<Char>& grouping) -> OutputIt {
+ static_assert(std::is_same<uint64_or_128_t<UInt>, UInt>::value, "");
+ int num_digits = count_digits(value);
+ char digits[40];
+ format_decimal(digits, value, num_digits);
+ unsigned size = to_unsigned((prefix != 0 ? 1 : 0) + num_digits +
+ grouping.count_separators(num_digits));
+ return write_padded<align::right>(
+ out, specs, size, size, [&](reserve_iterator<OutputIt> it) {
+ if (prefix != 0) *it++ = static_cast<Char>(prefix);
+ return grouping.apply(it, string_view(digits, to_unsigned(num_digits)));
+ });
+}
+
+template <typename OutputIt, typename UInt, typename Char>
+auto write_int_localized(OutputIt& out, UInt value, unsigned prefix,
+ const basic_format_specs<Char>& specs, locale_ref loc)
+ -> bool {
+ auto grouping = digit_grouping<Char>(loc);
+ out = write_int_localized(out, value, prefix, specs, grouping);
+ return true;
+}
+
+FMT_CONSTEXPR inline void prefix_append(unsigned& prefix, unsigned value) {
+ prefix |= prefix != 0 ? value << 8 : value;
+ prefix += (1u + (value > 0xff ? 1 : 0)) << 24;
+}
+
+template <typename UInt> struct write_int_arg {
+ UInt abs_value;
+ unsigned prefix;
+};
+
+template <typename T>
+FMT_CONSTEXPR auto make_write_int_arg(T value, sign_t sign)
+ -> write_int_arg<uint32_or_64_or_128_t<T>> {
+ auto prefix = 0u;
+ auto abs_value = static_cast<uint32_or_64_or_128_t<T>>(value);
+ if (is_negative(value)) {
+ prefix = 0x01000000 | '-';
+ abs_value = 0 - abs_value;
+ } else {
+ constexpr const unsigned prefixes[4] = {0, 0, 0x1000000u | '+',
+ 0x1000000u | ' '};
+ prefix = prefixes[sign];
+ }
+ return {abs_value, prefix};
+}
+
+template <typename Char, typename OutputIt, typename T>
+FMT_CONSTEXPR FMT_INLINE auto write_int(OutputIt out, write_int_arg<T> arg,
+ const basic_format_specs<Char>& specs,
+ locale_ref loc) -> OutputIt {
+ static_assert(std::is_same<T, uint32_or_64_or_128_t<T>>::value, "");
+ auto abs_value = arg.abs_value;
+ auto prefix = arg.prefix;
+ switch (specs.type) {
+ case presentation_type::none:
+ case presentation_type::dec: {
+ if (specs.localized &&
+ write_int_localized(out, static_cast<uint64_or_128_t<T>>(abs_value),
+ prefix, specs, loc)) {
+ return out;
+ }
+ auto num_digits = count_digits(abs_value);
+ return write_int(
+ out, num_digits, prefix, specs, [=](reserve_iterator<OutputIt> it) {
+ return format_decimal<Char>(it, abs_value, num_digits).end;
+ });
+ }
+ case presentation_type::hex_lower:
+ case presentation_type::hex_upper: {
+ bool upper = specs.type == presentation_type::hex_upper;
+ if (specs.alt)
+ prefix_append(prefix, unsigned(upper ? 'X' : 'x') << 8 | '0');
+ int num_digits = count_digits<4>(abs_value);
+ return write_int(
+ out, num_digits, prefix, specs, [=](reserve_iterator<OutputIt> it) {
+ return format_uint<4, Char>(it, abs_value, num_digits, upper);
+ });
+ }
+ case presentation_type::bin_lower:
+ case presentation_type::bin_upper: {
+ bool upper = specs.type == presentation_type::bin_upper;
+ if (specs.alt)
+ prefix_append(prefix, unsigned(upper ? 'B' : 'b') << 8 | '0');
+ int num_digits = count_digits<1>(abs_value);
+ return write_int(out, num_digits, prefix, specs,
+ [=](reserve_iterator<OutputIt> it) {
+ return format_uint<1, Char>(it, abs_value, num_digits);
+ });
+ }
+ case presentation_type::oct: {
+ int num_digits = count_digits<3>(abs_value);
+ // Octal prefix '0' is counted as a digit, so only add it if precision
+ // is not greater than the number of digits.
+ if (specs.alt && specs.precision <= num_digits && abs_value != 0)
+ prefix_append(prefix, '0');
+ return write_int(out, num_digits, prefix, specs,
+ [=](reserve_iterator<OutputIt> it) {
+ return format_uint<3, Char>(it, abs_value, num_digits);
+ });
+ }
+ case presentation_type::chr:
+ return write_char(out, static_cast<Char>(abs_value), specs);
+ default:
+ throw_format_error("invalid type specifier");
+ }
+ return out;
+}
+template <typename Char, typename OutputIt, typename T>
+FMT_CONSTEXPR FMT_NOINLINE auto write_int_noinline(
+ OutputIt out, write_int_arg<T> arg, const basic_format_specs<Char>& specs,
+ locale_ref loc) -> OutputIt {
+ return write_int(out, arg, specs, loc);
+}
+template <typename Char, typename OutputIt, typename T,
+ FMT_ENABLE_IF(is_integral<T>::value &&
+ !std::is_same<T, bool>::value &&
+ std::is_same<OutputIt, buffer_appender<Char>>::value)>
+FMT_CONSTEXPR FMT_INLINE auto write(OutputIt out, T value,
+ const basic_format_specs<Char>& specs,
+ locale_ref loc) -> OutputIt {
+ return write_int_noinline(out, make_write_int_arg(value, specs.sign), specs,
+ loc);
+}
+// An inlined version of write used in format string compilation.
+template <typename Char, typename OutputIt, typename T,
+ FMT_ENABLE_IF(is_integral<T>::value &&
+ !std::is_same<T, bool>::value &&
+ !std::is_same<OutputIt, buffer_appender<Char>>::value)>
+FMT_CONSTEXPR FMT_INLINE auto write(OutputIt out, T value,
+ const basic_format_specs<Char>& specs,
+ locale_ref loc) -> OutputIt {
+ return write_int(out, make_write_int_arg(value, specs.sign), specs, loc);
+}
+
+// An output iterator that counts the number of objects written to it and
+// discards them.
+class counting_iterator {
+ private:
+ size_t count_;
+
+ public:
+ using iterator_category = std::output_iterator_tag;
+ using difference_type = std::ptrdiff_t;
+ using pointer = void;
+ using reference = void;
+ FMT_UNCHECKED_ITERATOR(counting_iterator);
+
+ struct value_type {
+ template <typename T> void operator=(const T&) {}
+ };
+
+ counting_iterator() : count_(0) {}
+
+ size_t count() const { return count_; }
+
+ counting_iterator& operator++() {
+ ++count_;
+ return *this;
+ }
+ counting_iterator operator++(int) {
+ auto it = *this;
+ ++*this;
+ return it;
+ }
+
+ friend counting_iterator operator+(counting_iterator it, difference_type n) {
+ it.count_ += static_cast<size_t>(n);
+ return it;
+ }
+
+ value_type operator*() const { return {}; }
+};
+
+template <typename Char, typename OutputIt>
+FMT_CONSTEXPR auto write(OutputIt out, basic_string_view<Char> s,
+ const basic_format_specs<Char>& specs) -> OutputIt {
+ auto data = s.data();
+ auto size = s.size();
+ if (specs.precision >= 0 && to_unsigned(specs.precision) < size)
+ size = code_point_index(s, to_unsigned(specs.precision));
+ bool is_debug = specs.type == presentation_type::debug;
+ size_t width = 0;
+ if (specs.width != 0) {
+ if (is_debug)
+ width = write_escaped_string(counting_iterator{}, s).count();
+ else
+ width = compute_width(basic_string_view<Char>(data, size));
+ }
+ return write_padded(out, specs, size, width,
+ [=](reserve_iterator<OutputIt> it) {
+ if (is_debug) return write_escaped_string(it, s);
+ return copy_str<Char>(data, data + size, it);
+ });
+}
+template <typename Char, typename OutputIt>
+FMT_CONSTEXPR auto write(OutputIt out,
+ basic_string_view<type_identity_t<Char>> s,
+ const basic_format_specs<Char>& specs, locale_ref)
+ -> OutputIt {
+ check_string_type_spec(specs.type);
+ return write(out, s, specs);
+}
+template <typename Char, typename OutputIt>
+FMT_CONSTEXPR auto write(OutputIt out, const Char* s,
+ const basic_format_specs<Char>& specs, locale_ref)
+ -> OutputIt {
+ return check_cstring_type_spec(specs.type)
+ ? write(out, basic_string_view<Char>(s), specs, {})
+ : write_ptr<Char>(out, bit_cast<uintptr_t>(s), &specs);
+}
+
+template <typename Char, typename OutputIt, typename T,
+ FMT_ENABLE_IF(is_integral<T>::value &&
+ !std::is_same<T, bool>::value &&
+ !std::is_same<T, Char>::value)>
+FMT_CONSTEXPR auto write(OutputIt out, T value) -> OutputIt {
+ auto abs_value = static_cast<uint32_or_64_or_128_t<T>>(value);
+ bool negative = is_negative(value);
+ // Don't do -abs_value since it trips unsigned-integer-overflow sanitizer.
+ if (negative) abs_value = ~abs_value + 1;
+ int num_digits = count_digits(abs_value);
+ auto size = (negative ? 1 : 0) + static_cast<size_t>(num_digits);
+ auto it = reserve(out, size);
+ if (auto ptr = to_pointer<Char>(it, size)) {
+ if (negative) *ptr++ = static_cast<Char>('-');
+ format_decimal<Char>(ptr, abs_value, num_digits);
+ return out;
+ }
+ if (negative) *it++ = static_cast<Char>('-');
+ it = format_decimal<Char>(it, abs_value, num_digits).end;
+ return base_iterator(out, it);
+}
+
+template <typename Char, typename OutputIt>
+FMT_CONSTEXPR20 auto write_nonfinite(OutputIt out, bool isnan,
+ basic_format_specs<Char> specs,
+ const float_specs& fspecs) -> OutputIt {
+ auto str =
+ isnan ? (fspecs.upper ? "NAN" : "nan") : (fspecs.upper ? "INF" : "inf");
+ constexpr size_t str_size = 3;
+ auto sign = fspecs.sign;
+ auto size = str_size + (sign ? 1 : 0);
+ // Replace '0'-padding with space for non-finite values.
+ const bool is_zero_fill =
+ specs.fill.size() == 1 && *specs.fill.data() == static_cast<Char>('0');
+ if (is_zero_fill) specs.fill[0] = static_cast<Char>(' ');
+ return write_padded(out, specs, size, [=](reserve_iterator<OutputIt> it) {
+ if (sign) *it++ = detail::sign<Char>(sign);
+ return copy_str<Char>(str, str + str_size, it);
+ });
+}
+
+// A decimal floating-point number significand * pow(10, exp).
+struct big_decimal_fp {
+ const char* significand;
+ int significand_size;
+ int exponent;
+};
+
+constexpr auto get_significand_size(const big_decimal_fp& f) -> int {
+ return f.significand_size;
+}
+template <typename T>
+inline auto get_significand_size(const dragonbox::decimal_fp<T>& f) -> int {
+ return count_digits(f.significand);
+}
+
+template <typename Char, typename OutputIt>
+constexpr auto write_significand(OutputIt out, const char* significand,
+ int significand_size) -> OutputIt {
+ return copy_str<Char>(significand, significand + significand_size, out);
+}
+template <typename Char, typename OutputIt, typename UInt>
+inline auto write_significand(OutputIt out, UInt significand,
+ int significand_size) -> OutputIt {
+ return format_decimal<Char>(out, significand, significand_size).end;
+}
+template <typename Char, typename OutputIt, typename T, typename Grouping>
+FMT_CONSTEXPR20 auto write_significand(OutputIt out, T significand,
+ int significand_size, int exponent,
+ const Grouping& grouping) -> OutputIt {
+ if (!grouping.separator()) {
+ out = write_significand<Char>(out, significand, significand_size);
+ return detail::fill_n(out, exponent, static_cast<Char>('0'));
+ }
+ auto buffer = memory_buffer();
+ write_significand<char>(appender(buffer), significand, significand_size);
+ detail::fill_n(appender(buffer), exponent, '0');
+ return grouping.apply(out, string_view(buffer.data(), buffer.size()));
+}
+
+template <typename Char, typename UInt,
+ FMT_ENABLE_IF(std::is_integral<UInt>::value)>
+inline auto write_significand(Char* out, UInt significand, int significand_size,
+ int integral_size, Char decimal_point) -> Char* {
+ if (!decimal_point)
+ return format_decimal(out, significand, significand_size).end;
+ out += significand_size + 1;
+ Char* end = out;
+ int floating_size = significand_size - integral_size;
+ for (int i = floating_size / 2; i > 0; --i) {
+ out -= 2;
+ copy2(out, digits2(static_cast<std::size_t>(significand % 100)));
+ significand /= 100;
+ }
+ if (floating_size % 2 != 0) {
+ *--out = static_cast<Char>('0' + significand % 10);
+ significand /= 10;
+ }
+ *--out = decimal_point;
+ format_decimal(out - integral_size, significand, integral_size);
+ return end;
+}
+
+template <typename OutputIt, typename UInt, typename Char,
+ FMT_ENABLE_IF(!std::is_pointer<remove_cvref_t<OutputIt>>::value)>
+inline auto write_significand(OutputIt out, UInt significand,
+ int significand_size, int integral_size,
+ Char decimal_point) -> OutputIt {
+ // Buffer is large enough to hold digits (digits10 + 1) and a decimal point.
+ Char buffer[digits10<UInt>() + 2];
+ auto end = write_significand(buffer, significand, significand_size,
+ integral_size, decimal_point);
+ return detail::copy_str_noinline<Char>(buffer, end, out);
+}
+
+template <typename OutputIt, typename Char>
+FMT_CONSTEXPR auto write_significand(OutputIt out, const char* significand,
+ int significand_size, int integral_size,
+ Char decimal_point) -> OutputIt {
+ out = detail::copy_str_noinline<Char>(significand,
+ significand + integral_size, out);
+ if (!decimal_point) return out;
+ *out++ = decimal_point;
+ return detail::copy_str_noinline<Char>(significand + integral_size,
+ significand + significand_size, out);
+}
+
+template <typename OutputIt, typename Char, typename T, typename Grouping>
+FMT_CONSTEXPR20 auto write_significand(OutputIt out, T significand,
+ int significand_size, int integral_size,
+ Char decimal_point,
+ const Grouping& grouping) -> OutputIt {
+ if (!grouping.separator()) {
+ return write_significand(out, significand, significand_size, integral_size,
+ decimal_point);
+ }
+ auto buffer = basic_memory_buffer<Char>();
+ write_significand(buffer_appender<Char>(buffer), significand,
+ significand_size, integral_size, decimal_point);
+ grouping.apply(
+ out, basic_string_view<Char>(buffer.data(), to_unsigned(integral_size)));
+ return detail::copy_str_noinline<Char>(buffer.data() + integral_size,
+ buffer.end(), out);
+}
+
+template <typename OutputIt, typename DecimalFP, typename Char,
+ typename Grouping = digit_grouping<Char>>
+FMT_CONSTEXPR20 auto do_write_float(OutputIt out, const DecimalFP& f,
+ const basic_format_specs<Char>& specs,
+ float_specs fspecs, locale_ref loc)
+ -> OutputIt {
+ auto significand = f.significand;
+ int significand_size = get_significand_size(f);
+ const Char zero = static_cast<Char>('0');
+ auto sign = fspecs.sign;
+ size_t size = to_unsigned(significand_size) + (sign ? 1 : 0);
+ using iterator = reserve_iterator<OutputIt>;
+
+ Char decimal_point =
+ fspecs.locale ? detail::decimal_point<Char>(loc) : static_cast<Char>('.');
+
+ int output_exp = f.exponent + significand_size - 1;
+ auto use_exp_format = [=]() {
+ if (fspecs.format == float_format::exp) return true;
+ if (fspecs.format != float_format::general) return false;
+ // Use the fixed notation if the exponent is in [exp_lower, exp_upper),
+ // e.g. 0.0001 instead of 1e-04. Otherwise use the exponent notation.
+ const int exp_lower = -4, exp_upper = 16;
+ return output_exp < exp_lower ||
+ output_exp >= (fspecs.precision > 0 ? fspecs.precision : exp_upper);
+ };
+ if (use_exp_format()) {
+ int num_zeros = 0;
+ if (fspecs.showpoint) {
+ num_zeros = fspecs.precision - significand_size;
+ if (num_zeros < 0) num_zeros = 0;
+ size += to_unsigned(num_zeros);
+ } else if (significand_size == 1) {
+ decimal_point = Char();
+ }
+ auto abs_output_exp = output_exp >= 0 ? output_exp : -output_exp;
+ int exp_digits = 2;
+ if (abs_output_exp >= 100) exp_digits = abs_output_exp >= 1000 ? 4 : 3;
+
+ size += to_unsigned((decimal_point ? 1 : 0) + 2 + exp_digits);
+ char exp_char = fspecs.upper ? 'E' : 'e';
+ auto write = [=](iterator it) {
+ if (sign) *it++ = detail::sign<Char>(sign);
+ // Insert a decimal point after the first digit and add an exponent.
+ it = write_significand(it, significand, significand_size, 1,
+ decimal_point);
+ if (num_zeros > 0) it = detail::fill_n(it, num_zeros, zero);
+ *it++ = static_cast<Char>(exp_char);
+ return write_exponent<Char>(output_exp, it);
+ };
+ return specs.width > 0 ? write_padded<align::right>(out, specs, size, write)
+ : base_iterator(out, write(reserve(out, size)));
+ }
+
+ int exp = f.exponent + significand_size;
+ if (f.exponent >= 0) {
+ // 1234e5 -> 123400000[.0+]
+ size += to_unsigned(f.exponent);
+ int num_zeros = fspecs.precision - exp;
+ abort_fuzzing_if(num_zeros > 5000);
+ if (fspecs.showpoint) {
+ ++size;
+ if (num_zeros <= 0 && fspecs.format != float_format::fixed) num_zeros = 1;
+ if (num_zeros > 0) size += to_unsigned(num_zeros);
+ }
+ auto grouping = Grouping(loc, fspecs.locale);
+ size += to_unsigned(grouping.count_separators(exp));
+ return write_padded<align::right>(out, specs, size, [&](iterator it) {
+ if (sign) *it++ = detail::sign<Char>(sign);
+ it = write_significand<Char>(it, significand, significand_size,
+ f.exponent, grouping);
+ if (!fspecs.showpoint) return it;
+ *it++ = decimal_point;
+ return num_zeros > 0 ? detail::fill_n(it, num_zeros, zero) : it;
+ });
+ } else if (exp > 0) {
+ // 1234e-2 -> 12.34[0+]
+ int num_zeros = fspecs.showpoint ? fspecs.precision - significand_size : 0;
+ size += 1 + to_unsigned(num_zeros > 0 ? num_zeros : 0);
+ auto grouping = Grouping(loc, fspecs.locale);
+ size += to_unsigned(grouping.count_separators(significand_size));
+ return write_padded<align::right>(out, specs, size, [&](iterator it) {
+ if (sign) *it++ = detail::sign<Char>(sign);
+ it = write_significand(it, significand, significand_size, exp,
+ decimal_point, grouping);
+ return num_zeros > 0 ? detail::fill_n(it, num_zeros, zero) : it;
+ });
+ }
+ // 1234e-6 -> 0.001234
+ int num_zeros = -exp;
+ if (significand_size == 0 && fspecs.precision >= 0 &&
+ fspecs.precision < num_zeros) {
+ num_zeros = fspecs.precision;
+ }
+ bool pointy = num_zeros != 0 || significand_size != 0 || fspecs.showpoint;
+ size += 1 + (pointy ? 1 : 0) + to_unsigned(num_zeros);
+ return write_padded<align::right>(out, specs, size, [&](iterator it) {
+ if (sign) *it++ = detail::sign<Char>(sign);
+ *it++ = zero;
+ if (!pointy) return it;
+ *it++ = decimal_point;
+ it = detail::fill_n(it, num_zeros, zero);
+ return write_significand<Char>(it, significand, significand_size);
+ });
+}
+
+template <typename Char> class fallback_digit_grouping {
+ public:
+ constexpr fallback_digit_grouping(locale_ref, bool) {}
+
+ constexpr Char separator() const { return Char(); }
+
+ constexpr int count_separators(int) const { return 0; }
+
+ template <typename Out, typename C>
+ constexpr Out apply(Out out, basic_string_view<C>) const {
+ return out;
+ }
+};
+
+template <typename OutputIt, typename DecimalFP, typename Char>
+FMT_CONSTEXPR20 auto write_float(OutputIt out, const DecimalFP& f,
+ const basic_format_specs<Char>& specs,
+ float_specs fspecs, locale_ref loc)
+ -> OutputIt {
+ if (is_constant_evaluated()) {
+ return do_write_float<OutputIt, DecimalFP, Char,
+ fallback_digit_grouping<Char>>(out, f, specs, fspecs,
+ loc);
+ } else {
+ return do_write_float(out, f, specs, fspecs, loc);
+ }
+}
+
+template <typename T> constexpr bool isnan(T value) {
+ return !(value >= value); // std::isnan doesn't support __float128.
+}
+
+template <typename T, typename Enable = void>
+struct has_isfinite : std::false_type {};
+
+template <typename T>
+struct has_isfinite<T, enable_if_t<sizeof(std::isfinite(T())) != 0>>
+ : std::true_type {};
+
+template <typename T, FMT_ENABLE_IF(std::is_floating_point<T>::value&&
+ has_isfinite<T>::value)>
+FMT_CONSTEXPR20 bool isfinite(T value) {
+ constexpr T inf = T(std::numeric_limits<double>::infinity());
+ if (is_constant_evaluated())
+ return !detail::isnan(value) && value != inf && value != -inf;
+ return std::isfinite(value);
+}
+template <typename T, FMT_ENABLE_IF(!has_isfinite<T>::value)>
+FMT_CONSTEXPR bool isfinite(T value) {
+ T inf = T(std::numeric_limits<double>::infinity());
+ // std::isfinite doesn't support __float128.
+ return !detail::isnan(value) && value != inf && value != -inf;
+}
+
+template <typename T, FMT_ENABLE_IF(is_floating_point<T>::value)>
+FMT_INLINE FMT_CONSTEXPR bool signbit(T value) {
+ if (is_constant_evaluated()) {
+#ifdef __cpp_if_constexpr
+ if constexpr (std::numeric_limits<double>::is_iec559) {
+ auto bits = detail::bit_cast<uint64_t>(static_cast<double>(value));
+ return (bits >> (num_bits<uint64_t>() - 1)) != 0;
+ }
+#endif
+ }
+ return std::signbit(static_cast<double>(value));
+}
+
+enum class round_direction { unknown, up, down };
+
+// Given the divisor (normally a power of 10), the remainder = v % divisor for
+// some number v and the error, returns whether v should be rounded up, down, or
+// whether the rounding direction can't be determined due to error.
+// error should be less than divisor / 2.
+FMT_CONSTEXPR inline round_direction get_round_direction(uint64_t divisor,
+ uint64_t remainder,
+ uint64_t error) {
+ FMT_ASSERT(remainder < divisor, ""); // divisor - remainder won't overflow.
+ FMT_ASSERT(error < divisor, ""); // divisor - error won't overflow.
+ FMT_ASSERT(error < divisor - error, ""); // error * 2 won't overflow.
+ // Round down if (remainder + error) * 2 <= divisor.
+ if (remainder <= divisor - remainder && error * 2 <= divisor - remainder * 2)
+ return round_direction::down;
+ // Round up if (remainder - error) * 2 >= divisor.
+ if (remainder >= error &&
+ remainder - error >= divisor - (remainder - error)) {
+ return round_direction::up;
+ }
+ return round_direction::unknown;
+}
+
+namespace digits {
+enum result {
+ more, // Generate more digits.
+ done, // Done generating digits.
+ error // Digit generation cancelled due to an error.
+};
+}
+
+struct gen_digits_handler {
+ char* buf;
+ int size;
+ int precision;
+ int exp10;
+ bool fixed;
+
+ FMT_CONSTEXPR digits::result on_digit(char digit, uint64_t divisor,
+ uint64_t remainder, uint64_t error,
+ bool integral) {
+ FMT_ASSERT(remainder < divisor, "");
+ buf[size++] = digit;
+ if (!integral && error >= remainder) return digits::error;
+ if (size < precision) return digits::more;
+ if (!integral) {
+ // Check if error * 2 < divisor with overflow prevention.
+ // The check is not needed for the integral part because error = 1
+ // and divisor > (1 << 32) there.
+ if (error >= divisor || error >= divisor - error) return digits::error;
+ } else {
+ FMT_ASSERT(error == 1 && divisor > 2, "");
+ }
+ auto dir = get_round_direction(divisor, remainder, error);
+ if (dir != round_direction::up)
+ return dir == round_direction::down ? digits::done : digits::error;
+ ++buf[size - 1];
+ for (int i = size - 1; i > 0 && buf[i] > '9'; --i) {
+ buf[i] = '0';
+ ++buf[i - 1];
+ }
+ if (buf[0] > '9') {
+ buf[0] = '1';
+ if (fixed)
+ buf[size++] = '0';
+ else
+ ++exp10;
+ }
+ return digits::done;
+ }
+};
+
+inline FMT_CONSTEXPR20 void adjust_precision(int& precision, int exp10) {
+ // Adjust fixed precision by exponent because it is relative to decimal
+ // point.
+ if (exp10 > 0 && precision > max_value<int>() - exp10)
+ FMT_THROW(format_error("number is too big"));
+ precision += exp10;
+}
+
+// Generates output using the Grisu digit-gen algorithm.
+// error: the size of the region (lower, upper) outside of which numbers
+// definitely do not round to value (Delta in Grisu3).
+FMT_INLINE FMT_CONSTEXPR20 auto grisu_gen_digits(fp value, uint64_t error,
+ int& exp,
+ gen_digits_handler& handler)
+ -> digits::result {
+ const fp one(1ULL << -value.e, value.e);
+ // The integral part of scaled value (p1 in Grisu) = value / one. It cannot be
+ // zero because it contains a product of two 64-bit numbers with MSB set (due
+ // to normalization) - 1, shifted right by at most 60 bits.
+ auto integral = static_cast<uint32_t>(value.f >> -one.e);
+ FMT_ASSERT(integral != 0, "");
+ FMT_ASSERT(integral == value.f >> -one.e, "");
+ // The fractional part of scaled value (p2 in Grisu) c = value % one.
+ uint64_t fractional = value.f & (one.f - 1);
+ exp = count_digits(integral); // kappa in Grisu.
+ // Non-fixed formats require at least one digit and no precision adjustment.
+ if (handler.fixed) {
+ adjust_precision(handler.precision, exp + handler.exp10);
+ // Check if precision is satisfied just by leading zeros, e.g.
+ // format("{:.2f}", 0.001) gives "0.00" without generating any digits.
+ if (handler.precision <= 0) {
+ if (handler.precision < 0) return digits::done;
+ // Divide by 10 to prevent overflow.
+ uint64_t divisor = data::power_of_10_64[exp - 1] << -one.e;
+ auto dir = get_round_direction(divisor, value.f / 10, error * 10);
+ if (dir == round_direction::unknown) return digits::error;
+ handler.buf[handler.size++] = dir == round_direction::up ? '1' : '0';
+ return digits::done;
+ }
+ }
+ // Generate digits for the integral part. This can produce up to 10 digits.
+ do {
+ uint32_t digit = 0;
+ auto divmod_integral = [&](uint32_t divisor) {
+ digit = integral / divisor;
+ integral %= divisor;
+ };
+ // This optimization by Milo Yip reduces the number of integer divisions by
+ // one per iteration.
+ switch (exp) {
+ case 10:
+ divmod_integral(1000000000);
+ break;
+ case 9:
+ divmod_integral(100000000);
+ break;
+ case 8:
+ divmod_integral(10000000);
+ break;
+ case 7:
+ divmod_integral(1000000);
+ break;
+ case 6:
+ divmod_integral(100000);
+ break;
+ case 5:
+ divmod_integral(10000);
+ break;
+ case 4:
+ divmod_integral(1000);
+ break;
+ case 3:
+ divmod_integral(100);
+ break;
+ case 2:
+ divmod_integral(10);
+ break;
+ case 1:
+ digit = integral;
+ integral = 0;
+ break;
+ default:
+ FMT_ASSERT(false, "invalid number of digits");
+ }
+ --exp;
+ auto remainder = (static_cast<uint64_t>(integral) << -one.e) + fractional;
+ auto result = handler.on_digit(static_cast<char>('0' + digit),
+ data::power_of_10_64[exp] << -one.e,
+ remainder, error, true);
+ if (result != digits::more) return result;
+ } while (exp > 0);
+ // Generate digits for the fractional part.
+ for (;;) {
+ fractional *= 10;
+ error *= 10;
+ char digit = static_cast<char>('0' + (fractional >> -one.e));
+ fractional &= one.f - 1;
+ --exp;
+ auto result = handler.on_digit(digit, one.f, fractional, error, false);
+ if (result != digits::more) return result;
+ }
+}
+
+class bigint {
+ private:
+ // A bigint is stored as an array of bigits (big digits), with bigit at index
+ // 0 being the least significant one.
+ using bigit = uint32_t;
+ using double_bigit = uint64_t;
+ enum { bigits_capacity = 32 };
+ basic_memory_buffer<bigit, bigits_capacity> bigits_;
+ int exp_;
+
+ FMT_CONSTEXPR20 bigit operator[](int index) const {
+ return bigits_[to_unsigned(index)];
+ }
+ FMT_CONSTEXPR20 bigit& operator[](int index) {
+ return bigits_[to_unsigned(index)];
+ }
+
+ static constexpr const int bigit_bits = num_bits<bigit>();
+
+ friend struct formatter<bigint>;
+
+ FMT_CONSTEXPR20 void subtract_bigits(int index, bigit other, bigit& borrow) {
+ auto result = static_cast<double_bigit>((*this)[index]) - other - borrow;
+ (*this)[index] = static_cast<bigit>(result);
+ borrow = static_cast<bigit>(result >> (bigit_bits * 2 - 1));
+ }
+
+ FMT_CONSTEXPR20 void remove_leading_zeros() {
+ int num_bigits = static_cast<int>(bigits_.size()) - 1;
+ while (num_bigits > 0 && (*this)[num_bigits] == 0) --num_bigits;
+ bigits_.resize(to_unsigned(num_bigits + 1));
+ }
+
+ // Computes *this -= other assuming aligned bigints and *this >= other.
+ FMT_CONSTEXPR20 void subtract_aligned(const bigint& other) {
+ FMT_ASSERT(other.exp_ >= exp_, "unaligned bigints");
+ FMT_ASSERT(compare(*this, other) >= 0, "");
+ bigit borrow = 0;
+ int i = other.exp_ - exp_;
+ for (size_t j = 0, n = other.bigits_.size(); j != n; ++i, ++j)
+ subtract_bigits(i, other.bigits_[j], borrow);
+ while (borrow > 0) subtract_bigits(i, 0, borrow);
+ remove_leading_zeros();
+ }
+
+ FMT_CONSTEXPR20 void multiply(uint32_t value) {
+ const double_bigit wide_value = value;
+ bigit carry = 0;
+ for (size_t i = 0, n = bigits_.size(); i < n; ++i) {
+ double_bigit result = bigits_[i] * wide_value + carry;
+ bigits_[i] = static_cast<bigit>(result);
+ carry = static_cast<bigit>(result >> bigit_bits);
+ }
+ if (carry != 0) bigits_.push_back(carry);
+ }
+
+ template <typename UInt, FMT_ENABLE_IF(std::is_same<UInt, uint64_t>::value ||
+ std::is_same<UInt, uint128_t>::value)>
+ FMT_CONSTEXPR20 void multiply(UInt value) {
+ using half_uint =
+ conditional_t<std::is_same<UInt, uint128_t>::value, uint64_t, uint32_t>;
+ const int shift = num_bits<half_uint>() - bigit_bits;
+ const UInt lower = static_cast<half_uint>(value);
+ const UInt upper = value >> num_bits<half_uint>();
+ UInt carry = 0;
+ for (size_t i = 0, n = bigits_.size(); i < n; ++i) {
+ UInt result = lower * bigits_[i] + static_cast<bigit>(carry);
+ carry = (upper * bigits_[i] << shift) + (result >> bigit_bits) +
+ (carry >> bigit_bits);
+ bigits_[i] = static_cast<bigit>(result);
+ }
+ while (carry != 0) {
+ bigits_.push_back(static_cast<bigit>(carry));
+ carry >>= bigit_bits;
+ }
+ }
+
+ template <typename UInt, FMT_ENABLE_IF(std::is_same<UInt, uint64_t>::value ||
+ std::is_same<UInt, uint128_t>::value)>
+ FMT_CONSTEXPR20 void assign(UInt n) {
+ size_t num_bigits = 0;
+ do {
+ bigits_[num_bigits++] = static_cast<bigit>(n);
+ n >>= bigit_bits;
+ } while (n != 0);
+ bigits_.resize(num_bigits);
+ exp_ = 0;
+ }
+
+ public:
+ FMT_CONSTEXPR20 bigint() : exp_(0) {}
+ explicit bigint(uint64_t n) { assign(n); }
+
+ bigint(const bigint&) = delete;
+ void operator=(const bigint&) = delete;
+
+ FMT_CONSTEXPR20 void assign(const bigint& other) {
+ auto size = other.bigits_.size();
+ bigits_.resize(size);
+ auto data = other.bigits_.data();
+ std::copy(data, data + size, make_checked(bigits_.data(), size));
+ exp_ = other.exp_;
+ }
+
+ template <typename Int> FMT_CONSTEXPR20 void operator=(Int n) {
+ FMT_ASSERT(n > 0, "");
+ assign(uint64_or_128_t<Int>(n));
+ }
+
+ FMT_CONSTEXPR20 int num_bigits() const {
+ return static_cast<int>(bigits_.size()) + exp_;
+ }
+
+ FMT_NOINLINE FMT_CONSTEXPR20 bigint& operator<<=(int shift) {
+ FMT_ASSERT(shift >= 0, "");
+ exp_ += shift / bigit_bits;
+ shift %= bigit_bits;
+ if (shift == 0) return *this;
+ bigit carry = 0;
+ for (size_t i = 0, n = bigits_.size(); i < n; ++i) {
+ bigit c = bigits_[i] >> (bigit_bits - shift);
+ bigits_[i] = (bigits_[i] << shift) + carry;
+ carry = c;
+ }
+ if (carry != 0) bigits_.push_back(carry);
+ return *this;
+ }
+
+ template <typename Int> FMT_CONSTEXPR20 bigint& operator*=(Int value) {
+ FMT_ASSERT(value > 0, "");
+ multiply(uint32_or_64_or_128_t<Int>(value));
+ return *this;
+ }
+
+ friend FMT_CONSTEXPR20 int compare(const bigint& lhs, const bigint& rhs) {
+ int num_lhs_bigits = lhs.num_bigits(), num_rhs_bigits = rhs.num_bigits();
+ if (num_lhs_bigits != num_rhs_bigits)
+ return num_lhs_bigits > num_rhs_bigits ? 1 : -1;
+ int i = static_cast<int>(lhs.bigits_.size()) - 1;
+ int j = static_cast<int>(rhs.bigits_.size()) - 1;
+ int end = i - j;
+ if (end < 0) end = 0;
+ for (; i >= end; --i, --j) {
+ bigit lhs_bigit = lhs[i], rhs_bigit = rhs[j];
+ if (lhs_bigit != rhs_bigit) return lhs_bigit > rhs_bigit ? 1 : -1;
+ }
+ if (i != j) return i > j ? 1 : -1;
+ return 0;
+ }
+
+ // Returns compare(lhs1 + lhs2, rhs).
+ friend FMT_CONSTEXPR20 int add_compare(const bigint& lhs1, const bigint& lhs2,
+ const bigint& rhs) {
+ auto minimum = [](int a, int b) { return a < b ? a : b; };
+ auto maximum = [](int a, int b) { return a > b ? a : b; };
+ int max_lhs_bigits = maximum(lhs1.num_bigits(), lhs2.num_bigits());
+ int num_rhs_bigits = rhs.num_bigits();
+ if (max_lhs_bigits + 1 < num_rhs_bigits) return -1;
+ if (max_lhs_bigits > num_rhs_bigits) return 1;
+ auto get_bigit = [](const bigint& n, int i) -> bigit {
+ return i >= n.exp_ && i < n.num_bigits() ? n[i - n.exp_] : 0;
+ };
+ double_bigit borrow = 0;
+ int min_exp = minimum(minimum(lhs1.exp_, lhs2.exp_), rhs.exp_);
+ for (int i = num_rhs_bigits - 1; i >= min_exp; --i) {
+ double_bigit sum =
+ static_cast<double_bigit>(get_bigit(lhs1, i)) + get_bigit(lhs2, i);
+ bigit rhs_bigit = get_bigit(rhs, i);
+ if (sum > rhs_bigit + borrow) return 1;
+ borrow = rhs_bigit + borrow - sum;
+ if (borrow > 1) return -1;
+ borrow <<= bigit_bits;
+ }
+ return borrow != 0 ? -1 : 0;
+ }
+
+ // Assigns pow(10, exp) to this bigint.
+ FMT_CONSTEXPR20 void assign_pow10(int exp) {
+ FMT_ASSERT(exp >= 0, "");
+ if (exp == 0) return *this = 1;
+ // Find the top bit.
+ int bitmask = 1;
+ while (exp >= bitmask) bitmask <<= 1;
+ bitmask >>= 1;
+ // pow(10, exp) = pow(5, exp) * pow(2, exp). First compute pow(5, exp) by
+ // repeated squaring and multiplication.
+ *this = 5;
+ bitmask >>= 1;
+ while (bitmask != 0) {
+ square();
+ if ((exp & bitmask) != 0) *this *= 5;
+ bitmask >>= 1;
+ }
+ *this <<= exp; // Multiply by pow(2, exp) by shifting.
+ }
+
+ FMT_CONSTEXPR20 void square() {
+ int num_bigits = static_cast<int>(bigits_.size());
+ int num_result_bigits = 2 * num_bigits;
+ basic_memory_buffer<bigit, bigits_capacity> n(std::move(bigits_));
+ bigits_.resize(to_unsigned(num_result_bigits));
+ auto sum = uint128_t();
+ for (int bigit_index = 0; bigit_index < num_bigits; ++bigit_index) {
+ // Compute bigit at position bigit_index of the result by adding
+ // cross-product terms n[i] * n[j] such that i + j == bigit_index.
+ for (int i = 0, j = bigit_index; j >= 0; ++i, --j) {
+ // Most terms are multiplied twice which can be optimized in the future.
+ sum += static_cast<double_bigit>(n[i]) * n[j];
+ }
+ (*this)[bigit_index] = static_cast<bigit>(sum);
+ sum >>= num_bits<bigit>(); // Compute the carry.
+ }
+ // Do the same for the top half.
+ for (int bigit_index = num_bigits; bigit_index < num_result_bigits;
+ ++bigit_index) {
+ for (int j = num_bigits - 1, i = bigit_index - j; i < num_bigits;)
+ sum += static_cast<double_bigit>(n[i++]) * n[j--];
+ (*this)[bigit_index] = static_cast<bigit>(sum);
+ sum >>= num_bits<bigit>();
+ }
+ remove_leading_zeros();
+ exp_ *= 2;
+ }
+
+ // If this bigint has a bigger exponent than other, adds trailing zero to make
+ // exponents equal. This simplifies some operations such as subtraction.
+ FMT_CONSTEXPR20 void align(const bigint& other) {
+ int exp_difference = exp_ - other.exp_;
+ if (exp_difference <= 0) return;
+ int num_bigits = static_cast<int>(bigits_.size());
+ bigits_.resize(to_unsigned(num_bigits + exp_difference));
+ for (int i = num_bigits - 1, j = i + exp_difference; i >= 0; --i, --j)
+ bigits_[j] = bigits_[i];
+ std::uninitialized_fill_n(bigits_.data(), exp_difference, 0);
+ exp_ -= exp_difference;
+ }
+
+ // Divides this bignum by divisor, assigning the remainder to this and
+ // returning the quotient.
+ FMT_CONSTEXPR20 int divmod_assign(const bigint& divisor) {
+ FMT_ASSERT(this != &divisor, "");
+ if (compare(*this, divisor) < 0) return 0;
+ FMT_ASSERT(divisor.bigits_[divisor.bigits_.size() - 1u] != 0, "");
+ align(divisor);
+ int quotient = 0;
+ do {
+ subtract_aligned(divisor);
+ ++quotient;
+ } while (compare(*this, divisor) >= 0);
+ return quotient;
+ }
+};
+
+// format_dragon flags.
+enum dragon {
+ predecessor_closer = 1,
+ fixup = 2, // Run fixup to correct exp10 which can be off by one.
+ fixed = 4,
+};
+
+// Formats a floating-point number using a variation of the Fixed-Precision
+// Positive Floating-Point Printout ((FPP)^2) algorithm by Steele & White:
+// https://fmt.dev/papers/p372-steele.pdf.
+FMT_CONSTEXPR20 inline void format_dragon(basic_fp<uint128_t> value,
+ unsigned flags, int num_digits,
+ buffer<char>& buf, int& exp10) {
+ bigint numerator; // 2 * R in (FPP)^2.
+ bigint denominator; // 2 * S in (FPP)^2.
+ // lower and upper are differences between value and corresponding boundaries.
+ bigint lower; // (M^- in (FPP)^2).
+ bigint upper_store; // upper's value if different from lower.
+ bigint* upper = nullptr; // (M^+ in (FPP)^2).
+ // Shift numerator and denominator by an extra bit or two (if lower boundary
+ // is closer) to make lower and upper integers. This eliminates multiplication
+ // by 2 during later computations.
+ bool is_predecessor_closer = (flags & dragon::predecessor_closer) != 0;
+ int shift = is_predecessor_closer ? 2 : 1;
+ if (value.e >= 0) {
+ numerator = value.f;
+ numerator <<= value.e + shift;
+ lower = 1;
+ lower <<= value.e;
+ if (is_predecessor_closer) {
+ upper_store = 1;
+ upper_store <<= value.e + 1;
+ upper = &upper_store;
+ }
+ denominator.assign_pow10(exp10);
+ denominator <<= shift;
+ } else if (exp10 < 0) {
+ numerator.assign_pow10(-exp10);
+ lower.assign(numerator);
+ if (is_predecessor_closer) {
+ upper_store.assign(numerator);
+ upper_store <<= 1;
+ upper = &upper_store;
+ }
+ numerator *= value.f;
+ numerator <<= shift;
+ denominator = 1;
+ denominator <<= shift - value.e;
+ } else {
+ numerator = value.f;
+ numerator <<= shift;
+ denominator.assign_pow10(exp10);
+ denominator <<= shift - value.e;
+ lower = 1;
+ if (is_predecessor_closer) {
+ upper_store = 1ULL << 1;
+ upper = &upper_store;
+ }
+ }
+ bool even = (value.f & 1) == 0;
+ if (!upper) upper = &lower;
+ if ((flags & dragon::fixup) != 0) {
+ if (add_compare(numerator, *upper, denominator) + even <= 0) {
+ --exp10;
+ numerator *= 10;
+ if (num_digits < 0) {
+ lower *= 10;
+ if (upper != &lower) *upper *= 10;
+ }
+ }
+ if ((flags & dragon::fixed) != 0) adjust_precision(num_digits, exp10 + 1);
+ }
+ // Invariant: value == (numerator / denominator) * pow(10, exp10).
+ if (num_digits < 0) {
+ // Generate the shortest representation.
+ num_digits = 0;
+ char* data = buf.data();
+ for (;;) {
+ int digit = numerator.divmod_assign(denominator);
+ bool low = compare(numerator, lower) - even < 0; // numerator <[=] lower.
+ // numerator + upper >[=] pow10:
+ bool high = add_compare(numerator, *upper, denominator) + even > 0;
+ data[num_digits++] = static_cast<char>('0' + digit);
+ if (low || high) {
+ if (!low) {
+ ++data[num_digits - 1];
+ } else if (high) {
+ int result = add_compare(numerator, numerator, denominator);
+ // Round half to even.
+ if (result > 0 || (result == 0 && (digit % 2) != 0))
+ ++data[num_digits - 1];
+ }
+ buf.try_resize(to_unsigned(num_digits));
+ exp10 -= num_digits - 1;
+ return;
+ }
+ numerator *= 10;
+ lower *= 10;
+ if (upper != &lower) *upper *= 10;
+ }
+ }
+ // Generate the given number of digits.
+ exp10 -= num_digits - 1;
+ if (num_digits == 0) {
+ denominator *= 10;
+ auto digit = add_compare(numerator, numerator, denominator) > 0 ? '1' : '0';
+ buf.push_back(digit);
+ return;
+ }
+ buf.try_resize(to_unsigned(num_digits));
+ for (int i = 0; i < num_digits - 1; ++i) {
+ int digit = numerator.divmod_assign(denominator);
+ buf[i] = static_cast<char>('0' + digit);
+ numerator *= 10;
+ }
+ int digit = numerator.divmod_assign(denominator);
+ auto result = add_compare(numerator, numerator, denominator);
+ if (result > 0 || (result == 0 && (digit % 2) != 0)) {
+ if (digit == 9) {
+ const auto overflow = '0' + 10;
+ buf[num_digits - 1] = overflow;
+ // Propagate the carry.
+ for (int i = num_digits - 1; i > 0 && buf[i] == overflow; --i) {
+ buf[i] = '0';
+ ++buf[i - 1];
+ }
+ if (buf[0] == overflow) {
+ buf[0] = '1';
+ ++exp10;
+ }
+ return;
+ }
+ ++digit;
+ }
+ buf[num_digits - 1] = static_cast<char>('0' + digit);
+}
+
+template <typename Float>
+FMT_CONSTEXPR20 auto format_float(Float value, int precision, float_specs specs,
+ buffer<char>& buf) -> int {
+ // float is passed as double to reduce the number of instantiations.
+ static_assert(!std::is_same<Float, float>::value, "");
+ FMT_ASSERT(value >= 0, "value is negative");
+ auto converted_value = convert_float(value);
+
+ const bool fixed = specs.format == float_format::fixed;
+ if (value <= 0) { // <= instead of == to silence a warning.
+ if (precision <= 0 || !fixed) {
+ buf.push_back('0');
+ return 0;
+ }
+ buf.try_resize(to_unsigned(precision));
+ fill_n(buf.data(), precision, '0');
+ return -precision;
+ }
+
+ int exp = 0;
+ bool use_dragon = true;
+ unsigned dragon_flags = 0;
+ if (!is_fast_float<Float>()) {
+ const auto inv_log2_10 = 0.3010299956639812; // 1 / log2(10)
+ using info = dragonbox::float_info<decltype(converted_value)>;
+ const auto f = basic_fp<typename info::carrier_uint>(converted_value);
+ // Compute exp, an approximate power of 10, such that
+ // 10^(exp - 1) <= value < 10^exp or 10^exp <= value < 10^(exp + 1).
+ // This is based on log10(value) == log2(value) / log2(10) and approximation
+ // of log2(value) by e + num_fraction_bits idea from double-conversion.
+ exp = static_cast<int>(
+ std::ceil((f.e + count_digits<1>(f.f) - 1) * inv_log2_10 - 1e-10));
+ dragon_flags = dragon::fixup;
+ } else if (!is_constant_evaluated() && precision < 0) {
+ // Use Dragonbox for the shortest format.
+ if (specs.binary32) {
+ auto dec = dragonbox::to_decimal(static_cast<float>(value));
+ write<char>(buffer_appender<char>(buf), dec.significand);
+ return dec.exponent;
+ }
+ auto dec = dragonbox::to_decimal(static_cast<double>(value));
+ write<char>(buffer_appender<char>(buf), dec.significand);
+ return dec.exponent;
+ } else {
+ // Use Grisu + Dragon4 for the given precision:
+ // https://www.cs.tufts.edu/~nr/cs257/archive/florian-loitsch/printf.pdf.
+ const int min_exp = -60; // alpha in Grisu.
+ int cached_exp10 = 0; // K in Grisu.
+ fp normalized = normalize(fp(converted_value));
+ const auto cached_pow = get_cached_power(
+ min_exp - (normalized.e + fp::num_significand_bits), cached_exp10);
+ normalized = normalized * cached_pow;
+ gen_digits_handler handler{buf.data(), 0, precision, -cached_exp10, fixed};
+ if (grisu_gen_digits(normalized, 1, exp, handler) != digits::error &&
+ !is_constant_evaluated()) {
+ exp += handler.exp10;
+ buf.try_resize(to_unsigned(handler.size));
+ use_dragon = false;
+ } else {
+ exp += handler.size - cached_exp10 - 1;
+ precision = handler.precision;
+ }
+ }
+ if (use_dragon) {
+ auto f = basic_fp<uint128_t>();
+ bool is_predecessor_closer = specs.binary32
+ ? f.assign(static_cast<float>(value))
+ : f.assign(converted_value);
+ if (is_predecessor_closer) dragon_flags |= dragon::predecessor_closer;
+ if (fixed) dragon_flags |= dragon::fixed;
+ // Limit precision to the maximum possible number of significant digits in
+ // an IEEE754 double because we don't need to generate zeros.
+ const int max_double_digits = 767;
+ if (precision > max_double_digits) precision = max_double_digits;
+ format_dragon(f, dragon_flags, precision, buf, exp);
+ }
+ if (!fixed && !specs.showpoint) {
+ // Remove trailing zeros.
+ auto num_digits = buf.size();
+ while (num_digits > 0 && buf[num_digits - 1] == '0') {
+ --num_digits;
+ ++exp;
+ }
+ buf.try_resize(num_digits);
+ }
+ return exp;
+}
+
+template <typename Char, typename OutputIt, typename T,
+ FMT_ENABLE_IF(is_floating_point<T>::value)>
+FMT_CONSTEXPR20 auto write(OutputIt out, T value,
+ basic_format_specs<Char> specs, locale_ref loc = {})
+ -> OutputIt {
+ if (const_check(!is_supported_floating_point(value))) return out;
+ float_specs fspecs = parse_float_type_spec(specs);
+ fspecs.sign = specs.sign;
+ if (detail::signbit(value)) { // value < 0 is false for NaN so use signbit.
+ fspecs.sign = sign::minus;
+ value = -value;
+ } else if (fspecs.sign == sign::minus) {
+ fspecs.sign = sign::none;
+ }
+
+ if (!detail::isfinite(value))
+ return write_nonfinite(out, detail::isnan(value), specs, fspecs);
+
+ if (specs.align == align::numeric && fspecs.sign) {
+ auto it = reserve(out, 1);
+ *it++ = detail::sign<Char>(fspecs.sign);
+ out = base_iterator(out, it);
+ fspecs.sign = sign::none;
+ if (specs.width != 0) --specs.width;
+ }
+
+ memory_buffer buffer;
+ if (fspecs.format == float_format::hex) {
+ if (fspecs.sign) buffer.push_back(detail::sign<char>(fspecs.sign));
+ snprintf_float(convert_float(value), specs.precision, fspecs, buffer);
+ return write_bytes<align::right>(out, {buffer.data(), buffer.size()},
+ specs);
+ }
+ int precision = specs.precision >= 0 || specs.type == presentation_type::none
+ ? specs.precision
+ : 6;
+ if (fspecs.format == float_format::exp) {
+ if (precision == max_value<int>())
+ throw_format_error("number is too big");
+ else
+ ++precision;
+ } else if (fspecs.format != float_format::fixed && precision == 0) {
+ precision = 1;
+ }
+ if (const_check(std::is_same<T, float>())) fspecs.binary32 = true;
+ int exp = format_float(convert_float(value), precision, fspecs, buffer);
+ fspecs.precision = precision;
+ auto f = big_decimal_fp{buffer.data(), static_cast<int>(buffer.size()), exp};
+ return write_float(out, f, specs, fspecs, loc);
+}
+
+template <typename Char, typename OutputIt, typename T,
+ FMT_ENABLE_IF(is_fast_float<T>::value)>
+FMT_CONSTEXPR20 auto write(OutputIt out, T value) -> OutputIt {
+ if (is_constant_evaluated())
+ return write(out, value, basic_format_specs<Char>());
+ if (const_check(!is_supported_floating_point(value))) return out;
+
+ auto fspecs = float_specs();
+ if (detail::signbit(value)) {
+ fspecs.sign = sign::minus;
+ value = -value;
+ }
+
+ constexpr auto specs = basic_format_specs<Char>();
+ using floaty = conditional_t<std::is_same<T, long double>::value, double, T>;
+ using uint = typename dragonbox::float_info<floaty>::carrier_uint;
+ uint mask = exponent_mask<floaty>();
+ if ((bit_cast<uint>(value) & mask) == mask)
+ return write_nonfinite(out, std::isnan(value), specs, fspecs);
+
+ auto dec = dragonbox::to_decimal(static_cast<floaty>(value));
+ return write_float(out, dec, specs, fspecs, {});
+}
+
+template <typename Char, typename OutputIt, typename T,
+ FMT_ENABLE_IF(is_floating_point<T>::value &&
+ !is_fast_float<T>::value)>
+inline auto write(OutputIt out, T value) -> OutputIt {
+ return write(out, value, basic_format_specs<Char>());
+}
+
+template <typename Char, typename OutputIt>
+auto write(OutputIt out, monostate, basic_format_specs<Char> = {},
+ locale_ref = {}) -> OutputIt {
+ FMT_ASSERT(false, "");
+ return out;
+}
+
+template <typename Char, typename OutputIt>
+FMT_CONSTEXPR auto write(OutputIt out, basic_string_view<Char> value)
+ -> OutputIt {
+ auto it = reserve(out, value.size());
+ it = copy_str_noinline<Char>(value.begin(), value.end(), it);
+ return base_iterator(out, it);
+}
+
+template <typename Char, typename OutputIt, typename T,
+ FMT_ENABLE_IF(is_string<T>::value)>
+constexpr auto write(OutputIt out, const T& value) -> OutputIt {
+ return write<Char>(out, to_string_view(value));
+}
+
+// FMT_ENABLE_IF() condition separated to workaround an MSVC bug.
+template <
+ typename Char, typename OutputIt, typename T,
+ bool check =
+ std::is_enum<T>::value && !std::is_same<T, Char>::value &&
+ mapped_type_constant<T, basic_format_context<OutputIt, Char>>::value !=
+ type::custom_type,
+ FMT_ENABLE_IF(check)>
+FMT_CONSTEXPR auto write(OutputIt out, T value) -> OutputIt {
+ return write<Char>(out, static_cast<underlying_t<T>>(value));
+}
+
+template <typename Char, typename OutputIt, typename T,
+ FMT_ENABLE_IF(std::is_same<T, bool>::value)>
+FMT_CONSTEXPR auto write(OutputIt out, T value,
+ const basic_format_specs<Char>& specs = {},
+ locale_ref = {}) -> OutputIt {
+ return specs.type != presentation_type::none &&
+ specs.type != presentation_type::string
+ ? write(out, value ? 1 : 0, specs, {})
+ : write_bytes(out, value ? "true" : "false", specs);
+}
+
+template <typename Char, typename OutputIt>
+FMT_CONSTEXPR auto write(OutputIt out, Char value) -> OutputIt {
+ auto it = reserve(out, 1);
+ *it++ = value;
+ return base_iterator(out, it);
+}
+
+template <typename Char, typename OutputIt>
+FMT_CONSTEXPR_CHAR_TRAITS auto write(OutputIt out, const Char* value)
+ -> OutputIt {
+ if (!value) {
+ throw_format_error("string pointer is null");
+ } else {
+ out = write(out, basic_string_view<Char>(value));
+ }
+ return out;
+}
+
+template <typename Char, typename OutputIt, typename T,
+ FMT_ENABLE_IF(std::is_same<T, void>::value)>
+auto write(OutputIt out, const T* value,
+ const basic_format_specs<Char>& specs = {}, locale_ref = {})
+ -> OutputIt {
+ check_pointer_type_spec(specs.type, error_handler());
+ return write_ptr<Char>(out, bit_cast<uintptr_t>(value), &specs);
+}
+
+// A write overload that handles implicit conversions.
+template <typename Char, typename OutputIt, typename T,
+ typename Context = basic_format_context<OutputIt, Char>>
+FMT_CONSTEXPR auto write(OutputIt out, const T& value) -> enable_if_t<
+ std::is_class<T>::value && !is_string<T>::value &&
+ !is_floating_point<T>::value && !std::is_same<T, Char>::value &&
+ !std::is_same<const T&,
+ decltype(arg_mapper<Context>().map(value))>::value,
+ OutputIt> {
+ return write<Char>(out, arg_mapper<Context>().map(value));
+}
+
+template <typename Char, typename OutputIt, typename T,
+ typename Context = basic_format_context<OutputIt, Char>>
+FMT_CONSTEXPR auto write(OutputIt out, const T& value)
+ -> enable_if_t<mapped_type_constant<T, Context>::value == type::custom_type,
+ OutputIt> {
+ using formatter_type =
+ conditional_t<has_formatter<T, Context>::value,
+ typename Context::template formatter_type<T>,
+ fallback_formatter<T, Char>>;
+ auto ctx = Context(out, {}, {});
+ return formatter_type().format(value, ctx);
+}
+
+// An argument visitor that formats the argument and writes it via the output
+// iterator. It's a class and not a generic lambda for compatibility with C++11.
+template <typename Char> struct default_arg_formatter {
+ using iterator = buffer_appender<Char>;
+ using context = buffer_context<Char>;
+
+ iterator out;
+ basic_format_args<context> args;
+ locale_ref loc;
+
+ template <typename T> auto operator()(T value) -> iterator {
+ return write<Char>(out, value);
+ }
+ auto operator()(typename basic_format_arg<context>::handle h) -> iterator {
+ basic_format_parse_context<Char> parse_ctx({});
+ context format_ctx(out, args, loc);
+ h.format(parse_ctx, format_ctx);
+ return format_ctx.out();
+ }
+};
+
+template <typename Char> struct arg_formatter {
+ using iterator = buffer_appender<Char>;
+ using context = buffer_context<Char>;
+
+ iterator out;
+ const basic_format_specs<Char>& specs;
+ locale_ref locale;
+
+ template <typename T>
+ FMT_CONSTEXPR FMT_INLINE auto operator()(T value) -> iterator {
+ return detail::write(out, value, specs, locale);
+ }
+ auto operator()(typename basic_format_arg<context>::handle) -> iterator {
+ // User-defined types are handled separately because they require access
+ // to the parse context.
+ return out;
+ }
+};
+
+template <typename Char> struct custom_formatter {
+ basic_format_parse_context<Char>& parse_ctx;
+ buffer_context<Char>& ctx;
+
+ void operator()(
+ typename basic_format_arg<buffer_context<Char>>::handle h) const {
+ h.format(parse_ctx, ctx);
+ }
+ template <typename T> void operator()(T) const {}
+};
+
+template <typename T>
+using is_integer =
+ bool_constant<is_integral<T>::value && !std::is_same<T, bool>::value &&
+ !std::is_same<T, char>::value &&
+ !std::is_same<T, wchar_t>::value>;
+
+template <typename ErrorHandler> class width_checker {
+ public:
+ explicit FMT_CONSTEXPR width_checker(ErrorHandler& eh) : handler_(eh) {}
+
+ template <typename T, FMT_ENABLE_IF(is_integer<T>::value)>
+ FMT_CONSTEXPR auto operator()(T value) -> unsigned long long {
+ if (is_negative(value)) handler_.on_error("negative width");
+ return static_cast<unsigned long long>(value);
+ }
+
+ template <typename T, FMT_ENABLE_IF(!is_integer<T>::value)>
+ FMT_CONSTEXPR auto operator()(T) -> unsigned long long {
+ handler_.on_error("width is not integer");
+ return 0;
+ }
+
+ private:
+ ErrorHandler& handler_;
+};
+
+template <typename ErrorHandler> class precision_checker {
+ public:
+ explicit FMT_CONSTEXPR precision_checker(ErrorHandler& eh) : handler_(eh) {}
+
+ template <typename T, FMT_ENABLE_IF(is_integer<T>::value)>
+ FMT_CONSTEXPR auto operator()(T value) -> unsigned long long {
+ if (is_negative(value)) handler_.on_error("negative precision");
+ return static_cast<unsigned long long>(value);
+ }
+
+ template <typename T, FMT_ENABLE_IF(!is_integer<T>::value)>
+ FMT_CONSTEXPR auto operator()(T) -> unsigned long long {
+ handler_.on_error("precision is not integer");
+ return 0;
+ }
+
+ private:
+ ErrorHandler& handler_;
+};
+
+template <template <typename> class Handler, typename FormatArg,
+ typename ErrorHandler>
+FMT_CONSTEXPR auto get_dynamic_spec(FormatArg arg, ErrorHandler eh) -> int {
+ unsigned long long value = visit_format_arg(Handler<ErrorHandler>(eh), arg);
+ if (value > to_unsigned(max_value<int>())) eh.on_error("number is too big");
+ return static_cast<int>(value);
+}
+
+template <typename Context, typename ID>
+FMT_CONSTEXPR auto get_arg(Context& ctx, ID id) ->
+ typename Context::format_arg {
+ auto arg = ctx.arg(id);
+ if (!arg) ctx.on_error("argument not found");
+ return arg;
+}
+
+// The standard format specifier handler with checking.
+template <typename Char> class specs_handler : public specs_setter<Char> {
+ private:
+ basic_format_parse_context<Char>& parse_context_;
+ buffer_context<Char>& context_;
+
+ // This is only needed for compatibility with gcc 4.4.
+ using format_arg = basic_format_arg<buffer_context<Char>>;
+
+ FMT_CONSTEXPR auto get_arg(auto_id) -> format_arg {
+ return detail::get_arg(context_, parse_context_.next_arg_id());
+ }
+
+ FMT_CONSTEXPR auto get_arg(int arg_id) -> format_arg {
+ parse_context_.check_arg_id(arg_id);
+ return detail::get_arg(context_, arg_id);
+ }
+
+ FMT_CONSTEXPR auto get_arg(basic_string_view<Char> arg_id) -> format_arg {
+ parse_context_.check_arg_id(arg_id);
+ return detail::get_arg(context_, arg_id);
+ }
+
+ public:
+ FMT_CONSTEXPR specs_handler(basic_format_specs<Char>& specs,
+ basic_format_parse_context<Char>& parse_ctx,
+ buffer_context<Char>& ctx)
+ : specs_setter<Char>(specs), parse_context_(parse_ctx), context_(ctx) {}
+
+ template <typename Id> FMT_CONSTEXPR void on_dynamic_width(Id arg_id) {
+ this->specs_.width = get_dynamic_spec<width_checker>(
+ get_arg(arg_id), context_.error_handler());
+ }
+
+ template <typename Id> FMT_CONSTEXPR void on_dynamic_precision(Id arg_id) {
+ this->specs_.precision = get_dynamic_spec<precision_checker>(
+ get_arg(arg_id), context_.error_handler());
+ }
+
+ void on_error(const char* message) { context_.on_error(message); }
+};
+
+template <template <typename> class Handler, typename Context>
+FMT_CONSTEXPR void handle_dynamic_spec(int& value,
+ arg_ref<typename Context::char_type> ref,
+ Context& ctx) {
+ switch (ref.kind) {
+ case arg_id_kind::none:
+ break;
+ case arg_id_kind::index:
+ value = detail::get_dynamic_spec<Handler>(ctx.arg(ref.val.index),
+ ctx.error_handler());
+ break;
+ case arg_id_kind::name:
+ value = detail::get_dynamic_spec<Handler>(ctx.arg(ref.val.name),
+ ctx.error_handler());
+ break;
+ }
+}
+
+#if FMT_USE_USER_DEFINED_LITERALS
+template <typename Char> struct udl_formatter {
+ basic_string_view<Char> str;
+
+ template <typename... T>
+ auto operator()(T&&... args) const -> std::basic_string<Char> {
+ return vformat(str, fmt::make_format_args<buffer_context<Char>>(args...));
+ }
+};
+
+# if FMT_USE_NONTYPE_TEMPLATE_ARGS
+template <typename T, typename Char, size_t N,
+ fmt::detail_exported::fixed_string<Char, N> Str>
+struct statically_named_arg : view {
+ static constexpr auto name = Str.data;
+
+ const T& value;
+ statically_named_arg(const T& v) : value(v) {}
+};
+
+template <typename T, typename Char, size_t N,
+ fmt::detail_exported::fixed_string<Char, N> Str>
+struct is_named_arg<statically_named_arg<T, Char, N, Str>> : std::true_type {};
+
+template <typename T, typename Char, size_t N,
+ fmt::detail_exported::fixed_string<Char, N> Str>
+struct is_statically_named_arg<statically_named_arg<T, Char, N, Str>>
+ : std::true_type {};
+
+template <typename Char, size_t N,
+ fmt::detail_exported::fixed_string<Char, N> Str>
+struct udl_arg {
+ template <typename T> auto operator=(T&& value) const {
+ return statically_named_arg<T, Char, N, Str>(std::forward<T>(value));
+ }
+};
+# else
+template <typename Char> struct udl_arg {
+ const Char* str;
+
+ template <typename T> auto operator=(T&& value) const -> named_arg<Char, T> {
+ return {str, std::forward<T>(value)};
+ }
+};
+# endif
+#endif // FMT_USE_USER_DEFINED_LITERALS
+
+template <typename Locale, typename Char>
+auto vformat(const Locale& loc, basic_string_view<Char> format_str,
+ basic_format_args<buffer_context<type_identity_t<Char>>> args)
+ -> std::basic_string<Char> {
+ basic_memory_buffer<Char> buffer;
+ detail::vformat_to(buffer, format_str, args, detail::locale_ref(loc));
+ return {buffer.data(), buffer.size()};
+}
+
+using format_func = void (*)(detail::buffer<char>&, int, const char*);
+
+FMT_API void format_error_code(buffer<char>& out, int error_code,
+ string_view message) noexcept;
+
+FMT_API void report_error(format_func func, int error_code,
+ const char* message) noexcept;
+FMT_END_DETAIL_NAMESPACE
+
+FMT_API auto vsystem_error(int error_code, string_view format_str,
+ format_args args) -> std::system_error;
+
+/**
+ \rst
+ Constructs :class:`std::system_error` with a message formatted with
+ ``fmt::format(fmt, args...)``.
+ *error_code* is a system error code as given by ``errno``.
+
+ **Example**::
+
+ // This throws std::system_error with the description
+ // cannot open file 'madeup': No such file or directory
+ // or similar (system message may vary).
+ const char* filename = "madeup";
+ std::FILE* file = std::fopen(filename, "r");
+ if (!file)
+ throw fmt::system_error(errno, "cannot open file '{}'", filename);
+ \endrst
+*/
+template <typename... T>
+auto system_error(int error_code, format_string<T...> fmt, T&&... args)
+ -> std::system_error {
+ return vsystem_error(error_code, fmt, fmt::make_format_args(args...));
+}
+
+/**
+ \rst
+ Formats an error message for an error returned by an operating system or a
+ language runtime, for example a file opening error, and writes it to *out*.
+ The format is the same as the one used by ``std::system_error(ec, message)``
+ where ``ec`` is ``std::error_code(error_code, std::generic_category()})``.
+ It is implementation-defined but normally looks like:
+
+ .. parsed-literal::
+ *<message>*: *<system-message>*
+
+ where *<message>* is the passed message and *<system-message>* is the system
+ message corresponding to the error code.
+ *error_code* is a system error code as given by ``errno``.
+ \endrst
+ */
+FMT_API void format_system_error(detail::buffer<char>& out, int error_code,
+ const char* message) noexcept;
+
+// Reports a system error without throwing an exception.
+// Can be used to report errors from destructors.
+FMT_API void report_system_error(int error_code, const char* message) noexcept;
+
+/** Fast integer formatter. */
+class format_int {
+ private:
+ // Buffer should be large enough to hold all digits (digits10 + 1),
+ // a sign and a null character.
+ enum { buffer_size = std::numeric_limits<unsigned long long>::digits10 + 3 };
+ mutable char buffer_[buffer_size];
+ char* str_;
+
+ template <typename UInt> auto format_unsigned(UInt value) -> char* {
+ auto n = static_cast<detail::uint32_or_64_or_128_t<UInt>>(value);
+ return detail::format_decimal(buffer_, n, buffer_size - 1).begin;
+ }
+
+ template <typename Int> auto format_signed(Int value) -> char* {
+ auto abs_value = static_cast<detail::uint32_or_64_or_128_t<Int>>(value);
+ bool negative = value < 0;
+ if (negative) abs_value = 0 - abs_value;
+ auto begin = format_unsigned(abs_value);
+ if (negative) *--begin = '-';
+ return begin;
+ }
+
+ public:
+ explicit format_int(int value) : str_(format_signed(value)) {}
+ explicit format_int(long value) : str_(format_signed(value)) {}
+ explicit format_int(long long value) : str_(format_signed(value)) {}
+ explicit format_int(unsigned value) : str_(format_unsigned(value)) {}
+ explicit format_int(unsigned long value) : str_(format_unsigned(value)) {}
+ explicit format_int(unsigned long long value)
+ : str_(format_unsigned(value)) {}
+
+ /** Returns the number of characters written to the output buffer. */
+ auto size() const -> size_t {
+ return detail::to_unsigned(buffer_ - str_ + buffer_size - 1);
+ }
+
+ /**
+ Returns a pointer to the output buffer content. No terminating null
+ character is appended.
+ */
+ auto data() const -> const char* { return str_; }
+
+ /**
+ Returns a pointer to the output buffer content with terminating null
+ character appended.
+ */
+ auto c_str() const -> const char* {
+ buffer_[buffer_size - 1] = '\0';
+ return str_;
+ }
+
+ /**
+ \rst
+ Returns the content of the output buffer as an ``std::string``.
+ \endrst
+ */
+ auto str() const -> std::string { return std::string(str_, size()); }
+};
+
+template <typename T, typename Char>
+template <typename FormatContext>
+FMT_CONSTEXPR FMT_INLINE auto
+formatter<T, Char,
+ enable_if_t<detail::type_constant<T, Char>::value !=
+ detail::type::custom_type>>::format(const T& val,
+ FormatContext& ctx)
+ const -> decltype(ctx.out()) {
+ if (specs_.width_ref.kind != detail::arg_id_kind::none ||
+ specs_.precision_ref.kind != detail::arg_id_kind::none) {
+ auto specs = specs_;
+ detail::handle_dynamic_spec<detail::width_checker>(specs.width,
+ specs.width_ref, ctx);
+ detail::handle_dynamic_spec<detail::precision_checker>(
+ specs.precision, specs.precision_ref, ctx);
+ return detail::write<Char>(ctx.out(), val, specs, ctx.locale());
+ }
+ return detail::write<Char>(ctx.out(), val, specs_, ctx.locale());
+}
+
+template <typename Char>
+struct formatter<void*, Char> : formatter<const void*, Char> {
+ template <typename FormatContext>
+ auto format(void* val, FormatContext& ctx) const -> decltype(ctx.out()) {
+ return formatter<const void*, Char>::format(val, ctx);
+ }
+};
+
+template <typename Char, size_t N>
+struct formatter<Char[N], Char> : formatter<basic_string_view<Char>, Char> {
+ template <typename FormatContext>
+ FMT_CONSTEXPR auto format(const Char* val, FormatContext& ctx) const
+ -> decltype(ctx.out()) {
+ return formatter<basic_string_view<Char>, Char>::format(val, ctx);
+ }
+};
+
+// A formatter for types known only at run time such as variant alternatives.
+//
+// Usage:
+// using variant = std::variant<int, std::string>;
+// template <>
+// struct formatter<variant>: dynamic_formatter<> {
+// auto format(const variant& v, format_context& ctx) {
+// return visit([&](const auto& val) {
+// return dynamic_formatter<>::format(val, ctx);
+// }, v);
+// }
+// };
+template <typename Char = char> class dynamic_formatter {
+ private:
+ detail::dynamic_format_specs<Char> specs_;
+ const Char* format_str_;
+
+ struct null_handler : detail::error_handler {
+ void on_align(align_t) {}
+ void on_sign(sign_t) {}
+ void on_hash() {}
+ };
+
+ template <typename Context> void handle_specs(Context& ctx) {
+ detail::handle_dynamic_spec<detail::width_checker>(specs_.width,
+ specs_.width_ref, ctx);
+ detail::handle_dynamic_spec<detail::precision_checker>(
+ specs_.precision, specs_.precision_ref, ctx);
+ }
+
+ public:
+ template <typename ParseContext>
+ FMT_CONSTEXPR auto parse(ParseContext& ctx) -> decltype(ctx.begin()) {
+ format_str_ = ctx.begin();
+ // Checks are deferred to formatting time when the argument type is known.
+ detail::dynamic_specs_handler<ParseContext> handler(specs_, ctx);
+ return detail::parse_format_specs(ctx.begin(), ctx.end(), handler);
+ }
+
+ template <typename T, typename FormatContext>
+ auto format(const T& val, FormatContext& ctx) -> decltype(ctx.out()) {
+ handle_specs(ctx);
+ detail::specs_checker<null_handler> checker(
+ null_handler(), detail::mapped_type_constant<T, FormatContext>::value);
+ checker.on_align(specs_.align);
+ if (specs_.sign != sign::none) checker.on_sign(specs_.sign);
+ if (specs_.alt) checker.on_hash();
+ if (specs_.precision >= 0) checker.end_precision();
+ return detail::write<Char>(ctx.out(), val, specs_, ctx.locale());
+ }
+};
+
+/**
+ \rst
+ Converts ``p`` to ``const void*`` for pointer formatting.
+
+ **Example**::
+
+ auto s = fmt::format("{}", fmt::ptr(p));
+ \endrst
+ */
+template <typename T> auto ptr(T p) -> const void* {
+ static_assert(std::is_pointer<T>::value, "");
+ return detail::bit_cast<const void*>(p);
+}
+template <typename T> auto ptr(const std::unique_ptr<T>& p) -> const void* {
+ return p.get();
+}
+template <typename T> auto ptr(const std::shared_ptr<T>& p) -> const void* {
+ return p.get();
+}
+
+/**
+ \rst
+ Converts ``e`` to the underlying type.
+
+ **Example**::
+
+ enum class color { red, green, blue };
+ auto s = fmt::format("{}", fmt::underlying(color::red));
+ \endrst
+ */
+template <typename Enum>
+constexpr auto underlying(Enum e) noexcept -> underlying_t<Enum> {
+ return static_cast<underlying_t<Enum>>(e);
+}
+
+namespace enums {
+template <typename Enum, FMT_ENABLE_IF(std::is_enum<Enum>::value)>
+constexpr auto format_as(Enum e) noexcept -> underlying_t<Enum> {
+ return static_cast<underlying_t<Enum>>(e);
+}
+} // namespace enums
+
+class bytes {
+ private:
+ string_view data_;
+ friend struct formatter<bytes>;
+
+ public:
+ explicit bytes(string_view data) : data_(data) {}
+};
+
+template <> struct formatter<bytes> {
+ private:
+ detail::dynamic_format_specs<char> specs_;
+
+ public:
+ template <typename ParseContext>
+ FMT_CONSTEXPR auto parse(ParseContext& ctx) -> decltype(ctx.begin()) {
+ using handler_type = detail::dynamic_specs_handler<ParseContext>;
+ detail::specs_checker<handler_type> handler(handler_type(specs_, ctx),
+ detail::type::string_type);
+ auto it = parse_format_specs(ctx.begin(), ctx.end(), handler);
+ detail::check_string_type_spec(specs_.type, ctx.error_handler());
+ return it;
+ }
+
+ template <typename FormatContext>
+ auto format(bytes b, FormatContext& ctx) -> decltype(ctx.out()) {
+ detail::handle_dynamic_spec<detail::width_checker>(specs_.width,
+ specs_.width_ref, ctx);
+ detail::handle_dynamic_spec<detail::precision_checker>(
+ specs_.precision, specs_.precision_ref, ctx);
+ return detail::write_bytes(ctx.out(), b.data_, specs_);
+ }
+};
+
+// group_digits_view is not derived from view because it copies the argument.
+template <typename T> struct group_digits_view { T value; };
+
+/**
+ \rst
+ Returns a view that formats an integer value using ',' as a locale-independent
+ thousands separator.
+
+ **Example**::
+
+ fmt::print("{}", fmt::group_digits(12345));
+ // Output: "12,345"
+ \endrst
+ */
+template <typename T> auto group_digits(T value) -> group_digits_view<T> {
+ return {value};
+}
+
+template <typename T> struct formatter<group_digits_view<T>> : formatter<T> {
+ private:
+ detail::dynamic_format_specs<char> specs_;
+
+ public:
+ template <typename ParseContext>
+ FMT_CONSTEXPR auto parse(ParseContext& ctx) -> decltype(ctx.begin()) {
+ using handler_type = detail::dynamic_specs_handler<ParseContext>;
+ detail::specs_checker<handler_type> handler(handler_type(specs_, ctx),
+ detail::type::int_type);
+ auto it = parse_format_specs(ctx.begin(), ctx.end(), handler);
+ detail::check_string_type_spec(specs_.type, ctx.error_handler());
+ return it;
+ }
+
+ template <typename FormatContext>
+ auto format(group_digits_view<T> t, FormatContext& ctx)
+ -> decltype(ctx.out()) {
+ detail::handle_dynamic_spec<detail::width_checker>(specs_.width,
+ specs_.width_ref, ctx);
+ detail::handle_dynamic_spec<detail::precision_checker>(
+ specs_.precision, specs_.precision_ref, ctx);
+ return detail::write_int_localized(
+ ctx.out(), static_cast<detail::uint64_or_128_t<T>>(t.value), 0, specs_,
+ detail::digit_grouping<char>({"\3", ','}));
+ }
+};
+
+template <typename It, typename Sentinel, typename Char = char>
+struct join_view : detail::view {
+ It begin;
+ Sentinel end;
+ basic_string_view<Char> sep;
+
+ join_view(It b, Sentinel e, basic_string_view<Char> s)
+ : begin(b), end(e), sep(s) {}
+};
+
+template <typename It, typename Sentinel, typename Char>
+struct formatter<join_view<It, Sentinel, Char>, Char> {
+ private:
+ using value_type =
+#ifdef __cpp_lib_ranges
+ std::iter_value_t<It>;
+#else
+ typename std::iterator_traits<It>::value_type;
+#endif
+ using context = buffer_context<Char>;
+ using mapper = detail::arg_mapper<context>;
+
+ template <typename T, FMT_ENABLE_IF(has_formatter<T, context>::value)>
+ static auto map(const T& value) -> const T& {
+ return value;
+ }
+ template <typename T, FMT_ENABLE_IF(!has_formatter<T, context>::value)>
+ static auto map(const T& value) -> decltype(mapper().map(value)) {
+ return mapper().map(value);
+ }
+
+ using formatter_type =
+ conditional_t<is_formattable<value_type, Char>::value,
+ formatter<remove_cvref_t<decltype(map(
+ std::declval<const value_type&>()))>,
+ Char>,
+ detail::fallback_formatter<value_type, Char>>;
+
+ formatter_type value_formatter_;
+
+ public:
+ template <typename ParseContext>
+ FMT_CONSTEXPR auto parse(ParseContext& ctx) -> decltype(ctx.begin()) {
+ return value_formatter_.parse(ctx);
+ }
+
+ template <typename FormatContext>
+ auto format(const join_view<It, Sentinel, Char>& value,
+ FormatContext& ctx) const -> decltype(ctx.out()) {
+ auto it = value.begin;
+ auto out = ctx.out();
+ if (it != value.end) {
+ out = value_formatter_.format(map(*it), ctx);
+ ++it;
+ while (it != value.end) {
+ out = detail::copy_str<Char>(value.sep.begin(), value.sep.end(), out);
+ ctx.advance_to(out);
+ out = value_formatter_.format(map(*it), ctx);
+ ++it;
+ }
+ }
+ return out;
+ }
+};
+
+/**
+ Returns a view that formats the iterator range `[begin, end)` with elements
+ separated by `sep`.
+ */
+template <typename It, typename Sentinel>
+auto join(It begin, Sentinel end, string_view sep) -> join_view<It, Sentinel> {
+ return {begin, end, sep};
+}
+
+/**
+ \rst
+ Returns a view that formats `range` with elements separated by `sep`.
+
+ **Example**::
+
+ std::vector<int> v = {1, 2, 3};
+ fmt::print("{}", fmt::join(v, ", "));
+ // Output: "1, 2, 3"
+
+ ``fmt::join`` applies passed format specifiers to the range elements::
+
+ fmt::print("{:02}", fmt::join(v, ", "));
+ // Output: "01, 02, 03"
+ \endrst
+ */
+template <typename Range>
+auto join(Range&& range, string_view sep)
+ -> join_view<detail::iterator_t<Range>, detail::sentinel_t<Range>> {
+ return join(std::begin(range), std::end(range), sep);
+}
+
+/**
+ \rst
+ Converts *value* to ``std::string`` using the default format for type *T*.
+
+ **Example**::
+
+ #include <fmt/format.h>
+
+ std::string answer = fmt::to_string(42);
+ \endrst
+ */
+template <typename T, FMT_ENABLE_IF(!std::is_integral<T>::value)>
+inline auto to_string(const T& value) -> std::string {
+ auto result = std::string();
+ detail::write<char>(std::back_inserter(result), value);
+ return result;
+}
+
+template <typename T, FMT_ENABLE_IF(std::is_integral<T>::value)>
+FMT_NODISCARD inline auto to_string(T value) -> std::string {
+ // The buffer should be large enough to store the number including the sign
+ // or "false" for bool.
+ constexpr int max_size = detail::digits10<T>() + 2;
+ char buffer[max_size > 5 ? static_cast<unsigned>(max_size) : 5];
+ char* begin = buffer;
+ return std::string(begin, detail::write<char>(begin, value));
+}
+
+template <typename Char, size_t SIZE>
+FMT_NODISCARD auto to_string(const basic_memory_buffer<Char, SIZE>& buf)
+ -> std::basic_string<Char> {
+ auto size = buf.size();
+ detail::assume(size < std::basic_string<Char>().max_size());
+ return std::basic_string<Char>(buf.data(), size);
+}
+
+FMT_BEGIN_DETAIL_NAMESPACE
+
+template <typename Char>
+void vformat_to(
+ buffer<Char>& buf, basic_string_view<Char> fmt,
+ basic_format_args<FMT_BUFFER_CONTEXT(type_identity_t<Char>)> args,
+ locale_ref loc) {
+ // workaround for msvc bug regarding name-lookup in module
+ // link names into function scope
+ using detail::arg_formatter;
+ using detail::buffer_appender;
+ using detail::custom_formatter;
+ using detail::default_arg_formatter;
+ using detail::get_arg;
+ using detail::locale_ref;
+ using detail::parse_format_specs;
+ using detail::specs_checker;
+ using detail::specs_handler;
+ using detail::to_unsigned;
+ using detail::type;
+ using detail::write;
+ auto out = buffer_appender<Char>(buf);
+ if (fmt.size() == 2 && equal2(fmt.data(), "{}")) {
+ auto arg = args.get(0);
+ if (!arg) error_handler().on_error("argument not found");
+ visit_format_arg(default_arg_formatter<Char>{out, args, loc}, arg);
+ return;
+ }
+
+ struct format_handler : error_handler {
+ basic_format_parse_context<Char> parse_context;
+ buffer_context<Char> context;
+
+ format_handler(buffer_appender<Char> p_out, basic_string_view<Char> str,
+ basic_format_args<buffer_context<Char>> p_args,
+ locale_ref p_loc)
+ : parse_context(str), context(p_out, p_args, p_loc) {}
+
+ void on_text(const Char* begin, const Char* end) {
+ auto text = basic_string_view<Char>(begin, to_unsigned(end - begin));
+ context.advance_to(write<Char>(context.out(), text));
+ }
+
+ FMT_CONSTEXPR auto on_arg_id() -> int {
+ return parse_context.next_arg_id();
+ }
+ FMT_CONSTEXPR auto on_arg_id(int id) -> int {
+ return parse_context.check_arg_id(id), id;
+ }
+ FMT_CONSTEXPR auto on_arg_id(basic_string_view<Char> id) -> int {
+ int arg_id = context.arg_id(id);
+ if (arg_id < 0) on_error("argument not found");
+ return arg_id;
+ }
+
+ FMT_INLINE void on_replacement_field(int id, const Char*) {
+ auto arg = get_arg(context, id);
+ context.advance_to(visit_format_arg(
+ default_arg_formatter<Char>{context.out(), context.args(),
+ context.locale()},
+ arg));
+ }
+
+ auto on_format_specs(int id, const Char* begin, const Char* end)
+ -> const Char* {
+ auto arg = get_arg(context, id);
+ if (arg.type() == type::custom_type) {
+ parse_context.advance_to(parse_context.begin() +
+ (begin - &*parse_context.begin()));
+ visit_format_arg(custom_formatter<Char>{parse_context, context}, arg);
+ return parse_context.begin();
+ }
+ auto specs = basic_format_specs<Char>();
+ specs_checker<specs_handler<Char>> handler(
+ specs_handler<Char>(specs, parse_context, context), arg.type());
+ begin = parse_format_specs(begin, end, handler);
+ if (begin == end || *begin != '}')
+ on_error("missing '}' in format string");
+ auto f = arg_formatter<Char>{context.out(), specs, context.locale()};
+ context.advance_to(visit_format_arg(f, arg));
+ return begin;
+ }
+ };
+ detail::parse_format_string<false>(fmt, format_handler(out, fmt, args, loc));
+}
+
+#ifndef FMT_HEADER_ONLY
+extern template FMT_API auto thousands_sep_impl<char>(locale_ref)
+ -> thousands_sep_result<char>;
+extern template FMT_API auto thousands_sep_impl<wchar_t>(locale_ref)
+ -> thousands_sep_result<wchar_t>;
+extern template FMT_API auto decimal_point_impl(locale_ref) -> char;
+extern template FMT_API auto decimal_point_impl(locale_ref) -> wchar_t;
+#endif // FMT_HEADER_ONLY
+
+FMT_END_DETAIL_NAMESPACE
+
+#if FMT_USE_USER_DEFINED_LITERALS
+inline namespace literals {
+/**
+ \rst
+ User-defined literal equivalent of :func:`fmt::arg`.
+
+ **Example**::
+
+ using namespace fmt::literals;
+ fmt::print("Elapsed time: {s:.2f} seconds", "s"_a=1.23);
+ \endrst
+ */
+# if FMT_USE_NONTYPE_TEMPLATE_ARGS
+template <detail_exported::fixed_string Str> constexpr auto operator""_a() {
+ using char_t = remove_cvref_t<decltype(Str.data[0])>;
+ return detail::udl_arg<char_t, sizeof(Str.data) / sizeof(char_t), Str>();
+}
+# else
+constexpr auto operator"" _a(const char* s, size_t) -> detail::udl_arg<char> {
+ return {s};
+}
+# endif
+} // namespace literals
+#endif // FMT_USE_USER_DEFINED_LITERALS
+
+template <typename Locale, FMT_ENABLE_IF(detail::is_locale<Locale>::value)>
+inline auto vformat(const Locale& loc, string_view fmt, format_args args)
+ -> std::string {
+ return detail::vformat(loc, fmt, args);
+}
+
+template <typename Locale, typename... T,
+ FMT_ENABLE_IF(detail::is_locale<Locale>::value)>
+inline auto format(const Locale& loc, format_string<T...> fmt, T&&... args)
+ -> std::string {
+ return vformat(loc, string_view(fmt), fmt::make_format_args(args...));
+}
+
+template <typename OutputIt, typename Locale,
+ FMT_ENABLE_IF(detail::is_output_iterator<OutputIt, char>::value&&
+ detail::is_locale<Locale>::value)>
+auto vformat_to(OutputIt out, const Locale& loc, string_view fmt,
+ format_args args) -> OutputIt {
+ using detail::get_buffer;
+ auto&& buf = get_buffer<char>(out);
+ detail::vformat_to(buf, fmt, args, detail::locale_ref(loc));
+ return detail::get_iterator(buf);
+}
+
+template <typename OutputIt, typename Locale, typename... T,
+ FMT_ENABLE_IF(detail::is_output_iterator<OutputIt, char>::value&&
+ detail::is_locale<Locale>::value)>
+FMT_INLINE auto format_to(OutputIt out, const Locale& loc,
+ format_string<T...> fmt, T&&... args) -> OutputIt {
+ return vformat_to(out, loc, fmt, fmt::make_format_args(args...));
+}
+
+FMT_MODULE_EXPORT_END
+FMT_END_NAMESPACE
+
+#ifdef FMT_HEADER_ONLY
+# define FMT_FUNC inline
+# include "format-inl.h"
+#else
+# define FMT_FUNC
+#endif
+
+#endif // FMT_FORMAT_H_
diff --git a/subprojects/fmt/include/fmt/ostream.h b/subprojects/fmt/include/fmt/ostream.h
new file mode 100644
index 0000000..394d947
--- /dev/null
+++ b/subprojects/fmt/include/fmt/ostream.h
@@ -0,0 +1,213 @@
+// Formatting library for C++ - std::ostream support
+//
+// Copyright (c) 2012 - present, Victor Zverovich
+// All rights reserved.
+//
+// For the license information refer to format.h.
+
+#ifndef FMT_OSTREAM_H_
+#define FMT_OSTREAM_H_
+
+#include <fstream>
+#include <ostream>
+
+#include "format.h"
+
+FMT_BEGIN_NAMESPACE
+
+template <typename OutputIt, typename Char> class basic_printf_context;
+
+namespace detail {
+
+// Checks if T has a user-defined operator<<.
+template <typename T, typename Char, typename Enable = void>
+class is_streamable {
+ private:
+ template <typename U>
+ static auto test(int)
+ -> bool_constant<sizeof(std::declval<std::basic_ostream<Char>&>()
+ << std::declval<U>()) != 0>;
+
+ template <typename> static auto test(...) -> std::false_type;
+
+ using result = decltype(test<T>(0));
+
+ public:
+ is_streamable() = default;
+
+ static const bool value = result::value;
+};
+
+// Formatting of built-in types and arrays is intentionally disabled because
+// it's handled by standard (non-ostream) formatters.
+template <typename T, typename Char>
+struct is_streamable<
+ T, Char,
+ enable_if_t<
+ std::is_arithmetic<T>::value || std::is_array<T>::value ||
+ std::is_pointer<T>::value || std::is_same<T, char8_type>::value ||
+ std::is_convertible<T, fmt::basic_string_view<Char>>::value ||
+ std::is_same<T, std_string_view<Char>>::value ||
+ (std::is_convertible<T, int>::value && !std::is_enum<T>::value)>>
+ : std::false_type {};
+
+template <typename Char> FILE* get_file(std::basic_filebuf<Char>&) {
+ return nullptr;
+}
+
+struct dummy_filebuf {
+ FILE* _Myfile;
+};
+template <typename T, typename U = int> struct ms_filebuf {
+ using type = dummy_filebuf;
+};
+template <typename T> struct ms_filebuf<T, decltype(T::_Myfile, 0)> {
+ using type = T;
+};
+using filebuf_type = ms_filebuf<std::filebuf>::type;
+
+FILE* get_file(filebuf_type& buf);
+
+// Generate a unique explicit instantion in every translation unit using a tag
+// type in an anonymous namespace.
+namespace {
+struct filebuf_access_tag {};
+} // namespace
+template <typename Tag, typename FileMemberPtr, FileMemberPtr file>
+class filebuf_access {
+ friend FILE* get_file(filebuf_type& buf) { return buf.*file; }
+};
+template class filebuf_access<filebuf_access_tag,
+ decltype(&filebuf_type::_Myfile),
+ &filebuf_type::_Myfile>;
+
+inline bool write(std::filebuf& buf, fmt::string_view data) {
+ FILE* f = get_file(buf);
+ if (!f) return false;
+ print(f, data);
+ return true;
+}
+inline bool write(std::wfilebuf&, fmt::basic_string_view<wchar_t>) {
+ return false;
+}
+
+// Write the content of buf to os.
+// It is a separate function rather than a part of vprint to simplify testing.
+template <typename Char>
+void write_buffer(std::basic_ostream<Char>& os, buffer<Char>& buf) {
+ if (const_check(FMT_MSC_VERSION)) {
+ auto filebuf = dynamic_cast<std::basic_filebuf<Char>*>(os.rdbuf());
+ if (filebuf && write(*filebuf, {buf.data(), buf.size()})) return;
+ }
+ const Char* buf_data = buf.data();
+ using unsigned_streamsize = std::make_unsigned<std::streamsize>::type;
+ unsigned_streamsize size = buf.size();
+ unsigned_streamsize max_size = to_unsigned(max_value<std::streamsize>());
+ do {
+ unsigned_streamsize n = size <= max_size ? size : max_size;
+ os.write(buf_data, static_cast<std::streamsize>(n));
+ buf_data += n;
+ size -= n;
+ } while (size != 0);
+}
+
+template <typename Char, typename T>
+void format_value(buffer<Char>& buf, const T& value,
+ locale_ref loc = locale_ref()) {
+ auto&& format_buf = formatbuf<std::basic_streambuf<Char>>(buf);
+ auto&& output = std::basic_ostream<Char>(&format_buf);
+#if !defined(FMT_STATIC_THOUSANDS_SEPARATOR)
+ if (loc) output.imbue(loc.get<std::locale>());
+#endif
+ output << value;
+ output.exceptions(std::ios_base::failbit | std::ios_base::badbit);
+}
+
+template <typename T> struct streamed_view { const T& value; };
+
+} // namespace detail
+
+// Formats an object of type T that has an overloaded ostream operator<<.
+template <typename Char>
+struct basic_ostream_formatter : formatter<basic_string_view<Char>, Char> {
+ template <typename T, typename OutputIt>
+ auto format(const T& value, basic_format_context<OutputIt, Char>& ctx) const
+ -> OutputIt {
+ auto buffer = basic_memory_buffer<Char>();
+ format_value(buffer, value, ctx.locale());
+ return formatter<basic_string_view<Char>, Char>::format(
+ {buffer.data(), buffer.size()}, ctx);
+ }
+};
+
+using ostream_formatter = basic_ostream_formatter<char>;
+
+template <typename T>
+struct formatter<detail::streamed_view<T>> : ostream_formatter {
+ template <typename OutputIt>
+ auto format(detail::streamed_view<T> view,
+ basic_format_context<OutputIt, char>& ctx) const -> OutputIt {
+ return ostream_formatter::format(view.value, ctx);
+ }
+};
+
+/**
+ \rst
+ Returns a view that formats `value` via an ostream ``operator<<``.
+
+ **Example**::
+
+ fmt::print("Current thread id: {}\n",
+ fmt::streamed(std::this_thread::get_id()));
+ \endrst
+ */
+template <typename T>
+auto streamed(const T& value) -> detail::streamed_view<T> {
+ return {value};
+}
+
+namespace detail {
+
+// Formats an object of type T that has an overloaded ostream operator<<.
+template <typename T, typename Char>
+struct fallback_formatter<T, Char, enable_if_t<is_streamable<T, Char>::value>>
+ : basic_ostream_formatter<Char> {
+ using basic_ostream_formatter<Char>::format;
+};
+
+} // namespace detail
+
+FMT_MODULE_EXPORT template <typename Char>
+void vprint(std::basic_ostream<Char>& os,
+ basic_string_view<type_identity_t<Char>> format_str,
+ basic_format_args<buffer_context<type_identity_t<Char>>> args) {
+ auto buffer = basic_memory_buffer<Char>();
+ detail::vformat_to(buffer, format_str, args);
+ detail::write_buffer(os, buffer);
+}
+
+/**
+ \rst
+ Prints formatted data to the stream *os*.
+
+ **Example**::
+
+ fmt::print(cerr, "Don't {}!", "panic");
+ \endrst
+ */
+FMT_MODULE_EXPORT template <typename... T>
+void print(std::ostream& os, format_string<T...> fmt, T&&... args) {
+ vprint(os, fmt, fmt::make_format_args(args...));
+}
+
+FMT_MODULE_EXPORT
+template <typename... Args>
+void print(std::wostream& os,
+ basic_format_string<wchar_t, type_identity_t<Args>...> fmt,
+ Args&&... args) {
+ vprint(os, fmt, fmt::make_format_args<buffer_context<wchar_t>>(args...));
+}
+
+FMT_END_NAMESPACE
+
+#endif // FMT_OSTREAM_H_
diff --git a/subprojects/fmt/meson.build b/subprojects/fmt/meson.build
new file mode 100644
index 0000000..cf9f6b5
--- /dev/null
+++ b/subprojects/fmt/meson.build
@@ -0,0 +1,19 @@
+# Copyright 2022 David Robillard <d@drobilla.net>
+# SPDX-License-Identifier: 0BSD OR GPL-3.0-or-later
+
+project(
+ 'fmt',
+ ['cpp'],
+ default_options: [
+ 'b_ndebug=if-release',
+ 'buildtype=release',
+ 'cpp_std=c++17',
+ ],
+ license: 'MIT',
+ meson_version: '>= 0.56.0',
+ version: '9.0.0',
+)
+
+include_dirs = include_directories('include')
+
+fmt_dep = declare_dependency(include_directories: include_dirs)
diff --git a/waf b/waf
deleted file mode 100755
index e22930a..0000000
--- a/waf
+++ /dev/null
@@ -1,16 +0,0 @@
-#!/usr/bin/env python
-
-# Minimal waf script for projects that include waflib directly
-
-from waflib import Context, Scripting
-
-import inspect
-import os
-
-def main():
- script_path = os.path.abspath(inspect.getfile(inspect.getmodule(main)))
- project_path = os.path.dirname(script_path)
- Scripting.waf_entry_point(os.getcwd(), Context.WAFVERSION, project_path)
-
-if __name__ == '__main__':
- main()
diff --git a/waflib/.gitignore b/waflib/.gitignore
deleted file mode 100644
index 8d35cb3..0000000
--- a/waflib/.gitignore
+++ /dev/null
@@ -1,2 +0,0 @@
-__pycache__
-*.pyc
diff --git a/waflib/Build.py b/waflib/Build.py
deleted file mode 100644
index 1afcba6..0000000
--- a/waflib/Build.py
+++ /dev/null
@@ -1,1491 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2005-2018 (ita)
-
-"""
-Classes related to the build phase (build, clean, install, step, etc)
-
-The inheritance tree is the following:
-
-"""
-
-import os, sys, errno, re, shutil, stat
-try:
- import cPickle
-except ImportError:
- import pickle as cPickle
-from waflib import Node, Runner, TaskGen, Utils, ConfigSet, Task, Logs, Options, Context, Errors
-
-CACHE_DIR = 'c4che'
-"""Name of the cache directory"""
-
-CACHE_SUFFIX = '_cache.py'
-"""ConfigSet cache files for variants are written under :py:attr:´waflib.Build.CACHE_DIR´ in the form ´variant_name´_cache.py"""
-
-INSTALL = 1337
-"""Positive value '->' install, see :py:attr:`waflib.Build.BuildContext.is_install`"""
-
-UNINSTALL = -1337
-"""Negative value '<-' uninstall, see :py:attr:`waflib.Build.BuildContext.is_install`"""
-
-SAVED_ATTRS = 'root node_sigs task_sigs imp_sigs raw_deps node_deps'.split()
-"""Build class members to save between the runs; these should be all dicts
-except for `root` which represents a :py:class:`waflib.Node.Node` instance
-"""
-
-CFG_FILES = 'cfg_files'
-"""Files from the build directory to hash before starting the build (``config.h`` written during the configuration)"""
-
-POST_AT_ONCE = 0
-"""Post mode: all task generators are posted before any task executed"""
-
-POST_LAZY = 1
-"""Post mode: post the task generators group after group, the tasks in the next group are created when the tasks in the previous groups are done"""
-
-PROTOCOL = -1
-if sys.platform == 'cli':
- PROTOCOL = 0
-
-class BuildContext(Context.Context):
- '''executes the build'''
-
- cmd = 'build'
- variant = ''
-
- def __init__(self, **kw):
- super(BuildContext, self).__init__(**kw)
-
- self.is_install = 0
- """Non-zero value when installing or uninstalling file"""
-
- self.top_dir = kw.get('top_dir', Context.top_dir)
- """See :py:attr:`waflib.Context.top_dir`; prefer :py:attr:`waflib.Build.BuildContext.srcnode`"""
-
- self.out_dir = kw.get('out_dir', Context.out_dir)
- """See :py:attr:`waflib.Context.out_dir`; prefer :py:attr:`waflib.Build.BuildContext.bldnode`"""
-
- self.run_dir = kw.get('run_dir', Context.run_dir)
- """See :py:attr:`waflib.Context.run_dir`"""
-
- self.launch_dir = Context.launch_dir
- """See :py:attr:`waflib.Context.out_dir`; prefer :py:meth:`waflib.Build.BuildContext.launch_node`"""
-
- self.post_mode = POST_LAZY
- """Whether to post the task generators at once or group-by-group (default is group-by-group)"""
-
- self.cache_dir = kw.get('cache_dir')
- if not self.cache_dir:
- self.cache_dir = os.path.join(self.out_dir, CACHE_DIR)
-
- self.all_envs = {}
- """Map names to :py:class:`waflib.ConfigSet.ConfigSet`, the empty string must map to the default environment"""
-
- # ======================================= #
- # cache variables
-
- self.node_sigs = {}
- """Dict mapping build nodes to task identifier (uid), it indicates whether a task created a particular file (persists across builds)"""
-
- self.task_sigs = {}
- """Dict mapping task identifiers (uid) to task signatures (persists across builds)"""
-
- self.imp_sigs = {}
- """Dict mapping task identifiers (uid) to implicit task dependencies used for scanning targets (persists across builds)"""
-
- self.node_deps = {}
- """Dict mapping task identifiers (uid) to node dependencies found by :py:meth:`waflib.Task.Task.scan` (persists across builds)"""
-
- self.raw_deps = {}
- """Dict mapping task identifiers (uid) to custom data returned by :py:meth:`waflib.Task.Task.scan` (persists across builds)"""
-
- self.task_gen_cache_names = {}
-
- self.jobs = Options.options.jobs
- """Amount of jobs to run in parallel"""
-
- self.targets = Options.options.targets
- """List of targets to build (default: \*)"""
-
- self.keep = Options.options.keep
- """Whether the build should continue past errors"""
-
- self.progress_bar = Options.options.progress_bar
- """
- Level of progress status:
-
- 0. normal output
- 1. progress bar
- 2. IDE output
- 3. No output at all
- """
-
- # Manual dependencies.
- self.deps_man = Utils.defaultdict(list)
- """Manual dependencies set by :py:meth:`waflib.Build.BuildContext.add_manual_dependency`"""
-
- # just the structure here
- self.current_group = 0
- """
- Current build group
- """
-
- self.groups = []
- """
- List containing lists of task generators
- """
-
- self.group_names = {}
- """
- Map group names to the group lists. See :py:meth:`waflib.Build.BuildContext.add_group`
- """
-
- for v in SAVED_ATTRS:
- if not hasattr(self, v):
- setattr(self, v, {})
-
- def get_variant_dir(self):
- """Getter for the variant_dir attribute"""
- if not self.variant:
- return self.out_dir
- return os.path.join(self.out_dir, os.path.normpath(self.variant))
- variant_dir = property(get_variant_dir, None)
-
- def __call__(self, *k, **kw):
- """
- Create a task generator and add it to the current build group. The following forms are equivalent::
-
- def build(bld):
- tg = bld(a=1, b=2)
-
- def build(bld):
- tg = bld()
- tg.a = 1
- tg.b = 2
-
- def build(bld):
- tg = TaskGen.task_gen(a=1, b=2)
- bld.add_to_group(tg, None)
-
- :param group: group name to add the task generator to
- :type group: string
- """
- kw['bld'] = self
- ret = TaskGen.task_gen(*k, **kw)
- self.task_gen_cache_names = {} # reset the cache, each time
- self.add_to_group(ret, group=kw.get('group'))
- return ret
-
- def __copy__(self):
- """
- Build contexts cannot be copied
-
- :raises: :py:class:`waflib.Errors.WafError`
- """
- raise Errors.WafError('build contexts cannot be copied')
-
- def load_envs(self):
- """
- The configuration command creates files of the form ``build/c4che/NAMEcache.py``. This method
- creates a :py:class:`waflib.ConfigSet.ConfigSet` instance for each ``NAME`` by reading those
- files and stores them in :py:attr:`waflib.Build.BuildContext.allenvs`.
- """
- node = self.root.find_node(self.cache_dir)
- if not node:
- raise Errors.WafError('The project was not configured: run "waf configure" first!')
- lst = node.ant_glob('**/*%s' % CACHE_SUFFIX, quiet=True)
-
- if not lst:
- raise Errors.WafError('The cache directory is empty: reconfigure the project')
-
- for x in lst:
- name = x.path_from(node).replace(CACHE_SUFFIX, '').replace('\\', '/')
- env = ConfigSet.ConfigSet(x.abspath())
- self.all_envs[name] = env
- for f in env[CFG_FILES]:
- newnode = self.root.find_resource(f)
- if not newnode or not newnode.exists():
- raise Errors.WafError('Missing configuration file %r, reconfigure the project!' % f)
-
- def init_dirs(self):
- """
- Initialize the project directory and the build directory by creating the nodes
- :py:attr:`waflib.Build.BuildContext.srcnode` and :py:attr:`waflib.Build.BuildContext.bldnode`
- corresponding to ``top_dir`` and ``variant_dir`` respectively. The ``bldnode`` directory is
- created if necessary.
- """
- if not (os.path.isabs(self.top_dir) and os.path.isabs(self.out_dir)):
- raise Errors.WafError('The project was not configured: run "waf configure" first!')
-
- self.path = self.srcnode = self.root.find_dir(self.top_dir)
- self.bldnode = self.root.make_node(self.variant_dir)
- self.bldnode.mkdir()
-
- def execute(self):
- """
- Restore data from previous builds and call :py:meth:`waflib.Build.BuildContext.execute_build`.
- Overrides from :py:func:`waflib.Context.Context.execute`
- """
- self.restore()
- if not self.all_envs:
- self.load_envs()
- self.execute_build()
-
- def execute_build(self):
- """
- Execute the build by:
-
- * reading the scripts (see :py:meth:`waflib.Context.Context.recurse`)
- * calling :py:meth:`waflib.Build.BuildContext.pre_build` to call user build functions
- * calling :py:meth:`waflib.Build.BuildContext.compile` to process the tasks
- * calling :py:meth:`waflib.Build.BuildContext.post_build` to call user build functions
- """
-
- Logs.info("Waf: Entering directory `%s'", self.variant_dir)
- self.recurse([self.run_dir])
- self.pre_build()
-
- # display the time elapsed in the progress bar
- self.timer = Utils.Timer()
-
- try:
- self.compile()
- finally:
- if self.progress_bar == 1 and sys.stderr.isatty():
- c = self.producer.processed or 1
- m = self.progress_line(c, c, Logs.colors.BLUE, Logs.colors.NORMAL)
- Logs.info(m, extra={'stream': sys.stderr, 'c1': Logs.colors.cursor_off, 'c2' : Logs.colors.cursor_on})
- Logs.info("Waf: Leaving directory `%s'", self.variant_dir)
- try:
- self.producer.bld = None
- del self.producer
- except AttributeError:
- pass
- self.post_build()
-
- def restore(self):
- """
- Load data from a previous run, sets the attributes listed in :py:const:`waflib.Build.SAVED_ATTRS`
- """
- try:
- env = ConfigSet.ConfigSet(os.path.join(self.cache_dir, 'build.config.py'))
- except EnvironmentError:
- pass
- else:
- if env.version < Context.HEXVERSION:
- raise Errors.WafError('Project was configured with a different version of Waf, please reconfigure it')
-
- for t in env.tools:
- self.setup(**t)
-
- dbfn = os.path.join(self.variant_dir, Context.DBFILE)
- try:
- data = Utils.readf(dbfn, 'rb')
- except (EnvironmentError, EOFError):
- # handle missing file/empty file
- Logs.debug('build: Could not load the build cache %s (missing)', dbfn)
- else:
- try:
- Node.pickle_lock.acquire()
- Node.Nod3 = self.node_class
- try:
- data = cPickle.loads(data)
- except Exception as e:
- Logs.debug('build: Could not pickle the build cache %s: %r', dbfn, e)
- else:
- for x in SAVED_ATTRS:
- setattr(self, x, data.get(x, {}))
- finally:
- Node.pickle_lock.release()
-
- self.init_dirs()
-
- def store(self):
- """
- Store data for next runs, set the attributes listed in :py:const:`waflib.Build.SAVED_ATTRS`. Uses a temporary
- file to avoid problems on ctrl+c.
- """
- data = {}
- for x in SAVED_ATTRS:
- data[x] = getattr(self, x)
- db = os.path.join(self.variant_dir, Context.DBFILE)
-
- try:
- Node.pickle_lock.acquire()
- Node.Nod3 = self.node_class
- x = cPickle.dumps(data, PROTOCOL)
- finally:
- Node.pickle_lock.release()
-
- Utils.writef(db + '.tmp', x, m='wb')
-
- try:
- st = os.stat(db)
- os.remove(db)
- if not Utils.is_win32: # win32 has no chown but we're paranoid
- os.chown(db + '.tmp', st.st_uid, st.st_gid)
- except (AttributeError, OSError):
- pass
-
- # do not use shutil.move (copy is not thread-safe)
- os.rename(db + '.tmp', db)
-
- def compile(self):
- """
- Run the build by creating an instance of :py:class:`waflib.Runner.Parallel`
- The cache file is written when at least a task was executed.
-
- :raises: :py:class:`waflib.Errors.BuildError` in case the build fails
- """
- Logs.debug('build: compile()')
-
- # delegate the producer-consumer logic to another object to reduce the complexity
- self.producer = Runner.Parallel(self, self.jobs)
- self.producer.biter = self.get_build_iterator()
- try:
- self.producer.start()
- except KeyboardInterrupt:
- if self.is_dirty():
- self.store()
- raise
- else:
- if self.is_dirty():
- self.store()
-
- if self.producer.error:
- raise Errors.BuildError(self.producer.error)
-
- def is_dirty(self):
- return self.producer.dirty
-
- def setup(self, tool, tooldir=None, funs=None):
- """
- Import waf tools defined during the configuration::
-
- def configure(conf):
- conf.load('glib2')
-
- def build(bld):
- pass # glib2 is imported implicitly
-
- :param tool: tool list
- :type tool: list
- :param tooldir: optional tool directory (sys.path)
- :type tooldir: list of string
- :param funs: unused variable
- """
- if isinstance(tool, list):
- for i in tool:
- self.setup(i, tooldir)
- return
-
- module = Context.load_tool(tool, tooldir)
- if hasattr(module, "setup"):
- module.setup(self)
-
- def get_env(self):
- """Getter for the env property"""
- try:
- return self.all_envs[self.variant]
- except KeyError:
- return self.all_envs['']
- def set_env(self, val):
- """Setter for the env property"""
- self.all_envs[self.variant] = val
-
- env = property(get_env, set_env)
-
- def add_manual_dependency(self, path, value):
- """
- Adds a dependency from a node object to a value::
-
- def build(bld):
- bld.add_manual_dependency(
- bld.path.find_resource('wscript'),
- bld.root.find_resource('/etc/fstab'))
-
- :param path: file path
- :type path: string or :py:class:`waflib.Node.Node`
- :param value: value to depend
- :type value: :py:class:`waflib.Node.Node`, byte object, or function returning a byte object
- """
- if not path:
- raise ValueError('Invalid input path %r' % path)
-
- if isinstance(path, Node.Node):
- node = path
- elif os.path.isabs(path):
- node = self.root.find_resource(path)
- else:
- node = self.path.find_resource(path)
- if not node:
- raise ValueError('Could not find the path %r' % path)
-
- if isinstance(value, list):
- self.deps_man[node].extend(value)
- else:
- self.deps_man[node].append(value)
-
- def launch_node(self):
- """Returns the launch directory as a :py:class:`waflib.Node.Node` object (cached)"""
- try:
- # private cache
- return self.p_ln
- except AttributeError:
- self.p_ln = self.root.find_dir(self.launch_dir)
- return self.p_ln
-
- def hash_env_vars(self, env, vars_lst):
- """
- Hashes configuration set variables::
-
- def build(bld):
- bld.hash_env_vars(bld.env, ['CXX', 'CC'])
-
- This method uses an internal cache.
-
- :param env: Configuration Set
- :type env: :py:class:`waflib.ConfigSet.ConfigSet`
- :param vars_lst: list of variables
- :type vars_list: list of string
- """
-
- if not env.table:
- env = env.parent
- if not env:
- return Utils.SIG_NIL
-
- idx = str(id(env)) + str(vars_lst)
- try:
- cache = self.cache_env
- except AttributeError:
- cache = self.cache_env = {}
- else:
- try:
- return self.cache_env[idx]
- except KeyError:
- pass
-
- lst = [env[a] for a in vars_lst]
- cache[idx] = ret = Utils.h_list(lst)
- Logs.debug('envhash: %s %r', Utils.to_hex(ret), lst)
- return ret
-
- def get_tgen_by_name(self, name):
- """
- Fetches a task generator by its name or its target attribute;
- the name must be unique in a build::
-
- def build(bld):
- tg = bld(name='foo')
- tg == bld.get_tgen_by_name('foo')
-
- This method use a private internal cache.
-
- :param name: Task generator name
- :raises: :py:class:`waflib.Errors.WafError` in case there is no task genenerator by that name
- """
- cache = self.task_gen_cache_names
- if not cache:
- # create the index lazily
- for g in self.groups:
- for tg in g:
- try:
- cache[tg.name] = tg
- except AttributeError:
- # raised if not a task generator, which should be uncommon
- pass
- try:
- return cache[name]
- except KeyError:
- raise Errors.WafError('Could not find a task generator for the name %r' % name)
-
- def progress_line(self, idx, total, col1, col2):
- """
- Computes a progress bar line displayed when running ``waf -p``
-
- :returns: progress bar line
- :rtype: string
- """
- if not sys.stderr.isatty():
- return ''
-
- n = len(str(total))
-
- Utils.rot_idx += 1
- ind = Utils.rot_chr[Utils.rot_idx % 4]
-
- pc = (100. * idx)/total
- fs = "[%%%dd/%%d][%%s%%2d%%%%%%s][%s][" % (n, ind)
- left = fs % (idx, total, col1, pc, col2)
- right = '][%s%s%s]' % (col1, self.timer, col2)
-
- cols = Logs.get_term_cols() - len(left) - len(right) + 2*len(col1) + 2*len(col2)
- if cols < 7:
- cols = 7
-
- ratio = ((cols * idx)//total) - 1
-
- bar = ('='*ratio+'>').ljust(cols)
- msg = Logs.indicator % (left, bar, right)
-
- return msg
-
- def declare_chain(self, *k, **kw):
- """
- Wraps :py:func:`waflib.TaskGen.declare_chain` for convenience
- """
- return TaskGen.declare_chain(*k, **kw)
-
- def pre_build(self):
- """Executes user-defined methods before the build starts, see :py:meth:`waflib.Build.BuildContext.add_pre_fun`"""
- for m in getattr(self, 'pre_funs', []):
- m(self)
-
- def post_build(self):
- """Executes user-defined methods after the build is successful, see :py:meth:`waflib.Build.BuildContext.add_post_fun`"""
- for m in getattr(self, 'post_funs', []):
- m(self)
-
- def add_pre_fun(self, meth):
- """
- Binds a callback method to execute after the scripts are read and before the build starts::
-
- def mycallback(bld):
- print("Hello, world!")
-
- def build(bld):
- bld.add_pre_fun(mycallback)
- """
- try:
- self.pre_funs.append(meth)
- except AttributeError:
- self.pre_funs = [meth]
-
- def add_post_fun(self, meth):
- """
- Binds a callback method to execute immediately after the build is successful::
-
- def call_ldconfig(bld):
- bld.exec_command('/sbin/ldconfig')
-
- def build(bld):
- if bld.cmd == 'install':
- bld.add_pre_fun(call_ldconfig)
- """
- try:
- self.post_funs.append(meth)
- except AttributeError:
- self.post_funs = [meth]
-
- def get_group(self, x):
- """
- Returns the build group named `x`, or the current group if `x` is None
-
- :param x: name or number or None
- :type x: string, int or None
- """
- if not self.groups:
- self.add_group()
- if x is None:
- return self.groups[self.current_group]
- if x in self.group_names:
- return self.group_names[x]
- return self.groups[x]
-
- def add_to_group(self, tgen, group=None):
- """Adds a task or a task generator to the build; there is no attempt to remove it if it was already added."""
- assert(isinstance(tgen, TaskGen.task_gen) or isinstance(tgen, Task.Task))
- tgen.bld = self
- self.get_group(group).append(tgen)
-
- def get_group_name(self, g):
- """
- Returns the name of the input build group
-
- :param g: build group object or build group index
- :type g: integer or list
- :return: name
- :rtype: string
- """
- if not isinstance(g, list):
- g = self.groups[g]
- for x in self.group_names:
- if id(self.group_names[x]) == id(g):
- return x
- return ''
-
- def get_group_idx(self, tg):
- """
- Returns the index of the group containing the task generator given as argument::
-
- def build(bld):
- tg = bld(name='nada')
- 0 == bld.get_group_idx(tg)
-
- :param tg: Task generator object
- :type tg: :py:class:`waflib.TaskGen.task_gen`
- :rtype: int
- """
- se = id(tg)
- for i, tmp in enumerate(self.groups):
- for t in tmp:
- if id(t) == se:
- return i
- return None
-
- def add_group(self, name=None, move=True):
- """
- Adds a new group of tasks/task generators. By default the new group becomes
- the default group for new task generators (make sure to create build groups in order).
-
- :param name: name for this group
- :type name: string
- :param move: set this new group as default group (True by default)
- :type move: bool
- :raises: :py:class:`waflib.Errors.WafError` if a group by the name given already exists
- """
- if name and name in self.group_names:
- raise Errors.WafError('add_group: name %s already present', name)
- g = []
- self.group_names[name] = g
- self.groups.append(g)
- if move:
- self.current_group = len(self.groups) - 1
-
- def set_group(self, idx):
- """
- Sets the build group at position idx as current so that newly added
- task generators are added to this one by default::
-
- def build(bld):
- bld(rule='touch ${TGT}', target='foo.txt')
- bld.add_group() # now the current group is 1
- bld(rule='touch ${TGT}', target='bar.txt')
- bld.set_group(0) # now the current group is 0
- bld(rule='touch ${TGT}', target='truc.txt') # build truc.txt before bar.txt
-
- :param idx: group name or group index
- :type idx: string or int
- """
- if isinstance(idx, str):
- g = self.group_names[idx]
- for i, tmp in enumerate(self.groups):
- if id(g) == id(tmp):
- self.current_group = i
- break
- else:
- self.current_group = idx
-
- def total(self):
- """
- Approximate task count: this value may be inaccurate if task generators
- are posted lazily (see :py:attr:`waflib.Build.BuildContext.post_mode`).
- The value :py:attr:`waflib.Runner.Parallel.total` is updated during the task execution.
-
- :rtype: int
- """
- total = 0
- for group in self.groups:
- for tg in group:
- try:
- total += len(tg.tasks)
- except AttributeError:
- total += 1
- return total
-
- def get_targets(self):
- """
- This method returns a pair containing the index of the last build group to post,
- and the list of task generator objects corresponding to the target names.
-
- This is used internally by :py:meth:`waflib.Build.BuildContext.get_build_iterator`
- to perform partial builds::
-
- $ waf --targets=myprogram,myshlib
-
- :return: the minimum build group index, and list of task generators
- :rtype: tuple
- """
- to_post = []
- min_grp = 0
- for name in self.targets.split(','):
- tg = self.get_tgen_by_name(name)
- m = self.get_group_idx(tg)
- if m > min_grp:
- min_grp = m
- to_post = [tg]
- elif m == min_grp:
- to_post.append(tg)
- return (min_grp, to_post)
-
- def get_all_task_gen(self):
- """
- Returns a list of all task generators for troubleshooting purposes.
- """
- lst = []
- for g in self.groups:
- lst.extend(g)
- return lst
-
- def post_group(self):
- """
- Post task generators from the group indexed by self.current_group; used internally
- by :py:meth:`waflib.Build.BuildContext.get_build_iterator`
- """
- def tgpost(tg):
- try:
- f = tg.post
- except AttributeError:
- pass
- else:
- f()
-
- if self.targets == '*':
- for tg in self.groups[self.current_group]:
- tgpost(tg)
- elif self.targets:
- if self.current_group < self._min_grp:
- for tg in self.groups[self.current_group]:
- tgpost(tg)
- else:
- for tg in self._exact_tg:
- tg.post()
- else:
- ln = self.launch_node()
- if ln.is_child_of(self.bldnode):
- Logs.warn('Building from the build directory, forcing --targets=*')
- ln = self.srcnode
- elif not ln.is_child_of(self.srcnode):
- Logs.warn('CWD %s is not under %s, forcing --targets=* (run distclean?)', ln.abspath(), self.srcnode.abspath())
- ln = self.srcnode
-
- def is_post(tg, ln):
- try:
- p = tg.path
- except AttributeError:
- pass
- else:
- if p.is_child_of(ln):
- return True
-
- def is_post_group():
- for i, g in enumerate(self.groups):
- if i > self.current_group:
- for tg in g:
- if is_post(tg, ln):
- return True
-
- if self.post_mode == POST_LAZY and ln != self.srcnode:
- # partial folder builds require all targets from a previous build group
- if is_post_group():
- ln = self.srcnode
-
- for tg in self.groups[self.current_group]:
- if is_post(tg, ln):
- tgpost(tg)
-
- def get_tasks_group(self, idx):
- """
- Returns all task instances for the build group at position idx,
- used internally by :py:meth:`waflib.Build.BuildContext.get_build_iterator`
-
- :rtype: list of :py:class:`waflib.Task.Task`
- """
- tasks = []
- for tg in self.groups[idx]:
- try:
- tasks.extend(tg.tasks)
- except AttributeError: # not a task generator
- tasks.append(tg)
- return tasks
-
- def get_build_iterator(self):
- """
- Creates a Python generator object that returns lists of tasks that may be processed in parallel.
-
- :return: tasks which can be executed immediately
- :rtype: generator returning lists of :py:class:`waflib.Task.Task`
- """
- if self.targets and self.targets != '*':
- (self._min_grp, self._exact_tg) = self.get_targets()
-
- if self.post_mode != POST_LAZY:
- for self.current_group, _ in enumerate(self.groups):
- self.post_group()
-
- for self.current_group, _ in enumerate(self.groups):
- # first post the task generators for the group
- if self.post_mode != POST_AT_ONCE:
- self.post_group()
-
- # then extract the tasks
- tasks = self.get_tasks_group(self.current_group)
-
- # if the constraints are set properly (ext_in/ext_out, before/after)
- # the call to set_file_constraints may be removed (can be a 15% penalty on no-op rebuilds)
- # (but leave set_file_constraints for the installation step)
- #
- # if the tasks have only files, set_file_constraints is required but set_precedence_constraints is not necessary
- #
- Task.set_file_constraints(tasks)
- Task.set_precedence_constraints(tasks)
-
- self.cur_tasks = tasks
- if tasks:
- yield tasks
-
- while 1:
- # the build stops once there are no tasks to process
- yield []
-
- def install_files(self, dest, files, **kw):
- """
- Creates a task generator to install files on the system::
-
- def build(bld):
- bld.install_files('${DATADIR}', self.path.find_resource('wscript'))
-
- :param dest: path representing the destination directory
- :type dest: :py:class:`waflib.Node.Node` or string (absolute path)
- :param files: input files
- :type files: list of strings or list of :py:class:`waflib.Node.Node`
- :param env: configuration set to expand *dest*
- :type env: :py:class:`waflib.ConfigSet.ConfigSet`
- :param relative_trick: preserve the folder hierarchy when installing whole folders
- :type relative_trick: bool
- :param cwd: parent node for searching srcfile, when srcfile is not an instance of :py:class:`waflib.Node.Node`
- :type cwd: :py:class:`waflib.Node.Node`
- :param postpone: execute the task immediately to perform the installation (False by default)
- :type postpone: bool
- """
- assert(dest)
- tg = self(features='install_task', install_to=dest, install_from=files, **kw)
- tg.dest = tg.install_to
- tg.type = 'install_files'
- if not kw.get('postpone', True):
- tg.post()
- return tg
-
- def install_as(self, dest, srcfile, **kw):
- """
- Creates a task generator to install a file on the system with a different name::
-
- def build(bld):
- bld.install_as('${PREFIX}/bin', 'myapp', chmod=Utils.O755)
-
- :param dest: destination file
- :type dest: :py:class:`waflib.Node.Node` or string (absolute path)
- :param srcfile: input file
- :type srcfile: string or :py:class:`waflib.Node.Node`
- :param cwd: parent node for searching srcfile, when srcfile is not an instance of :py:class:`waflib.Node.Node`
- :type cwd: :py:class:`waflib.Node.Node`
- :param env: configuration set for performing substitutions in dest
- :type env: :py:class:`waflib.ConfigSet.ConfigSet`
- :param postpone: execute the task immediately to perform the installation (False by default)
- :type postpone: bool
- """
- assert(dest)
- tg = self(features='install_task', install_to=dest, install_from=srcfile, **kw)
- tg.dest = tg.install_to
- tg.type = 'install_as'
- if not kw.get('postpone', True):
- tg.post()
- return tg
-
- def symlink_as(self, dest, src, **kw):
- """
- Creates a task generator to install a symlink::
-
- def build(bld):
- bld.symlink_as('${PREFIX}/lib/libfoo.so', 'libfoo.so.1.2.3')
-
- :param dest: absolute path of the symlink
- :type dest: :py:class:`waflib.Node.Node` or string (absolute path)
- :param src: link contents, which is a relative or absolute path which may exist or not
- :type src: string
- :param env: configuration set for performing substitutions in dest
- :type env: :py:class:`waflib.ConfigSet.ConfigSet`
- :param add: add the task created to a build group - set ``False`` only if the installation task is created after the build has started
- :type add: bool
- :param postpone: execute the task immediately to perform the installation
- :type postpone: bool
- :param relative_trick: make the symlink relative (default: ``False``)
- :type relative_trick: bool
- """
- assert(dest)
- tg = self(features='install_task', install_to=dest, install_from=src, **kw)
- tg.dest = tg.install_to
- tg.type = 'symlink_as'
- tg.link = src
- # TODO if add: self.add_to_group(tsk)
- if not kw.get('postpone', True):
- tg.post()
- return tg
-
-@TaskGen.feature('install_task')
-@TaskGen.before_method('process_rule', 'process_source')
-def process_install_task(self):
- """Creates the installation task for the current task generator; uses :py:func:`waflib.Build.add_install_task` internally."""
- self.add_install_task(**self.__dict__)
-
-@TaskGen.taskgen_method
-def add_install_task(self, **kw):
- """
- Creates the installation task for the current task generator, and executes it immediately if necessary
-
- :returns: An installation task
- :rtype: :py:class:`waflib.Build.inst`
- """
- if not self.bld.is_install:
- return
- if not kw['install_to']:
- return
-
- if kw['type'] == 'symlink_as' and Utils.is_win32:
- if kw.get('win32_install'):
- kw['type'] = 'install_as'
- else:
- # just exit
- return
-
- tsk = self.install_task = self.create_task('inst')
- tsk.chmod = kw.get('chmod', Utils.O644)
- tsk.link = kw.get('link', '') or kw.get('install_from', '')
- tsk.relative_trick = kw.get('relative_trick', False)
- tsk.type = kw['type']
- tsk.install_to = tsk.dest = kw['install_to']
- tsk.install_from = kw['install_from']
- tsk.relative_base = kw.get('cwd') or kw.get('relative_base', self.path)
- tsk.install_user = kw.get('install_user')
- tsk.install_group = kw.get('install_group')
- tsk.init_files()
- if not kw.get('postpone', True):
- tsk.run_now()
- return tsk
-
-@TaskGen.taskgen_method
-def add_install_files(self, **kw):
- """
- Creates an installation task for files
-
- :returns: An installation task
- :rtype: :py:class:`waflib.Build.inst`
- """
- kw['type'] = 'install_files'
- return self.add_install_task(**kw)
-
-@TaskGen.taskgen_method
-def add_install_as(self, **kw):
- """
- Creates an installation task for a single file
-
- :returns: An installation task
- :rtype: :py:class:`waflib.Build.inst`
- """
- kw['type'] = 'install_as'
- return self.add_install_task(**kw)
-
-@TaskGen.taskgen_method
-def add_symlink_as(self, **kw):
- """
- Creates an installation task for a symbolic link
-
- :returns: An installation task
- :rtype: :py:class:`waflib.Build.inst`
- """
- kw['type'] = 'symlink_as'
- return self.add_install_task(**kw)
-
-class inst(Task.Task):
- """Task that installs files or symlinks; it is typically executed by :py:class:`waflib.Build.InstallContext` and :py:class:`waflib.Build.UnInstallContext`"""
- def __str__(self):
- """Returns an empty string to disable the standard task display"""
- return ''
-
- def uid(self):
- """Returns a unique identifier for the task"""
- lst = self.inputs + self.outputs + [self.link, self.generator.path.abspath()]
- return Utils.h_list(lst)
-
- def init_files(self):
- """
- Initializes the task input and output nodes
- """
- if self.type == 'symlink_as':
- inputs = []
- else:
- inputs = self.generator.to_nodes(self.install_from)
- if self.type == 'install_as':
- assert len(inputs) == 1
- self.set_inputs(inputs)
-
- dest = self.get_install_path()
- outputs = []
- if self.type == 'symlink_as':
- if self.relative_trick:
- self.link = os.path.relpath(self.link, os.path.dirname(dest))
- outputs.append(self.generator.bld.root.make_node(dest))
- elif self.type == 'install_as':
- outputs.append(self.generator.bld.root.make_node(dest))
- else:
- for y in inputs:
- if self.relative_trick:
- destfile = os.path.join(dest, y.path_from(self.relative_base))
- else:
- destfile = os.path.join(dest, y.name)
- outputs.append(self.generator.bld.root.make_node(destfile))
- self.set_outputs(outputs)
-
- def runnable_status(self):
- """
- Installation tasks are always executed, so this method returns either :py:const:`waflib.Task.ASK_LATER` or :py:const:`waflib.Task.RUN_ME`.
- """
- ret = super(inst, self).runnable_status()
- if ret == Task.SKIP_ME and self.generator.bld.is_install:
- return Task.RUN_ME
- return ret
-
- def post_run(self):
- """
- Disables any post-run operations
- """
- pass
-
- def get_install_path(self, destdir=True):
- """
- Returns the destination path where files will be installed, pre-pending `destdir`.
-
- :rtype: string
- """
- if isinstance(self.install_to, Node.Node):
- dest = self.install_to.abspath()
- else:
- dest = Utils.subst_vars(self.install_to, self.env)
- if destdir and Options.options.destdir:
- dest = os.path.join(Options.options.destdir, os.path.splitdrive(dest)[1].lstrip(os.sep))
- return dest
-
- def copy_fun(self, src, tgt):
- """
- Copies a file from src to tgt, preserving permissions and trying to work
- around path limitations on Windows platforms. On Unix-like platforms,
- the owner/group of the target file may be set through install_user/install_group
-
- :param src: absolute path
- :type src: string
- :param tgt: absolute path
- :type tgt: string
- """
- # override this if you want to strip executables
- # kw['tsk'].source is the task that created the files in the build
- if Utils.is_win32 and len(tgt) > 259 and not tgt.startswith('\\\\?\\'):
- tgt = '\\\\?\\' + tgt
- shutil.copy2(src, tgt)
- self.fix_perms(tgt)
-
- def rm_empty_dirs(self, tgt):
- """
- Removes empty folders recursively when uninstalling.
-
- :param tgt: absolute path
- :type tgt: string
- """
- while tgt:
- tgt = os.path.dirname(tgt)
- try:
- os.rmdir(tgt)
- except OSError:
- break
-
- def run(self):
- """
- Performs file or symlink installation
- """
- is_install = self.generator.bld.is_install
- if not is_install: # unnecessary?
- return
-
- for x in self.outputs:
- if is_install == INSTALL:
- x.parent.mkdir()
- if self.type == 'symlink_as':
- fun = is_install == INSTALL and self.do_link or self.do_unlink
- fun(self.link, self.outputs[0].abspath())
- else:
- fun = is_install == INSTALL and self.do_install or self.do_uninstall
- launch_node = self.generator.bld.launch_node()
- for x, y in zip(self.inputs, self.outputs):
- fun(x.abspath(), y.abspath(), x.path_from(launch_node))
-
- def run_now(self):
- """
- Try executing the installation task right now
-
- :raises: :py:class:`waflib.Errors.TaskNotReady`
- """
- status = self.runnable_status()
- if status not in (Task.RUN_ME, Task.SKIP_ME):
- raise Errors.TaskNotReady('Could not process %r: status %r' % (self, status))
- self.run()
- self.hasrun = Task.SUCCESS
-
- def do_install(self, src, tgt, lbl, **kw):
- """
- Copies a file from src to tgt with given file permissions. The actual copy is only performed
- if the source and target file sizes or timestamps differ. When the copy occurs,
- the file is always first removed and then copied so as to prevent stale inodes.
-
- :param src: file name as absolute path
- :type src: string
- :param tgt: file destination, as absolute path
- :type tgt: string
- :param lbl: file source description
- :type lbl: string
- :param chmod: installation mode
- :type chmod: int
- :raises: :py:class:`waflib.Errors.WafError` if the file cannot be written
- """
- if not Options.options.force:
- # check if the file is already there to avoid a copy
- try:
- st1 = os.stat(tgt)
- st2 = os.stat(src)
- except OSError:
- pass
- else:
- # same size and identical timestamps -> make no copy
- if st1.st_mtime + 2 >= st2.st_mtime and st1.st_size == st2.st_size:
- if not self.generator.bld.progress_bar:
- Logs.info('- install %s (from %s)', tgt, lbl)
- return False
-
- if not self.generator.bld.progress_bar:
- Logs.info('+ install %s (from %s)', tgt, lbl)
-
- # Give best attempt at making destination overwritable,
- # like the 'install' utility used by 'make install' does.
- try:
- os.chmod(tgt, Utils.O644 | stat.S_IMODE(os.stat(tgt).st_mode))
- except EnvironmentError:
- pass
-
- # following is for shared libs and stale inodes (-_-)
- try:
- os.remove(tgt)
- except OSError:
- pass
-
- try:
- self.copy_fun(src, tgt)
- except EnvironmentError as e:
- if not os.path.exists(src):
- Logs.error('File %r does not exist', src)
- elif not os.path.isfile(src):
- Logs.error('Input %r is not a file', src)
- raise Errors.WafError('Could not install the file %r' % tgt, e)
-
- def fix_perms(self, tgt):
- """
- Change the ownership of the file/folder/link pointed by the given path
- This looks up for `install_user` or `install_group` attributes
- on the task or on the task generator::
-
- def build(bld):
- bld.install_as('${PREFIX}/wscript',
- 'wscript',
- install_user='nobody', install_group='nogroup')
- bld.symlink_as('${PREFIX}/wscript_link',
- Utils.subst_vars('${PREFIX}/wscript', bld.env),
- install_user='nobody', install_group='nogroup')
- """
- if not Utils.is_win32:
- user = getattr(self, 'install_user', None) or getattr(self.generator, 'install_user', None)
- group = getattr(self, 'install_group', None) or getattr(self.generator, 'install_group', None)
- if user or group:
- Utils.lchown(tgt, user or -1, group or -1)
- if not os.path.islink(tgt):
- os.chmod(tgt, self.chmod)
-
- def do_link(self, src, tgt, **kw):
- """
- Creates a symlink from tgt to src.
-
- :param src: file name as absolute path
- :type src: string
- :param tgt: file destination, as absolute path
- :type tgt: string
- """
- if os.path.islink(tgt) and os.readlink(tgt) == src:
- if not self.generator.bld.progress_bar:
- Logs.info('- symlink %s (to %s)', tgt, src)
- else:
- try:
- os.remove(tgt)
- except OSError:
- pass
- if not self.generator.bld.progress_bar:
- Logs.info('+ symlink %s (to %s)', tgt, src)
- os.symlink(src, tgt)
- self.fix_perms(tgt)
-
- def do_uninstall(self, src, tgt, lbl, **kw):
- """
- See :py:meth:`waflib.Build.inst.do_install`
- """
- if not self.generator.bld.progress_bar:
- Logs.info('- remove %s', tgt)
-
- #self.uninstall.append(tgt)
- try:
- os.remove(tgt)
- except OSError as e:
- if e.errno != errno.ENOENT:
- if not getattr(self, 'uninstall_error', None):
- self.uninstall_error = True
- Logs.warn('build: some files could not be uninstalled (retry with -vv to list them)')
- if Logs.verbose > 1:
- Logs.warn('Could not remove %s (error code %r)', e.filename, e.errno)
- self.rm_empty_dirs(tgt)
-
- def do_unlink(self, src, tgt, **kw):
- """
- See :py:meth:`waflib.Build.inst.do_link`
- """
- try:
- if not self.generator.bld.progress_bar:
- Logs.info('- remove %s', tgt)
- os.remove(tgt)
- except OSError:
- pass
- self.rm_empty_dirs(tgt)
-
-class InstallContext(BuildContext):
- '''installs the targets on the system'''
- cmd = 'install'
-
- def __init__(self, **kw):
- super(InstallContext, self).__init__(**kw)
- self.is_install = INSTALL
-
-class UninstallContext(InstallContext):
- '''removes the targets installed'''
- cmd = 'uninstall'
-
- def __init__(self, **kw):
- super(UninstallContext, self).__init__(**kw)
- self.is_install = UNINSTALL
-
-class CleanContext(BuildContext):
- '''cleans the project'''
- cmd = 'clean'
- def execute(self):
- """
- See :py:func:`waflib.Build.BuildContext.execute`.
- """
- self.restore()
- if not self.all_envs:
- self.load_envs()
-
- self.recurse([self.run_dir])
- try:
- self.clean()
- finally:
- self.store()
-
- def clean(self):
- """
- Remove most files from the build directory, and reset all caches.
-
- Custom lists of files to clean can be declared as `bld.clean_files`.
- For example, exclude `build/program/myprogram` from getting removed::
-
- def build(bld):
- bld.clean_files = bld.bldnode.ant_glob('**',
- excl='.lock* config.log c4che/* config.h program/myprogram',
- quiet=True, generator=True)
- """
- Logs.debug('build: clean called')
-
- if hasattr(self, 'clean_files'):
- for n in self.clean_files:
- n.delete()
- elif self.bldnode != self.srcnode:
- # would lead to a disaster if top == out
- lst = []
- for env in self.all_envs.values():
- lst.extend(self.root.find_or_declare(f) for f in env[CFG_FILES])
- for n in self.bldnode.ant_glob('**/*', excl='.lock* *conf_check_*/** config.log c4che/*', quiet=True):
- if n in lst:
- continue
- n.delete()
- self.root.children = {}
-
- for v in SAVED_ATTRS:
- if v == 'root':
- continue
- setattr(self, v, {})
-
-class ListContext(BuildContext):
- '''lists the targets to execute'''
- cmd = 'list'
-
- def execute(self):
- """
- In addition to printing the name of each build target,
- a description column will include text for each task
- generator which has a "description" field set.
-
- See :py:func:`waflib.Build.BuildContext.execute`.
- """
- self.restore()
- if not self.all_envs:
- self.load_envs()
-
- self.recurse([self.run_dir])
- self.pre_build()
-
- # display the time elapsed in the progress bar
- self.timer = Utils.Timer()
-
- for g in self.groups:
- for tg in g:
- try:
- f = tg.post
- except AttributeError:
- pass
- else:
- f()
-
- try:
- # force the cache initialization
- self.get_tgen_by_name('')
- except Errors.WafError:
- pass
-
- targets = sorted(self.task_gen_cache_names)
-
- # figure out how much to left-justify, for largest target name
- line_just = max(len(t) for t in targets) if targets else 0
-
- for target in targets:
- tgen = self.task_gen_cache_names[target]
-
- # Support displaying the description for the target
- # if it was set on the tgen
- descript = getattr(tgen, 'description', '')
- if descript:
- target = target.ljust(line_just)
- descript = ': %s' % descript
-
- Logs.pprint('GREEN', target, label=descript)
-
-class StepContext(BuildContext):
- '''executes tasks in a step-by-step fashion, for debugging'''
- cmd = 'step'
-
- def __init__(self, **kw):
- super(StepContext, self).__init__(**kw)
- self.files = Options.options.files
-
- def compile(self):
- """
- Overrides :py:meth:`waflib.Build.BuildContext.compile` to perform a partial build
- on tasks matching the input/output pattern given (regular expression matching)::
-
- $ waf step --files=foo.c,bar.c,in:truc.c,out:bar.o
- $ waf step --files=in:foo.cpp.1.o # link task only
-
- """
- if not self.files:
- Logs.warn('Add a pattern for the debug build, for example "waf step --files=main.c,app"')
- BuildContext.compile(self)
- return
-
- targets = []
- if self.targets and self.targets != '*':
- targets = self.targets.split(',')
-
- for g in self.groups:
- for tg in g:
- if targets and tg.name not in targets:
- continue
-
- try:
- f = tg.post
- except AttributeError:
- pass
- else:
- f()
-
- for pat in self.files.split(','):
- matcher = self.get_matcher(pat)
- for tg in g:
- if isinstance(tg, Task.Task):
- lst = [tg]
- else:
- lst = tg.tasks
- for tsk in lst:
- do_exec = False
- for node in tsk.inputs:
- if matcher(node, output=False):
- do_exec = True
- break
- for node in tsk.outputs:
- if matcher(node, output=True):
- do_exec = True
- break
- if do_exec:
- ret = tsk.run()
- Logs.info('%s -> exit %r', tsk, ret)
-
- def get_matcher(self, pat):
- """
- Converts a step pattern into a function
-
- :param: pat: pattern of the form in:truc.c,out:bar.o
- :returns: Python function that uses Node objects as inputs and returns matches
- :rtype: function
- """
- # this returns a function
- inn = True
- out = True
- if pat.startswith('in:'):
- out = False
- pat = pat.replace('in:', '')
- elif pat.startswith('out:'):
- inn = False
- pat = pat.replace('out:', '')
-
- anode = self.root.find_node(pat)
- pattern = None
- if not anode:
- if not pat.startswith('^'):
- pat = '^.+?%s' % pat
- if not pat.endswith('$'):
- pat = '%s$' % pat
- pattern = re.compile(pat)
-
- def match(node, output):
- if output and not out:
- return False
- if not output and not inn:
- return False
-
- if anode:
- return anode == node
- else:
- return pattern.match(node.abspath())
- return match
-
-class EnvContext(BuildContext):
- """Subclass EnvContext to create commands that require configuration data in 'env'"""
- fun = cmd = None
- def execute(self):
- """
- See :py:func:`waflib.Build.BuildContext.execute`.
- """
- self.restore()
- if not self.all_envs:
- self.load_envs()
- self.recurse([self.run_dir])
-
diff --git a/waflib/COPYING b/waflib/COPYING
deleted file mode 100644
index a4147d2..0000000
--- a/waflib/COPYING
+++ /dev/null
@@ -1,25 +0,0 @@
-Redistribution and use in source and binary forms, with or without
-modification, are permitted provided that the following conditions
-are met:
-
-1. Redistributions of source code must retain the above copyright
- notice, this list of conditions and the following disclaimer.
-
-2. Redistributions in binary form must reproduce the above copyright
- notice, this list of conditions and the following disclaimer in the
- documentation and/or other materials provided with the distribution.
-
-3. The name of the author may not be used to endorse or promote products
- derived from this software without specific prior written permission.
-
-THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR
-IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
-WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
-DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT,
-INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
-(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
-SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
-HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
-STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
-IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
-POSSIBILITY OF SUCH DAMAGE.
diff --git a/waflib/ConfigSet.py b/waflib/ConfigSet.py
deleted file mode 100644
index b300bb5..0000000
--- a/waflib/ConfigSet.py
+++ /dev/null
@@ -1,361 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2005-2018 (ita)
-
-"""
-
-ConfigSet: a special dict
-
-The values put in :py:class:`ConfigSet` must be serializable (dicts, lists, strings)
-"""
-
-import copy, re, os
-from waflib import Logs, Utils
-re_imp = re.compile('^(#)*?([^#=]*?)\ =\ (.*?)$', re.M)
-
-class ConfigSet(object):
- """
- A copy-on-write dict with human-readable serialized format. The serialization format
- is human-readable (python-like) and performed by using eval() and repr().
- For high performance prefer pickle. Do not store functions as they are not serializable.
-
- The values can be accessed by attributes or by keys::
-
- from waflib.ConfigSet import ConfigSet
- env = ConfigSet()
- env.FOO = 'test'
- env['FOO'] = 'test'
- """
- __slots__ = ('table', 'parent')
- def __init__(self, filename=None):
- self.table = {}
- """
- Internal dict holding the object values
- """
- #self.parent = None
-
- if filename:
- self.load(filename)
-
- def __contains__(self, key):
- """
- Enables the *in* syntax::
-
- if 'foo' in env:
- print(env['foo'])
- """
- if key in self.table:
- return True
- try:
- return self.parent.__contains__(key)
- except AttributeError:
- return False # parent may not exist
-
- def keys(self):
- """Dict interface"""
- keys = set()
- cur = self
- while cur:
- keys.update(cur.table.keys())
- cur = getattr(cur, 'parent', None)
- keys = list(keys)
- keys.sort()
- return keys
-
- def __iter__(self):
- return iter(self.keys())
-
- def __str__(self):
- """Text representation of the ConfigSet (for debugging purposes)"""
- return "\n".join(["%r %r" % (x, self.__getitem__(x)) for x in self.keys()])
-
- def __getitem__(self, key):
- """
- Dictionary interface: get value from key::
-
- def configure(conf):
- conf.env['foo'] = {}
- print(env['foo'])
- """
- try:
- while 1:
- x = self.table.get(key)
- if not x is None:
- return x
- self = self.parent
- except AttributeError:
- return []
-
- def __setitem__(self, key, value):
- """
- Dictionary interface: set value from key
- """
- self.table[key] = value
-
- def __delitem__(self, key):
- """
- Dictionary interface: mark the value as missing
- """
- self[key] = []
-
- def __getattr__(self, name):
- """
- Attribute access provided for convenience. The following forms are equivalent::
-
- def configure(conf):
- conf.env.value
- conf.env['value']
- """
- if name in self.__slots__:
- return object.__getattribute__(self, name)
- else:
- return self[name]
-
- def __setattr__(self, name, value):
- """
- Attribute access provided for convenience. The following forms are equivalent::
-
- def configure(conf):
- conf.env.value = x
- env['value'] = x
- """
- if name in self.__slots__:
- object.__setattr__(self, name, value)
- else:
- self[name] = value
-
- def __delattr__(self, name):
- """
- Attribute access provided for convenience. The following forms are equivalent::
-
- def configure(conf):
- del env.value
- del env['value']
- """
- if name in self.__slots__:
- object.__delattr__(self, name)
- else:
- del self[name]
-
- def derive(self):
- """
- Returns a new ConfigSet deriving from self. The copy returned
- will be a shallow copy::
-
- from waflib.ConfigSet import ConfigSet
- env = ConfigSet()
- env.append_value('CFLAGS', ['-O2'])
- child = env.derive()
- child.CFLAGS.append('test') # warning! this will modify 'env'
- child.CFLAGS = ['-O3'] # new list, ok
- child.append_value('CFLAGS', ['-O3']) # ok
-
- Use :py:func:`ConfigSet.detach` to detach the child from the parent.
- """
- newenv = ConfigSet()
- newenv.parent = self
- return newenv
-
- def detach(self):
- """
- Detaches this instance from its parent (if present)
-
- Modifying the parent :py:class:`ConfigSet` will not change the current object
- Modifying this :py:class:`ConfigSet` will not modify the parent one.
- """
- tbl = self.get_merged_dict()
- try:
- delattr(self, 'parent')
- except AttributeError:
- pass
- else:
- keys = tbl.keys()
- for x in keys:
- tbl[x] = copy.deepcopy(tbl[x])
- self.table = tbl
- return self
-
- def get_flat(self, key):
- """
- Returns a value as a string. If the input is a list, the value returned is space-separated.
-
- :param key: key to use
- :type key: string
- """
- s = self[key]
- if isinstance(s, str):
- return s
- return ' '.join(s)
-
- def _get_list_value_for_modification(self, key):
- """
- Returns a list value for further modification.
-
- The list may be modified inplace and there is no need to do this afterwards::
-
- self.table[var] = value
- """
- try:
- value = self.table[key]
- except KeyError:
- try:
- value = self.parent[key]
- except AttributeError:
- value = []
- else:
- if isinstance(value, list):
- # force a copy
- value = value[:]
- else:
- value = [value]
- self.table[key] = value
- else:
- if not isinstance(value, list):
- self.table[key] = value = [value]
- return value
-
- def append_value(self, var, val):
- """
- Appends a value to the specified config key::
-
- def build(bld):
- bld.env.append_value('CFLAGS', ['-O2'])
-
- The value must be a list or a tuple
- """
- if isinstance(val, str): # if there were string everywhere we could optimize this
- val = [val]
- current_value = self._get_list_value_for_modification(var)
- current_value.extend(val)
-
- def prepend_value(self, var, val):
- """
- Prepends a value to the specified item::
-
- def configure(conf):
- conf.env.prepend_value('CFLAGS', ['-O2'])
-
- The value must be a list or a tuple
- """
- if isinstance(val, str):
- val = [val]
- self.table[var] = val + self._get_list_value_for_modification(var)
-
- def append_unique(self, var, val):
- """
- Appends a value to the specified item only if it's not already present::
-
- def build(bld):
- bld.env.append_unique('CFLAGS', ['-O2', '-g'])
-
- The value must be a list or a tuple
- """
- if isinstance(val, str):
- val = [val]
- current_value = self._get_list_value_for_modification(var)
-
- for x in val:
- if x not in current_value:
- current_value.append(x)
-
- def get_merged_dict(self):
- """
- Computes the merged dictionary from the fusion of self and all its parent
-
- :rtype: a ConfigSet object
- """
- table_list = []
- env = self
- while 1:
- table_list.insert(0, env.table)
- try:
- env = env.parent
- except AttributeError:
- break
- merged_table = {}
- for table in table_list:
- merged_table.update(table)
- return merged_table
-
- def store(self, filename):
- """
- Serializes the :py:class:`ConfigSet` data to a file. See :py:meth:`ConfigSet.load` for reading such files.
-
- :param filename: file to use
- :type filename: string
- """
- try:
- os.makedirs(os.path.split(filename)[0])
- except OSError:
- pass
-
- buf = []
- merged_table = self.get_merged_dict()
- keys = list(merged_table.keys())
- keys.sort()
-
- try:
- fun = ascii
- except NameError:
- fun = repr
-
- for k in keys:
- if k != 'undo_stack':
- buf.append('%s = %s\n' % (k, fun(merged_table[k])))
- Utils.writef(filename, ''.join(buf))
-
- def load(self, filename):
- """
- Restores contents from a file (current values are not cleared). Files are written using :py:meth:`ConfigSet.store`.
-
- :param filename: file to use
- :type filename: string
- """
- tbl = self.table
- code = Utils.readf(filename, m='rU')
- for m in re_imp.finditer(code):
- g = m.group
- tbl[g(2)] = eval(g(3))
- Logs.debug('env: %s', self.table)
-
- def update(self, d):
- """
- Dictionary interface: replace values with the ones from another dict
-
- :param d: object to use the value from
- :type d: dict-like object
- """
- self.table.update(d)
-
- def stash(self):
- """
- Stores the object state to provide transactionality semantics::
-
- env = ConfigSet()
- env.stash()
- try:
- env.append_value('CFLAGS', '-O3')
- call_some_method(env)
- finally:
- env.revert()
-
- The history is kept in a stack, and is lost during the serialization by :py:meth:`ConfigSet.store`
- """
- orig = self.table
- tbl = self.table = self.table.copy()
- for x in tbl.keys():
- tbl[x] = copy.deepcopy(tbl[x])
- self.undo_stack = self.undo_stack + [orig]
-
- def commit(self):
- """
- Commits transactional changes. See :py:meth:`ConfigSet.stash`
- """
- self.undo_stack.pop(-1)
-
- def revert(self):
- """
- Reverts the object to a previous state. See :py:meth:`ConfigSet.stash`
- """
- self.table = self.undo_stack.pop(-1)
-
diff --git a/waflib/Configure.py b/waflib/Configure.py
deleted file mode 100644
index d0a4793..0000000
--- a/waflib/Configure.py
+++ /dev/null
@@ -1,638 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2005-2018 (ita)
-
-"""
-Configuration system
-
-A :py:class:`waflib.Configure.ConfigurationContext` instance is created when ``waf configure`` is called, it is used to:
-
-* create data dictionaries (ConfigSet instances)
-* store the list of modules to import
-* hold configuration routines such as ``find_program``, etc
-"""
-
-import os, re, shlex, shutil, sys, time, traceback
-from waflib import ConfigSet, Utils, Options, Logs, Context, Build, Errors
-
-WAF_CONFIG_LOG = 'config.log'
-"""Name of the configuration log file"""
-
-autoconfig = False
-"""Execute the configuration automatically"""
-
-conf_template = '''# project %(app)s configured on %(now)s by
-# waf %(wafver)s (abi %(abi)s, python %(pyver)x on %(systype)s)
-# using %(args)s
-#'''
-
-class ConfigurationContext(Context.Context):
- '''configures the project'''
-
- cmd = 'configure'
-
- error_handlers = []
- """
- Additional functions to handle configuration errors
- """
-
- def __init__(self, **kw):
- super(ConfigurationContext, self).__init__(**kw)
- self.environ = dict(os.environ)
- self.all_envs = {}
-
- self.top_dir = None
- self.out_dir = None
-
- self.tools = [] # tools loaded in the configuration, and that will be loaded when building
-
- self.hash = 0
- self.files = []
-
- self.tool_cache = []
-
- self.setenv('')
-
- def setenv(self, name, env=None):
- """
- Set a new config set for conf.env. If a config set of that name already exists,
- recall it without modification.
-
- The name is the filename prefix to save to ``c4che/NAME_cache.py``, and it
- is also used as *variants* by the build commands.
- Though related to variants, whatever kind of data may be stored in the config set::
-
- def configure(cfg):
- cfg.env.ONE = 1
- cfg.setenv('foo')
- cfg.env.ONE = 2
-
- def build(bld):
- 2 == bld.env_of_name('foo').ONE
-
- :param name: name of the configuration set
- :type name: string
- :param env: ConfigSet to copy, or an empty ConfigSet is created
- :type env: :py:class:`waflib.ConfigSet.ConfigSet`
- """
- if name not in self.all_envs or env:
- if not env:
- env = ConfigSet.ConfigSet()
- self.prepare_env(env)
- else:
- env = env.derive()
- self.all_envs[name] = env
- self.variant = name
-
- def get_env(self):
- """Getter for the env property"""
- return self.all_envs[self.variant]
- def set_env(self, val):
- """Setter for the env property"""
- self.all_envs[self.variant] = val
-
- env = property(get_env, set_env)
-
- def init_dirs(self):
- """
- Initialize the project directory and the build directory
- """
-
- top = self.top_dir
- if not top:
- top = Options.options.top
- if not top:
- top = getattr(Context.g_module, Context.TOP, None)
- if not top:
- top = self.path.abspath()
- top = os.path.abspath(top)
-
- self.srcnode = (os.path.isabs(top) and self.root or self.path).find_dir(top)
- assert(self.srcnode)
-
- out = self.out_dir
- if not out:
- out = Options.options.out
- if not out:
- out = getattr(Context.g_module, Context.OUT, None)
- if not out:
- out = Options.lockfile.replace('.lock-waf_%s_' % sys.platform, '').replace('.lock-waf', '')
-
- # someone can be messing with symlinks
- out = os.path.realpath(out)
-
- self.bldnode = (os.path.isabs(out) and self.root or self.path).make_node(out)
- self.bldnode.mkdir()
-
- if not os.path.isdir(self.bldnode.abspath()):
- conf.fatal('Could not create the build directory %s' % self.bldnode.abspath())
-
- def execute(self):
- """
- See :py:func:`waflib.Context.Context.execute`
- """
- self.init_dirs()
-
- self.cachedir = self.bldnode.make_node(Build.CACHE_DIR)
- self.cachedir.mkdir()
-
- path = os.path.join(self.bldnode.abspath(), WAF_CONFIG_LOG)
- self.logger = Logs.make_logger(path, 'cfg')
-
- app = getattr(Context.g_module, 'APPNAME', '')
- if app:
- ver = getattr(Context.g_module, 'VERSION', '')
- if ver:
- app = "%s (%s)" % (app, ver)
-
- params = {'now': time.ctime(), 'pyver': sys.hexversion, 'systype': sys.platform, 'args': " ".join(sys.argv), 'wafver': Context.WAFVERSION, 'abi': Context.ABI, 'app': app}
- self.to_log(conf_template % params)
- self.msg('Setting top to', self.srcnode.abspath())
- self.msg('Setting out to', self.bldnode.abspath())
-
- if id(self.srcnode) == id(self.bldnode):
- Logs.warn('Setting top == out')
- elif id(self.path) != id(self.srcnode):
- if self.srcnode.is_child_of(self.path):
- Logs.warn('Are you certain that you do not want to set top="." ?')
-
- super(ConfigurationContext, self).execute()
-
- self.store()
-
- Context.top_dir = self.srcnode.abspath()
- Context.out_dir = self.bldnode.abspath()
-
- # this will write a configure lock so that subsequent builds will
- # consider the current path as the root directory (see prepare_impl).
- # to remove: use 'waf distclean'
- env = ConfigSet.ConfigSet()
- env.argv = sys.argv
- env.options = Options.options.__dict__
- env.config_cmd = self.cmd
-
- env.run_dir = Context.run_dir
- env.top_dir = Context.top_dir
- env.out_dir = Context.out_dir
-
- # conf.hash & conf.files hold wscript files paths and hash
- # (used only by Configure.autoconfig)
- env.hash = self.hash
- env.files = self.files
- env.environ = dict(self.environ)
-
- if not (self.env.NO_LOCK_IN_RUN or env.environ.get('NO_LOCK_IN_RUN') or getattr(Options.options, 'no_lock_in_run')):
- env.store(os.path.join(Context.run_dir, Options.lockfile))
- if not (self.env.NO_LOCK_IN_TOP or env.environ.get('NO_LOCK_IN_TOP') or getattr(Options.options, 'no_lock_in_top')):
- env.store(os.path.join(Context.top_dir, Options.lockfile))
- if not (self.env.NO_LOCK_IN_OUT or env.environ.get('NO_LOCK_IN_OUT') or getattr(Options.options, 'no_lock_in_out')):
- env.store(os.path.join(Context.out_dir, Options.lockfile))
-
- def prepare_env(self, env):
- """
- Insert *PREFIX*, *BINDIR* and *LIBDIR* values into ``env``
-
- :type env: :py:class:`waflib.ConfigSet.ConfigSet`
- :param env: a ConfigSet, usually ``conf.env``
- """
- if not env.PREFIX:
- if Options.options.prefix or Utils.is_win32:
- env.PREFIX = Options.options.prefix
- else:
- env.PREFIX = '/'
- if not env.BINDIR:
- if Options.options.bindir:
- env.BINDIR = Options.options.bindir
- else:
- env.BINDIR = Utils.subst_vars('${PREFIX}/bin', env)
- if not env.LIBDIR:
- if Options.options.libdir:
- env.LIBDIR = Options.options.libdir
- else:
- env.LIBDIR = Utils.subst_vars('${PREFIX}/lib%s' % Utils.lib64(), env)
-
- def store(self):
- """Save the config results into the cache file"""
- n = self.cachedir.make_node('build.config.py')
- n.write('version = 0x%x\ntools = %r\n' % (Context.HEXVERSION, self.tools))
-
- if not self.all_envs:
- self.fatal('nothing to store in the configuration context!')
-
- for key in self.all_envs:
- tmpenv = self.all_envs[key]
- tmpenv.store(os.path.join(self.cachedir.abspath(), key + Build.CACHE_SUFFIX))
-
- def load(self, tool_list, tooldir=None, funs=None, with_sys_path=True, cache=False):
- """
- Load Waf tools, which will be imported whenever a build is started.
-
- :param tool_list: waf tools to import
- :type tool_list: list of string
- :param tooldir: paths for the imports
- :type tooldir: list of string
- :param funs: functions to execute from the waf tools
- :type funs: list of string
- :param cache: whether to prevent the tool from running twice
- :type cache: bool
- """
-
- tools = Utils.to_list(tool_list)
- if tooldir:
- tooldir = Utils.to_list(tooldir)
- for tool in tools:
- # avoid loading the same tool more than once with the same functions
- # used by composite projects
-
- if cache:
- mag = (tool, id(self.env), tooldir, funs)
- if mag in self.tool_cache:
- self.to_log('(tool %s is already loaded, skipping)' % tool)
- continue
- self.tool_cache.append(mag)
-
- module = None
- try:
- module = Context.load_tool(tool, tooldir, ctx=self, with_sys_path=with_sys_path)
- except ImportError as e:
- self.fatal('Could not load the Waf tool %r from %r\n%s' % (tool, getattr(e, 'waf_sys_path', sys.path), e))
- except Exception as e:
- self.to_log('imp %r (%r & %r)' % (tool, tooldir, funs))
- self.to_log(traceback.format_exc())
- raise
-
- if funs is not None:
- self.eval_rules(funs)
- else:
- func = getattr(module, 'configure', None)
- if func:
- if type(func) is type(Utils.readf):
- func(self)
- else:
- self.eval_rules(func)
-
- self.tools.append({'tool':tool, 'tooldir':tooldir, 'funs':funs})
-
- def post_recurse(self, node):
- """
- Records the path and a hash of the scripts visited, see :py:meth:`waflib.Context.Context.post_recurse`
-
- :param node: script
- :type node: :py:class:`waflib.Node.Node`
- """
- super(ConfigurationContext, self).post_recurse(node)
- self.hash = Utils.h_list((self.hash, node.read('rb')))
- self.files.append(node.abspath())
-
- def eval_rules(self, rules):
- """
- Execute configuration tests provided as list of functions to run
-
- :param rules: list of configuration method names
- :type rules: list of string
- """
- self.rules = Utils.to_list(rules)
- for x in self.rules:
- f = getattr(self, x)
- if not f:
- self.fatal('No such configuration function %r' % x)
- f()
-
-def conf(f):
- """
- Decorator: attach new configuration functions to :py:class:`waflib.Build.BuildContext` and
- :py:class:`waflib.Configure.ConfigurationContext`. The methods bound will accept a parameter
- named 'mandatory' to disable the configuration errors::
-
- def configure(conf):
- conf.find_program('abc', mandatory=False)
-
- :param f: method to bind
- :type f: function
- """
- def fun(*k, **kw):
- mandatory = kw.pop('mandatory', True)
- try:
- return f(*k, **kw)
- except Errors.ConfigurationError:
- if mandatory:
- raise
-
- fun.__name__ = f.__name__
- setattr(ConfigurationContext, f.__name__, fun)
- setattr(Build.BuildContext, f.__name__, fun)
- return f
-
-@conf
-def add_os_flags(self, var, dest=None, dup=False):
- """
- Import operating system environment values into ``conf.env`` dict::
-
- def configure(conf):
- conf.add_os_flags('CFLAGS')
-
- :param var: variable to use
- :type var: string
- :param dest: destination variable, by default the same as var
- :type dest: string
- :param dup: add the same set of flags again
- :type dup: bool
- """
- try:
- flags = shlex.split(self.environ[var])
- except KeyError:
- return
- if dup or ''.join(flags) not in ''.join(Utils.to_list(self.env[dest or var])):
- self.env.append_value(dest or var, flags)
-
-@conf
-def cmd_to_list(self, cmd):
- """
- Detect if a command is written in pseudo shell like ``ccache g++`` and return a list.
-
- :param cmd: command
- :type cmd: a string or a list of string
- """
- if isinstance(cmd, str):
- if os.path.isfile(cmd):
- # do not take any risk
- return [cmd]
- if os.sep == '/':
- return shlex.split(cmd)
- else:
- try:
- return shlex.split(cmd, posix=False)
- except TypeError:
- # Python 2.5 on windows?
- return shlex.split(cmd)
- return cmd
-
-@conf
-def check_waf_version(self, mini='1.9.99', maxi='2.1.0', **kw):
- """
- Raise a Configuration error if the Waf version does not strictly match the given bounds::
-
- conf.check_waf_version(mini='1.9.99', maxi='2.1.0')
-
- :type mini: number, tuple or string
- :param mini: Minimum required version
- :type maxi: number, tuple or string
- :param maxi: Maximum allowed version
- """
- self.start_msg('Checking for waf version in %s-%s' % (str(mini), str(maxi)), **kw)
- ver = Context.HEXVERSION
- if Utils.num2ver(mini) > ver:
- self.fatal('waf version should be at least %r (%r found)' % (Utils.num2ver(mini), ver))
- if Utils.num2ver(maxi) < ver:
- self.fatal('waf version should be at most %r (%r found)' % (Utils.num2ver(maxi), ver))
- self.end_msg('ok', **kw)
-
-@conf
-def find_file(self, filename, path_list=[]):
- """
- Find a file in a list of paths
-
- :param filename: name of the file to search for
- :param path_list: list of directories to search
- :return: the first matching filename; else a configuration exception is raised
- """
- for n in Utils.to_list(filename):
- for d in Utils.to_list(path_list):
- p = os.path.expanduser(os.path.join(d, n))
- if os.path.exists(p):
- return p
- self.fatal('Could not find %r' % filename)
-
-@conf
-def find_program(self, filename, **kw):
- """
- Search for a program on the operating system
-
- When var is used, you may set os.environ[var] to help find a specific program version, for example::
-
- $ CC='ccache gcc' waf configure
-
- :param path_list: paths to use for searching
- :type param_list: list of string
- :param var: store the result to conf.env[var] where var defaults to filename.upper() if not provided; the result is stored as a list of strings
- :type var: string
- :param value: obtain the program from the value passed exclusively
- :type value: list or string (list is preferred)
- :param exts: list of extensions for the binary (do not add an extension for portability)
- :type exts: list of string
- :param msg: name to display in the log, by default filename is used
- :type msg: string
- :param interpreter: interpreter for the program
- :type interpreter: ConfigSet variable key
- :raises: :py:class:`waflib.Errors.ConfigurationError`
- """
-
- exts = kw.get('exts', Utils.is_win32 and '.exe,.com,.bat,.cmd' or ',.sh,.pl,.py')
-
- environ = kw.get('environ', getattr(self, 'environ', os.environ))
-
- ret = ''
-
- filename = Utils.to_list(filename)
- msg = kw.get('msg', ', '.join(filename))
-
- var = kw.get('var', '')
- if not var:
- var = re.sub(r'[-.]', '_', filename[0].upper())
-
- path_list = kw.get('path_list', '')
- if path_list:
- path_list = Utils.to_list(path_list)
- else:
- path_list = environ.get('PATH', '').split(os.pathsep)
-
- if kw.get('value'):
- # user-provided in command-line options and passed to find_program
- ret = self.cmd_to_list(kw['value'])
- elif environ.get(var):
- # user-provided in the os environment
- ret = self.cmd_to_list(environ[var])
- elif self.env[var]:
- # a default option in the wscript file
- ret = self.cmd_to_list(self.env[var])
- else:
- if not ret:
- ret = self.find_binary(filename, exts.split(','), path_list)
- if not ret and Utils.winreg:
- ret = Utils.get_registry_app_path(Utils.winreg.HKEY_CURRENT_USER, filename)
- if not ret and Utils.winreg:
- ret = Utils.get_registry_app_path(Utils.winreg.HKEY_LOCAL_MACHINE, filename)
- ret = self.cmd_to_list(ret)
-
- if ret:
- if len(ret) == 1:
- retmsg = ret[0]
- else:
- retmsg = ret
- else:
- retmsg = False
-
- self.msg('Checking for program %r' % msg, retmsg, **kw)
- if not kw.get('quiet'):
- self.to_log('find program=%r paths=%r var=%r -> %r' % (filename, path_list, var, ret))
-
- if not ret:
- self.fatal(kw.get('errmsg', '') or 'Could not find the program %r' % filename)
-
- interpreter = kw.get('interpreter')
- if interpreter is None:
- if not Utils.check_exe(ret[0], env=environ):
- self.fatal('Program %r is not executable' % ret)
- self.env[var] = ret
- else:
- self.env[var] = self.env[interpreter] + ret
-
- return ret
-
-@conf
-def find_binary(self, filenames, exts, paths):
- for f in filenames:
- for ext in exts:
- exe_name = f + ext
- if os.path.isabs(exe_name):
- if os.path.isfile(exe_name):
- return exe_name
- else:
- for path in paths:
- x = os.path.expanduser(os.path.join(path, exe_name))
- if os.path.isfile(x):
- return x
- return None
-
-@conf
-def run_build(self, *k, **kw):
- """
- Create a temporary build context to execute a build. A reference to that build
- context is kept on self.test_bld for debugging purposes, and you should not rely
- on it too much (read the note on the cache below).
- The parameters given in the arguments to this function are passed as arguments for
- a single task generator created in the build. Only three parameters are obligatory:
-
- :param features: features to pass to a task generator created in the build
- :type features: list of string
- :param compile_filename: file to create for the compilation (default: *test.c*)
- :type compile_filename: string
- :param code: code to write in the filename to compile
- :type code: string
-
- Though this function returns *0* by default, the build may set an attribute named *retval* on the
- build context object to return a particular value. See :py:func:`waflib.Tools.c_config.test_exec_fun` for example.
-
- This function also provides a limited cache. To use it, provide the following option::
-
- def options(opt):
- opt.add_option('--confcache', dest='confcache', default=0,
- action='count', help='Use a configuration cache')
-
- And execute the configuration with the following command-line::
-
- $ waf configure --confcache
-
- """
- lst = [str(v) for (p, v) in kw.items() if p != 'env']
- h = Utils.h_list(lst)
- dir = self.bldnode.abspath() + os.sep + (not Utils.is_win32 and '.' or '') + 'conf_check_' + Utils.to_hex(h)
-
- try:
- os.makedirs(dir)
- except OSError:
- pass
-
- try:
- os.stat(dir)
- except OSError:
- self.fatal('cannot use the configuration test folder %r' % dir)
-
- cachemode = getattr(Options.options, 'confcache', None)
- if cachemode == 1:
- try:
- proj = ConfigSet.ConfigSet(os.path.join(dir, 'cache_run_build'))
- except EnvironmentError:
- pass
- else:
- ret = proj['cache_run_build']
- if isinstance(ret, str) and ret.startswith('Test does not build'):
- self.fatal(ret)
- return ret
-
- bdir = os.path.join(dir, 'testbuild')
-
- if not os.path.exists(bdir):
- os.makedirs(bdir)
-
- cls_name = kw.get('run_build_cls') or getattr(self, 'run_build_cls', 'build')
- self.test_bld = bld = Context.create_context(cls_name, top_dir=dir, out_dir=bdir)
- bld.init_dirs()
- bld.progress_bar = 0
- bld.targets = '*'
-
- bld.logger = self.logger
- bld.all_envs.update(self.all_envs) # not really necessary
- bld.env = kw['env']
-
- bld.kw = kw
- bld.conf = self
- kw['build_fun'](bld)
- ret = -1
- try:
- try:
- bld.compile()
- except Errors.WafError:
- ret = 'Test does not build: %s' % traceback.format_exc()
- self.fatal(ret)
- else:
- ret = getattr(bld, 'retval', 0)
- finally:
- if cachemode == 1:
- # cache the results each time
- proj = ConfigSet.ConfigSet()
- proj['cache_run_build'] = ret
- proj.store(os.path.join(dir, 'cache_run_build'))
- else:
- shutil.rmtree(dir)
- return ret
-
-@conf
-def ret_msg(self, msg, args):
- if isinstance(msg, str):
- return msg
- return msg(args)
-
-@conf
-def test(self, *k, **kw):
-
- if not 'env' in kw:
- kw['env'] = self.env.derive()
-
- # validate_c for example
- if kw.get('validate'):
- kw['validate'](kw)
-
- self.start_msg(kw['msg'], **kw)
- ret = None
- try:
- ret = self.run_build(*k, **kw)
- except self.errors.ConfigurationError:
- self.end_msg(kw['errmsg'], 'YELLOW', **kw)
- if Logs.verbose > 1:
- raise
- else:
- self.fatal('The configuration failed')
- else:
- kw['success'] = ret
-
- if kw.get('post_check'):
- ret = kw['post_check'](kw)
-
- if ret:
- self.end_msg(kw['errmsg'], 'YELLOW', **kw)
- self.fatal('The configuration failed %r' % ret)
- else:
- self.end_msg(self.ret_msg(kw['okmsg'], kw), **kw)
- return ret
-
diff --git a/waflib/Context.py b/waflib/Context.py
deleted file mode 100644
index bb47c92..0000000
--- a/waflib/Context.py
+++ /dev/null
@@ -1,737 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2010-2018 (ita)
-
-"""
-Classes and functions enabling the command system
-"""
-
-import os, re, imp, sys
-from waflib import Utils, Errors, Logs
-import waflib.Node
-
-# the following 3 constants are updated on each new release (do not touch)
-HEXVERSION=0x2000b00
-"""Constant updated on new releases"""
-
-WAFVERSION="2.0.11"
-"""Constant updated on new releases"""
-
-WAFREVISION="a97f6fb0941091b4966b625f15ec32fa783a8bec"
-"""Git revision when the waf version is updated"""
-
-ABI = 20
-"""Version of the build data cache file format (used in :py:const:`waflib.Context.DBFILE`)"""
-
-DBFILE = '.wafpickle-%s-%d-%d' % (sys.platform, sys.hexversion, ABI)
-"""Name of the pickle file for storing the build data"""
-
-APPNAME = 'APPNAME'
-"""Default application name (used by ``waf dist``)"""
-
-VERSION = 'VERSION'
-"""Default application version (used by ``waf dist``)"""
-
-TOP = 'top'
-"""The variable name for the top-level directory in wscript files"""
-
-OUT = 'out'
-"""The variable name for the output directory in wscript files"""
-
-WSCRIPT_FILE = 'wscript'
-"""Name of the waf script files"""
-
-launch_dir = ''
-"""Directory from which waf has been called"""
-run_dir = ''
-"""Location of the wscript file to use as the entry point"""
-top_dir = ''
-"""Location of the project directory (top), if the project was configured"""
-out_dir = ''
-"""Location of the build directory (out), if the project was configured"""
-waf_dir = ''
-"""Directory containing the waf modules"""
-
-default_encoding = Utils.console_encoding()
-"""Encoding to use when reading outputs from other processes"""
-
-g_module = None
-"""
-Module representing the top-level wscript file (see :py:const:`waflib.Context.run_dir`)
-"""
-
-STDOUT = 1
-STDERR = -1
-BOTH = 0
-
-classes = []
-"""
-List of :py:class:`waflib.Context.Context` subclasses that can be used as waf commands. The classes
-are added automatically by a metaclass.
-"""
-
-def create_context(cmd_name, *k, **kw):
- """
- Returns a new :py:class:`waflib.Context.Context` instance corresponding to the given command.
- Used in particular by :py:func:`waflib.Scripting.run_command`
-
- :param cmd_name: command name
- :type cmd_name: string
- :param k: arguments to give to the context class initializer
- :type k: list
- :param k: keyword arguments to give to the context class initializer
- :type k: dict
- :return: Context object
- :rtype: :py:class:`waflib.Context.Context`
- """
- for x in classes:
- if x.cmd == cmd_name:
- return x(*k, **kw)
- ctx = Context(*k, **kw)
- ctx.fun = cmd_name
- return ctx
-
-class store_context(type):
- """
- Metaclass that registers command classes into the list :py:const:`waflib.Context.classes`
- Context classes must provide an attribute 'cmd' representing the command name, and a function
- attribute 'fun' representing the function name that the command uses.
- """
- def __init__(cls, name, bases, dct):
- super(store_context, cls).__init__(name, bases, dct)
- name = cls.__name__
-
- if name in ('ctx', 'Context'):
- return
-
- try:
- cls.cmd
- except AttributeError:
- raise Errors.WafError('Missing command for the context class %r (cmd)' % name)
-
- if not getattr(cls, 'fun', None):
- cls.fun = cls.cmd
-
- classes.insert(0, cls)
-
-ctx = store_context('ctx', (object,), {})
-"""Base class for all :py:class:`waflib.Context.Context` classes"""
-
-class Context(ctx):
- """
- Default context for waf commands, and base class for new command contexts.
-
- Context objects are passed to top-level functions::
-
- def foo(ctx):
- print(ctx.__class__.__name__) # waflib.Context.Context
-
- Subclasses must define the class attributes 'cmd' and 'fun':
-
- :param cmd: command to execute as in ``waf cmd``
- :type cmd: string
- :param fun: function name to execute when the command is called
- :type fun: string
-
- .. inheritance-diagram:: waflib.Context.Context waflib.Build.BuildContext waflib.Build.InstallContext waflib.Build.UninstallContext waflib.Build.StepContext waflib.Build.ListContext waflib.Configure.ConfigurationContext waflib.Scripting.Dist waflib.Scripting.DistCheck waflib.Build.CleanContext
-
- """
-
- errors = Errors
- """
- Shortcut to :py:mod:`waflib.Errors` provided for convenience
- """
-
- tools = {}
- """
- A module cache for wscript files; see :py:meth:`Context.Context.load`
- """
-
- def __init__(self, **kw):
- try:
- rd = kw['run_dir']
- except KeyError:
- rd = run_dir
-
- # binds the context to the nodes in use to avoid a context singleton
- self.node_class = type('Nod3', (waflib.Node.Node,), {})
- self.node_class.__module__ = 'waflib.Node'
- self.node_class.ctx = self
-
- self.root = self.node_class('', None)
- self.cur_script = None
- self.path = self.root.find_dir(rd)
-
- self.stack_path = []
- self.exec_dict = {'ctx':self, 'conf':self, 'bld':self, 'opt':self}
- self.logger = None
-
- def finalize(self):
- """
- Called to free resources such as logger files
- """
- try:
- logger = self.logger
- except AttributeError:
- pass
- else:
- Logs.free_logger(logger)
- delattr(self, 'logger')
-
- def load(self, tool_list, *k, **kw):
- """
- Loads a Waf tool as a module, and try calling the function named :py:const:`waflib.Context.Context.fun`
- from it. A ``tooldir`` argument may be provided as a list of module paths.
-
- :param tool_list: list of Waf tool names to load
- :type tool_list: list of string or space-separated string
- """
- tools = Utils.to_list(tool_list)
- path = Utils.to_list(kw.get('tooldir', ''))
- with_sys_path = kw.get('with_sys_path', True)
-
- for t in tools:
- module = load_tool(t, path, with_sys_path=with_sys_path)
- fun = getattr(module, kw.get('name', self.fun), None)
- if fun:
- fun(self)
-
- def execute(self):
- """
- Here, it calls the function name in the top-level wscript file. Most subclasses
- redefine this method to provide additional functionality.
- """
- self.recurse([os.path.dirname(g_module.root_path)])
-
- def pre_recurse(self, node):
- """
- Method executed immediately before a folder is read by :py:meth:`waflib.Context.Context.recurse`.
- The current script is bound as a Node object on ``self.cur_script``, and the current path
- is bound to ``self.path``
-
- :param node: script
- :type node: :py:class:`waflib.Node.Node`
- """
- self.stack_path.append(self.cur_script)
-
- self.cur_script = node
- self.path = node.parent
-
- def post_recurse(self, node):
- """
- Restores ``self.cur_script`` and ``self.path`` right after :py:meth:`waflib.Context.Context.recurse` terminates.
-
- :param node: script
- :type node: :py:class:`waflib.Node.Node`
- """
- self.cur_script = self.stack_path.pop()
- if self.cur_script:
- self.path = self.cur_script.parent
-
- def recurse(self, dirs, name=None, mandatory=True, once=True, encoding=None):
- """
- Runs user-provided functions from the supplied list of directories.
- The directories can be either absolute, or relative to the directory
- of the wscript file
-
- The methods :py:meth:`waflib.Context.Context.pre_recurse` and
- :py:meth:`waflib.Context.Context.post_recurse` are called immediately before
- and after a script has been executed.
-
- :param dirs: List of directories to visit
- :type dirs: list of string or space-separated string
- :param name: Name of function to invoke from the wscript
- :type name: string
- :param mandatory: whether sub wscript files are required to exist
- :type mandatory: bool
- :param once: read the script file once for a particular context
- :type once: bool
- """
- try:
- cache = self.recurse_cache
- except AttributeError:
- cache = self.recurse_cache = {}
-
- for d in Utils.to_list(dirs):
-
- if not os.path.isabs(d):
- # absolute paths only
- d = os.path.join(self.path.abspath(), d)
-
- WSCRIPT = os.path.join(d, WSCRIPT_FILE)
- WSCRIPT_FUN = WSCRIPT + '_' + (name or self.fun)
-
- node = self.root.find_node(WSCRIPT_FUN)
- if node and (not once or node not in cache):
- cache[node] = True
- self.pre_recurse(node)
- try:
- function_code = node.read('rU', encoding)
- exec(compile(function_code, node.abspath(), 'exec'), self.exec_dict)
- finally:
- self.post_recurse(node)
- elif not node:
- node = self.root.find_node(WSCRIPT)
- tup = (node, name or self.fun)
- if node and (not once or tup not in cache):
- cache[tup] = True
- self.pre_recurse(node)
- try:
- wscript_module = load_module(node.abspath(), encoding=encoding)
- user_function = getattr(wscript_module, (name or self.fun), None)
- if not user_function:
- if not mandatory:
- continue
- raise Errors.WafError('No function %r defined in %s' % (name or self.fun, node.abspath()))
- user_function(self)
- finally:
- self.post_recurse(node)
- elif not node:
- if not mandatory:
- continue
- try:
- os.listdir(d)
- except OSError:
- raise Errors.WafError('Cannot read the folder %r' % d)
- raise Errors.WafError('No wscript file in directory %s' % d)
-
- def log_command(self, cmd, kw):
- if Logs.verbose:
- fmt = os.environ.get('WAF_CMD_FORMAT')
- if fmt == 'string':
- if not isinstance(cmd, str):
- cmd = Utils.shell_escape(cmd)
- Logs.debug('runner: %r', cmd)
- Logs.debug('runner_env: kw=%s', kw)
-
- def exec_command(self, cmd, **kw):
- """
- Runs an external process and returns the exit status::
-
- def run(tsk):
- ret = tsk.generator.bld.exec_command('touch foo.txt')
- return ret
-
- If the context has the attribute 'log', then captures and logs the process stderr/stdout.
- Unlike :py:meth:`waflib.Context.Context.cmd_and_log`, this method does not return the
- stdout/stderr values captured.
-
- :param cmd: command argument for subprocess.Popen
- :type cmd: string or list
- :param kw: keyword arguments for subprocess.Popen. The parameters input/timeout will be passed to wait/communicate.
- :type kw: dict
- :returns: process exit status
- :rtype: integer
- :raises: :py:class:`waflib.Errors.WafError` if an invalid executable is specified for a non-shell process
- :raises: :py:class:`waflib.Errors.WafError` in case of execution failure
- """
- subprocess = Utils.subprocess
- kw['shell'] = isinstance(cmd, str)
- self.log_command(cmd, kw)
-
- if self.logger:
- self.logger.info(cmd)
-
- if 'stdout' not in kw:
- kw['stdout'] = subprocess.PIPE
- if 'stderr' not in kw:
- kw['stderr'] = subprocess.PIPE
-
- if Logs.verbose and not kw['shell'] and not Utils.check_exe(cmd[0]):
- raise Errors.WafError('Program %s not found!' % cmd[0])
-
- cargs = {}
- if 'timeout' in kw:
- if sys.hexversion >= 0x3030000:
- cargs['timeout'] = kw['timeout']
- if not 'start_new_session' in kw:
- kw['start_new_session'] = True
- del kw['timeout']
- if 'input' in kw:
- if kw['input']:
- cargs['input'] = kw['input']
- kw['stdin'] = subprocess.PIPE
- del kw['input']
-
- if 'cwd' in kw:
- if not isinstance(kw['cwd'], str):
- kw['cwd'] = kw['cwd'].abspath()
-
- encoding = kw.pop('decode_as', default_encoding)
-
- try:
- ret, out, err = Utils.run_process(cmd, kw, cargs)
- except Exception as e:
- raise Errors.WafError('Execution failure: %s' % str(e), ex=e)
-
- if out:
- if not isinstance(out, str):
- out = out.decode(encoding, errors='replace')
- if self.logger:
- self.logger.debug('out: %s', out)
- else:
- Logs.info(out, extra={'stream':sys.stdout, 'c1': ''})
- if err:
- if not isinstance(err, str):
- err = err.decode(encoding, errors='replace')
- if self.logger:
- self.logger.error('err: %s' % err)
- else:
- Logs.info(err, extra={'stream':sys.stderr, 'c1': ''})
-
- return ret
-
- def cmd_and_log(self, cmd, **kw):
- """
- Executes a process and returns stdout/stderr if the execution is successful.
- An exception is thrown when the exit status is non-0. In that case, both stderr and stdout
- will be bound to the WafError object (configuration tests)::
-
- def configure(conf):
- out = conf.cmd_and_log(['echo', 'hello'], output=waflib.Context.STDOUT, quiet=waflib.Context.BOTH)
- (out, err) = conf.cmd_and_log(['echo', 'hello'], output=waflib.Context.BOTH)
- (out, err) = conf.cmd_and_log(cmd, input='\\n'.encode(), output=waflib.Context.STDOUT)
- try:
- conf.cmd_and_log(['which', 'someapp'], output=waflib.Context.BOTH)
- except Errors.WafError as e:
- print(e.stdout, e.stderr)
-
- :param cmd: args for subprocess.Popen
- :type cmd: list or string
- :param kw: keyword arguments for subprocess.Popen. The parameters input/timeout will be passed to wait/communicate.
- :type kw: dict
- :returns: a tuple containing the contents of stdout and stderr
- :rtype: string
- :raises: :py:class:`waflib.Errors.WafError` if an invalid executable is specified for a non-shell process
- :raises: :py:class:`waflib.Errors.WafError` in case of execution failure; stdout/stderr/returncode are bound to the exception object
- """
- subprocess = Utils.subprocess
- kw['shell'] = isinstance(cmd, str)
- self.log_command(cmd, kw)
-
- quiet = kw.pop('quiet', None)
- to_ret = kw.pop('output', STDOUT)
-
- if Logs.verbose and not kw['shell'] and not Utils.check_exe(cmd[0]):
- raise Errors.WafError('Program %r not found!' % cmd[0])
-
- kw['stdout'] = kw['stderr'] = subprocess.PIPE
- if quiet is None:
- self.to_log(cmd)
-
- cargs = {}
- if 'timeout' in kw:
- if sys.hexversion >= 0x3030000:
- cargs['timeout'] = kw['timeout']
- if not 'start_new_session' in kw:
- kw['start_new_session'] = True
- del kw['timeout']
- if 'input' in kw:
- if kw['input']:
- cargs['input'] = kw['input']
- kw['stdin'] = subprocess.PIPE
- del kw['input']
-
- if 'cwd' in kw:
- if not isinstance(kw['cwd'], str):
- kw['cwd'] = kw['cwd'].abspath()
-
- encoding = kw.pop('decode_as', default_encoding)
-
- try:
- ret, out, err = Utils.run_process(cmd, kw, cargs)
- except Exception as e:
- raise Errors.WafError('Execution failure: %s' % str(e), ex=e)
-
- if not isinstance(out, str):
- out = out.decode(encoding, errors='replace')
- if not isinstance(err, str):
- err = err.decode(encoding, errors='replace')
-
- if out and quiet != STDOUT and quiet != BOTH:
- self.to_log('out: %s' % out)
- if err and quiet != STDERR and quiet != BOTH:
- self.to_log('err: %s' % err)
-
- if ret:
- e = Errors.WafError('Command %r returned %r' % (cmd, ret))
- e.returncode = ret
- e.stderr = err
- e.stdout = out
- raise e
-
- if to_ret == BOTH:
- return (out, err)
- elif to_ret == STDERR:
- return err
- return out
-
- def fatal(self, msg, ex=None):
- """
- Prints an error message in red and stops command execution; this is
- usually used in the configuration section::
-
- def configure(conf):
- conf.fatal('a requirement is missing')
-
- :param msg: message to display
- :type msg: string
- :param ex: optional exception object
- :type ex: exception
- :raises: :py:class:`waflib.Errors.ConfigurationError`
- """
- if self.logger:
- self.logger.info('from %s: %s' % (self.path.abspath(), msg))
- try:
- logfile = self.logger.handlers[0].baseFilename
- except AttributeError:
- pass
- else:
- if os.environ.get('WAF_PRINT_FAILURE_LOG'):
- # see #1930
- msg = 'Log from (%s):\n%s\n' % (logfile, Utils.readf(logfile))
- else:
- msg = '%s\n(complete log in %s)' % (msg, logfile)
- raise self.errors.ConfigurationError(msg, ex=ex)
-
- def to_log(self, msg):
- """
- Logs information to the logger (if present), or to stderr.
- Empty messages are not printed::
-
- def build(bld):
- bld.to_log('starting the build')
-
- Provide a logger on the context class or override this method if necessary.
-
- :param msg: message
- :type msg: string
- """
- if not msg:
- return
- if self.logger:
- self.logger.info(msg)
- else:
- sys.stderr.write(str(msg))
- sys.stderr.flush()
-
-
- def msg(self, *k, **kw):
- """
- Prints a configuration message of the form ``msg: result``.
- The second part of the message will be in colors. The output
- can be disabled easly by setting ``in_msg`` to a positive value::
-
- def configure(conf):
- self.in_msg = 1
- conf.msg('Checking for library foo', 'ok')
- # no output
-
- :param msg: message to display to the user
- :type msg: string
- :param result: result to display
- :type result: string or boolean
- :param color: color to use, see :py:const:`waflib.Logs.colors_lst`
- :type color: string
- """
- try:
- msg = kw['msg']
- except KeyError:
- msg = k[0]
-
- self.start_msg(msg, **kw)
-
- try:
- result = kw['result']
- except KeyError:
- result = k[1]
-
- color = kw.get('color')
- if not isinstance(color, str):
- color = result and 'GREEN' or 'YELLOW'
-
- self.end_msg(result, color, **kw)
-
- def start_msg(self, *k, **kw):
- """
- Prints the beginning of a 'Checking for xxx' message. See :py:meth:`waflib.Context.Context.msg`
- """
- if kw.get('quiet'):
- return
-
- msg = kw.get('msg') or k[0]
- try:
- if self.in_msg:
- self.in_msg += 1
- return
- except AttributeError:
- self.in_msg = 0
- self.in_msg += 1
-
- try:
- self.line_just = max(self.line_just, len(msg))
- except AttributeError:
- self.line_just = max(40, len(msg))
- for x in (self.line_just * '-', msg):
- self.to_log(x)
- Logs.pprint('NORMAL', "%s :" % msg.ljust(self.line_just), sep='')
-
- def end_msg(self, *k, **kw):
- """Prints the end of a 'Checking for' message. See :py:meth:`waflib.Context.Context.msg`"""
- if kw.get('quiet'):
- return
- self.in_msg -= 1
- if self.in_msg:
- return
-
- result = kw.get('result') or k[0]
-
- defcolor = 'GREEN'
- if result is True:
- msg = 'ok'
- elif not result:
- msg = 'not found'
- defcolor = 'YELLOW'
- else:
- msg = str(result)
-
- self.to_log(msg)
- try:
- color = kw['color']
- except KeyError:
- if len(k) > 1 and k[1] in Logs.colors_lst:
- # compatibility waf 1.7
- color = k[1]
- else:
- color = defcolor
- Logs.pprint(color, msg)
-
- def load_special_tools(self, var, ban=[]):
- """
- Loads third-party extensions modules for certain programming languages
- by trying to list certain files in the extras/ directory. This method
- is typically called once for a programming language group, see for
- example :py:mod:`waflib.Tools.compiler_c`
-
- :param var: glob expression, for example 'cxx\_\*.py'
- :type var: string
- :param ban: list of exact file names to exclude
- :type ban: list of string
- """
- if os.path.isdir(waf_dir):
- lst = self.root.find_node(waf_dir).find_node('waflib/extras').ant_glob(var)
- for x in lst:
- if not x.name in ban:
- load_tool(x.name.replace('.py', ''))
- else:
- from zipfile import PyZipFile
- waflibs = PyZipFile(waf_dir)
- lst = waflibs.namelist()
- for x in lst:
- if not re.match('waflib/extras/%s' % var.replace('*', '.*'), var):
- continue
- f = os.path.basename(x)
- doban = False
- for b in ban:
- r = b.replace('*', '.*')
- if re.match(r, f):
- doban = True
- if not doban:
- f = f.replace('.py', '')
- load_tool(f)
-
-cache_modules = {}
-"""
-Dictionary holding already loaded modules (wscript), indexed by their absolute path.
-The modules are added automatically by :py:func:`waflib.Context.load_module`
-"""
-
-def load_module(path, encoding=None):
- """
- Loads a wscript file as a python module. This method caches results in :py:attr:`waflib.Context.cache_modules`
-
- :param path: file path
- :type path: string
- :return: Loaded Python module
- :rtype: module
- """
- try:
- return cache_modules[path]
- except KeyError:
- pass
-
- module = imp.new_module(WSCRIPT_FILE)
- try:
- code = Utils.readf(path, m='rU', encoding=encoding)
- except EnvironmentError:
- raise Errors.WafError('Could not read the file %r' % path)
-
- module_dir = os.path.dirname(path)
- sys.path.insert(0, module_dir)
- try:
- exec(compile(code, path, 'exec'), module.__dict__)
- finally:
- sys.path.remove(module_dir)
-
- cache_modules[path] = module
- return module
-
-def load_tool(tool, tooldir=None, ctx=None, with_sys_path=True):
- """
- Importx a Waf tool as a python module, and stores it in the dict :py:const:`waflib.Context.Context.tools`
-
- :type tool: string
- :param tool: Name of the tool
- :type tooldir: list
- :param tooldir: List of directories to search for the tool module
- :type with_sys_path: boolean
- :param with_sys_path: whether or not to search the regular sys.path, besides waf_dir and potentially given tooldirs
- """
- if tool == 'java':
- tool = 'javaw' # jython
- else:
- tool = tool.replace('++', 'xx')
-
- if not with_sys_path:
- back_path = sys.path
- sys.path = []
- try:
- if tooldir:
- assert isinstance(tooldir, list)
- sys.path = tooldir + sys.path
- try:
- __import__(tool)
- except ImportError as e:
- e.waf_sys_path = list(sys.path)
- raise
- finally:
- for d in tooldir:
- sys.path.remove(d)
- ret = sys.modules[tool]
- Context.tools[tool] = ret
- return ret
- else:
- if not with_sys_path:
- sys.path.insert(0, waf_dir)
- try:
- for x in ('waflib.Tools.%s', 'waflib.extras.%s', 'waflib.%s', '%s'):
- try:
- __import__(x % tool)
- break
- except ImportError:
- x = None
- else: # raise an exception
- __import__(tool)
- except ImportError as e:
- e.waf_sys_path = list(sys.path)
- raise
- finally:
- if not with_sys_path:
- sys.path.remove(waf_dir)
- ret = sys.modules[x % tool]
- Context.tools[tool] = ret
- return ret
- finally:
- if not with_sys_path:
- sys.path += back_path
-
diff --git a/waflib/Errors.py b/waflib/Errors.py
deleted file mode 100644
index bf75c1b..0000000
--- a/waflib/Errors.py
+++ /dev/null
@@ -1,68 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2010-2018 (ita)
-
-"""
-Exceptions used in the Waf code
-"""
-
-import traceback, sys
-
-class WafError(Exception):
- """Base class for all Waf errors"""
- def __init__(self, msg='', ex=None):
- """
- :param msg: error message
- :type msg: string
- :param ex: exception causing this error (optional)
- :type ex: exception
- """
- Exception.__init__(self)
- self.msg = msg
- assert not isinstance(msg, Exception)
-
- self.stack = []
- if ex:
- if not msg:
- self.msg = str(ex)
- if isinstance(ex, WafError):
- self.stack = ex.stack
- else:
- self.stack = traceback.extract_tb(sys.exc_info()[2])
- self.stack += traceback.extract_stack()[:-1]
- self.verbose_msg = ''.join(traceback.format_list(self.stack))
-
- def __str__(self):
- return str(self.msg)
-
-class BuildError(WafError):
- """Error raised during the build and install phases"""
- def __init__(self, error_tasks=[]):
- """
- :param error_tasks: tasks that could not complete normally
- :type error_tasks: list of task objects
- """
- self.tasks = error_tasks
- WafError.__init__(self, self.format_error())
-
- def format_error(self):
- """Formats the error messages from the tasks that failed"""
- lst = ['Build failed']
- for tsk in self.tasks:
- txt = tsk.format_error()
- if txt:
- lst.append(txt)
- return '\n'.join(lst)
-
-class ConfigurationError(WafError):
- """Configuration exception raised in particular by :py:meth:`waflib.Context.Context.fatal`"""
- pass
-
-class TaskRescan(WafError):
- """Task-specific exception type signalling required signature recalculations"""
- pass
-
-class TaskNotReady(WafError):
- """Task-specific exception type signalling that task signatures cannot be computed"""
- pass
-
diff --git a/waflib/Logs.py b/waflib/Logs.py
deleted file mode 100644
index 2a47516..0000000
--- a/waflib/Logs.py
+++ /dev/null
@@ -1,379 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2005-2018 (ita)
-
-"""
-logging, colors, terminal width and pretty-print
-"""
-
-import os, re, traceback, sys
-from waflib import Utils, ansiterm
-
-if not os.environ.get('NOSYNC', False):
- # synchronized output is nearly mandatory to prevent garbled output
- if sys.stdout.isatty() and id(sys.stdout) == id(sys.__stdout__):
- sys.stdout = ansiterm.AnsiTerm(sys.stdout)
- if sys.stderr.isatty() and id(sys.stderr) == id(sys.__stderr__):
- sys.stderr = ansiterm.AnsiTerm(sys.stderr)
-
-# import the logging module after since it holds a reference on sys.stderr
-# in case someone uses the root logger
-import logging
-
-LOG_FORMAT = os.environ.get('WAF_LOG_FORMAT', '%(asctime)s %(c1)s%(zone)s%(c2)s %(message)s')
-HOUR_FORMAT = os.environ.get('WAF_HOUR_FORMAT', '%H:%M:%S')
-
-zones = []
-"""
-See :py:class:`waflib.Logs.log_filter`
-"""
-
-verbose = 0
-"""
-Global verbosity level, see :py:func:`waflib.Logs.debug` and :py:func:`waflib.Logs.error`
-"""
-
-colors_lst = {
-'USE' : True,
-'BOLD' :'\x1b[01;1m',
-'RED' :'\x1b[01;31m',
-'GREEN' :'\x1b[32m',
-'YELLOW':'\x1b[33m',
-'PINK' :'\x1b[35m',
-'BLUE' :'\x1b[01;34m',
-'CYAN' :'\x1b[36m',
-'GREY' :'\x1b[37m',
-'NORMAL':'\x1b[0m',
-'cursor_on' :'\x1b[?25h',
-'cursor_off' :'\x1b[?25l',
-}
-
-indicator = '\r\x1b[K%s%s%s'
-
-try:
- unicode
-except NameError:
- unicode = None
-
-def enable_colors(use):
- """
- If *1* is given, then the system will perform a few verifications
- before enabling colors, such as checking whether the interpreter
- is running in a terminal. A value of zero will disable colors,
- and a value above *1* will force colors.
-
- :param use: whether to enable colors or not
- :type use: integer
- """
- if use == 1:
- if not (sys.stderr.isatty() or sys.stdout.isatty()):
- use = 0
- if Utils.is_win32 and os.name != 'java':
- term = os.environ.get('TERM', '') # has ansiterm
- else:
- term = os.environ.get('TERM', 'dumb')
-
- if term in ('dumb', 'emacs'):
- use = 0
-
- if use >= 1:
- os.environ['TERM'] = 'vt100'
-
- colors_lst['USE'] = use
-
-# If console packages are available, replace the dummy function with a real
-# implementation
-try:
- get_term_cols = ansiterm.get_term_cols
-except AttributeError:
- def get_term_cols():
- return 80
-
-get_term_cols.__doc__ = """
- Returns the console width in characters.
-
- :return: the number of characters per line
- :rtype: int
- """
-
-def get_color(cl):
- """
- Returns the ansi sequence corresponding to the given color name.
- An empty string is returned when coloring is globally disabled.
-
- :param cl: color name in capital letters
- :type cl: string
- """
- if colors_lst['USE']:
- return colors_lst.get(cl, '')
- return ''
-
-class color_dict(object):
- """attribute-based color access, eg: colors.PINK"""
- def __getattr__(self, a):
- return get_color(a)
- def __call__(self, a):
- return get_color(a)
-
-colors = color_dict()
-
-re_log = re.compile(r'(\w+): (.*)', re.M)
-class log_filter(logging.Filter):
- """
- Waf logs are of the form 'name: message', and can be filtered by 'waf --zones=name'.
- For example, the following::
-
- from waflib import Logs
- Logs.debug('test: here is a message')
-
- Will be displayed only when executing::
-
- $ waf --zones=test
- """
- def __init__(self, name=''):
- logging.Filter.__init__(self, name)
-
- def filter(self, rec):
- """
- Filters log records by zone and by logging level
-
- :param rec: log entry
- """
- rec.zone = rec.module
- if rec.levelno >= logging.INFO:
- return True
-
- m = re_log.match(rec.msg)
- if m:
- rec.zone = m.group(1)
- rec.msg = m.group(2)
-
- if zones:
- return getattr(rec, 'zone', '') in zones or '*' in zones
- elif not verbose > 2:
- return False
- return True
-
-class log_handler(logging.StreamHandler):
- """Dispatches messages to stderr/stdout depending on the severity level"""
- def emit(self, record):
- """
- Delegates the functionality to :py:meth:`waflib.Log.log_handler.emit_override`
- """
- # default implementation
- try:
- try:
- self.stream = record.stream
- except AttributeError:
- if record.levelno >= logging.WARNING:
- record.stream = self.stream = sys.stderr
- else:
- record.stream = self.stream = sys.stdout
- self.emit_override(record)
- self.flush()
- except (KeyboardInterrupt, SystemExit):
- raise
- except: # from the python library -_-
- self.handleError(record)
-
- def emit_override(self, record, **kw):
- """
- Writes the log record to the desired stream (stderr/stdout)
- """
- self.terminator = getattr(record, 'terminator', '\n')
- stream = self.stream
- if unicode:
- # python2
- msg = self.formatter.format(record)
- fs = '%s' + self.terminator
- try:
- if (isinstance(msg, unicode) and getattr(stream, 'encoding', None)):
- fs = fs.decode(stream.encoding)
- try:
- stream.write(fs % msg)
- except UnicodeEncodeError:
- stream.write((fs % msg).encode(stream.encoding))
- else:
- stream.write(fs % msg)
- except UnicodeError:
- stream.write((fs % msg).encode('utf-8'))
- else:
- logging.StreamHandler.emit(self, record)
-
-class formatter(logging.Formatter):
- """Simple log formatter which handles colors"""
- def __init__(self):
- logging.Formatter.__init__(self, LOG_FORMAT, HOUR_FORMAT)
-
- def format(self, rec):
- """
- Formats records and adds colors as needed. The records do not get
- a leading hour format if the logging level is above *INFO*.
- """
- try:
- msg = rec.msg.decode('utf-8')
- except Exception:
- msg = rec.msg
-
- use = colors_lst['USE']
- if (use == 1 and rec.stream.isatty()) or use == 2:
-
- c1 = getattr(rec, 'c1', None)
- if c1 is None:
- c1 = ''
- if rec.levelno >= logging.ERROR:
- c1 = colors.RED
- elif rec.levelno >= logging.WARNING:
- c1 = colors.YELLOW
- elif rec.levelno >= logging.INFO:
- c1 = colors.GREEN
- c2 = getattr(rec, 'c2', colors.NORMAL)
- msg = '%s%s%s' % (c1, msg, c2)
- else:
- # remove single \r that make long lines in text files
- # and other terminal commands
- msg = re.sub(r'\r(?!\n)|\x1B\[(K|.*?(m|h|l))', '', msg)
-
- if rec.levelno >= logging.INFO:
- # the goal of this is to format without the leading "Logs, hour" prefix
- if rec.args:
- return msg % rec.args
- return msg
-
- rec.msg = msg
- rec.c1 = colors.PINK
- rec.c2 = colors.NORMAL
- return logging.Formatter.format(self, rec)
-
-log = None
-"""global logger for Logs.debug, Logs.error, etc"""
-
-def debug(*k, **kw):
- """
- Wraps logging.debug and discards messages if the verbosity level :py:attr:`waflib.Logs.verbose` ≤ 0
- """
- if verbose:
- k = list(k)
- k[0] = k[0].replace('\n', ' ')
- log.debug(*k, **kw)
-
-def error(*k, **kw):
- """
- Wrap logging.errors, adds the stack trace when the verbosity level :py:attr:`waflib.Logs.verbose` ≥ 2
- """
- log.error(*k, **kw)
- if verbose > 2:
- st = traceback.extract_stack()
- if st:
- st = st[:-1]
- buf = []
- for filename, lineno, name, line in st:
- buf.append(' File %r, line %d, in %s' % (filename, lineno, name))
- if line:
- buf.append(' %s' % line.strip())
- if buf:
- log.error('\n'.join(buf))
-
-def warn(*k, **kw):
- """
- Wraps logging.warn
- """
- log.warn(*k, **kw)
-
-def info(*k, **kw):
- """
- Wraps logging.info
- """
- log.info(*k, **kw)
-
-def init_log():
- """
- Initializes the logger :py:attr:`waflib.Logs.log`
- """
- global log
- log = logging.getLogger('waflib')
- log.handlers = []
- log.filters = []
- hdlr = log_handler()
- hdlr.setFormatter(formatter())
- log.addHandler(hdlr)
- log.addFilter(log_filter())
- log.setLevel(logging.DEBUG)
-
-def make_logger(path, name):
- """
- Creates a simple logger, which is often used to redirect the context command output::
-
- from waflib import Logs
- bld.logger = Logs.make_logger('test.log', 'build')
- bld.check(header_name='sadlib.h', features='cxx cprogram', mandatory=False)
-
- # have the file closed immediately
- Logs.free_logger(bld.logger)
-
- # stop logging
- bld.logger = None
-
- The method finalize() of the command will try to free the logger, if any
-
- :param path: file name to write the log output to
- :type path: string
- :param name: logger name (loggers are reused)
- :type name: string
- """
- logger = logging.getLogger(name)
- if sys.hexversion > 0x3000000:
- encoding = sys.stdout.encoding
- else:
- encoding = None
- hdlr = logging.FileHandler(path, 'w', encoding=encoding)
- formatter = logging.Formatter('%(message)s')
- hdlr.setFormatter(formatter)
- logger.addHandler(hdlr)
- logger.setLevel(logging.DEBUG)
- return logger
-
-def make_mem_logger(name, to_log, size=8192):
- """
- Creates a memory logger to avoid writing concurrently to the main logger
- """
- from logging.handlers import MemoryHandler
- logger = logging.getLogger(name)
- hdlr = MemoryHandler(size, target=to_log)
- formatter = logging.Formatter('%(message)s')
- hdlr.setFormatter(formatter)
- logger.addHandler(hdlr)
- logger.memhandler = hdlr
- logger.setLevel(logging.DEBUG)
- return logger
-
-def free_logger(logger):
- """
- Frees the resources held by the loggers created through make_logger or make_mem_logger.
- This is used for file cleanup and for handler removal (logger objects are re-used).
- """
- try:
- for x in logger.handlers:
- x.close()
- logger.removeHandler(x)
- except Exception:
- pass
-
-def pprint(col, msg, label='', sep='\n'):
- """
- Prints messages in color immediately on stderr::
-
- from waflib import Logs
- Logs.pprint('RED', 'Something bad just happened')
-
- :param col: color name to use in :py:const:`Logs.colors_lst`
- :type col: string
- :param msg: message to display
- :type msg: string or a value that can be printed by %s
- :param label: a message to add after the colored output
- :type label: string
- :param sep: a string to append at the end (line separator)
- :type sep: string
- """
- info('%s%s%s %s', colors(col), msg, colors.NORMAL, label, extra={'terminator':sep})
-
diff --git a/waflib/Node.py b/waflib/Node.py
deleted file mode 100644
index 4ac1ea8..0000000
--- a/waflib/Node.py
+++ /dev/null
@@ -1,970 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2005-2018 (ita)
-
-"""
-Node: filesystem structure
-
-#. Each file/folder is represented by exactly one node.
-
-#. Some potential class properties are stored on :py:class:`waflib.Build.BuildContext` : nodes to depend on, etc.
- Unused class members can increase the `.wafpickle` file size sensibly.
-
-#. Node objects should never be created directly, use
- the methods :py:func:`Node.make_node` or :py:func:`Node.find_node` for the low-level operations
-
-#. The methods :py:func:`Node.find_resource`, :py:func:`Node.find_dir` :py:func:`Node.find_or_declare` must be
- used when a build context is present
-
-#. Each instance of :py:class:`waflib.Context.Context` has a unique :py:class:`Node` subclass required for serialization.
- (:py:class:`waflib.Node.Nod3`, see the :py:class:`waflib.Context.Context` initializer). A reference to the context
- owning a node is held as *self.ctx*
-"""
-
-import os, re, sys, shutil
-from waflib import Utils, Errors
-
-exclude_regs = '''
-**/*~
-**/#*#
-**/.#*
-**/%*%
-**/._*
-**/*.swp
-**/CVS
-**/CVS/**
-**/.cvsignore
-**/SCCS
-**/SCCS/**
-**/vssver.scc
-**/.svn
-**/.svn/**
-**/BitKeeper
-**/.git
-**/.git/**
-**/.gitignore
-**/.bzr
-**/.bzrignore
-**/.bzr/**
-**/.hg
-**/.hg/**
-**/_MTN
-**/_MTN/**
-**/.arch-ids
-**/{arch}
-**/_darcs
-**/_darcs/**
-**/.intlcache
-**/.DS_Store'''
-"""
-Ant patterns for files and folders to exclude while doing the
-recursive traversal in :py:meth:`waflib.Node.Node.ant_glob`
-"""
-
-def ant_matcher(s, ignorecase):
- reflags = re.I if ignorecase else 0
- ret = []
- for x in Utils.to_list(s):
- x = x.replace('\\', '/').replace('//', '/')
- if x.endswith('/'):
- x += '**'
- accu = []
- for k in x.split('/'):
- if k == '**':
- accu.append(k)
- else:
- k = k.replace('.', '[.]').replace('*','.*').replace('?', '.').replace('+', '\\+')
- k = '^%s$' % k
- try:
- exp = re.compile(k, flags=reflags)
- except Exception as e:
- raise Errors.WafError('Invalid pattern: %s' % k, e)
- else:
- accu.append(exp)
- ret.append(accu)
- return ret
-
-def ant_sub_filter(name, nn):
- ret = []
- for lst in nn:
- if not lst:
- pass
- elif lst[0] == '**':
- ret.append(lst)
- if len(lst) > 1:
- if lst[1].match(name):
- ret.append(lst[2:])
- else:
- ret.append([])
- elif lst[0].match(name):
- ret.append(lst[1:])
- return ret
-
-def ant_sub_matcher(name, pats):
- nacc = ant_sub_filter(name, pats[0])
- nrej = ant_sub_filter(name, pats[1])
- if [] in nrej:
- nacc = []
- return [nacc, nrej]
-
-class Node(object):
- """
- This class is organized in two parts:
-
- * The basic methods meant for filesystem access (compute paths, create folders, etc)
- * The methods bound to a :py:class:`waflib.Build.BuildContext` (require ``bld.srcnode`` and ``bld.bldnode``)
- """
-
- dict_class = dict
- """
- Subclasses can provide a dict class to enable case insensitivity for example.
- """
-
- __slots__ = ('name', 'parent', 'children', 'cache_abspath', 'cache_isdir')
- def __init__(self, name, parent):
- """
- .. note:: Use :py:func:`Node.make_node` or :py:func:`Node.find_node` instead of calling this constructor
- """
- self.name = name
- self.parent = parent
- if parent:
- if name in parent.children:
- raise Errors.WafError('node %s exists in the parent files %r already' % (name, parent))
- parent.children[name] = self
-
- def __setstate__(self, data):
- "Deserializes node information, used for persistence"
- self.name = data[0]
- self.parent = data[1]
- if data[2] is not None:
- # Issue 1480
- self.children = self.dict_class(data[2])
-
- def __getstate__(self):
- "Serializes node information, used for persistence"
- return (self.name, self.parent, getattr(self, 'children', None))
-
- def __str__(self):
- """
- String representation (abspath), for debugging purposes
-
- :rtype: string
- """
- return self.abspath()
-
- def __repr__(self):
- """
- String representation (abspath), for debugging purposes
-
- :rtype: string
- """
- return self.abspath()
-
- def __copy__(self):
- """
- Provided to prevent nodes from being copied
-
- :raises: :py:class:`waflib.Errors.WafError`
- """
- raise Errors.WafError('nodes are not supposed to be copied')
-
- def read(self, flags='r', encoding='latin-1'):
- """
- Reads and returns the contents of the file represented by this node, see :py:func:`waflib.Utils.readf`::
-
- def build(bld):
- bld.path.find_node('wscript').read()
-
- :param flags: Open mode
- :type flags: string
- :param encoding: encoding value for Python3
- :type encoding: string
- :rtype: string or bytes
- :return: File contents
- """
- return Utils.readf(self.abspath(), flags, encoding)
-
- def write(self, data, flags='w', encoding='latin-1'):
- """
- Writes data to the file represented by this node, see :py:func:`waflib.Utils.writef`::
-
- def build(bld):
- bld.path.make_node('foo.txt').write('Hello, world!')
-
- :param data: data to write
- :type data: string
- :param flags: Write mode
- :type flags: string
- :param encoding: encoding value for Python3
- :type encoding: string
- """
- Utils.writef(self.abspath(), data, flags, encoding)
-
- def read_json(self, convert=True, encoding='utf-8'):
- """
- Reads and parses the contents of this node as JSON (Python ≥ 2.6)::
-
- def build(bld):
- bld.path.find_node('abc.json').read_json()
-
- Note that this by default automatically decodes unicode strings on Python2, unlike what the Python JSON module does.
-
- :type convert: boolean
- :param convert: Prevents decoding of unicode strings on Python2
- :type encoding: string
- :param encoding: The encoding of the file to read. This default to UTF8 as per the JSON standard
- :rtype: object
- :return: Parsed file contents
- """
- import json # Python 2.6 and up
- object_pairs_hook = None
- if convert and sys.hexversion < 0x3000000:
- try:
- _type = unicode
- except NameError:
- _type = str
-
- def convert(value):
- if isinstance(value, list):
- return [convert(element) for element in value]
- elif isinstance(value, _type):
- return str(value)
- else:
- return value
-
- def object_pairs(pairs):
- return dict((str(pair[0]), convert(pair[1])) for pair in pairs)
-
- object_pairs_hook = object_pairs
-
- return json.loads(self.read(encoding=encoding), object_pairs_hook=object_pairs_hook)
-
- def write_json(self, data, pretty=True):
- """
- Writes a python object as JSON to disk (Python ≥ 2.6) as UTF-8 data (JSON standard)::
-
- def build(bld):
- bld.path.find_node('xyz.json').write_json(199)
-
- :type data: object
- :param data: The data to write to disk
- :type pretty: boolean
- :param pretty: Determines if the JSON will be nicely space separated
- """
- import json # Python 2.6 and up
- indent = 2
- separators = (',', ': ')
- sort_keys = pretty
- newline = os.linesep
- if not pretty:
- indent = None
- separators = (',', ':')
- newline = ''
- output = json.dumps(data, indent=indent, separators=separators, sort_keys=sort_keys) + newline
- self.write(output, encoding='utf-8')
-
- def exists(self):
- """
- Returns whether the Node is present on the filesystem
-
- :rtype: bool
- """
- return os.path.exists(self.abspath())
-
- def isdir(self):
- """
- Returns whether the Node represents a folder
-
- :rtype: bool
- """
- return os.path.isdir(self.abspath())
-
- def chmod(self, val):
- """
- Changes the file/dir permissions::
-
- def build(bld):
- bld.path.chmod(493) # 0755
- """
- os.chmod(self.abspath(), val)
-
- def delete(self, evict=True):
- """
- Removes the file/folder from the filesystem (equivalent to `rm -rf`), and remove this object from the Node tree.
- Do not use this object after calling this method.
- """
- try:
- try:
- if os.path.isdir(self.abspath()):
- shutil.rmtree(self.abspath())
- else:
- os.remove(self.abspath())
- except OSError:
- if os.path.exists(self.abspath()):
- raise
- finally:
- if evict:
- self.evict()
-
- def evict(self):
- """
- Removes this node from the Node tree
- """
- del self.parent.children[self.name]
-
- def suffix(self):
- """
- Returns the file rightmost extension, for example `a.b.c.d → .d`
-
- :rtype: string
- """
- k = max(0, self.name.rfind('.'))
- return self.name[k:]
-
- def height(self):
- """
- Returns the depth in the folder hierarchy from the filesystem root or from all the file drives
-
- :returns: filesystem depth
- :rtype: integer
- """
- d = self
- val = -1
- while d:
- d = d.parent
- val += 1
- return val
-
- def listdir(self):
- """
- Lists the folder contents
-
- :returns: list of file/folder names ordered alphabetically
- :rtype: list of string
- """
- lst = Utils.listdir(self.abspath())
- lst.sort()
- return lst
-
- def mkdir(self):
- """
- Creates a folder represented by this node. Intermediate folders are created as needed.
-
- :raises: :py:class:`waflib.Errors.WafError` when the folder is missing
- """
- if self.isdir():
- return
-
- try:
- self.parent.mkdir()
- except OSError:
- pass
-
- if self.name:
- try:
- os.makedirs(self.abspath())
- except OSError:
- pass
-
- if not self.isdir():
- raise Errors.WafError('Could not create the directory %r' % self)
-
- try:
- self.children
- except AttributeError:
- self.children = self.dict_class()
-
- def find_node(self, lst):
- """
- Finds a node on the file system (files or folders), and creates the corresponding Node objects if it exists
-
- :param lst: relative path
- :type lst: string or list of string
- :returns: The corresponding Node object or None if no entry was found on the filesystem
- :rtype: :py:class:´waflib.Node.Node´
- """
-
- if isinstance(lst, str):
- lst = [x for x in Utils.split_path(lst) if x and x != '.']
-
- if lst and lst[0].startswith('\\\\') and not self.parent:
- node = self.ctx.root.make_node(lst[0])
- node.cache_isdir = True
- return node.find_node(lst[1:])
-
- cur = self
- for x in lst:
- if x == '..':
- cur = cur.parent or cur
- continue
-
- try:
- ch = cur.children
- except AttributeError:
- cur.children = self.dict_class()
- else:
- try:
- cur = ch[x]
- continue
- except KeyError:
- pass
-
- # optimistic: create the node first then look if it was correct to do so
- cur = self.__class__(x, cur)
- if not cur.exists():
- cur.evict()
- return None
-
- if not cur.exists():
- cur.evict()
- return None
-
- return cur
-
- def make_node(self, lst):
- """
- Returns or creates a Node object corresponding to the input path without considering the filesystem.
-
- :param lst: relative path
- :type lst: string or list of string
- :rtype: :py:class:´waflib.Node.Node´
- """
- if isinstance(lst, str):
- lst = [x for x in Utils.split_path(lst) if x and x != '.']
-
- cur = self
- for x in lst:
- if x == '..':
- cur = cur.parent or cur
- continue
-
- try:
- cur = cur.children[x]
- except AttributeError:
- cur.children = self.dict_class()
- except KeyError:
- pass
- else:
- continue
- cur = self.__class__(x, cur)
- return cur
-
- def search_node(self, lst):
- """
- Returns a Node previously defined in the data structure. The filesystem is not considered.
-
- :param lst: relative path
- :type lst: string or list of string
- :rtype: :py:class:´waflib.Node.Node´ or None if there is no entry in the Node datastructure
- """
- if isinstance(lst, str):
- lst = [x for x in Utils.split_path(lst) if x and x != '.']
-
- cur = self
- for x in lst:
- if x == '..':
- cur = cur.parent or cur
- else:
- try:
- cur = cur.children[x]
- except (AttributeError, KeyError):
- return None
- return cur
-
- def path_from(self, node):
- """
- Path of this node seen from the other::
-
- def build(bld):
- n1 = bld.path.find_node('foo/bar/xyz.txt')
- n2 = bld.path.find_node('foo/stuff/')
- n1.path_from(n2) # '../bar/xyz.txt'
-
- :param node: path to use as a reference
- :type node: :py:class:`waflib.Node.Node`
- :returns: a relative path or an absolute one if that is better
- :rtype: string
- """
- c1 = self
- c2 = node
-
- c1h = c1.height()
- c2h = c2.height()
-
- lst = []
- up = 0
-
- while c1h > c2h:
- lst.append(c1.name)
- c1 = c1.parent
- c1h -= 1
-
- while c2h > c1h:
- up += 1
- c2 = c2.parent
- c2h -= 1
-
- while not c1 is c2:
- lst.append(c1.name)
- up += 1
-
- c1 = c1.parent
- c2 = c2.parent
-
- if c1.parent:
- lst.extend(['..'] * up)
- lst.reverse()
- return os.sep.join(lst) or '.'
- else:
- return self.abspath()
-
- def abspath(self):
- """
- Returns the absolute path. A cache is kept in the context as ``cache_node_abspath``
-
- :rtype: string
- """
- try:
- return self.cache_abspath
- except AttributeError:
- pass
- # think twice before touching this (performance + complexity + correctness)
-
- if not self.parent:
- val = os.sep
- elif not self.parent.name:
- val = os.sep + self.name
- else:
- val = self.parent.abspath() + os.sep + self.name
- self.cache_abspath = val
- return val
-
- if Utils.is_win32:
- def abspath(self):
- try:
- return self.cache_abspath
- except AttributeError:
- pass
- if not self.parent:
- val = ''
- elif not self.parent.name:
- val = self.name + os.sep
- else:
- val = self.parent.abspath().rstrip(os.sep) + os.sep + self.name
- self.cache_abspath = val
- return val
-
- def is_child_of(self, node):
- """
- Returns whether the object belongs to a subtree of the input node::
-
- def build(bld):
- node = bld.path.find_node('wscript')
- node.is_child_of(bld.path) # True
-
- :param node: path to use as a reference
- :type node: :py:class:`waflib.Node.Node`
- :rtype: bool
- """
- p = self
- diff = self.height() - node.height()
- while diff > 0:
- diff -= 1
- p = p.parent
- return p is node
-
- def ant_iter(self, accept=None, maxdepth=25, pats=[], dir=False, src=True, remove=True, quiet=False):
- """
- Recursive method used by :py:meth:`waflib.Node.ant_glob`.
-
- :param accept: function used for accepting/rejecting a node, returns the patterns that can be still accepted in recursion
- :type accept: function
- :param maxdepth: maximum depth in the filesystem (25)
- :type maxdepth: int
- :param pats: list of patterns to accept and list of patterns to exclude
- :type pats: tuple
- :param dir: return folders too (False by default)
- :type dir: bool
- :param src: return files (True by default)
- :type src: bool
- :param remove: remove files/folders that do not exist (True by default)
- :type remove: bool
- :param quiet: disable build directory traversal warnings (verbose mode)
- :type quiet: bool
- :returns: A generator object to iterate from
- :rtype: iterator
- """
- dircont = self.listdir()
- dircont.sort()
-
- try:
- lst = set(self.children.keys())
- except AttributeError:
- self.children = self.dict_class()
- else:
- if remove:
- for x in lst - set(dircont):
- self.children[x].evict()
-
- for name in dircont:
- npats = accept(name, pats)
- if npats and npats[0]:
- accepted = [] in npats[0]
-
- node = self.make_node([name])
-
- isdir = node.isdir()
- if accepted:
- if isdir:
- if dir:
- yield node
- elif src:
- yield node
-
- if isdir:
- node.cache_isdir = True
- if maxdepth:
- for k in node.ant_iter(accept=accept, maxdepth=maxdepth - 1, pats=npats, dir=dir, src=src, remove=remove, quiet=quiet):
- yield k
-
- def ant_glob(self, *k, **kw):
- """
- Finds files across folders and returns Node objects:
-
- * ``**/*`` find all files recursively
- * ``**/*.class`` find all files ending by .class
- * ``..`` find files having two dot characters
-
- For example::
-
- def configure(cfg):
- # find all .cpp files
- cfg.path.ant_glob('**/*.cpp')
- # find particular files from the root filesystem (can be slow)
- cfg.root.ant_glob('etc/*.txt')
- # simple exclusion rule example
- cfg.path.ant_glob('*.c*', excl=['*.c'], src=True, dir=False)
-
- For more information about the patterns, consult http://ant.apache.org/manual/dirtasks.html
- Please remember that the '..' sequence does not represent the parent directory::
-
- def configure(cfg):
- cfg.path.ant_glob('../*.h') # incorrect
- cfg.path.parent.ant_glob('*.h') # correct
-
- The Node structure is itself a filesystem cache, so certain precautions must
- be taken while matching files in the build or installation phases.
- Nodes objects that do have a corresponding file or folder are garbage-collected by default.
- This garbage collection is usually required to prevent returning files that do not
- exist anymore. Yet, this may also remove Node objects of files that are yet-to-be built.
-
- This typically happens when trying to match files in the build directory,
- but there are also cases when files are created in the source directory.
- Run ``waf -v`` to display any warnings, and try consider passing ``remove=False``
- when matching files in the build directory.
-
- Since ant_glob can traverse both source and build folders, it is a best practice
- to call this method only from the most specific build node::
-
- def build(bld):
- # traverses the build directory, may need ``remove=False``:
- bld.path.ant_glob('project/dir/**/*.h')
- # better, no accidental build directory traversal:
- bld.path.find_node('project/dir').ant_glob('**/*.h') # best
-
- In addition, files and folders are listed immediately. When matching files in the
- build folders, consider passing ``generator=True`` so that the generator object
- returned can defer computation to a later stage. For example::
-
- def build(bld):
- bld(rule='tar xvf ${SRC}', source='arch.tar')
- bld.add_group()
- gen = bld.bldnode.ant_glob("*.h", generator=True, remove=True)
- # files will be listed only after the arch.tar is unpacked
- bld(rule='ls ${SRC}', source=gen, name='XYZ')
-
-
- :param incl: ant patterns or list of patterns to include
- :type incl: string or list of strings
- :param excl: ant patterns or list of patterns to exclude
- :type excl: string or list of strings
- :param dir: return folders too (False by default)
- :type dir: bool
- :param src: return files (True by default)
- :type src: bool
- :param maxdepth: maximum depth of recursion
- :type maxdepth: int
- :param ignorecase: ignore case while matching (False by default)
- :type ignorecase: bool
- :param generator: Whether to evaluate the Nodes lazily
- :type generator: bool
- :param remove: remove files/folders that do not exist (True by default)
- :type remove: bool
- :param quiet: disable build directory traversal warnings (verbose mode)
- :type quiet: bool
- :returns: The corresponding Node objects as a list or as a generator object (generator=True)
- :rtype: by default, list of :py:class:`waflib.Node.Node` instances
- """
- src = kw.get('src', True)
- dir = kw.get('dir')
- excl = kw.get('excl', exclude_regs)
- incl = k and k[0] or kw.get('incl', '**')
- remove = kw.get('remove', True)
- maxdepth = kw.get('maxdepth', 25)
- ignorecase = kw.get('ignorecase', False)
- quiet = kw.get('quiet', False)
- pats = (ant_matcher(incl, ignorecase), ant_matcher(excl, ignorecase))
-
- if kw.get('generator'):
- return Utils.lazy_generator(self.ant_iter, (ant_sub_matcher, maxdepth, pats, dir, src, remove, quiet))
-
- it = self.ant_iter(ant_sub_matcher, maxdepth, pats, dir, src, remove, quiet)
- if kw.get('flat'):
- # returns relative paths as a space-delimited string
- # prefer Node objects whenever possible
- return ' '.join(x.path_from(self) for x in it)
- return list(it)
-
- # ----------------------------------------------------------------------------
- # the methods below require the source/build folders (bld.srcnode/bld.bldnode)
-
- def is_src(self):
- """
- Returns True if the node is below the source directory. Note that ``!is_src() ≠ is_bld()``
-
- :rtype: bool
- """
- cur = self
- x = self.ctx.srcnode
- y = self.ctx.bldnode
- while cur.parent:
- if cur is y:
- return False
- if cur is x:
- return True
- cur = cur.parent
- return False
-
- def is_bld(self):
- """
- Returns True if the node is below the build directory. Note that ``!is_bld() ≠ is_src()``
-
- :rtype: bool
- """
- cur = self
- y = self.ctx.bldnode
- while cur.parent:
- if cur is y:
- return True
- cur = cur.parent
- return False
-
- def get_src(self):
- """
- Returns the corresponding Node object in the source directory (or self if already
- under the source directory). Use this method only if the purpose is to create
- a Node object (this is common with folders but not with files, see ticket 1937)
-
- :rtype: :py:class:`waflib.Node.Node`
- """
- cur = self
- x = self.ctx.srcnode
- y = self.ctx.bldnode
- lst = []
- while cur.parent:
- if cur is y:
- lst.reverse()
- return x.make_node(lst)
- if cur is x:
- return self
- lst.append(cur.name)
- cur = cur.parent
- return self
-
- def get_bld(self):
- """
- Return the corresponding Node object in the build directory (or self if already
- under the build directory). Use this method only if the purpose is to create
- a Node object (this is common with folders but not with files, see ticket 1937)
-
- :rtype: :py:class:`waflib.Node.Node`
- """
- cur = self
- x = self.ctx.srcnode
- y = self.ctx.bldnode
- lst = []
- while cur.parent:
- if cur is y:
- return self
- if cur is x:
- lst.reverse()
- return self.ctx.bldnode.make_node(lst)
- lst.append(cur.name)
- cur = cur.parent
- # the file is external to the current project, make a fake root in the current build directory
- lst.reverse()
- if lst and Utils.is_win32 and len(lst[0]) == 2 and lst[0].endswith(':'):
- lst[0] = lst[0][0]
- return self.ctx.bldnode.make_node(['__root__'] + lst)
-
- def find_resource(self, lst):
- """
- Use this method in the build phase to find source files corresponding to the relative path given.
-
- First it looks up the Node data structure to find any declared Node object in the build directory.
- If None is found, it then considers the filesystem in the source directory.
-
- :param lst: relative path
- :type lst: string or list of string
- :returns: the corresponding Node object or None
- :rtype: :py:class:`waflib.Node.Node`
- """
- if isinstance(lst, str):
- lst = [x for x in Utils.split_path(lst) if x and x != '.']
-
- node = self.get_bld().search_node(lst)
- if not node:
- node = self.get_src().find_node(lst)
- if node and node.isdir():
- return None
- return node
-
- def find_or_declare(self, lst):
- """
- Use this method in the build phase to declare output files which
- are meant to be written in the build directory.
-
- This method creates the Node object and its parent folder
- as needed.
-
- :param lst: relative path
- :type lst: string or list of string
- """
- if isinstance(lst, str) and os.path.isabs(lst):
- node = self.ctx.root.make_node(lst)
- else:
- node = self.get_bld().make_node(lst)
- node.parent.mkdir()
- return node
-
- def find_dir(self, lst):
- """
- Searches for a folder on the filesystem (see :py:meth:`waflib.Node.Node.find_node`)
-
- :param lst: relative path
- :type lst: string or list of string
- :returns: The corresponding Node object or None if there is no such folder
- :rtype: :py:class:`waflib.Node.Node`
- """
- if isinstance(lst, str):
- lst = [x for x in Utils.split_path(lst) if x and x != '.']
-
- node = self.find_node(lst)
- if node and not node.isdir():
- return None
- return node
-
- # helpers for building things
- def change_ext(self, ext, ext_in=None):
- """
- Declares a build node with a distinct extension; this is uses :py:meth:`waflib.Node.Node.find_or_declare`
-
- :return: A build node of the same path, but with a different extension
- :rtype: :py:class:`waflib.Node.Node`
- """
- name = self.name
- if ext_in is None:
- k = name.rfind('.')
- if k >= 0:
- name = name[:k] + ext
- else:
- name = name + ext
- else:
- name = name[:- len(ext_in)] + ext
-
- return self.parent.find_or_declare([name])
-
- def bldpath(self):
- """
- Returns the relative path seen from the build directory ``src/foo.cpp``
-
- :rtype: string
- """
- return self.path_from(self.ctx.bldnode)
-
- def srcpath(self):
- """
- Returns the relative path seen from the source directory ``../src/foo.cpp``
-
- :rtype: string
- """
- return self.path_from(self.ctx.srcnode)
-
- def relpath(self):
- """
- If a file in the build directory, returns :py:meth:`waflib.Node.Node.bldpath`,
- else returns :py:meth:`waflib.Node.Node.srcpath`
-
- :rtype: string
- """
- cur = self
- x = self.ctx.bldnode
- while cur.parent:
- if cur is x:
- return self.bldpath()
- cur = cur.parent
- return self.srcpath()
-
- def bld_dir(self):
- """
- Equivalent to self.parent.bldpath()
-
- :rtype: string
- """
- return self.parent.bldpath()
-
- def h_file(self):
- """
- See :py:func:`waflib.Utils.h_file`
-
- :return: a hash representing the file contents
- :rtype: string or bytes
- """
- return Utils.h_file(self.abspath())
-
- def get_bld_sig(self):
- """
- Returns a signature (see :py:meth:`waflib.Node.Node.h_file`) for the purpose
- of build dependency calculation. This method uses a per-context cache.
-
- :return: a hash representing the object contents
- :rtype: string or bytes
- """
- # previous behaviour can be set by returning self.ctx.node_sigs[self] when a build node
- try:
- cache = self.ctx.cache_sig
- except AttributeError:
- cache = self.ctx.cache_sig = {}
- try:
- ret = cache[self]
- except KeyError:
- p = self.abspath()
- try:
- ret = cache[self] = self.h_file()
- except EnvironmentError:
- if self.isdir():
- # allow folders as build nodes, do not use the creation time
- st = os.stat(p)
- ret = cache[self] = Utils.h_list([p, st.st_ino, st.st_mode])
- return ret
- raise
- return ret
-
-pickle_lock = Utils.threading.Lock()
-"""Lock mandatory for thread-safe node serialization"""
-
-class Nod3(Node):
- """Mandatory subclass for thread-safe node serialization"""
- pass # do not remove
-
-
diff --git a/waflib/Options.py b/waflib/Options.py
deleted file mode 100644
index ad802d4..0000000
--- a/waflib/Options.py
+++ /dev/null
@@ -1,342 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Scott Newton, 2005 (scottn)
-# Thomas Nagy, 2006-2018 (ita)
-
-"""
-Support for waf command-line options
-
-Provides default and command-line options, as well the command
-that reads the ``options`` wscript function.
-"""
-
-import os, tempfile, optparse, sys, re
-from waflib import Logs, Utils, Context, Errors
-
-options = optparse.Values()
-"""
-A global dictionary representing user-provided command-line options::
-
- $ waf --foo=bar
-"""
-
-commands = []
-"""
-List of commands to execute extracted from the command-line. This list
-is consumed during the execution by :py:func:`waflib.Scripting.run_commands`.
-"""
-
-envvars = []
-"""
-List of environment variable declarations placed after the Waf executable name.
-These are detected by searching for "=" in the remaining arguments.
-You probably do not want to use this.
-"""
-
-lockfile = os.environ.get('WAFLOCK', '.lock-waf_%s_build' % sys.platform)
-"""
-Name of the lock file that marks a project as configured
-"""
-
-class opt_parser(optparse.OptionParser):
- """
- Command-line options parser.
- """
- def __init__(self, ctx, allow_unknown=False):
- optparse.OptionParser.__init__(self, conflict_handler='resolve', add_help_option=False,
- version='waf %s (%s)' % (Context.WAFVERSION, Context.WAFREVISION))
- self.formatter.width = Logs.get_term_cols()
- self.ctx = ctx
- self.allow_unknown = allow_unknown
-
- def _process_args(self, largs, rargs, values):
- """
- Custom _process_args to allow unknown options according to the allow_unknown status
- """
- while rargs:
- try:
- optparse.OptionParser._process_args(self,largs,rargs,values)
- except (optparse.BadOptionError, optparse.AmbiguousOptionError) as e:
- if self.allow_unknown:
- largs.append(e.opt_str)
- else:
- self.error(str(e))
-
- def print_usage(self, file=None):
- return self.print_help(file)
-
- def get_usage(self):
- """
- Builds the message to print on ``waf --help``
-
- :rtype: string
- """
- cmds_str = {}
- for cls in Context.classes:
- if not cls.cmd or cls.cmd == 'options' or cls.cmd.startswith( '_' ):
- continue
-
- s = cls.__doc__ or ''
- cmds_str[cls.cmd] = s
-
- if Context.g_module:
- for (k, v) in Context.g_module.__dict__.items():
- if k in ('options', 'init', 'shutdown'):
- continue
-
- if type(v) is type(Context.create_context):
- if v.__doc__ and not k.startswith('_'):
- cmds_str[k] = v.__doc__
-
- just = 0
- for k in cmds_str:
- just = max(just, len(k))
-
- lst = [' %s: %s' % (k.ljust(just), v) for (k, v) in cmds_str.items()]
- lst.sort()
- ret = '\n'.join(lst)
-
- return '''waf [commands] [options]
-
-Main commands (example: ./waf build -j4)
-%s
-''' % ret
-
-
-class OptionsContext(Context.Context):
- """
- Collects custom options from wscript files and parses the command line.
- Sets the global :py:const:`waflib.Options.commands` and :py:const:`waflib.Options.options` values.
- """
- cmd = 'options'
- fun = 'options'
-
- def __init__(self, **kw):
- super(OptionsContext, self).__init__(**kw)
-
- self.parser = opt_parser(self)
- """Instance of :py:class:`waflib.Options.opt_parser`"""
-
- self.option_groups = {}
-
- jobs = self.jobs()
- p = self.add_option
- color = os.environ.get('NOCOLOR', '') and 'no' or 'auto'
- if os.environ.get('CLICOLOR', '') == '0':
- color = 'no'
- elif os.environ.get('CLICOLOR_FORCE', '') == '1':
- color = 'yes'
- p('-c', '--color', dest='colors', default=color, action='store', help='whether to use colors (yes/no/auto) [default: auto]', choices=('yes', 'no', 'auto'))
- p('-j', '--jobs', dest='jobs', default=jobs, type='int', help='amount of parallel jobs (%r)' % jobs)
- p('-k', '--keep', dest='keep', default=0, action='count', help='continue despite errors (-kk to try harder)')
- p('-v', '--verbose', dest='verbose', default=0, action='count', help='verbosity level -v -vv or -vvv [default: 0]')
- p('--zones', dest='zones', default='', action='store', help='debugging zones (task_gen, deps, tasks, etc)')
- p('--profile', dest='profile', default=0, action='store_true', help=optparse.SUPPRESS_HELP)
- p('--pdb', dest='pdb', default=0, action='store_true', help=optparse.SUPPRESS_HELP)
- p('-h', '--help', dest='whelp', default=0, action='store_true', help="show this help message and exit")
-
- gr = self.add_option_group('Configuration options')
- self.option_groups['configure options'] = gr
-
- gr.add_option('-o', '--out', action='store', default='', help='build dir for the project', dest='out')
- gr.add_option('-t', '--top', action='store', default='', help='src dir for the project', dest='top')
-
- gr.add_option('--no-lock-in-run', action='store_true', default='', help=optparse.SUPPRESS_HELP, dest='no_lock_in_run')
- gr.add_option('--no-lock-in-out', action='store_true', default='', help=optparse.SUPPRESS_HELP, dest='no_lock_in_out')
- gr.add_option('--no-lock-in-top', action='store_true', default='', help=optparse.SUPPRESS_HELP, dest='no_lock_in_top')
-
- default_prefix = getattr(Context.g_module, 'default_prefix', os.environ.get('PREFIX'))
- if not default_prefix:
- if Utils.unversioned_sys_platform() == 'win32':
- d = tempfile.gettempdir()
- default_prefix = d[0].upper() + d[1:]
- # win32 preserves the case, but gettempdir does not
- else:
- default_prefix = '/usr/local/'
- gr.add_option('--prefix', dest='prefix', default=default_prefix, help='installation prefix [default: %r]' % default_prefix)
- gr.add_option('--bindir', dest='bindir', help='bindir')
- gr.add_option('--libdir', dest='libdir', help='libdir')
-
- gr = self.add_option_group('Build and installation options')
- self.option_groups['build and install options'] = gr
- gr.add_option('-p', '--progress', dest='progress_bar', default=0, action='count', help= '-p: progress bar; -pp: ide output')
- gr.add_option('--targets', dest='targets', default='', action='store', help='task generators, e.g. "target1,target2"')
-
- gr = self.add_option_group('Step options')
- self.option_groups['step options'] = gr
- gr.add_option('--files', dest='files', default='', action='store', help='files to process, by regexp, e.g. "*/main.c,*/test/main.o"')
-
- default_destdir = os.environ.get('DESTDIR', '')
-
- gr = self.add_option_group('Installation and uninstallation options')
- self.option_groups['install/uninstall options'] = gr
- gr.add_option('--destdir', help='installation root [default: %r]' % default_destdir, default=default_destdir, dest='destdir')
- gr.add_option('-f', '--force', dest='force', default=False, action='store_true', help='force file installation')
- gr.add_option('--distcheck-args', metavar='ARGS', help='arguments to pass to distcheck', default=None, action='store')
-
- def jobs(self):
- """
- Finds the optimal amount of cpu cores to use for parallel jobs.
- At runtime the options can be obtained from :py:const:`waflib.Options.options` ::
-
- from waflib.Options import options
- njobs = options.jobs
-
- :return: the amount of cpu cores
- :rtype: int
- """
- count = int(os.environ.get('JOBS', 0))
- if count < 1:
- if 'NUMBER_OF_PROCESSORS' in os.environ:
- # on Windows, use the NUMBER_OF_PROCESSORS environment variable
- count = int(os.environ.get('NUMBER_OF_PROCESSORS', 1))
- else:
- # on everything else, first try the POSIX sysconf values
- if hasattr(os, 'sysconf_names'):
- if 'SC_NPROCESSORS_ONLN' in os.sysconf_names:
- count = int(os.sysconf('SC_NPROCESSORS_ONLN'))
- elif 'SC_NPROCESSORS_CONF' in os.sysconf_names:
- count = int(os.sysconf('SC_NPROCESSORS_CONF'))
- if not count and os.name not in ('nt', 'java'):
- try:
- tmp = self.cmd_and_log(['sysctl', '-n', 'hw.ncpu'], quiet=0)
- except Errors.WafError:
- pass
- else:
- if re.match('^[0-9]+$', tmp):
- count = int(tmp)
- if count < 1:
- count = 1
- elif count > 1024:
- count = 1024
- return count
-
- def add_option(self, *k, **kw):
- """
- Wraps ``optparse.add_option``::
-
- def options(ctx):
- ctx.add_option('-u', '--use', dest='use', default=False,
- action='store_true', help='a boolean option')
-
- :rtype: optparse option object
- """
- return self.parser.add_option(*k, **kw)
-
- def add_option_group(self, *k, **kw):
- """
- Wraps ``optparse.add_option_group``::
-
- def options(ctx):
- gr = ctx.add_option_group('some options')
- gr.add_option('-u', '--use', dest='use', default=False, action='store_true')
-
- :rtype: optparse option group object
- """
- try:
- gr = self.option_groups[k[0]]
- except KeyError:
- gr = self.parser.add_option_group(*k, **kw)
- self.option_groups[k[0]] = gr
- return gr
-
- def get_option_group(self, opt_str):
- """
- Wraps ``optparse.get_option_group``::
-
- def options(ctx):
- gr = ctx.get_option_group('configure options')
- gr.add_option('-o', '--out', action='store', default='',
- help='build dir for the project', dest='out')
-
- :rtype: optparse option group object
- """
- try:
- return self.option_groups[opt_str]
- except KeyError:
- for group in self.parser.option_groups:
- if group.title == opt_str:
- return group
- return None
-
- def sanitize_path(self, path, cwd=None):
- if not cwd:
- cwd = Context.launch_dir
- p = os.path.expanduser(path)
- p = os.path.join(cwd, p)
- p = os.path.normpath(p)
- p = os.path.abspath(p)
- return p
-
- def parse_cmd_args(self, _args=None, cwd=None, allow_unknown=False):
- """
- Just parse the arguments
- """
- self.parser.allow_unknown = allow_unknown
- (options, leftover_args) = self.parser.parse_args(args=_args)
- envvars = []
- commands = []
- for arg in leftover_args:
- if '=' in arg:
- envvars.append(arg)
- elif arg != 'options':
- commands.append(arg)
-
- for name in 'top out destdir prefix bindir libdir'.split():
- # those paths are usually expanded from Context.launch_dir
- if getattr(options, name, None):
- path = self.sanitize_path(getattr(options, name), cwd)
- setattr(options, name, path)
- return options, commands, envvars
-
- def init_module_vars(self, arg_options, arg_commands, arg_envvars):
- options.__dict__.clear()
- del commands[:]
- del envvars[:]
-
- options.__dict__.update(arg_options.__dict__)
- commands.extend(arg_commands)
- envvars.extend(arg_envvars)
-
- for var in envvars:
- (name, value) = var.split('=', 1)
- os.environ[name.strip()] = value
-
- def init_logs(self, options, commands, envvars):
- Logs.verbose = options.verbose
- if options.verbose >= 1:
- self.load('errcheck')
-
- colors = {'yes' : 2, 'auto' : 1, 'no' : 0}[options.colors]
- Logs.enable_colors(colors)
-
- if options.zones:
- Logs.zones = options.zones.split(',')
- if not Logs.verbose:
- Logs.verbose = 1
- elif Logs.verbose > 0:
- Logs.zones = ['runner']
- if Logs.verbose > 2:
- Logs.zones = ['*']
-
- def parse_args(self, _args=None):
- """
- Parses arguments from a list which is not necessarily the command-line.
- Initializes the module variables options, commands and envvars
- If help is requested, prints it and exit the application
-
- :param _args: arguments
- :type _args: list of strings
- """
- options, commands, envvars = self.parse_cmd_args()
- self.init_logs(options, commands, envvars)
- self.init_module_vars(options, commands, envvars)
-
- def execute(self):
- """
- See :py:func:`waflib.Context.Context.execute`
- """
- super(OptionsContext, self).execute()
- self.parse_args()
- Utils.alloc_process_pool(options.jobs)
-
diff --git a/waflib/README.md b/waflib/README.md
deleted file mode 100644
index c5361b9..0000000
--- a/waflib/README.md
+++ /dev/null
@@ -1,24 +0,0 @@
-Autowaf
-=======
-
-This is autowaf, a bundle of waf and a few extensions intended to be easy to
-use directly as source code in a project. Using this as a submodule or subtree
-named `waflib` in a project allows waf to be used without including binary
-encoded data in the waf script. This gets along with revision control and
-distributions better, among other advantages, without losing
-self-containedness.
-
-To use this in a project, add this repository as a directory named `waflib` in
-the top level of the project, and link or copy `waf` to the top level.
-
-Two waf extras are also included: `autowaf.py` and `lv2.py`.
-
-The `autowaf.py` module is a kitchen sink of Python utilities for building
-consistent packages, and can be imported in a wcript as
-`waflib.extras.autowaf`.
-
-The `lv2.py` extra defines options for LV2 plugin installation paths. It can
-be used by calling `opt.load('lv2')` and `conf.load('lv2')` in the appropriate
-locations in a wscript.
-
- -- David Robillard <d@drobilla.net>
diff --git a/waflib/Runner.py b/waflib/Runner.py
deleted file mode 100644
index 261084d..0000000
--- a/waflib/Runner.py
+++ /dev/null
@@ -1,611 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2005-2018 (ita)
-
-"""
-Runner.py: Task scheduling and execution
-"""
-
-import heapq, traceback
-try:
- from queue import Queue, PriorityQueue
-except ImportError:
- from Queue import Queue
- try:
- from Queue import PriorityQueue
- except ImportError:
- class PriorityQueue(Queue):
- def _init(self, maxsize):
- self.maxsize = maxsize
- self.queue = []
- def _put(self, item):
- heapq.heappush(self.queue, item)
- def _get(self):
- return heapq.heappop(self.queue)
-
-from waflib import Utils, Task, Errors, Logs
-
-GAP = 5
-"""
-Wait for at least ``GAP * njobs`` before trying to enqueue more tasks to run
-"""
-
-class PriorityTasks(object):
- def __init__(self):
- self.lst = []
- def __len__(self):
- return len(self.lst)
- def __iter__(self):
- return iter(self.lst)
- def clear(self):
- self.lst = []
- def append(self, task):
- heapq.heappush(self.lst, task)
- def appendleft(self, task):
- "Deprecated, do not use"
- heapq.heappush(self.lst, task)
- def pop(self):
- return heapq.heappop(self.lst)
- def extend(self, lst):
- if self.lst:
- for x in lst:
- self.append(x)
- else:
- if isinstance(lst, list):
- self.lst = lst
- heapq.heapify(lst)
- else:
- self.lst = lst.lst
-
-class Consumer(Utils.threading.Thread):
- """
- Daemon thread object that executes a task. It shares a semaphore with
- the coordinator :py:class:`waflib.Runner.Spawner`. There is one
- instance per task to consume.
- """
- def __init__(self, spawner, task):
- Utils.threading.Thread.__init__(self)
- self.task = task
- """Task to execute"""
- self.spawner = spawner
- """Coordinator object"""
- self.setDaemon(1)
- self.start()
- def run(self):
- """
- Processes a single task
- """
- try:
- if not self.spawner.master.stop:
- self.spawner.master.process_task(self.task)
- finally:
- self.spawner.sem.release()
- self.spawner.master.out.put(self.task)
- self.task = None
- self.spawner = None
-
-class Spawner(Utils.threading.Thread):
- """
- Daemon thread that consumes tasks from :py:class:`waflib.Runner.Parallel` producer and
- spawns a consuming thread :py:class:`waflib.Runner.Consumer` for each
- :py:class:`waflib.Task.Task` instance.
- """
- def __init__(self, master):
- Utils.threading.Thread.__init__(self)
- self.master = master
- """:py:class:`waflib.Runner.Parallel` producer instance"""
- self.sem = Utils.threading.Semaphore(master.numjobs)
- """Bounded semaphore that prevents spawning more than *n* concurrent consumers"""
- self.setDaemon(1)
- self.start()
- def run(self):
- """
- Spawns new consumers to execute tasks by delegating to :py:meth:`waflib.Runner.Spawner.loop`
- """
- try:
- self.loop()
- except Exception:
- # Python 2 prints unnecessary messages when shutting down
- # we also want to stop the thread properly
- pass
- def loop(self):
- """
- Consumes task objects from the producer; ends when the producer has no more
- task to provide.
- """
- master = self.master
- while 1:
- task = master.ready.get()
- self.sem.acquire()
- if not master.stop:
- task.log_display(task.generator.bld)
- Consumer(self, task)
-
-class Parallel(object):
- """
- Schedule the tasks obtained from the build context for execution.
- """
- def __init__(self, bld, j=2):
- """
- The initialization requires a build context reference
- for computing the total number of jobs.
- """
-
- self.numjobs = j
- """
- Amount of parallel consumers to use
- """
-
- self.bld = bld
- """
- Instance of :py:class:`waflib.Build.BuildContext`
- """
-
- self.outstanding = PriorityTasks()
- """Heap of :py:class:`waflib.Task.Task` that may be ready to be executed"""
-
- self.postponed = PriorityTasks()
- """Heap of :py:class:`waflib.Task.Task` which are not ready to run for non-DAG reasons"""
-
- self.incomplete = set()
- """List of :py:class:`waflib.Task.Task` waiting for dependent tasks to complete (DAG)"""
-
- self.ready = PriorityQueue(0)
- """List of :py:class:`waflib.Task.Task` ready to be executed by consumers"""
-
- self.out = Queue(0)
- """List of :py:class:`waflib.Task.Task` returned by the task consumers"""
-
- self.count = 0
- """Amount of tasks that may be processed by :py:class:`waflib.Runner.TaskConsumer`"""
-
- self.processed = 0
- """Amount of tasks processed"""
-
- self.stop = False
- """Error flag to stop the build"""
-
- self.error = []
- """Tasks that could not be executed"""
-
- self.biter = None
- """Task iterator which must give groups of parallelizable tasks when calling ``next()``"""
-
- self.dirty = False
- """
- Flag that indicates that the build cache must be saved when a task was executed
- (calls :py:meth:`waflib.Build.BuildContext.store`)"""
-
- self.revdeps = Utils.defaultdict(set)
- """
- The reverse dependency graph of dependencies obtained from Task.run_after
- """
-
- self.spawner = Spawner(self)
- """
- Coordinating daemon thread that spawns thread consumers
- """
-
- def get_next_task(self):
- """
- Obtains the next Task instance to run
-
- :rtype: :py:class:`waflib.Task.Task`
- """
- if not self.outstanding:
- return None
- return self.outstanding.pop()
-
- def postpone(self, tsk):
- """
- Adds the task to the list :py:attr:`waflib.Runner.Parallel.postponed`.
- The order is scrambled so as to consume as many tasks in parallel as possible.
-
- :param tsk: task instance
- :type tsk: :py:class:`waflib.Task.Task`
- """
- self.postponed.append(tsk)
-
- def refill_task_list(self):
- """
- Pulls a next group of tasks to execute in :py:attr:`waflib.Runner.Parallel.outstanding`.
- Ensures that all tasks in the current build group are complete before processing the next one.
- """
- while self.count > self.numjobs * GAP:
- self.get_out()
-
- while not self.outstanding:
- if self.count:
- self.get_out()
- if self.outstanding:
- break
- elif self.postponed:
- try:
- cond = self.deadlock == self.processed
- except AttributeError:
- pass
- else:
- if cond:
- # The most common reason is conflicting build order declaration
- # for example: "X run_after Y" and "Y run_after X"
- # Another can be changing "run_after" dependencies while the build is running
- # for example: updating "tsk.run_after" in the "runnable_status" method
- lst = []
- for tsk in self.postponed:
- deps = [id(x) for x in tsk.run_after if not x.hasrun]
- lst.append('%s\t-> %r' % (repr(tsk), deps))
- if not deps:
- lst.append('\n task %r dependencies are done, check its *runnable_status*?' % id(tsk))
- raise Errors.WafError('Deadlock detected: check the task build order%s' % ''.join(lst))
- self.deadlock = self.processed
-
- if self.postponed:
- self.outstanding.extend(self.postponed)
- self.postponed.clear()
- elif not self.count:
- if self.incomplete:
- for x in self.incomplete:
- for k in x.run_after:
- if not k.hasrun:
- break
- else:
- # dependency added after the build started without updating revdeps
- self.incomplete.remove(x)
- self.outstanding.append(x)
- break
- else:
- raise Errors.WafError('Broken revdeps detected on %r' % self.incomplete)
- else:
- tasks = next(self.biter)
- ready, waiting = self.prio_and_split(tasks)
- self.outstanding.extend(ready)
- self.incomplete.update(waiting)
- self.total = self.bld.total()
- break
-
- def add_more_tasks(self, tsk):
- """
- If a task provides :py:attr:`waflib.Task.Task.more_tasks`, then the tasks contained
- in that list are added to the current build and will be processed before the next build group.
-
- The priorities for dependent tasks are not re-calculated globally
-
- :param tsk: task instance
- :type tsk: :py:attr:`waflib.Task.Task`
- """
- if getattr(tsk, 'more_tasks', None):
- more = set(tsk.more_tasks)
- groups_done = set()
- def iteri(a, b):
- for x in a:
- yield x
- for x in b:
- yield x
-
- # Update the dependency tree
- # this assumes that task.run_after values were updated
- for x in iteri(self.outstanding, self.incomplete):
- for k in x.run_after:
- if isinstance(k, Task.TaskGroup):
- if k not in groups_done:
- groups_done.add(k)
- for j in k.prev & more:
- self.revdeps[j].add(k)
- elif k in more:
- self.revdeps[k].add(x)
-
- ready, waiting = self.prio_and_split(tsk.more_tasks)
- self.outstanding.extend(ready)
- self.incomplete.update(waiting)
- self.total += len(tsk.more_tasks)
-
- def mark_finished(self, tsk):
- def try_unfreeze(x):
- # DAG ancestors are likely to be in the incomplete set
- # This assumes that the run_after contents have not changed
- # after the build starts, else a deadlock may occur
- if x in self.incomplete:
- # TODO remove dependencies to free some memory?
- # x.run_after.remove(tsk)
- for k in x.run_after:
- if not k.hasrun:
- break
- else:
- self.incomplete.remove(x)
- self.outstanding.append(x)
-
- if tsk in self.revdeps:
- for x in self.revdeps[tsk]:
- if isinstance(x, Task.TaskGroup):
- x.prev.remove(tsk)
- if not x.prev:
- for k in x.next:
- # TODO necessary optimization?
- k.run_after.remove(x)
- try_unfreeze(k)
- # TODO necessary optimization?
- x.next = []
- else:
- try_unfreeze(x)
- del self.revdeps[tsk]
-
- if hasattr(tsk, 'semaphore'):
- sem = tsk.semaphore
- sem.release(tsk)
- while sem.waiting and not sem.is_locked():
- # take a frozen task, make it ready to run
- x = sem.waiting.pop()
- self._add_task(x)
-
- def get_out(self):
- """
- Waits for a Task that task consumers add to :py:attr:`waflib.Runner.Parallel.out` after execution.
- Adds more Tasks if necessary through :py:attr:`waflib.Runner.Parallel.add_more_tasks`.
-
- :rtype: :py:attr:`waflib.Task.Task`
- """
- tsk = self.out.get()
- if not self.stop:
- self.add_more_tasks(tsk)
- self.mark_finished(tsk)
-
- self.count -= 1
- self.dirty = True
- return tsk
-
- def add_task(self, tsk):
- """
- Enqueue a Task to :py:attr:`waflib.Runner.Parallel.ready` so that consumers can run them.
-
- :param tsk: task instance
- :type tsk: :py:attr:`waflib.Task.Task`
- """
- # TODO change in waf 2.1
- self.ready.put(tsk)
-
- def _add_task(self, tsk):
- if hasattr(tsk, 'semaphore'):
- sem = tsk.semaphore
- try:
- sem.acquire(tsk)
- except IndexError:
- sem.waiting.add(tsk)
- return
-
- self.count += 1
- self.processed += 1
- if self.numjobs == 1:
- tsk.log_display(tsk.generator.bld)
- try:
- self.process_task(tsk)
- finally:
- self.out.put(tsk)
- else:
- self.add_task(tsk)
-
- def process_task(self, tsk):
- """
- Processes a task and attempts to stop the build in case of errors
- """
- tsk.process()
- if tsk.hasrun != Task.SUCCESS:
- self.error_handler(tsk)
-
- def skip(self, tsk):
- """
- Mark a task as skipped/up-to-date
- """
- tsk.hasrun = Task.SKIPPED
- self.mark_finished(tsk)
-
- def cancel(self, tsk):
- """
- Mark a task as failed because of unsatisfiable dependencies
- """
- tsk.hasrun = Task.CANCELED
- self.mark_finished(tsk)
-
- def error_handler(self, tsk):
- """
- Called when a task cannot be executed. The flag :py:attr:`waflib.Runner.Parallel.stop` is set,
- unless the build is executed with::
-
- $ waf build -k
-
- :param tsk: task instance
- :type tsk: :py:attr:`waflib.Task.Task`
- """
- if not self.bld.keep:
- self.stop = True
- self.error.append(tsk)
-
- def task_status(self, tsk):
- """
- Obtains the task status to decide whether to run it immediately or not.
-
- :return: the exit status, for example :py:attr:`waflib.Task.ASK_LATER`
- :rtype: integer
- """
- try:
- return tsk.runnable_status()
- except Exception:
- self.processed += 1
- tsk.err_msg = traceback.format_exc()
- if not self.stop and self.bld.keep:
- self.skip(tsk)
- if self.bld.keep == 1:
- # if -k stop on the first exception, if -kk try to go as far as possible
- if Logs.verbose > 1 or not self.error:
- self.error.append(tsk)
- self.stop = True
- else:
- if Logs.verbose > 1:
- self.error.append(tsk)
- return Task.EXCEPTION
-
- tsk.hasrun = Task.EXCEPTION
- self.error_handler(tsk)
-
- return Task.EXCEPTION
-
- def start(self):
- """
- Obtains Task instances from the BuildContext instance and adds the ones that need to be executed to
- :py:class:`waflib.Runner.Parallel.ready` so that the :py:class:`waflib.Runner.Spawner` consumer thread
- has them executed. Obtains the executed Tasks back from :py:class:`waflib.Runner.Parallel.out`
- and marks the build as failed by setting the ``stop`` flag.
- If only one job is used, then executes the tasks one by one, without consumers.
- """
- self.total = self.bld.total()
-
- while not self.stop:
-
- self.refill_task_list()
-
- # consider the next task
- tsk = self.get_next_task()
- if not tsk:
- if self.count:
- # tasks may add new ones after they are run
- continue
- else:
- # no tasks to run, no tasks running, time to exit
- break
-
- if tsk.hasrun:
- # if the task is marked as "run", just skip it
- self.processed += 1
- continue
-
- if self.stop: # stop immediately after a failure is detected
- break
-
- st = self.task_status(tsk)
- if st == Task.RUN_ME:
- self._add_task(tsk)
- elif st == Task.ASK_LATER:
- self.postpone(tsk)
- elif st == Task.SKIP_ME:
- self.processed += 1
- self.skip(tsk)
- self.add_more_tasks(tsk)
- elif st == Task.CANCEL_ME:
- # A dependency problem has occurred, and the
- # build is most likely run with `waf -k`
- if Logs.verbose > 1:
- self.error.append(tsk)
- self.processed += 1
- self.cancel(tsk)
-
- # self.count represents the tasks that have been made available to the consumer threads
- # collect all the tasks after an error else the message may be incomplete
- while self.error and self.count:
- self.get_out()
-
- self.ready.put(None)
- if not self.stop:
- assert not self.count
- assert not self.postponed
- assert not self.incomplete
-
- def prio_and_split(self, tasks):
- """
- Label input tasks with priority values, and return a pair containing
- the tasks that are ready to run and the tasks that are necessarily
- waiting for other tasks to complete.
-
- The priority system is really meant as an optional layer for optimization:
- dependency cycles are found quickly, and builds should be more efficient.
- A high priority number means that a task is processed first.
-
- This method can be overridden to disable the priority system::
-
- def prio_and_split(self, tasks):
- return tasks, []
-
- :return: A pair of task lists
- :rtype: tuple
- """
- # to disable:
- #return tasks, []
- for x in tasks:
- x.visited = 0
-
- reverse = self.revdeps
-
- groups_done = set()
- for x in tasks:
- for k in x.run_after:
- if isinstance(k, Task.TaskGroup):
- if k not in groups_done:
- groups_done.add(k)
- for j in k.prev:
- reverse[j].add(k)
- else:
- reverse[k].add(x)
-
- # the priority number is not the tree depth
- def visit(n):
- if isinstance(n, Task.TaskGroup):
- return sum(visit(k) for k in n.next)
-
- if n.visited == 0:
- n.visited = 1
-
- if n in reverse:
- rev = reverse[n]
- n.prio_order = n.tree_weight + len(rev) + sum(visit(k) for k in rev)
- else:
- n.prio_order = n.tree_weight
-
- n.visited = 2
- elif n.visited == 1:
- raise Errors.WafError('Dependency cycle found!')
- return n.prio_order
-
- for x in tasks:
- if x.visited != 0:
- # must visit all to detect cycles
- continue
- try:
- visit(x)
- except Errors.WafError:
- self.debug_cycles(tasks, reverse)
-
- ready = []
- waiting = []
- for x in tasks:
- for k in x.run_after:
- if not k.hasrun:
- waiting.append(x)
- break
- else:
- ready.append(x)
- return (ready, waiting)
-
- def debug_cycles(self, tasks, reverse):
- tmp = {}
- for x in tasks:
- tmp[x] = 0
-
- def visit(n, acc):
- if isinstance(n, Task.TaskGroup):
- for k in n.next:
- visit(k, acc)
- return
- if tmp[n] == 0:
- tmp[n] = 1
- for k in reverse.get(n, []):
- visit(k, [n] + acc)
- tmp[n] = 2
- elif tmp[n] == 1:
- lst = []
- for tsk in acc:
- lst.append(repr(tsk))
- if tsk is n:
- # exclude prior nodes, we want the minimum cycle
- break
- raise Errors.WafError('Task dependency cycle in "run_after" constraints: %s' % ''.join(lst))
- for x in tasks:
- visit(x, [])
-
diff --git a/waflib/Scripting.py b/waflib/Scripting.py
deleted file mode 100644
index 749d4f2..0000000
--- a/waflib/Scripting.py
+++ /dev/null
@@ -1,614 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2005-2018 (ita)
-
-"Module called for configuring, compiling and installing targets"
-
-from __future__ import with_statement
-
-import os, shlex, shutil, traceback, errno, sys, stat
-from waflib import Utils, Configure, Logs, Options, ConfigSet, Context, Errors, Build, Node
-
-build_dir_override = None
-
-no_climb_commands = ['configure']
-
-default_cmd = "build"
-
-def waf_entry_point(current_directory, version, wafdir):
- """
- This is the main entry point, all Waf execution starts here.
-
- :param current_directory: absolute path representing the current directory
- :type current_directory: string
- :param version: version number
- :type version: string
- :param wafdir: absolute path representing the directory of the waf library
- :type wafdir: string
- """
- Logs.init_log()
-
- if Context.WAFVERSION != version:
- Logs.error('Waf script %r and library %r do not match (directory %r)', version, Context.WAFVERSION, wafdir)
- sys.exit(1)
-
- # Store current directory before any chdir
- Context.waf_dir = wafdir
- Context.run_dir = Context.launch_dir = current_directory
- start_dir = current_directory
- no_climb = os.environ.get('NOCLIMB')
-
- if len(sys.argv) > 1:
- # os.path.join handles absolute paths
- # if sys.argv[1] is not an absolute path, then it is relative to the current working directory
- potential_wscript = os.path.join(current_directory, sys.argv[1])
- if os.path.basename(potential_wscript) == Context.WSCRIPT_FILE and os.path.isfile(potential_wscript):
- # need to explicitly normalize the path, as it may contain extra '/.'
- path = os.path.normpath(os.path.dirname(potential_wscript))
- start_dir = os.path.abspath(path)
- no_climb = True
- sys.argv.pop(1)
-
- ctx = Context.create_context('options')
- (options, commands, env) = ctx.parse_cmd_args(allow_unknown=True)
- if options.top:
- start_dir = Context.run_dir = Context.top_dir = options.top
- no_climb = True
- if options.out:
- Context.out_dir = options.out
-
- # if 'configure' is in the commands, do not search any further
- if not no_climb:
- for k in no_climb_commands:
- for y in commands:
- if y.startswith(k):
- no_climb = True
- break
-
- # try to find a lock file (if the project was configured)
- # at the same time, store the first wscript file seen
- cur = start_dir
- while cur:
- try:
- lst = os.listdir(cur)
- except OSError:
- lst = []
- Logs.error('Directory %r is unreadable!', cur)
- if Options.lockfile in lst:
- env = ConfigSet.ConfigSet()
- try:
- env.load(os.path.join(cur, Options.lockfile))
- ino = os.stat(cur)[stat.ST_INO]
- except EnvironmentError:
- pass
- else:
- # check if the folder was not moved
- for x in (env.run_dir, env.top_dir, env.out_dir):
- if not x:
- continue
- if Utils.is_win32:
- if cur == x:
- load = True
- break
- else:
- # if the filesystem features symlinks, compare the inode numbers
- try:
- ino2 = os.stat(x)[stat.ST_INO]
- except OSError:
- pass
- else:
- if ino == ino2:
- load = True
- break
- else:
- Logs.warn('invalid lock file in %s', cur)
- load = False
-
- if load:
- Context.run_dir = env.run_dir
- Context.top_dir = env.top_dir
- Context.out_dir = env.out_dir
- break
-
- if not Context.run_dir:
- if Context.WSCRIPT_FILE in lst:
- Context.run_dir = cur
-
- next = os.path.dirname(cur)
- if next == cur:
- break
- cur = next
-
- if no_climb:
- break
-
- wscript = os.path.normpath(os.path.join(Context.run_dir, Context.WSCRIPT_FILE))
- if not os.path.exists(wscript):
- if options.whelp:
- Logs.warn('These are the generic options (no wscript/project found)')
- ctx.parser.print_help()
- sys.exit(0)
- Logs.error('Waf: Run from a folder containing a %r file (or try -h for the generic options)', Context.WSCRIPT_FILE)
- sys.exit(1)
-
- try:
- os.chdir(Context.run_dir)
- except OSError:
- Logs.error('Waf: The folder %r is unreadable', Context.run_dir)
- sys.exit(1)
-
- try:
- set_main_module(wscript)
- except Errors.WafError as e:
- Logs.pprint('RED', e.verbose_msg)
- Logs.error(str(e))
- sys.exit(1)
- except Exception as e:
- Logs.error('Waf: The wscript in %r is unreadable', Context.run_dir)
- traceback.print_exc(file=sys.stdout)
- sys.exit(2)
-
- if options.profile:
- import cProfile, pstats
- cProfile.runctx('from waflib import Scripting; Scripting.run_commands()', {}, {}, 'profi.txt')
- p = pstats.Stats('profi.txt')
- p.sort_stats('time').print_stats(75) # or 'cumulative'
- else:
- try:
- try:
- run_commands()
- except:
- if options.pdb:
- import pdb
- type, value, tb = sys.exc_info()
- traceback.print_exc()
- pdb.post_mortem(tb)
- else:
- raise
- except Errors.WafError as e:
- if Logs.verbose > 1:
- Logs.pprint('RED', e.verbose_msg)
- Logs.error(e.msg)
- sys.exit(1)
- except SystemExit:
- raise
- except Exception as e:
- traceback.print_exc(file=sys.stdout)
- sys.exit(2)
- except KeyboardInterrupt:
- Logs.pprint('RED', 'Interrupted')
- sys.exit(68)
-
-def set_main_module(file_path):
- """
- Read the main wscript file into :py:const:`waflib.Context.Context.g_module` and
- bind default functions such as ``init``, ``dist``, ``distclean`` if not defined.
- Called by :py:func:`waflib.Scripting.waf_entry_point` during the initialization.
-
- :param file_path: absolute path representing the top-level wscript file
- :type file_path: string
- """
- Context.g_module = Context.load_module(file_path)
- Context.g_module.root_path = file_path
-
- # note: to register the module globally, use the following:
- # sys.modules['wscript_main'] = g_module
-
- def set_def(obj):
- name = obj.__name__
- if not name in Context.g_module.__dict__:
- setattr(Context.g_module, name, obj)
- for k in (dist, distclean, distcheck):
- set_def(k)
- # add dummy init and shutdown functions if they're not defined
- if not 'init' in Context.g_module.__dict__:
- Context.g_module.init = Utils.nada
- if not 'shutdown' in Context.g_module.__dict__:
- Context.g_module.shutdown = Utils.nada
- if not 'options' in Context.g_module.__dict__:
- Context.g_module.options = Utils.nada
-
-def parse_options():
- """
- Parses the command-line options and initialize the logging system.
- Called by :py:func:`waflib.Scripting.waf_entry_point` during the initialization.
- """
- ctx = Context.create_context('options')
- ctx.execute()
- if not Options.commands:
- Options.commands.append(default_cmd)
- if Options.options.whelp:
- ctx.parser.print_help()
- sys.exit(0)
-
-def run_command(cmd_name):
- """
- Executes a single Waf command. Called by :py:func:`waflib.Scripting.run_commands`.
-
- :param cmd_name: command to execute, like ``build``
- :type cmd_name: string
- """
- ctx = Context.create_context(cmd_name)
- ctx.log_timer = Utils.Timer()
- ctx.options = Options.options # provided for convenience
- ctx.cmd = cmd_name
- try:
- ctx.execute()
- finally:
- # Issue 1374
- ctx.finalize()
- return ctx
-
-def run_commands():
- """
- Execute the Waf commands that were given on the command-line, and the other options
- Called by :py:func:`waflib.Scripting.waf_entry_point` during the initialization, and executed
- after :py:func:`waflib.Scripting.parse_options`.
- """
- parse_options()
- run_command('init')
- while Options.commands:
- cmd_name = Options.commands.pop(0)
- ctx = run_command(cmd_name)
- Logs.info('%r finished successfully (%s)', cmd_name, ctx.log_timer)
- run_command('shutdown')
-
-###########################################################################################
-
-def distclean_dir(dirname):
- """
- Distclean function called in the particular case when::
-
- top == out
-
- :param dirname: absolute path of the folder to clean
- :type dirname: string
- """
- for (root, dirs, files) in os.walk(dirname):
- for f in files:
- if f.endswith(('.o', '.moc', '.exe')):
- fname = os.path.join(root, f)
- try:
- os.remove(fname)
- except OSError:
- Logs.warn('Could not remove %r', fname)
-
- for x in (Context.DBFILE, 'config.log'):
- try:
- os.remove(x)
- except OSError:
- pass
-
- try:
- shutil.rmtree('c4che')
- except OSError:
- pass
-
-def distclean(ctx):
- '''removes build folders and data'''
-
- def remove_and_log(k, fun):
- try:
- fun(k)
- except EnvironmentError as e:
- if e.errno != errno.ENOENT:
- Logs.warn('Could not remove %r', k)
-
- # remove waf cache folders on the top-level
- if not Options.commands:
- for k in os.listdir('.'):
- for x in '.waf-2 waf-2 .waf3-2 waf3-2'.split():
- if k.startswith(x):
- remove_and_log(k, shutil.rmtree)
-
- # remove a build folder, if any
- cur = '.'
- if ctx.options.no_lock_in_top:
- cur = ctx.options.out
-
- try:
- lst = os.listdir(cur)
- except OSError:
- Logs.warn('Could not read %r', cur)
- return
-
- if Options.lockfile in lst:
- f = os.path.join(cur, Options.lockfile)
- try:
- env = ConfigSet.ConfigSet(f)
- except EnvironmentError:
- Logs.warn('Could not read %r', f)
- return
-
- if not env.out_dir or not env.top_dir:
- Logs.warn('Invalid lock file %r', f)
- return
-
- if env.out_dir == env.top_dir:
- distclean_dir(env.out_dir)
- else:
- remove_and_log(env.out_dir, shutil.rmtree)
-
- for k in (env.out_dir, env.top_dir, env.run_dir):
- p = os.path.join(k, Options.lockfile)
- remove_and_log(p, os.remove)
-
-class Dist(Context.Context):
- '''creates an archive containing the project source code'''
- cmd = 'dist'
- fun = 'dist'
- algo = 'tar.bz2'
- ext_algo = {}
-
- def execute(self):
- """
- See :py:func:`waflib.Context.Context.execute`
- """
- self.recurse([os.path.dirname(Context.g_module.root_path)])
- self.archive()
-
- def archive(self):
- """
- Creates the source archive.
- """
- import tarfile
-
- arch_name = self.get_arch_name()
-
- try:
- self.base_path
- except AttributeError:
- self.base_path = self.path
-
- node = self.base_path.make_node(arch_name)
- try:
- node.delete()
- except OSError:
- pass
-
- files = self.get_files()
-
- if self.algo.startswith('tar.'):
- tar = tarfile.open(node.abspath(), 'w:' + self.algo.replace('tar.', ''))
-
- for x in files:
- self.add_tar_file(x, tar)
- tar.close()
- elif self.algo == 'zip':
- import zipfile
- zip = zipfile.ZipFile(node.abspath(), 'w', compression=zipfile.ZIP_DEFLATED)
-
- for x in files:
- archive_name = self.get_base_name() + '/' + x.path_from(self.base_path)
- zip.write(x.abspath(), archive_name, zipfile.ZIP_DEFLATED)
- zip.close()
- else:
- self.fatal('Valid algo types are tar.bz2, tar.gz, tar.xz or zip')
-
- try:
- from hashlib import sha256
- except ImportError:
- digest = ''
- else:
- digest = ' (sha256=%r)' % sha256(node.read(flags='rb')).hexdigest()
-
- Logs.info('New archive created: %s%s', self.arch_name, digest)
-
- def get_tar_path(self, node):
- """
- Return the path to use for a node in the tar archive, the purpose of this
- is to let subclases resolve symbolic links or to change file names
-
- :return: absolute path
- :rtype: string
- """
- return node.abspath()
-
- def add_tar_file(self, x, tar):
- """
- Adds a file to the tar archive. Symlinks are not verified.
-
- :param x: file path
- :param tar: tar file object
- """
- p = self.get_tar_path(x)
- tinfo = tar.gettarinfo(name=p, arcname=self.get_tar_prefix() + '/' + x.path_from(self.base_path))
- tinfo.uid = 0
- tinfo.gid = 0
- tinfo.uname = 'root'
- tinfo.gname = 'root'
-
- if os.path.isfile(p):
- with open(p, 'rb') as f:
- tar.addfile(tinfo, fileobj=f)
- else:
- tar.addfile(tinfo)
-
- def get_tar_prefix(self):
- """
- Returns the base path for files added into the archive tar file
-
- :rtype: string
- """
- try:
- return self.tar_prefix
- except AttributeError:
- return self.get_base_name()
-
- def get_arch_name(self):
- """
- Returns the archive file name.
- Set the attribute *arch_name* to change the default value::
-
- def dist(ctx):
- ctx.arch_name = 'ctx.tar.bz2'
-
- :rtype: string
- """
- try:
- self.arch_name
- except AttributeError:
- self.arch_name = self.get_base_name() + '.' + self.ext_algo.get(self.algo, self.algo)
- return self.arch_name
-
- def get_base_name(self):
- """
- Returns the default name of the main directory in the archive, which is set to *appname-version*.
- Set the attribute *base_name* to change the default value::
-
- def dist(ctx):
- ctx.base_name = 'files'
-
- :rtype: string
- """
- try:
- self.base_name
- except AttributeError:
- appname = getattr(Context.g_module, Context.APPNAME, 'noname')
- version = getattr(Context.g_module, Context.VERSION, '1.0')
- self.base_name = appname + '-' + version
- return self.base_name
-
- def get_excl(self):
- """
- Returns the patterns to exclude for finding the files in the top-level directory.
- Set the attribute *excl* to change the default value::
-
- def dist(ctx):
- ctx.excl = 'build **/*.o **/*.class'
-
- :rtype: string
- """
- try:
- return self.excl
- except AttributeError:
- self.excl = Node.exclude_regs + ' **/waf-2.* **/.waf-2.* **/waf3-2.* **/.waf3-2.* **/*~ **/*.rej **/*.orig **/*.pyc **/*.pyo **/*.bak **/*.swp **/.lock-w*'
- if Context.out_dir:
- nd = self.root.find_node(Context.out_dir)
- if nd:
- self.excl += ' ' + nd.path_from(self.base_path)
- return self.excl
-
- def get_files(self):
- """
- Files to package are searched automatically by :py:func:`waflib.Node.Node.ant_glob`.
- Set *files* to prevent this behaviour::
-
- def dist(ctx):
- ctx.files = ctx.path.find_node('wscript')
-
- Files are also searched from the directory 'base_path', to change it, set::
-
- def dist(ctx):
- ctx.base_path = path
-
- :rtype: list of :py:class:`waflib.Node.Node`
- """
- try:
- files = self.files
- except AttributeError:
- files = self.base_path.ant_glob('**/*', excl=self.get_excl())
- return files
-
-def dist(ctx):
- '''makes a tarball for redistributing the sources'''
- pass
-
-class DistCheck(Dist):
- """creates an archive with dist, then tries to build it"""
- fun = 'distcheck'
- cmd = 'distcheck'
-
- def execute(self):
- """
- See :py:func:`waflib.Context.Context.execute`
- """
- self.recurse([os.path.dirname(Context.g_module.root_path)])
- self.archive()
- self.check()
-
- def make_distcheck_cmd(self, tmpdir):
- cfg = []
- if Options.options.distcheck_args:
- cfg = shlex.split(Options.options.distcheck_args)
- else:
- cfg = [x for x in sys.argv if x.startswith('-')]
- cmd = [sys.executable, sys.argv[0], 'configure', 'build', 'install', 'uninstall', '--destdir=' + tmpdir] + cfg
- return cmd
-
- def check(self):
- """
- Creates the archive, uncompresses it and tries to build the project
- """
- import tempfile, tarfile
-
- with tarfile.open(self.get_arch_name()) as t:
- for x in t:
- t.extract(x)
-
- instdir = tempfile.mkdtemp('.inst', self.get_base_name())
- cmd = self.make_distcheck_cmd(instdir)
- ret = Utils.subprocess.Popen(cmd, cwd=self.get_base_name()).wait()
- if ret:
- raise Errors.WafError('distcheck failed with code %r' % ret)
-
- if os.path.exists(instdir):
- raise Errors.WafError('distcheck succeeded, but files were left in %s' % instdir)
-
- shutil.rmtree(self.get_base_name())
-
-
-def distcheck(ctx):
- '''checks if the project compiles (tarball from 'dist')'''
- pass
-
-def autoconfigure(execute_method):
- """
- Decorator that enables context commands to run *configure* as needed.
- """
- def execute(self):
- """
- Wraps :py:func:`waflib.Context.Context.execute` on the context class
- """
- if not Configure.autoconfig:
- return execute_method(self)
-
- env = ConfigSet.ConfigSet()
- do_config = False
- try:
- env.load(os.path.join(Context.top_dir, Options.lockfile))
- except EnvironmentError:
- Logs.warn('Configuring the project')
- do_config = True
- else:
- if env.run_dir != Context.run_dir:
- do_config = True
- else:
- h = 0
- for f in env.files:
- try:
- h = Utils.h_list((h, Utils.readf(f, 'rb')))
- except EnvironmentError:
- do_config = True
- break
- else:
- do_config = h != env.hash
-
- if do_config:
- cmd = env.config_cmd or 'configure'
- if Configure.autoconfig == 'clobber':
- tmp = Options.options.__dict__
- if env.options:
- Options.options.__dict__ = env.options
- try:
- run_command(cmd)
- finally:
- Options.options.__dict__ = tmp
- else:
- run_command(cmd)
- run_command(self.cmd)
- else:
- return execute_method(self)
- return execute
-Build.BuildContext.execute = autoconfigure(Build.BuildContext.execute)
-
diff --git a/waflib/Task.py b/waflib/Task.py
deleted file mode 100644
index 0fc449d..0000000
--- a/waflib/Task.py
+++ /dev/null
@@ -1,1394 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2005-2018 (ita)
-
-"""
-Tasks represent atomic operations such as processes.
-"""
-
-import os, re, sys, tempfile, traceback
-from waflib import Utils, Logs, Errors
-
-# task states
-NOT_RUN = 0
-"""The task was not executed yet"""
-
-MISSING = 1
-"""The task has been executed but the files have not been created"""
-
-CRASHED = 2
-"""The task execution returned a non-zero exit status"""
-
-EXCEPTION = 3
-"""An exception occurred in the task execution"""
-
-CANCELED = 4
-"""A dependency for the task is missing so it was cancelled"""
-
-SKIPPED = 8
-"""The task did not have to be executed"""
-
-SUCCESS = 9
-"""The task was successfully executed"""
-
-ASK_LATER = -1
-"""The task is not ready to be executed"""
-
-SKIP_ME = -2
-"""The task does not need to be executed"""
-
-RUN_ME = -3
-"""The task must be executed"""
-
-CANCEL_ME = -4
-"""The task cannot be executed because of a dependency problem"""
-
-COMPILE_TEMPLATE_SHELL = '''
-def f(tsk):
- env = tsk.env
- gen = tsk.generator
- bld = gen.bld
- cwdx = tsk.get_cwd()
- p = env.get_flat
- tsk.last_cmd = cmd = \'\'\' %s \'\'\' % s
- return tsk.exec_command(cmd, cwd=cwdx, env=env.env or None)
-'''
-
-COMPILE_TEMPLATE_NOSHELL = '''
-def f(tsk):
- env = tsk.env
- gen = tsk.generator
- bld = gen.bld
- cwdx = tsk.get_cwd()
- def to_list(xx):
- if isinstance(xx, str): return [xx]
- return xx
- def merge(lst1, lst2):
- if lst1 and lst2:
- return lst1[:-1] + [lst1[-1] + lst2[0]] + lst2[1:]
- return lst1 + lst2
- lst = []
- %s
- if '' in lst:
- lst = [x for x in lst if x]
- tsk.last_cmd = lst
- return tsk.exec_command(lst, cwd=cwdx, env=env.env or None)
-'''
-
-COMPILE_TEMPLATE_SIG_VARS = '''
-def f(tsk):
- super(tsk.__class__, tsk).sig_vars()
- env = tsk.env
- gen = tsk.generator
- bld = gen.bld
- cwdx = tsk.get_cwd()
- p = env.get_flat
- buf = []
- %s
- tsk.m.update(repr(buf).encode())
-'''
-
-classes = {}
-"""
-The metaclass :py:class:`waflib.Task.store_task_type` stores all class tasks
-created by user scripts or Waf tools to this dict. It maps class names to class objects.
-"""
-
-class store_task_type(type):
- """
- Metaclass: store the task classes into the dict pointed by the
- class attribute 'register' which defaults to :py:const:`waflib.Task.classes`,
-
- The attribute 'run_str' is compiled into a method 'run' bound to the task class.
- """
- def __init__(cls, name, bases, dict):
- super(store_task_type, cls).__init__(name, bases, dict)
- name = cls.__name__
-
- if name != 'evil' and name != 'Task':
- if getattr(cls, 'run_str', None):
- # if a string is provided, convert it to a method
- (f, dvars) = compile_fun(cls.run_str, cls.shell)
- cls.hcode = Utils.h_cmd(cls.run_str)
- cls.orig_run_str = cls.run_str
- # change the name of run_str or it is impossible to subclass with a function
- cls.run_str = None
- cls.run = f
- # process variables
- cls.vars = list(set(cls.vars + dvars))
- cls.vars.sort()
- if cls.vars:
- fun = compile_sig_vars(cls.vars)
- if fun:
- cls.sig_vars = fun
- elif getattr(cls, 'run', None) and not 'hcode' in cls.__dict__:
- # getattr(cls, 'hcode') would look in the upper classes
- cls.hcode = Utils.h_cmd(cls.run)
-
- # be creative
- getattr(cls, 'register', classes)[name] = cls
-
-evil = store_task_type('evil', (object,), {})
-"Base class provided to avoid writing a metaclass, so the code can run in python 2.6 and 3.x unmodified"
-
-class Task(evil):
- """
- Task objects represents actions to perform such as commands to execute by calling the `run` method.
-
- Detecting when to execute a task occurs in the method :py:meth:`waflib.Task.Task.runnable_status`.
-
- Detecting which tasks to execute is performed through a hash value returned by
- :py:meth:`waflib.Task.Task.signature`. The task signature is persistent from build to build.
- """
- vars = []
- """ConfigSet variables that should trigger a rebuild (class attribute used for :py:meth:`waflib.Task.Task.sig_vars`)"""
-
- always_run = False
- """Specify whether task instances must always be executed or not (class attribute)"""
-
- shell = False
- """Execute the command with the shell (class attribute)"""
-
- color = 'GREEN'
- """Color for the console display, see :py:const:`waflib.Logs.colors_lst`"""
-
- ext_in = []
- """File extensions that objects of this task class may use"""
-
- ext_out = []
- """File extensions that objects of this task class may create"""
-
- before = []
- """List of task class names to execute before instances of this class"""
-
- after = []
- """List of task class names to execute after instances of this class"""
-
- hcode = Utils.SIG_NIL
- """String representing an additional hash for the class representation"""
-
- keep_last_cmd = False
- """Whether to keep the last command executed on the instance after execution.
- This may be useful for certain extensions but it can a lot of memory.
- """
-
- weight = 0
- """Optional weight to tune the priority for task instances.
- The higher, the earlier. The weight only applies to single task objects."""
-
- tree_weight = 0
- """Optional weight to tune the priority of task instances and whole subtrees.
- The higher, the earlier."""
-
- prio_order = 0
- """Priority order set by the scheduler on instances during the build phase.
- You most likely do not need to set it.
- """
-
- __slots__ = ('hasrun', 'generator', 'env', 'inputs', 'outputs', 'dep_nodes', 'run_after')
-
- def __init__(self, *k, **kw):
- self.hasrun = NOT_RUN
- try:
- self.generator = kw['generator']
- except KeyError:
- self.generator = self
-
- self.env = kw['env']
- """:py:class:`waflib.ConfigSet.ConfigSet` object (make sure to provide one)"""
-
- self.inputs = []
- """List of input nodes, which represent the files used by the task instance"""
-
- self.outputs = []
- """List of output nodes, which represent the files created by the task instance"""
-
- self.dep_nodes = []
- """List of additional nodes to depend on"""
-
- self.run_after = set()
- """Set of tasks that must be executed before this one"""
-
- def __lt__(self, other):
- return self.priority() > other.priority()
- def __le__(self, other):
- return self.priority() >= other.priority()
- def __gt__(self, other):
- return self.priority() < other.priority()
- def __ge__(self, other):
- return self.priority() <= other.priority()
-
- def get_cwd(self):
- """
- :return: current working directory
- :rtype: :py:class:`waflib.Node.Node`
- """
- bld = self.generator.bld
- ret = getattr(self, 'cwd', None) or getattr(bld, 'cwd', bld.bldnode)
- if isinstance(ret, str):
- if os.path.isabs(ret):
- ret = bld.root.make_node(ret)
- else:
- ret = self.generator.path.make_node(ret)
- return ret
-
- def quote_flag(self, x):
- """
- Surround a process argument by quotes so that a list of arguments can be written to a file
-
- :param x: flag
- :type x: string
- :return: quoted flag
- :rtype: string
- """
- old = x
- if '\\' in x:
- x = x.replace('\\', '\\\\')
- if '"' in x:
- x = x.replace('"', '\\"')
- if old != x or ' ' in x or '\t' in x or "'" in x:
- x = '"%s"' % x
- return x
-
- def priority(self):
- """
- Priority of execution; the higher, the earlier
-
- :return: the priority value
- :rtype: a tuple of numeric values
- """
- return (self.weight + self.prio_order, - getattr(self.generator, 'tg_idx_count', 0))
-
- def split_argfile(self, cmd):
- """
- Splits a list of process commands into the executable part and its list of arguments
-
- :return: a tuple containing the executable first and then the rest of arguments
- :rtype: tuple
- """
- return ([cmd[0]], [self.quote_flag(x) for x in cmd[1:]])
-
- def exec_command(self, cmd, **kw):
- """
- Wrapper for :py:meth:`waflib.Context.Context.exec_command`.
- This version set the current working directory (``build.variant_dir``),
- applies PATH settings (if self.env.PATH is provided), and can run long
- commands through a temporary ``@argfile``.
-
- :param cmd: process command to execute
- :type cmd: list of string (best) or string (process will use a shell)
- :return: the return code
- :rtype: int
-
- Optional parameters:
-
- #. cwd: current working directory (Node or string)
- #. stdout: set to None to prevent waf from capturing the process standard output
- #. stderr: set to None to prevent waf from capturing the process standard error
- #. timeout: timeout value (Python 3)
- """
- if not 'cwd' in kw:
- kw['cwd'] = self.get_cwd()
-
- if hasattr(self, 'timeout'):
- kw['timeout'] = self.timeout
-
- if self.env.PATH:
- env = kw['env'] = dict(kw.get('env') or self.env.env or os.environ)
- env['PATH'] = self.env.PATH if isinstance(self.env.PATH, str) else os.pathsep.join(self.env.PATH)
-
- if hasattr(self, 'stdout'):
- kw['stdout'] = self.stdout
- if hasattr(self, 'stderr'):
- kw['stderr'] = self.stderr
-
- # workaround for command line length limit:
- # http://support.microsoft.com/kb/830473
- if not isinstance(cmd, str) and (len(repr(cmd)) >= 8192 if Utils.is_win32 else len(cmd) > 200000):
- cmd, args = self.split_argfile(cmd)
- try:
- (fd, tmp) = tempfile.mkstemp()
- os.write(fd, '\r\n'.join(args).encode())
- os.close(fd)
- if Logs.verbose:
- Logs.debug('argfile: @%r -> %r', tmp, args)
- return self.generator.bld.exec_command(cmd + ['@' + tmp], **kw)
- finally:
- try:
- os.remove(tmp)
- except OSError:
- # anti-virus and indexers can keep files open -_-
- pass
- else:
- return self.generator.bld.exec_command(cmd, **kw)
-
- def process(self):
- """
- Runs the task and handles errors
-
- :return: 0 or None if everything is fine
- :rtype: integer
- """
- # remove the task signature immediately before it is executed
- # so that the task will be executed again in case of failure
- try:
- del self.generator.bld.task_sigs[self.uid()]
- except KeyError:
- pass
-
- try:
- ret = self.run()
- except Exception:
- self.err_msg = traceback.format_exc()
- self.hasrun = EXCEPTION
- else:
- if ret:
- self.err_code = ret
- self.hasrun = CRASHED
- else:
- try:
- self.post_run()
- except Errors.WafError:
- pass
- except Exception:
- self.err_msg = traceback.format_exc()
- self.hasrun = EXCEPTION
- else:
- self.hasrun = SUCCESS
-
- if self.hasrun != SUCCESS and self.scan:
- # rescan dependencies on next run
- try:
- del self.generator.bld.imp_sigs[self.uid()]
- except KeyError:
- pass
-
- def log_display(self, bld):
- "Writes the execution status on the context logger"
- if self.generator.bld.progress_bar == 3:
- return
-
- s = self.display()
- if s:
- if bld.logger:
- logger = bld.logger
- else:
- logger = Logs
-
- if self.generator.bld.progress_bar == 1:
- c1 = Logs.colors.cursor_off
- c2 = Logs.colors.cursor_on
- logger.info(s, extra={'stream': sys.stderr, 'terminator':'', 'c1': c1, 'c2' : c2})
- else:
- logger.info(s, extra={'terminator':'', 'c1': '', 'c2' : ''})
-
- def display(self):
- """
- Returns an execution status for the console, the progress bar, or the IDE output.
-
- :rtype: string
- """
- col1 = Logs.colors(self.color)
- col2 = Logs.colors.NORMAL
- master = self.generator.bld.producer
-
- def cur():
- # the current task position, computed as late as possible
- return master.processed - master.ready.qsize()
-
- if self.generator.bld.progress_bar == 1:
- return self.generator.bld.progress_line(cur(), master.total, col1, col2)
-
- if self.generator.bld.progress_bar == 2:
- ela = str(self.generator.bld.timer)
- try:
- ins = ','.join([n.name for n in self.inputs])
- except AttributeError:
- ins = ''
- try:
- outs = ','.join([n.name for n in self.outputs])
- except AttributeError:
- outs = ''
- return '|Total %s|Current %s|Inputs %s|Outputs %s|Time %s|\n' % (master.total, cur(), ins, outs, ela)
-
- s = str(self)
- if not s:
- return None
-
- total = master.total
- n = len(str(total))
- fs = '[%%%dd/%%%dd] %%s%%s%%s%%s\n' % (n, n)
- kw = self.keyword()
- if kw:
- kw += ' '
- return fs % (cur(), total, kw, col1, s, col2)
-
- def hash_constraints(self):
- """
- Identifies a task type for all the constraints relevant for the scheduler: precedence, file production
-
- :return: a hash value
- :rtype: string
- """
- return (tuple(self.before), tuple(self.after), tuple(self.ext_in), tuple(self.ext_out), self.__class__.__name__, self.hcode)
-
- def format_error(self):
- """
- Returns an error message to display the build failure reasons
-
- :rtype: string
- """
- if Logs.verbose:
- msg = ': %r\n%r' % (self, getattr(self, 'last_cmd', ''))
- else:
- msg = ' (run with -v to display more information)'
- name = getattr(self.generator, 'name', '')
- if getattr(self, "err_msg", None):
- return self.err_msg
- elif not self.hasrun:
- return 'task in %r was not executed for some reason: %r' % (name, self)
- elif self.hasrun == CRASHED:
- try:
- return ' -> task in %r failed with exit status %r%s' % (name, self.err_code, msg)
- except AttributeError:
- return ' -> task in %r failed%s' % (name, msg)
- elif self.hasrun == MISSING:
- return ' -> missing files in %r%s' % (name, msg)
- elif self.hasrun == CANCELED:
- return ' -> %r canceled because of missing dependencies' % name
- else:
- return 'invalid status for task in %r: %r' % (name, self.hasrun)
-
- def colon(self, var1, var2):
- """
- Enable scriptlet expressions of the form ${FOO_ST:FOO}
- If the first variable (FOO_ST) is empty, then an empty list is returned
-
- The results will be slightly different if FOO_ST is a list, for example::
-
- env.FOO = ['p1', 'p2']
- env.FOO_ST = '-I%s'
- # ${FOO_ST:FOO} returns
- ['-Ip1', '-Ip2']
-
- env.FOO_ST = ['-a', '-b']
- # ${FOO_ST:FOO} returns
- ['-a', '-b', 'p1', '-a', '-b', 'p2']
- """
- tmp = self.env[var1]
- if not tmp:
- return []
-
- if isinstance(var2, str):
- it = self.env[var2]
- else:
- it = var2
- if isinstance(tmp, str):
- return [tmp % x for x in it]
- else:
- lst = []
- for y in it:
- lst.extend(tmp)
- lst.append(y)
- return lst
-
- def __str__(self):
- "string to display to the user"
- name = self.__class__.__name__
- if self.outputs:
- if name.endswith(('lib', 'program')) or not self.inputs:
- node = self.outputs[0]
- return node.path_from(node.ctx.launch_node())
- if not (self.inputs or self.outputs):
- return self.__class__.__name__
- if len(self.inputs) == 1:
- node = self.inputs[0]
- return node.path_from(node.ctx.launch_node())
-
- src_str = ' '.join([a.path_from(a.ctx.launch_node()) for a in self.inputs])
- tgt_str = ' '.join([a.path_from(a.ctx.launch_node()) for a in self.outputs])
- if self.outputs:
- sep = ' -> '
- else:
- sep = ''
- return '%s: %s%s%s' % (self.__class__.__name__, src_str, sep, tgt_str)
-
- def keyword(self):
- "Display keyword used to prettify the console outputs"
- name = self.__class__.__name__
- if name.endswith(('lib', 'program')):
- return 'Linking'
- if len(self.inputs) == 1 and len(self.outputs) == 1:
- return 'Compiling'
- if not self.inputs:
- if self.outputs:
- return 'Creating'
- else:
- return 'Running'
- return 'Processing'
-
- def __repr__(self):
- "for debugging purposes"
- try:
- ins = ",".join([x.name for x in self.inputs])
- outs = ",".join([x.name for x in self.outputs])
- except AttributeError:
- ins = ",".join([str(x) for x in self.inputs])
- outs = ",".join([str(x) for x in self.outputs])
- return "".join(['\n\t{task %r: ' % id(self), self.__class__.__name__, " ", ins, " -> ", outs, '}'])
-
- def uid(self):
- """
- Returns an identifier used to determine if tasks are up-to-date. Since the
- identifier will be stored between executions, it must be:
-
- - unique for a task: no two tasks return the same value (for a given build context)
- - the same for a given task instance
-
- By default, the node paths, the class name, and the function are used
- as inputs to compute a hash.
-
- The pointer to the object (python built-in 'id') will change between build executions,
- and must be avoided in such hashes.
-
- :return: hash value
- :rtype: string
- """
- try:
- return self.uid_
- except AttributeError:
- m = Utils.md5(self.__class__.__name__)
- up = m.update
- for x in self.inputs + self.outputs:
- up(x.abspath())
- self.uid_ = m.digest()
- return self.uid_
-
- def set_inputs(self, inp):
- """
- Appends the nodes to the *inputs* list
-
- :param inp: input nodes
- :type inp: node or list of nodes
- """
- if isinstance(inp, list):
- self.inputs += inp
- else:
- self.inputs.append(inp)
-
- def set_outputs(self, out):
- """
- Appends the nodes to the *outputs* list
-
- :param out: output nodes
- :type out: node or list of nodes
- """
- if isinstance(out, list):
- self.outputs += out
- else:
- self.outputs.append(out)
-
- def set_run_after(self, task):
- """
- Run this task only after the given *task*.
-
- Calling this method from :py:meth:`waflib.Task.Task.runnable_status` may cause
- build deadlocks; see :py:meth:`waflib.Tools.fc.fc.runnable_status` for details.
-
- :param task: task
- :type task: :py:class:`waflib.Task.Task`
- """
- assert isinstance(task, Task)
- self.run_after.add(task)
-
- def signature(self):
- """
- Task signatures are stored between build executions, they are use to track the changes
- made to the input nodes (not to the outputs!). The signature hashes data from various sources:
-
- * explicit dependencies: files listed in the inputs (list of node objects) :py:meth:`waflib.Task.Task.sig_explicit_deps`
- * implicit dependencies: list of nodes returned by scanner methods (when present) :py:meth:`waflib.Task.Task.sig_implicit_deps`
- * hashed data: variables/values read from task.vars/task.env :py:meth:`waflib.Task.Task.sig_vars`
-
- If the signature is expected to give a different result, clear the cache kept in ``self.cache_sig``::
-
- from waflib import Task
- class cls(Task.Task):
- def signature(self):
- sig = super(Task.Task, self).signature()
- delattr(self, 'cache_sig')
- return super(Task.Task, self).signature()
-
- :return: the signature value
- :rtype: string or bytes
- """
- try:
- return self.cache_sig
- except AttributeError:
- pass
-
- self.m = Utils.md5(self.hcode)
-
- # explicit deps
- self.sig_explicit_deps()
-
- # env vars
- self.sig_vars()
-
- # implicit deps / scanner results
- if self.scan:
- try:
- self.sig_implicit_deps()
- except Errors.TaskRescan:
- return self.signature()
-
- ret = self.cache_sig = self.m.digest()
- return ret
-
- def runnable_status(self):
- """
- Returns the Task status
-
- :return: a task state in :py:const:`waflib.Task.RUN_ME`,
- :py:const:`waflib.Task.SKIP_ME`, :py:const:`waflib.Task.CANCEL_ME` or :py:const:`waflib.Task.ASK_LATER`.
- :rtype: int
- """
- bld = self.generator.bld
- if bld.is_install < 0:
- return SKIP_ME
-
- for t in self.run_after:
- if not t.hasrun:
- return ASK_LATER
- elif t.hasrun < SKIPPED:
- # a dependency has an error
- return CANCEL_ME
-
- # first compute the signature
- try:
- new_sig = self.signature()
- except Errors.TaskNotReady:
- return ASK_LATER
-
- # compare the signature to a signature computed previously
- key = self.uid()
- try:
- prev_sig = bld.task_sigs[key]
- except KeyError:
- Logs.debug('task: task %r must run: it was never run before or the task code changed', self)
- return RUN_ME
-
- if new_sig != prev_sig:
- Logs.debug('task: task %r must run: the task signature changed', self)
- return RUN_ME
-
- # compare the signatures of the outputs
- for node in self.outputs:
- sig = bld.node_sigs.get(node)
- if not sig:
- Logs.debug('task: task %r must run: an output node has no signature', self)
- return RUN_ME
- if sig != key:
- Logs.debug('task: task %r must run: an output node was produced by another task', self)
- return RUN_ME
- if not node.exists():
- Logs.debug('task: task %r must run: an output node does not exist', self)
- return RUN_ME
-
- return (self.always_run and RUN_ME) or SKIP_ME
-
- def post_run(self):
- """
- Called after successful execution to record that the task has run by
- updating the entry in :py:attr:`waflib.Build.BuildContext.task_sigs`.
- """
- bld = self.generator.bld
- for node in self.outputs:
- if not node.exists():
- self.hasrun = MISSING
- self.err_msg = '-> missing file: %r' % node.abspath()
- raise Errors.WafError(self.err_msg)
- bld.node_sigs[node] = self.uid() # make sure this task produced the files in question
- bld.task_sigs[self.uid()] = self.signature()
- if not self.keep_last_cmd:
- try:
- del self.last_cmd
- except AttributeError:
- pass
-
- def sig_explicit_deps(self):
- """
- Used by :py:meth:`waflib.Task.Task.signature`; it hashes :py:attr:`waflib.Task.Task.inputs`
- and :py:attr:`waflib.Task.Task.dep_nodes` signatures.
- """
- bld = self.generator.bld
- upd = self.m.update
-
- # the inputs
- for x in self.inputs + self.dep_nodes:
- upd(x.get_bld_sig())
-
- # manual dependencies, they can slow down the builds
- if bld.deps_man:
- additional_deps = bld.deps_man
- for x in self.inputs + self.outputs:
- try:
- d = additional_deps[x]
- except KeyError:
- continue
-
- for v in d:
- try:
- v = v.get_bld_sig()
- except AttributeError:
- if hasattr(v, '__call__'):
- v = v() # dependency is a function, call it
- upd(v)
-
- def sig_deep_inputs(self):
- """
- Enable rebuilds on input files task signatures. Not used by default.
-
- Example: hashes of output programs can be unchanged after being re-linked,
- despite the libraries being different. This method can thus prevent stale unit test
- results (waf_unit_test.py).
-
- Hashing input file timestamps is another possibility for the implementation.
- This may cause unnecessary rebuilds when input tasks are frequently executed.
- Here is an implementation example::
-
- lst = []
- for node in self.inputs + self.dep_nodes:
- st = os.stat(node.abspath())
- lst.append(st.st_mtime)
- lst.append(st.st_size)
- self.m.update(Utils.h_list(lst))
-
- The downside of the implementation is that it absolutely requires all build directory
- files to be declared within the current build.
- """
- bld = self.generator.bld
- lst = [bld.task_sigs[bld.node_sigs[node]] for node in (self.inputs + self.dep_nodes) if node.is_bld()]
- self.m.update(Utils.h_list(lst))
-
- def sig_vars(self):
- """
- Used by :py:meth:`waflib.Task.Task.signature`; it hashes :py:attr:`waflib.Task.Task.env` variables/values
- When overriding this method, and if scriptlet expressions are used, make sure to follow
- the code in :py:meth:`waflib.Task.Task.compile_sig_vars` to enable dependencies on scriptlet results.
- """
- sig = self.generator.bld.hash_env_vars(self.env, self.vars)
- self.m.update(sig)
-
- scan = None
- """
- This method, when provided, returns a tuple containing:
-
- * a list of nodes corresponding to real files
- * a list of names for files not found in path_lst
-
- For example::
-
- from waflib.Task import Task
- class mytask(Task):
- def scan(self, node):
- return ([], [])
-
- The first and second lists in the tuple are stored in :py:attr:`waflib.Build.BuildContext.node_deps` and
- :py:attr:`waflib.Build.BuildContext.raw_deps` respectively.
- """
-
- def sig_implicit_deps(self):
- """
- Used by :py:meth:`waflib.Task.Task.signature`; it hashes node signatures
- obtained by scanning for dependencies (:py:meth:`waflib.Task.Task.scan`).
-
- The exception :py:class:`waflib.Errors.TaskRescan` is thrown
- when a file has changed. In this case, the method :py:meth:`waflib.Task.Task.signature` is called
- once again, and return here to call :py:meth:`waflib.Task.Task.scan` and searching for dependencies.
- """
- bld = self.generator.bld
-
- # get the task signatures from previous runs
- key = self.uid()
- prev = bld.imp_sigs.get(key, [])
-
- # for issue #379
- if prev:
- try:
- if prev == self.compute_sig_implicit_deps():
- return prev
- except Errors.TaskNotReady:
- raise
- except EnvironmentError:
- # when a file was renamed, remove the stale nodes (headers in folders without source files)
- # this will break the order calculation for headers created during the build in the source directory (should be uncommon)
- # the behaviour will differ when top != out
- for x in bld.node_deps.get(self.uid(), []):
- if not x.is_bld() and not x.exists():
- try:
- del x.parent.children[x.name]
- except KeyError:
- pass
- del bld.imp_sigs[key]
- raise Errors.TaskRescan('rescan')
-
- # no previous run or the signature of the dependencies has changed, rescan the dependencies
- (bld.node_deps[key], bld.raw_deps[key]) = self.scan()
- if Logs.verbose:
- Logs.debug('deps: scanner for %s: %r; unresolved: %r', self, bld.node_deps[key], bld.raw_deps[key])
-
- # recompute the signature and return it
- try:
- bld.imp_sigs[key] = self.compute_sig_implicit_deps()
- except EnvironmentError:
- for k in bld.node_deps.get(self.uid(), []):
- if not k.exists():
- Logs.warn('Dependency %r for %r is missing: check the task declaration and the build order!', k, self)
- raise
-
- def compute_sig_implicit_deps(self):
- """
- Used by :py:meth:`waflib.Task.Task.sig_implicit_deps` for computing the actual hash of the
- :py:class:`waflib.Node.Node` returned by the scanner.
-
- :return: a hash value for the implicit dependencies
- :rtype: string or bytes
- """
- upd = self.m.update
- self.are_implicit_nodes_ready()
-
- # scanner returns a node that does not have a signature
- # just *ignore* the error and let them figure out from the compiler output
- # waf -k behaviour
- for k in self.generator.bld.node_deps.get(self.uid(), []):
- upd(k.get_bld_sig())
- return self.m.digest()
-
- def are_implicit_nodes_ready(self):
- """
- For each node returned by the scanner, see if there is a task that creates it,
- and infer the build order
-
- This has a low performance impact on null builds (1.86s->1.66s) thanks to caching (28s->1.86s)
- """
- bld = self.generator.bld
- try:
- cache = bld.dct_implicit_nodes
- except AttributeError:
- bld.dct_implicit_nodes = cache = {}
-
- # one cache per build group
- try:
- dct = cache[bld.current_group]
- except KeyError:
- dct = cache[bld.current_group] = {}
- for tsk in bld.cur_tasks:
- for x in tsk.outputs:
- dct[x] = tsk
-
- modified = False
- for x in bld.node_deps.get(self.uid(), []):
- if x in dct:
- self.run_after.add(dct[x])
- modified = True
-
- if modified:
- for tsk in self.run_after:
- if not tsk.hasrun:
- #print "task is not ready..."
- raise Errors.TaskNotReady('not ready')
-if sys.hexversion > 0x3000000:
- def uid(self):
- try:
- return self.uid_
- except AttributeError:
- m = Utils.md5(self.__class__.__name__.encode('latin-1', 'xmlcharrefreplace'))
- up = m.update
- for x in self.inputs + self.outputs:
- up(x.abspath().encode('latin-1', 'xmlcharrefreplace'))
- self.uid_ = m.digest()
- return self.uid_
- uid.__doc__ = Task.uid.__doc__
- Task.uid = uid
-
-def is_before(t1, t2):
- """
- Returns a non-zero value if task t1 is to be executed before task t2::
-
- t1.ext_out = '.h'
- t2.ext_in = '.h'
- t2.after = ['t1']
- t1.before = ['t2']
- waflib.Task.is_before(t1, t2) # True
-
- :param t1: Task object
- :type t1: :py:class:`waflib.Task.Task`
- :param t2: Task object
- :type t2: :py:class:`waflib.Task.Task`
- """
- to_list = Utils.to_list
- for k in to_list(t2.ext_in):
- if k in to_list(t1.ext_out):
- return 1
-
- if t1.__class__.__name__ in to_list(t2.after):
- return 1
-
- if t2.__class__.__name__ in to_list(t1.before):
- return 1
-
- return 0
-
-def set_file_constraints(tasks):
- """
- Updates the ``run_after`` attribute of all tasks based on the task inputs and outputs
-
- :param tasks: tasks
- :type tasks: list of :py:class:`waflib.Task.Task`
- """
- ins = Utils.defaultdict(set)
- outs = Utils.defaultdict(set)
- for x in tasks:
- for a in x.inputs:
- ins[a].add(x)
- for a in x.dep_nodes:
- ins[a].add(x)
- for a in x.outputs:
- outs[a].add(x)
-
- links = set(ins.keys()).intersection(outs.keys())
- for k in links:
- for a in ins[k]:
- a.run_after.update(outs[k])
-
-
-class TaskGroup(object):
- """
- Wrap nxm task order constraints into a single object
- to prevent the creation of large list/set objects
-
- This is an optimization
- """
- def __init__(self, prev, next):
- self.prev = prev
- self.next = next
- self.done = False
-
- def get_hasrun(self):
- for k in self.prev:
- if not k.hasrun:
- return NOT_RUN
- return SUCCESS
-
- hasrun = property(get_hasrun, None)
-
-def set_precedence_constraints(tasks):
- """
- Updates the ``run_after`` attribute of all tasks based on the after/before/ext_out/ext_in attributes
-
- :param tasks: tasks
- :type tasks: list of :py:class:`waflib.Task.Task`
- """
- cstr_groups = Utils.defaultdict(list)
- for x in tasks:
- h = x.hash_constraints()
- cstr_groups[h].append(x)
-
- keys = list(cstr_groups.keys())
- maxi = len(keys)
-
- # this list should be short
- for i in range(maxi):
- t1 = cstr_groups[keys[i]][0]
- for j in range(i + 1, maxi):
- t2 = cstr_groups[keys[j]][0]
-
- # add the constraints based on the comparisons
- if is_before(t1, t2):
- a = i
- b = j
- elif is_before(t2, t1):
- a = j
- b = i
- else:
- continue
-
- a = cstr_groups[keys[a]]
- b = cstr_groups[keys[b]]
-
- if len(a) < 2 or len(b) < 2:
- for x in b:
- x.run_after.update(a)
- else:
- group = TaskGroup(set(a), set(b))
- for x in b:
- x.run_after.add(group)
-
-def funex(c):
- """
- Compiles a scriptlet expression into a Python function
-
- :param c: function to compile
- :type c: string
- :return: the function 'f' declared in the input string
- :rtype: function
- """
- dc = {}
- exec(c, dc)
- return dc['f']
-
-re_cond = re.compile('(?P<var>\w+)|(?P<or>\|)|(?P<and>&)')
-re_novar = re.compile(r'^(SRC|TGT)\W+.*?$')
-reg_act = re.compile(r'(?P<backslash>\\)|(?P<dollar>\$\$)|(?P<subst>\$\{(?P<var>\w+)(?P<code>.*?)\})', re.M)
-def compile_fun_shell(line):
- """
- Creates a compiled function to execute a process through a sub-shell
- """
- extr = []
- def repl(match):
- g = match.group
- if g('dollar'):
- return "$"
- elif g('backslash'):
- return '\\\\'
- elif g('subst'):
- extr.append((g('var'), g('code')))
- return "%s"
- return None
- line = reg_act.sub(repl, line) or line
- dvars = []
- def add_dvar(x):
- if x not in dvars:
- dvars.append(x)
-
- def replc(m):
- # performs substitutions and populates dvars
- if m.group('and'):
- return ' and '
- elif m.group('or'):
- return ' or '
- else:
- x = m.group('var')
- add_dvar(x)
- return 'env[%r]' % x
-
- parm = []
- app = parm.append
- for (var, meth) in extr:
- if var == 'SRC':
- if meth:
- app('tsk.inputs%s' % meth)
- else:
- app('" ".join([a.path_from(cwdx) for a in tsk.inputs])')
- elif var == 'TGT':
- if meth:
- app('tsk.outputs%s' % meth)
- else:
- app('" ".join([a.path_from(cwdx) for a in tsk.outputs])')
- elif meth:
- if meth.startswith(':'):
- add_dvar(var)
- m = meth[1:]
- if m == 'SRC':
- m = '[a.path_from(cwdx) for a in tsk.inputs]'
- elif m == 'TGT':
- m = '[a.path_from(cwdx) for a in tsk.outputs]'
- elif re_novar.match(m):
- m = '[tsk.inputs%s]' % m[3:]
- elif re_novar.match(m):
- m = '[tsk.outputs%s]' % m[3:]
- else:
- add_dvar(m)
- if m[:3] not in ('tsk', 'gen', 'bld'):
- m = '%r' % m
- app('" ".join(tsk.colon(%r, %s))' % (var, m))
- elif meth.startswith('?'):
- # In A?B|C output env.A if one of env.B or env.C is non-empty
- expr = re_cond.sub(replc, meth[1:])
- app('p(%r) if (%s) else ""' % (var, expr))
- else:
- call = '%s%s' % (var, meth)
- add_dvar(call)
- app(call)
- else:
- add_dvar(var)
- app("p('%s')" % var)
- if parm:
- parm = "%% (%s) " % (',\n\t\t'.join(parm))
- else:
- parm = ''
-
- c = COMPILE_TEMPLATE_SHELL % (line, parm)
- Logs.debug('action: %s', c.strip().splitlines())
- return (funex(c), dvars)
-
-reg_act_noshell = re.compile(r"(?P<space>\s+)|(?P<subst>\$\{(?P<var>\w+)(?P<code>.*?)\})|(?P<text>([^$ \t\n\r\f\v]|\$\$)+)", re.M)
-def compile_fun_noshell(line):
- """
- Creates a compiled function to execute a process without a sub-shell
- """
- buf = []
- dvars = []
- merge = False
- app = buf.append
-
- def add_dvar(x):
- if x not in dvars:
- dvars.append(x)
-
- def replc(m):
- # performs substitutions and populates dvars
- if m.group('and'):
- return ' and '
- elif m.group('or'):
- return ' or '
- else:
- x = m.group('var')
- add_dvar(x)
- return 'env[%r]' % x
-
- for m in reg_act_noshell.finditer(line):
- if m.group('space'):
- merge = False
- continue
- elif m.group('text'):
- app('[%r]' % m.group('text').replace('$$', '$'))
- elif m.group('subst'):
- var = m.group('var')
- code = m.group('code')
- if var == 'SRC':
- if code:
- app('[tsk.inputs%s]' % code)
- else:
- app('[a.path_from(cwdx) for a in tsk.inputs]')
- elif var == 'TGT':
- if code:
- app('[tsk.outputs%s]' % code)
- else:
- app('[a.path_from(cwdx) for a in tsk.outputs]')
- elif code:
- if code.startswith(':'):
- # a composed variable ${FOO:OUT}
- add_dvar(var)
- m = code[1:]
- if m == 'SRC':
- m = '[a.path_from(cwdx) for a in tsk.inputs]'
- elif m == 'TGT':
- m = '[a.path_from(cwdx) for a in tsk.outputs]'
- elif re_novar.match(m):
- m = '[tsk.inputs%s]' % m[3:]
- elif re_novar.match(m):
- m = '[tsk.outputs%s]' % m[3:]
- else:
- add_dvar(m)
- if m[:3] not in ('tsk', 'gen', 'bld'):
- m = '%r' % m
- app('tsk.colon(%r, %s)' % (var, m))
- elif code.startswith('?'):
- # In A?B|C output env.A if one of env.B or env.C is non-empty
- expr = re_cond.sub(replc, code[1:])
- app('to_list(env[%r] if (%s) else [])' % (var, expr))
- else:
- # plain code such as ${tsk.inputs[0].abspath()}
- call = '%s%s' % (var, code)
- add_dvar(call)
- app('gen.to_list(%s)' % call)
- else:
- # a plain variable such as # a plain variable like ${AR}
- app('to_list(env[%r])' % var)
- add_dvar(var)
- if merge:
- tmp = 'merge(%s, %s)' % (buf[-2], buf[-1])
- del buf[-1]
- buf[-1] = tmp
- merge = True # next turn
-
- buf = ['lst.extend(%s)' % x for x in buf]
- fun = COMPILE_TEMPLATE_NOSHELL % "\n\t".join(buf)
- Logs.debug('action: %s', fun.strip().splitlines())
- return (funex(fun), dvars)
-
-def compile_fun(line, shell=False):
- """
- Parses a string expression such as '${CC} ${SRC} -o ${TGT}' and returns a pair containing:
-
- * The function created (compiled) for use as :py:meth:`waflib.Task.Task.run`
- * The list of variables that must cause rebuilds when *env* data is modified
-
- for example::
-
- from waflib.Task import compile_fun
- compile_fun('cxx', '${CXX} -o ${TGT[0]} ${SRC} -I ${SRC[0].parent.bldpath()}')
-
- def build(bld):
- bld(source='wscript', rule='echo "foo\\${SRC[0].name}\\bar"')
-
- The env variables (CXX, ..) on the task must not hold dicts so as to preserve a consistent order.
- The reserved keywords ``TGT`` and ``SRC`` represent the task input and output nodes
-
- """
- if isinstance(line, str):
- if line.find('<') > 0 or line.find('>') > 0 or line.find('&&') > 0:
- shell = True
- else:
- dvars_lst = []
- funs_lst = []
- for x in line:
- if isinstance(x, str):
- fun, dvars = compile_fun(x, shell)
- dvars_lst += dvars
- funs_lst.append(fun)
- else:
- # assume a function to let through
- funs_lst.append(x)
- def composed_fun(task):
- for x in funs_lst:
- ret = x(task)
- if ret:
- return ret
- return None
- return composed_fun, dvars_lst
- if shell:
- return compile_fun_shell(line)
- else:
- return compile_fun_noshell(line)
-
-def compile_sig_vars(vars):
- """
- This method produces a sig_vars method suitable for subclasses that provide
- scriptlet code in their run_str code.
- If no such method can be created, this method returns None.
-
- The purpose of the sig_vars method returned is to ensures
- that rebuilds occur whenever the contents of the expression changes.
- This is the case B below::
-
- import time
- # case A: regular variables
- tg = bld(rule='echo ${FOO}')
- tg.env.FOO = '%s' % time.time()
- # case B
- bld(rule='echo ${gen.foo}', foo='%s' % time.time())
-
- :param vars: env variables such as CXXFLAGS or gen.foo
- :type vars: list of string
- :return: A sig_vars method relevant for dependencies if adequate, else None
- :rtype: A function, or None in most cases
- """
- buf = []
- for x in sorted(vars):
- if x[:3] in ('tsk', 'gen', 'bld'):
- buf.append('buf.append(%s)' % x)
- if buf:
- return funex(COMPILE_TEMPLATE_SIG_VARS % '\n\t'.join(buf))
- return None
-
-def task_factory(name, func=None, vars=None, color='GREEN', ext_in=[], ext_out=[], before=[], after=[], shell=False, scan=None):
- """
- Returns a new task subclass with the function ``run`` compiled from the line given.
-
- :param func: method run
- :type func: string or function
- :param vars: list of variables to hash
- :type vars: list of string
- :param color: color to use
- :type color: string
- :param shell: when *func* is a string, enable/disable the use of the shell
- :type shell: bool
- :param scan: method scan
- :type scan: function
- :rtype: :py:class:`waflib.Task.Task`
- """
-
- params = {
- 'vars': vars or [], # function arguments are static, and this one may be modified by the class
- 'color': color,
- 'name': name,
- 'shell': shell,
- 'scan': scan,
- }
-
- if isinstance(func, str) or isinstance(func, tuple):
- params['run_str'] = func
- else:
- params['run'] = func
-
- cls = type(Task)(name, (Task,), params)
- classes[name] = cls
-
- if ext_in:
- cls.ext_in = Utils.to_list(ext_in)
- if ext_out:
- cls.ext_out = Utils.to_list(ext_out)
- if before:
- cls.before = Utils.to_list(before)
- if after:
- cls.after = Utils.to_list(after)
-
- return cls
-
-def deep_inputs(cls):
- """
- Task class decorator to enable rebuilds on input files task signatures
- """
- def sig_explicit_deps(self):
- Task.sig_explicit_deps(self)
- Task.sig_deep_inputs(self)
- cls.sig_explicit_deps = sig_explicit_deps
- return cls
-
-TaskBase = Task
-"Provided for compatibility reasons, TaskBase should not be used"
-
-class TaskSemaphore(object):
- """
- Task semaphores provide a simple and efficient way of throttling the amount of
- a particular task to run concurrently. The throttling value is capped
- by the amount of maximum jobs, so for example, a `TaskSemaphore(10)`
- has no effect in a `-j2` build.
-
- Task semaphores are typically specified on the task class level::
-
- class compile(waflib.Task.Task):
- semaphore = waflib.Task.TaskSemaphore(2)
- run_str = 'touch ${TGT}'
-
- Task semaphores are meant to be used by the build scheduler in the main
- thread, so there are no guarantees of thread safety.
- """
- def __init__(self, num):
- """
- :param num: maximum value of concurrent tasks
- :type num: int
- """
- self.num = num
- self.locking = set()
- self.waiting = set()
-
- def is_locked(self):
- """Returns True if this semaphore cannot be acquired by more tasks"""
- return len(self.locking) >= self.num
-
- def acquire(self, tsk):
- """
- Mark the semaphore as used by the given task (not re-entrant).
-
- :param tsk: task object
- :type tsk: :py:class:`waflib.Task.Task`
- :raises: :py:class:`IndexError` in case the resource is already acquired
- """
- if self.is_locked():
- raise IndexError('Cannot lock more %r' % self.locking)
- self.locking.add(tsk)
-
- def release(self, tsk):
- """
- Mark the semaphore as unused by the given task.
-
- :param tsk: task object
- :type tsk: :py:class:`waflib.Task.Task`
- :raises: :py:class:`KeyError` in case the resource is not acquired by the task
- """
- self.locking.remove(tsk)
-
diff --git a/waflib/TaskGen.py b/waflib/TaskGen.py
deleted file mode 100644
index a74e643..0000000
--- a/waflib/TaskGen.py
+++ /dev/null
@@ -1,917 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2005-2018 (ita)
-
-"""
-Task generators
-
-The class :py:class:`waflib.TaskGen.task_gen` encapsulates the creation of task objects (low-level code)
-The instances can have various parameters, but the creation of task nodes (Task.py)
-is deferred. To achieve this, various methods are called from the method "apply"
-"""
-
-import copy, re, os, functools
-from waflib import Task, Utils, Logs, Errors, ConfigSet, Node
-
-feats = Utils.defaultdict(set)
-"""remember the methods declaring features"""
-
-HEADER_EXTS = ['.h', '.hpp', '.hxx', '.hh']
-
-class task_gen(object):
- """
- Instances of this class create :py:class:`waflib.Task.Task` when
- calling the method :py:meth:`waflib.TaskGen.task_gen.post` from the main thread.
- A few notes:
-
- * The methods to call (*self.meths*) can be specified dynamically (removing, adding, ..)
- * The 'features' are used to add methods to self.meths and then execute them
- * The attribute 'path' is a node representing the location of the task generator
- * The tasks created are added to the attribute *tasks*
- * The attribute 'idx' is a counter of task generators in the same path
- """
-
- mappings = Utils.ordered_iter_dict()
- """Mappings are global file extension mappings that are retrieved in the order of definition"""
-
- prec = Utils.defaultdict(set)
- """Dict that holds the precedence execution rules for task generator methods"""
-
- def __init__(self, *k, **kw):
- """
- Task generator objects predefine various attributes (source, target) for possible
- processing by process_rule (make-like rules) or process_source (extensions, misc methods)
-
- Tasks are stored on the attribute 'tasks'. They are created by calling methods
- listed in ``self.meths`` or referenced in the attribute ``features``
- A topological sort is performed to execute the methods in correct order.
-
- The extra key/value elements passed in ``kw`` are set as attributes
- """
- self.source = []
- self.target = ''
-
- self.meths = []
- """
- List of method names to execute (internal)
- """
-
- self.features = []
- """
- List of feature names for bringing new methods in
- """
-
- self.tasks = []
- """
- Tasks created are added to this list
- """
-
- if not 'bld' in kw:
- # task generators without a build context :-/
- self.env = ConfigSet.ConfigSet()
- self.idx = 0
- self.path = None
- else:
- self.bld = kw['bld']
- self.env = self.bld.env.derive()
- self.path = self.bld.path # emulate chdir when reading scripts
-
- # Provide a unique index per folder
- # This is part of a measure to prevent output file name collisions
- path = self.path.abspath()
- try:
- self.idx = self.bld.idx[path] = self.bld.idx.get(path, 0) + 1
- except AttributeError:
- self.bld.idx = {}
- self.idx = self.bld.idx[path] = 1
-
- # Record the global task generator count
- try:
- self.tg_idx_count = self.bld.tg_idx_count = self.bld.tg_idx_count + 1
- except AttributeError:
- self.tg_idx_count = self.bld.tg_idx_count = 1
-
- for key, val in kw.items():
- setattr(self, key, val)
-
- def __str__(self):
- """Debugging helper"""
- return "<task_gen %r declared in %s>" % (self.name, self.path.abspath())
-
- def __repr__(self):
- """Debugging helper"""
- lst = []
- for x in self.__dict__:
- if x not in ('env', 'bld', 'compiled_tasks', 'tasks'):
- lst.append("%s=%s" % (x, repr(getattr(self, x))))
- return "bld(%s) in %s" % (", ".join(lst), self.path.abspath())
-
- def get_cwd(self):
- """
- Current working directory for the task generator, defaults to the build directory.
- This is still used in a few places but it should disappear at some point as the classes
- define their own working directory.
-
- :rtype: :py:class:`waflib.Node.Node`
- """
- return self.bld.bldnode
-
- def get_name(self):
- """
- If the attribute ``name`` is not set on the instance,
- the name is computed from the target name::
-
- def build(bld):
- x = bld(name='foo')
- x.get_name() # foo
- y = bld(target='bar')
- y.get_name() # bar
-
- :rtype: string
- :return: name of this task generator
- """
- try:
- return self._name
- except AttributeError:
- if isinstance(self.target, list):
- lst = [str(x) for x in self.target]
- name = self._name = ','.join(lst)
- else:
- name = self._name = str(self.target)
- return name
- def set_name(self, name):
- self._name = name
-
- name = property(get_name, set_name)
-
- def to_list(self, val):
- """
- Ensures that a parameter is a list, see :py:func:`waflib.Utils.to_list`
-
- :type val: string or list of string
- :param val: input to return as a list
- :rtype: list
- """
- if isinstance(val, str):
- return val.split()
- else:
- return val
-
- def post(self):
- """
- Creates tasks for this task generators. The following operations are performed:
-
- #. The body of this method is called only once and sets the attribute ``posted``
- #. The attribute ``features`` is used to add more methods in ``self.meths``
- #. The methods are sorted by the precedence table ``self.prec`` or `:waflib:attr:waflib.TaskGen.task_gen.prec`
- #. The methods are then executed in order
- #. The tasks created are added to :py:attr:`waflib.TaskGen.task_gen.tasks`
- """
- if getattr(self, 'posted', None):
- return False
- self.posted = True
-
- keys = set(self.meths)
- keys.update(feats['*'])
-
- # add the methods listed in the features
- self.features = Utils.to_list(self.features)
- for x in self.features:
- st = feats[x]
- if st:
- keys.update(st)
- elif not x in Task.classes:
- Logs.warn('feature %r does not exist - bind at least one method to it?', x)
-
- # copy the precedence table
- prec = {}
- prec_tbl = self.prec
- for x in prec_tbl:
- if x in keys:
- prec[x] = prec_tbl[x]
-
- # elements disconnected
- tmp = []
- for a in keys:
- for x in prec.values():
- if a in x:
- break
- else:
- tmp.append(a)
-
- tmp.sort(reverse=True)
-
- # topological sort
- out = []
- while tmp:
- e = tmp.pop()
- if e in keys:
- out.append(e)
- try:
- nlst = prec[e]
- except KeyError:
- pass
- else:
- del prec[e]
- for x in nlst:
- for y in prec:
- if x in prec[y]:
- break
- else:
- tmp.append(x)
- tmp.sort(reverse=True)
-
- if prec:
- buf = ['Cycle detected in the method execution:']
- for k, v in prec.items():
- buf.append('- %s after %s' % (k, [x for x in v if x in prec]))
- raise Errors.WafError('\n'.join(buf))
- self.meths = out
-
- # then we run the methods in order
- Logs.debug('task_gen: posting %s %d', self, id(self))
- for x in out:
- try:
- v = getattr(self, x)
- except AttributeError:
- raise Errors.WafError('%r is not a valid task generator method' % x)
- Logs.debug('task_gen: -> %s (%d)', x, id(self))
- v()
-
- Logs.debug('task_gen: posted %s', self.name)
- return True
-
- def get_hook(self, node):
- """
- Returns the ``@extension`` method to call for a Node of a particular extension.
-
- :param node: Input file to process
- :type node: :py:class:`waflib.Tools.Node.Node`
- :return: A method able to process the input node by looking at the extension
- :rtype: function
- """
- name = node.name
- for k in self.mappings:
- try:
- if name.endswith(k):
- return self.mappings[k]
- except TypeError:
- # regexps objects
- if k.match(name):
- return self.mappings[k]
- keys = list(self.mappings.keys())
- raise Errors.WafError("File %r has no mapping in %r (load a waf tool?)" % (node, keys))
-
- def create_task(self, name, src=None, tgt=None, **kw):
- """
- Creates task instances.
-
- :param name: task class name
- :type name: string
- :param src: input nodes
- :type src: list of :py:class:`waflib.Tools.Node.Node`
- :param tgt: output nodes
- :type tgt: list of :py:class:`waflib.Tools.Node.Node`
- :return: A task object
- :rtype: :py:class:`waflib.Task.Task`
- """
- task = Task.classes[name](env=self.env.derive(), generator=self)
- if src:
- task.set_inputs(src)
- if tgt:
- task.set_outputs(tgt)
- task.__dict__.update(kw)
- self.tasks.append(task)
- return task
-
- def clone(self, env):
- """
- Makes a copy of a task generator. Once the copy is made, it is necessary to ensure that the
- it does not create the same output files as the original, or the same files may
- be compiled several times.
-
- :param env: A configuration set
- :type env: :py:class:`waflib.ConfigSet.ConfigSet`
- :return: A copy
- :rtype: :py:class:`waflib.TaskGen.task_gen`
- """
- newobj = self.bld()
- for x in self.__dict__:
- if x in ('env', 'bld'):
- continue
- elif x in ('path', 'features'):
- setattr(newobj, x, getattr(self, x))
- else:
- setattr(newobj, x, copy.copy(getattr(self, x)))
-
- newobj.posted = False
- if isinstance(env, str):
- newobj.env = self.bld.all_envs[env].derive()
- else:
- newobj.env = env.derive()
-
- return newobj
-
-def declare_chain(name='', rule=None, reentrant=None, color='BLUE',
- ext_in=[], ext_out=[], before=[], after=[], decider=None, scan=None, install_path=None, shell=False):
- """
- Creates a new mapping and a task class for processing files by extension.
- See Tools/flex.py for an example.
-
- :param name: name for the task class
- :type name: string
- :param rule: function to execute or string to be compiled in a function
- :type rule: string or function
- :param reentrant: re-inject the output file in the process (done automatically, set to 0 to disable)
- :type reentrant: int
- :param color: color for the task output
- :type color: string
- :param ext_in: execute the task only after the files of such extensions are created
- :type ext_in: list of string
- :param ext_out: execute the task only before files of such extensions are processed
- :type ext_out: list of string
- :param before: execute instances of this task before classes of the given names
- :type before: list of string
- :param after: execute instances of this task after classes of the given names
- :type after: list of string
- :param decider: if present, function that returns a list of output file extensions (overrides ext_out for output files, but not for the build order)
- :type decider: function
- :param scan: scanner function for the task
- :type scan: function
- :param install_path: installation path for the output nodes
- :type install_path: string
- """
- ext_in = Utils.to_list(ext_in)
- ext_out = Utils.to_list(ext_out)
- if not name:
- name = rule
- cls = Task.task_factory(name, rule, color=color, ext_in=ext_in, ext_out=ext_out, before=before, after=after, scan=scan, shell=shell)
-
- def x_file(self, node):
- if ext_in:
- _ext_in = ext_in[0]
-
- tsk = self.create_task(name, node)
- cnt = 0
-
- ext = decider(self, node) if decider else cls.ext_out
- for x in ext:
- k = node.change_ext(x, ext_in=_ext_in)
- tsk.outputs.append(k)
-
- if reentrant != None:
- if cnt < int(reentrant):
- self.source.append(k)
- else:
- # reinject downstream files into the build
- for y in self.mappings: # ~ nfile * nextensions :-/
- if k.name.endswith(y):
- self.source.append(k)
- break
- cnt += 1
-
- if install_path:
- self.install_task = self.add_install_files(install_to=install_path, install_from=tsk.outputs)
- return tsk
-
- for x in cls.ext_in:
- task_gen.mappings[x] = x_file
- return x_file
-
-def taskgen_method(func):
- """
- Decorator that registers method as a task generator method.
- The function must accept a task generator as first parameter::
-
- from waflib.TaskGen import taskgen_method
- @taskgen_method
- def mymethod(self):
- pass
-
- :param func: task generator method to add
- :type func: function
- :rtype: function
- """
- setattr(task_gen, func.__name__, func)
- return func
-
-def feature(*k):
- """
- Decorator that registers a task generator method that will be executed when the
- object attribute ``feature`` contains the corresponding key(s)::
-
- from waflib.Task import feature
- @feature('myfeature')
- def myfunction(self):
- print('that is my feature!')
- def build(bld):
- bld(features='myfeature')
-
- :param k: feature names
- :type k: list of string
- """
- def deco(func):
- setattr(task_gen, func.__name__, func)
- for name in k:
- feats[name].update([func.__name__])
- return func
- return deco
-
-def before_method(*k):
- """
- Decorator that registera task generator method which will be executed
- before the functions of given name(s)::
-
- from waflib.TaskGen import feature, before
- @feature('myfeature')
- @before_method('fun2')
- def fun1(self):
- print('feature 1!')
- @feature('myfeature')
- def fun2(self):
- print('feature 2!')
- def build(bld):
- bld(features='myfeature')
-
- :param k: method names
- :type k: list of string
- """
- def deco(func):
- setattr(task_gen, func.__name__, func)
- for fun_name in k:
- task_gen.prec[func.__name__].add(fun_name)
- return func
- return deco
-before = before_method
-
-def after_method(*k):
- """
- Decorator that registers a task generator method which will be executed
- after the functions of given name(s)::
-
- from waflib.TaskGen import feature, after
- @feature('myfeature')
- @after_method('fun2')
- def fun1(self):
- print('feature 1!')
- @feature('myfeature')
- def fun2(self):
- print('feature 2!')
- def build(bld):
- bld(features='myfeature')
-
- :param k: method names
- :type k: list of string
- """
- def deco(func):
- setattr(task_gen, func.__name__, func)
- for fun_name in k:
- task_gen.prec[fun_name].add(func.__name__)
- return func
- return deco
-after = after_method
-
-def extension(*k):
- """
- Decorator that registers a task generator method which will be invoked during
- the processing of source files for the extension given::
-
- from waflib import Task
- class mytask(Task):
- run_str = 'cp ${SRC} ${TGT}'
- @extension('.moo')
- def create_maa_file(self, node):
- self.create_task('mytask', node, node.change_ext('.maa'))
- def build(bld):
- bld(source='foo.moo')
- """
- def deco(func):
- setattr(task_gen, func.__name__, func)
- for x in k:
- task_gen.mappings[x] = func
- return func
- return deco
-
-@taskgen_method
-def to_nodes(self, lst, path=None):
- """
- Flatten the input list of string/nodes/lists into a list of nodes.
-
- It is used by :py:func:`waflib.TaskGen.process_source` and :py:func:`waflib.TaskGen.process_rule`.
- It is designed for source files, for folders, see :py:func:`waflib.Tools.ccroot.to_incnodes`:
-
- :param lst: input list
- :type lst: list of string and nodes
- :param path: path from which to search the nodes (by default, :py:attr:`waflib.TaskGen.task_gen.path`)
- :type path: :py:class:`waflib.Tools.Node.Node`
- :rtype: list of :py:class:`waflib.Tools.Node.Node`
- """
- tmp = []
- path = path or self.path
- find = path.find_resource
-
- if isinstance(lst, Node.Node):
- lst = [lst]
-
- for x in Utils.to_list(lst):
- if isinstance(x, str):
- node = find(x)
- elif hasattr(x, 'name'):
- node = x
- else:
- tmp.extend(self.to_nodes(x))
- continue
- if not node:
- raise Errors.WafError('source not found: %r in %r' % (x, self))
- tmp.append(node)
- return tmp
-
-@feature('*')
-def process_source(self):
- """
- Processes each element in the attribute ``source`` by extension.
-
- #. The *source* list is converted through :py:meth:`waflib.TaskGen.to_nodes` to a list of :py:class:`waflib.Node.Node` first.
- #. File extensions are mapped to methods having the signature: ``def meth(self, node)`` by :py:meth:`waflib.TaskGen.extension`
- #. The method is retrieved through :py:meth:`waflib.TaskGen.task_gen.get_hook`
- #. When called, the methods may modify self.source to append more source to process
- #. The mappings can map an extension or a filename (see the code below)
- """
- self.source = self.to_nodes(getattr(self, 'source', []))
- for node in self.source:
- self.get_hook(node)(self, node)
-
-@feature('*')
-@before_method('process_source')
-def process_rule(self):
- """
- Processes the attribute ``rule``. When present, :py:meth:`waflib.TaskGen.process_source` is disabled::
-
- def build(bld):
- bld(rule='cp ${SRC} ${TGT}', source='wscript', target='bar.txt')
-
- Main attributes processed:
-
- * rule: command to execute, it can be a tuple of strings for multiple commands
- * chmod: permissions for the resulting files (integer value such as Utils.O755)
- * shell: set to False to execute the command directly (default is True to use a shell)
- * scan: scanner function
- * vars: list of variables to trigger rebuilds, such as CFLAGS
- * cls_str: string to display when executing the task
- * cls_keyword: label to display when executing the task
- * cache_rule: by default, try to re-use similar classes, set to False to disable
- * source: list of Node or string objects representing the source files required by this task
- * target: list of Node or string objects representing the files that this task creates
- * cwd: current working directory (Node or string)
- * stdout: standard output, set to None to prevent waf from capturing the text
- * stderr: standard error, set to None to prevent waf from capturing the text
- * timeout: timeout for command execution (Python 3)
- * always: whether to always run the command (False by default)
- * deep_inputs: whether the task must depend on the input file tasks too (False by default)
- """
- if not getattr(self, 'rule', None):
- return
-
- # create the task class
- name = str(getattr(self, 'name', None) or self.target or getattr(self.rule, '__name__', self.rule))
-
- # or we can put the class in a cache for performance reasons
- try:
- cache = self.bld.cache_rule_attr
- except AttributeError:
- cache = self.bld.cache_rule_attr = {}
-
- chmod = getattr(self, 'chmod', None)
- shell = getattr(self, 'shell', True)
- color = getattr(self, 'color', 'BLUE')
- scan = getattr(self, 'scan', None)
- _vars = getattr(self, 'vars', [])
- cls_str = getattr(self, 'cls_str', None)
- cls_keyword = getattr(self, 'cls_keyword', None)
- use_cache = getattr(self, 'cache_rule', 'True')
- deep_inputs = getattr(self, 'deep_inputs', False)
-
- scan_val = has_deps = hasattr(self, 'deps')
- if scan:
- scan_val = id(scan)
-
- key = Utils.h_list((name, self.rule, chmod, shell, color, cls_str, cls_keyword, scan_val, _vars, deep_inputs))
-
- cls = None
- if use_cache:
- try:
- cls = cache[key]
- except KeyError:
- pass
- if not cls:
- rule = self.rule
- if chmod is not None:
- def chmod_fun(tsk):
- for x in tsk.outputs:
- os.chmod(x.abspath(), tsk.generator.chmod)
- if isinstance(rule, tuple):
- rule = list(rule)
- rule.append(chmod_fun)
- rule = tuple(rule)
- else:
- rule = (rule, chmod_fun)
-
- cls = Task.task_factory(name, rule, _vars, shell=shell, color=color)
-
- if cls_str:
- setattr(cls, '__str__', self.cls_str)
-
- if cls_keyword:
- setattr(cls, 'keyword', self.cls_keyword)
-
- if deep_inputs:
- Task.deep_inputs(cls)
-
- if scan:
- cls.scan = self.scan
- elif has_deps:
- def scan(self):
- nodes = []
- for x in self.generator.to_list(getattr(self.generator, 'deps', None)):
- node = self.generator.path.find_resource(x)
- if not node:
- self.generator.bld.fatal('Could not find %r (was it declared?)' % x)
- nodes.append(node)
- return [nodes, []]
- cls.scan = scan
-
- if use_cache:
- cache[key] = cls
-
- # now create one instance
- tsk = self.create_task(name)
-
- for x in ('after', 'before', 'ext_in', 'ext_out'):
- setattr(tsk, x, getattr(self, x, []))
-
- if hasattr(self, 'stdout'):
- tsk.stdout = self.stdout
-
- if hasattr(self, 'stderr'):
- tsk.stderr = self.stderr
-
- if getattr(self, 'timeout', None):
- tsk.timeout = self.timeout
-
- if getattr(self, 'always', None):
- tsk.always_run = True
-
- if getattr(self, 'target', None):
- if isinstance(self.target, str):
- self.target = self.target.split()
- if not isinstance(self.target, list):
- self.target = [self.target]
- for x in self.target:
- if isinstance(x, str):
- tsk.outputs.append(self.path.find_or_declare(x))
- else:
- x.parent.mkdir() # if a node was given, create the required folders
- tsk.outputs.append(x)
- if getattr(self, 'install_path', None):
- self.install_task = self.add_install_files(install_to=self.install_path,
- install_from=tsk.outputs, chmod=getattr(self, 'chmod', Utils.O644))
-
- if getattr(self, 'source', None):
- tsk.inputs = self.to_nodes(self.source)
- # bypass the execution of process_source by setting the source to an empty list
- self.source = []
-
- if getattr(self, 'cwd', None):
- tsk.cwd = self.cwd
-
- if isinstance(tsk.run, functools.partial):
- # Python documentation says: "partial objects defined in classes
- # behave like static methods and do not transform into bound
- # methods during instance attribute look-up."
- tsk.run = functools.partial(tsk.run, tsk)
-
-@feature('seq')
-def sequence_order(self):
- """
- Adds a strict sequential constraint between the tasks generated by task generators.
- It works because task generators are posted in order.
- It will not post objects which belong to other folders.
-
- Example::
-
- bld(features='javac seq')
- bld(features='jar seq')
-
- To start a new sequence, set the attribute seq_start, for example::
-
- obj = bld(features='seq')
- obj.seq_start = True
-
- Note that the method is executed in last position. This is more an
- example than a widely-used solution.
- """
- if self.meths and self.meths[-1] != 'sequence_order':
- self.meths.append('sequence_order')
- return
-
- if getattr(self, 'seq_start', None):
- return
-
- # all the tasks previously declared must be run before these
- if getattr(self.bld, 'prev', None):
- self.bld.prev.post()
- for x in self.bld.prev.tasks:
- for y in self.tasks:
- y.set_run_after(x)
-
- self.bld.prev = self
-
-
-re_m4 = re.compile('@(\w+)@', re.M)
-
-class subst_pc(Task.Task):
- """
- Creates *.pc* files from *.pc.in*. The task is executed whenever an input variable used
- in the substitution changes.
- """
-
- def force_permissions(self):
- "Private for the time being, we will probably refactor this into run_str=[run1,chmod]"
- if getattr(self.generator, 'chmod', None):
- for x in self.outputs:
- os.chmod(x.abspath(), self.generator.chmod)
-
- def run(self):
- "Substitutes variables in a .in file"
-
- if getattr(self.generator, 'is_copy', None):
- for i, x in enumerate(self.outputs):
- x.write(self.inputs[i].read('rb'), 'wb')
- stat = os.stat(self.inputs[i].abspath()) # Preserve mtime of the copy
- os.utime(self.outputs[i].abspath(), (stat.st_atime, stat.st_mtime))
- self.force_permissions()
- return None
-
- if getattr(self.generator, 'fun', None):
- ret = self.generator.fun(self)
- if not ret:
- self.force_permissions()
- return ret
-
- code = self.inputs[0].read(encoding=getattr(self.generator, 'encoding', 'latin-1'))
- if getattr(self.generator, 'subst_fun', None):
- code = self.generator.subst_fun(self, code)
- if code is not None:
- self.outputs[0].write(code, encoding=getattr(self.generator, 'encoding', 'latin-1'))
- self.force_permissions()
- return None
-
- # replace all % by %% to prevent errors by % signs
- code = code.replace('%', '%%')
-
- # extract the vars foo into lst and replace @foo@ by %(foo)s
- lst = []
- def repl(match):
- g = match.group
- if g(1):
- lst.append(g(1))
- return "%%(%s)s" % g(1)
- return ''
- code = getattr(self.generator, 're_m4', re_m4).sub(repl, code)
-
- try:
- d = self.generator.dct
- except AttributeError:
- d = {}
- for x in lst:
- tmp = getattr(self.generator, x, '') or self.env[x] or self.env[x.upper()]
- try:
- tmp = ''.join(tmp)
- except TypeError:
- tmp = str(tmp)
- d[x] = tmp
-
- code = code % d
- self.outputs[0].write(code, encoding=getattr(self.generator, 'encoding', 'latin-1'))
- self.generator.bld.raw_deps[self.uid()] = lst
-
- # make sure the signature is updated
- try:
- delattr(self, 'cache_sig')
- except AttributeError:
- pass
-
- self.force_permissions()
-
- def sig_vars(self):
- """
- Compute a hash (signature) of the variables used in the substitution
- """
- bld = self.generator.bld
- env = self.env
- upd = self.m.update
-
- if getattr(self.generator, 'fun', None):
- upd(Utils.h_fun(self.generator.fun).encode())
- if getattr(self.generator, 'subst_fun', None):
- upd(Utils.h_fun(self.generator.subst_fun).encode())
-
- # raw_deps: persistent custom values returned by the scanner
- vars = self.generator.bld.raw_deps.get(self.uid(), [])
-
- # hash both env vars and task generator attributes
- act_sig = bld.hash_env_vars(env, vars)
- upd(act_sig)
-
- lst = [getattr(self.generator, x, '') for x in vars]
- upd(Utils.h_list(lst))
-
- return self.m.digest()
-
-@extension('.pc.in')
-def add_pcfile(self, node):
- """
- Processes *.pc.in* files to *.pc*. Installs the results to ``${PREFIX}/lib/pkgconfig/`` by default
-
- def build(bld):
- bld(source='foo.pc.in', install_path='${LIBDIR}/pkgconfig/')
- """
- tsk = self.create_task('subst_pc', node, node.change_ext('.pc', '.pc.in'))
- self.install_task = self.add_install_files(
- install_to=getattr(self, 'install_path', '${LIBDIR}/pkgconfig/'), install_from=tsk.outputs)
-
-class subst(subst_pc):
- pass
-
-@feature('subst')
-@before_method('process_source', 'process_rule')
-def process_subst(self):
- """
- Defines a transformation that substitutes the contents of *source* files to *target* files::
-
- def build(bld):
- bld(
- features='subst',
- source='foo.c.in',
- target='foo.c',
- install_path='${LIBDIR}/pkgconfig',
- VAR = 'val'
- )
-
- The input files are supposed to contain macros of the form *@VAR@*, where *VAR* is an argument
- of the task generator object.
-
- This method overrides the processing by :py:meth:`waflib.TaskGen.process_source`.
- """
-
- src = Utils.to_list(getattr(self, 'source', []))
- if isinstance(src, Node.Node):
- src = [src]
- tgt = Utils.to_list(getattr(self, 'target', []))
- if isinstance(tgt, Node.Node):
- tgt = [tgt]
- if len(src) != len(tgt):
- raise Errors.WafError('invalid number of source/target for %r' % self)
-
- for x, y in zip(src, tgt):
- if not x or not y:
- raise Errors.WafError('null source or target for %r' % self)
- a, b = None, None
-
- if isinstance(x, str) and isinstance(y, str) and x == y:
- a = self.path.find_node(x)
- b = self.path.get_bld().make_node(y)
- if not os.path.isfile(b.abspath()):
- b.parent.mkdir()
- else:
- if isinstance(x, str):
- a = self.path.find_resource(x)
- elif isinstance(x, Node.Node):
- a = x
- if isinstance(y, str):
- b = self.path.find_or_declare(y)
- elif isinstance(y, Node.Node):
- b = y
-
- if not a:
- raise Errors.WafError('could not find %r for %r' % (x, self))
-
- tsk = self.create_task('subst', a, b)
- for k in ('after', 'before', 'ext_in', 'ext_out'):
- val = getattr(self, k, None)
- if val:
- setattr(tsk, k, val)
-
- # paranoid safety measure for the general case foo.in->foo.h with ambiguous dependencies
- for xt in HEADER_EXTS:
- if b.name.endswith(xt):
- tsk.ext_in = tsk.ext_in + ['.h']
- break
-
- inst_to = getattr(self, 'install_path', None)
- if inst_to:
- self.install_task = self.add_install_files(install_to=inst_to,
- install_from=b, chmod=getattr(self, 'chmod', Utils.O644))
-
- self.source = []
-
diff --git a/waflib/Tools/__init__.py b/waflib/Tools/__init__.py
deleted file mode 100644
index 079df35..0000000
--- a/waflib/Tools/__init__.py
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2005-2018 (ita)
diff --git a/waflib/Tools/ar.py b/waflib/Tools/ar.py
deleted file mode 100644
index b39b645..0000000
--- a/waflib/Tools/ar.py
+++ /dev/null
@@ -1,24 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2006-2018 (ita)
-# Ralf Habacker, 2006 (rh)
-
-"""
-The **ar** program creates static libraries. This tool is almost always loaded
-from others (C, C++, D, etc) for static library support.
-"""
-
-from waflib.Configure import conf
-
-@conf
-def find_ar(conf):
- """Configuration helper used by C/C++ tools to enable the support for static libraries"""
- conf.load('ar')
-
-def configure(conf):
- """Finds the ar program and sets the default flags in ``conf.env.ARFLAGS``"""
- conf.find_program('ar', var='AR')
- conf.add_os_flags('ARFLAGS')
- if not conf.env.ARFLAGS:
- conf.env.ARFLAGS = ['rcs']
-
diff --git a/waflib/Tools/asm.py b/waflib/Tools/asm.py
deleted file mode 100644
index b6f26fb..0000000
--- a/waflib/Tools/asm.py
+++ /dev/null
@@ -1,73 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2008-2018 (ita)
-
-"""
-Assembly support, used by tools such as gas and nasm
-
-To declare targets using assembly::
-
- def configure(conf):
- conf.load('gcc gas')
-
- def build(bld):
- bld(
- features='c cstlib asm',
- source = 'test.S',
- target = 'asmtest')
-
- bld(
- features='asm asmprogram',
- source = 'test.S',
- target = 'asmtest')
-
-Support for pure asm programs and libraries should also work::
-
- def configure(conf):
- conf.load('nasm')
- conf.find_program('ld', 'ASLINK')
-
- def build(bld):
- bld(
- features='asm asmprogram',
- source = 'test.S',
- target = 'asmtest')
-"""
-
-from waflib import Task
-from waflib.Tools.ccroot import link_task, stlink_task
-from waflib.TaskGen import extension
-
-class asm(Task.Task):
- """
- Compiles asm files by gas/nasm/yasm/...
- """
- color = 'BLUE'
- run_str = '${AS} ${ASFLAGS} ${ASMPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${AS_SRC_F}${SRC} ${AS_TGT_F}${TGT}'
-
-@extension('.s', '.S', '.asm', '.ASM', '.spp', '.SPP')
-def asm_hook(self, node):
- """
- Binds the asm extension to the asm task
-
- :param node: input file
- :type node: :py:class:`waflib.Node.Node`
- """
- return self.create_compiled_task('asm', node)
-
-class asmprogram(link_task):
- "Links object files into a c program"
- run_str = '${ASLINK} ${ASLINKFLAGS} ${ASLNK_TGT_F}${TGT} ${ASLNK_SRC_F}${SRC}'
- ext_out = ['.bin']
- inst_to = '${BINDIR}'
-
-class asmshlib(asmprogram):
- "Links object files into a c shared library"
- inst_to = '${LIBDIR}'
-
-class asmstlib(stlink_task):
- "Links object files into a c static library"
- pass # do not remove
-
-def configure(conf):
- conf.env.ASMPATH_ST = '-I%s'
diff --git a/waflib/Tools/bison.py b/waflib/Tools/bison.py
deleted file mode 100644
index eef56dc..0000000
--- a/waflib/Tools/bison.py
+++ /dev/null
@@ -1,49 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# John O'Meara, 2006
-# Thomas Nagy 2009-2018 (ita)
-
-"""
-The **bison** program is a code generator which creates C or C++ files.
-The generated files are compiled into object files.
-"""
-
-from waflib import Task
-from waflib.TaskGen import extension
-
-class bison(Task.Task):
- """Compiles bison files"""
- color = 'BLUE'
- run_str = '${BISON} ${BISONFLAGS} ${SRC[0].abspath()} -o ${TGT[0].name}'
- ext_out = ['.h'] # just to make sure
-
-@extension('.y', '.yc', '.yy')
-def big_bison(self, node):
- """
- Creates a bison task, which must be executed from the directory of the output file.
- """
- has_h = '-d' in self.env.BISONFLAGS
-
- outs = []
- if node.name.endswith('.yc'):
- outs.append(node.change_ext('.tab.cc'))
- if has_h:
- outs.append(node.change_ext('.tab.hh'))
- else:
- outs.append(node.change_ext('.tab.c'))
- if has_h:
- outs.append(node.change_ext('.tab.h'))
-
- tsk = self.create_task('bison', node, outs)
- tsk.cwd = node.parent.get_bld()
-
- # and the c/cxx file must be compiled too
- self.source.append(outs[0])
-
-def configure(conf):
- """
- Detects the *bison* program
- """
- conf.find_program('bison', var='BISON')
- conf.env.BISONFLAGS = ['-d']
-
diff --git a/waflib/Tools/c.py b/waflib/Tools/c.py
deleted file mode 100644
index effd6b6..0000000
--- a/waflib/Tools/c.py
+++ /dev/null
@@ -1,39 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2006-2018 (ita)
-
-"Base for c programs/libraries"
-
-from waflib import TaskGen, Task
-from waflib.Tools import c_preproc
-from waflib.Tools.ccroot import link_task, stlink_task
-
-@TaskGen.extension('.c')
-def c_hook(self, node):
- "Binds the c file extensions create :py:class:`waflib.Tools.c.c` instances"
- if not self.env.CC and self.env.CXX:
- return self.create_compiled_task('cxx', node)
- return self.create_compiled_task('c', node)
-
-class c(Task.Task):
- "Compiles C files into object files"
- run_str = '${CC} ${ARCH_ST:ARCH} ${CFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${CC_SRC_F}${SRC} ${CC_TGT_F}${TGT[0].abspath()} ${CPPFLAGS}'
- vars = ['CCDEPS'] # unused variable to depend on, just in case
- ext_in = ['.h'] # set the build order easily by using ext_out=['.h']
- scan = c_preproc.scan
-
-class cprogram(link_task):
- "Links object files into c programs"
- run_str = '${LINK_CC} ${LINKFLAGS} ${CCLNK_SRC_F}${SRC} ${CCLNK_TGT_F}${TGT[0].abspath()} ${RPATH_ST:RPATH} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${FRAMEWORK_ST:FRAMEWORK} ${ARCH_ST:ARCH} ${STLIB_MARKER} ${STLIBPATH_ST:STLIBPATH} ${STLIB_ST:STLIB} ${SHLIB_MARKER} ${LIBPATH_ST:LIBPATH} ${LIB_ST:LIB} ${LDFLAGS}'
- ext_out = ['.bin']
- vars = ['LINKDEPS']
- inst_to = '${BINDIR}'
-
-class cshlib(cprogram):
- "Links object files into c shared libraries"
- inst_to = '${LIBDIR}'
-
-class cstlib(stlink_task):
- "Links object files into a c static libraries"
- pass # do not remove
-
diff --git a/waflib/Tools/c_aliases.py b/waflib/Tools/c_aliases.py
deleted file mode 100644
index c9d5369..0000000
--- a/waflib/Tools/c_aliases.py
+++ /dev/null
@@ -1,144 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2005-2015 (ita)
-
-"base for all c/c++ programs and libraries"
-
-from waflib import Utils, Errors
-from waflib.Configure import conf
-
-def get_extensions(lst):
- """
- Returns the file extensions for the list of files given as input
-
- :param lst: files to process
- :list lst: list of string or :py:class:`waflib.Node.Node`
- :return: list of file extensions
- :rtype: list of string
- """
- ret = []
- for x in Utils.to_list(lst):
- if not isinstance(x, str):
- x = x.name
- ret.append(x[x.rfind('.') + 1:])
- return ret
-
-def sniff_features(**kw):
- """
- Computes and returns the features required for a task generator by
- looking at the file extensions. This aimed for C/C++ mainly::
-
- snif_features(source=['foo.c', 'foo.cxx'], type='shlib')
- # returns ['cxx', 'c', 'cxxshlib', 'cshlib']
-
- :param source: source files to process
- :type source: list of string or :py:class:`waflib.Node.Node`
- :param type: object type in *program*, *shlib* or *stlib*
- :type type: string
- :return: the list of features for a task generator processing the source files
- :rtype: list of string
- """
- exts = get_extensions(kw['source'])
- typ = kw['typ']
- feats = []
-
- # watch the order, cxx will have the precedence
- for x in 'cxx cpp c++ cc C'.split():
- if x in exts:
- feats.append('cxx')
- break
-
- if 'c' in exts or 'vala' in exts or 'gs' in exts:
- feats.append('c')
-
- for x in 'f f90 F F90 for FOR'.split():
- if x in exts:
- feats.append('fc')
- break
-
- if 'd' in exts:
- feats.append('d')
-
- if 'java' in exts:
- feats.append('java')
- return 'java'
-
- if typ in ('program', 'shlib', 'stlib'):
- will_link = False
- for x in feats:
- if x in ('cxx', 'd', 'fc', 'c'):
- feats.append(x + typ)
- will_link = True
- if not will_link and not kw.get('features', []):
- raise Errors.WafError('Cannot link from %r, try passing eg: features="c cprogram"?' % kw)
- return feats
-
-def set_features(kw, typ):
- """
- Inserts data in the input dict *kw* based on existing data and on the type of target
- required (typ).
-
- :param kw: task generator parameters
- :type kw: dict
- :param typ: type of target
- :type typ: string
- """
- kw['typ'] = typ
- kw['features'] = Utils.to_list(kw.get('features', [])) + Utils.to_list(sniff_features(**kw))
-
-@conf
-def program(bld, *k, **kw):
- """
- Alias for creating programs by looking at the file extensions::
-
- def build(bld):
- bld.program(source='foo.c', target='app')
- # equivalent to:
- # bld(features='c cprogram', source='foo.c', target='app')
-
- """
- set_features(kw, 'program')
- return bld(*k, **kw)
-
-@conf
-def shlib(bld, *k, **kw):
- """
- Alias for creating shared libraries by looking at the file extensions::
-
- def build(bld):
- bld.shlib(source='foo.c', target='app')
- # equivalent to:
- # bld(features='c cshlib', source='foo.c', target='app')
-
- """
- set_features(kw, 'shlib')
- return bld(*k, **kw)
-
-@conf
-def stlib(bld, *k, **kw):
- """
- Alias for creating static libraries by looking at the file extensions::
-
- def build(bld):
- bld.stlib(source='foo.cpp', target='app')
- # equivalent to:
- # bld(features='cxx cxxstlib', source='foo.cpp', target='app')
-
- """
- set_features(kw, 'stlib')
- return bld(*k, **kw)
-
-@conf
-def objects(bld, *k, **kw):
- """
- Alias for creating object files by looking at the file extensions::
-
- def build(bld):
- bld.objects(source='foo.c', target='app')
- # equivalent to:
- # bld(features='c', source='foo.c', target='app')
-
- """
- set_features(kw, 'objects')
- return bld(*k, **kw)
-
diff --git a/waflib/Tools/c_config.py b/waflib/Tools/c_config.py
deleted file mode 100644
index d2b3c0d..0000000
--- a/waflib/Tools/c_config.py
+++ /dev/null
@@ -1,1351 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2005-2018 (ita)
-
-"""
-C/C++/D configuration helpers
-"""
-
-from __future__ import with_statement
-
-import os, re, shlex
-from waflib import Build, Utils, Task, Options, Logs, Errors, Runner
-from waflib.TaskGen import after_method, feature
-from waflib.Configure import conf
-
-WAF_CONFIG_H = 'config.h'
-"""default name for the config.h file"""
-
-DEFKEYS = 'define_key'
-INCKEYS = 'include_key'
-
-SNIP_EMPTY_PROGRAM = '''
-int main(int argc, char **argv) {
- (void)argc; (void)argv;
- return 0;
-}
-'''
-
-MACRO_TO_DESTOS = {
-'__linux__' : 'linux',
-'__GNU__' : 'gnu', # hurd
-'__FreeBSD__' : 'freebsd',
-'__NetBSD__' : 'netbsd',
-'__OpenBSD__' : 'openbsd',
-'__sun' : 'sunos',
-'__hpux' : 'hpux',
-'__sgi' : 'irix',
-'_AIX' : 'aix',
-'__CYGWIN__' : 'cygwin',
-'__MSYS__' : 'cygwin',
-'_UWIN' : 'uwin',
-'_WIN64' : 'win32',
-'_WIN32' : 'win32',
-# Note about darwin: this is also tested with 'defined __APPLE__ && defined __MACH__' somewhere below in this file.
-'__ENVIRONMENT_MAC_OS_X_VERSION_MIN_REQUIRED__' : 'darwin',
-'__ENVIRONMENT_IPHONE_OS_VERSION_MIN_REQUIRED__' : 'darwin', # iphone
-'__QNX__' : 'qnx',
-'__native_client__' : 'nacl' # google native client platform
-}
-
-MACRO_TO_DEST_CPU = {
-'__x86_64__' : 'x86_64',
-'__amd64__' : 'x86_64',
-'__i386__' : 'x86',
-'__ia64__' : 'ia',
-'__mips__' : 'mips',
-'__sparc__' : 'sparc',
-'__alpha__' : 'alpha',
-'__aarch64__' : 'aarch64',
-'__thumb__' : 'thumb',
-'__arm__' : 'arm',
-'__hppa__' : 'hppa',
-'__powerpc__' : 'powerpc',
-'__ppc__' : 'powerpc',
-'__convex__' : 'convex',
-'__m68k__' : 'm68k',
-'__s390x__' : 's390x',
-'__s390__' : 's390',
-'__sh__' : 'sh',
-'__xtensa__' : 'xtensa',
-}
-
-@conf
-def parse_flags(self, line, uselib_store, env=None, force_static=False, posix=None):
- """
- Parses flags from the input lines, and adds them to the relevant use variables::
-
- def configure(conf):
- conf.parse_flags('-O3', 'FOO')
- # conf.env.CXXFLAGS_FOO = ['-O3']
- # conf.env.CFLAGS_FOO = ['-O3']
-
- :param line: flags
- :type line: string
- :param uselib_store: where to add the flags
- :type uselib_store: string
- :param env: config set or conf.env by default
- :type env: :py:class:`waflib.ConfigSet.ConfigSet`
- """
-
- assert(isinstance(line, str))
-
- env = env or self.env
-
- # Issue 811 and 1371
- if posix is None:
- posix = True
- if '\\' in line:
- posix = ('\\ ' in line) or ('\\\\' in line)
-
- lex = shlex.shlex(line, posix=posix)
- lex.whitespace_split = True
- lex.commenters = ''
- lst = list(lex)
-
- # append_unique is not always possible
- # for example, apple flags may require both -arch i386 and -arch ppc
- uselib = uselib_store
- def app(var, val):
- env.append_value('%s_%s' % (var, uselib), val)
- def appu(var, val):
- env.append_unique('%s_%s' % (var, uselib), val)
- static = False
- while lst:
- x = lst.pop(0)
- st = x[:2]
- ot = x[2:]
-
- if st == '-I' or st == '/I':
- if not ot:
- ot = lst.pop(0)
- appu('INCLUDES', ot)
- elif st == '-i':
- tmp = [x, lst.pop(0)]
- app('CFLAGS', tmp)
- app('CXXFLAGS', tmp)
- elif st == '-D' or (env.CXX_NAME == 'msvc' and st == '/D'): # not perfect but..
- if not ot:
- ot = lst.pop(0)
- app('DEFINES', ot)
- elif st == '-l':
- if not ot:
- ot = lst.pop(0)
- prefix = 'STLIB' if (force_static or static) else 'LIB'
- app(prefix, ot)
- elif st == '-L':
- if not ot:
- ot = lst.pop(0)
- prefix = 'STLIBPATH' if (force_static or static) else 'LIBPATH'
- appu(prefix, ot)
- elif x.startswith('/LIBPATH:'):
- prefix = 'STLIBPATH' if (force_static or static) else 'LIBPATH'
- appu(prefix, x.replace('/LIBPATH:', ''))
- elif x.startswith('-std='):
- prefix = 'CXXFLAGS' if '++' in x else 'CFLAGS'
- app(prefix, x)
- elif x.startswith('+') or x in ('-pthread', '-fPIC', '-fpic', '-fPIE', '-fpie'):
- app('CFLAGS', x)
- app('CXXFLAGS', x)
- app('LINKFLAGS', x)
- elif x == '-framework':
- appu('FRAMEWORK', lst.pop(0))
- elif x.startswith('-F'):
- appu('FRAMEWORKPATH', x[2:])
- elif x == '-Wl,-rpath' or x == '-Wl,-R':
- app('RPATH', lst.pop(0).lstrip('-Wl,'))
- elif x.startswith('-Wl,-R,'):
- app('RPATH', x[7:])
- elif x.startswith('-Wl,-R'):
- app('RPATH', x[6:])
- elif x.startswith('-Wl,-rpath,'):
- app('RPATH', x[11:])
- elif x == '-Wl,-Bstatic' or x == '-Bstatic':
- static = True
- elif x == '-Wl,-Bdynamic' or x == '-Bdynamic':
- static = False
- elif x.startswith('-Wl') or x in ('-rdynamic', '-pie'):
- app('LINKFLAGS', x)
- elif x.startswith(('-m', '-f', '-dynamic', '-O', '-g')):
- # Adding the -W option breaks python builds on Openindiana
- app('CFLAGS', x)
- app('CXXFLAGS', x)
- elif x.startswith('-bundle'):
- app('LINKFLAGS', x)
- elif x.startswith(('-undefined', '-Xlinker')):
- arg = lst.pop(0)
- app('LINKFLAGS', [x, arg])
- elif x.startswith(('-arch', '-isysroot')):
- tmp = [x, lst.pop(0)]
- app('CFLAGS', tmp)
- app('CXXFLAGS', tmp)
- app('LINKFLAGS', tmp)
- elif x.endswith(('.a', '.so', '.dylib', '.lib')):
- appu('LINKFLAGS', x) # not cool, #762
- else:
- self.to_log('Unhandled flag %r' % x)
-
-@conf
-def validate_cfg(self, kw):
- """
- Searches for the program *pkg-config* if missing, and validates the
- parameters to pass to :py:func:`waflib.Tools.c_config.exec_cfg`.
-
- :param path: the **-config program to use** (default is *pkg-config*)
- :type path: list of string
- :param msg: message to display to describe the test executed
- :type msg: string
- :param okmsg: message to display when the test is successful
- :type okmsg: string
- :param errmsg: message to display in case of error
- :type errmsg: string
- """
- if not 'path' in kw:
- if not self.env.PKGCONFIG:
- self.find_program('pkg-config', var='PKGCONFIG')
- kw['path'] = self.env.PKGCONFIG
-
- # verify that exactly one action is requested
- s = ('atleast_pkgconfig_version' in kw) + ('modversion' in kw) + ('package' in kw)
- if s != 1:
- raise ValueError('exactly one of atleast_pkgconfig_version, modversion and package must be set')
- if not 'msg' in kw:
- if 'atleast_pkgconfig_version' in kw:
- kw['msg'] = 'Checking for pkg-config version >= %r' % kw['atleast_pkgconfig_version']
- elif 'modversion' in kw:
- kw['msg'] = 'Checking for %r version' % kw['modversion']
- else:
- kw['msg'] = 'Checking for %r' %(kw['package'])
-
- # let the modversion check set the okmsg to the detected version
- if not 'okmsg' in kw and not 'modversion' in kw:
- kw['okmsg'] = 'yes'
- if not 'errmsg' in kw:
- kw['errmsg'] = 'not found'
-
- # pkg-config version
- if 'atleast_pkgconfig_version' in kw:
- pass
- elif 'modversion' in kw:
- if not 'uselib_store' in kw:
- kw['uselib_store'] = kw['modversion']
- if not 'define_name' in kw:
- kw['define_name'] = '%s_VERSION' % Utils.quote_define_name(kw['uselib_store'])
- else:
- if not 'uselib_store' in kw:
- kw['uselib_store'] = Utils.to_list(kw['package'])[0].upper()
- if not 'define_name' in kw:
- kw['define_name'] = self.have_define(kw['uselib_store'])
-
-@conf
-def exec_cfg(self, kw):
- """
- Executes ``pkg-config`` or other ``-config`` applications to collect configuration flags:
-
- * if atleast_pkgconfig_version is given, check that pkg-config has the version n and return
- * if modversion is given, then return the module version
- * else, execute the *-config* program with the *args* and *variables* given, and set the flags on the *conf.env.FLAGS_name* variable
-
- :param atleast_pkgconfig_version: minimum pkg-config version to use (disable other tests)
- :type atleast_pkgconfig_version: string
- :param package: package name, for example *gtk+-2.0*
- :type package: string
- :param uselib_store: if the test is successful, define HAVE\_*name*. It is also used to define *conf.env.FLAGS_name* variables.
- :type uselib_store: string
- :param modversion: if provided, return the version of the given module and define *name*\_VERSION
- :type modversion: string
- :param args: arguments to give to *package* when retrieving flags
- :type args: list of string
- :param variables: return the values of particular variables
- :type variables: list of string
- :param define_variable: additional variables to define (also in conf.env.PKG_CONFIG_DEFINES)
- :type define_variable: dict(string: string)
- """
-
- path = Utils.to_list(kw['path'])
- env = self.env.env or None
- if kw.get('pkg_config_path'):
- if not env:
- env = dict(self.environ)
- env['PKG_CONFIG_PATH'] = kw['pkg_config_path']
-
- def define_it():
- define_name = kw['define_name']
- # by default, add HAVE_X to the config.h, else provide DEFINES_X for use=X
- if kw.get('global_define', 1):
- self.define(define_name, 1, False)
- else:
- self.env.append_unique('DEFINES_%s' % kw['uselib_store'], "%s=1" % define_name)
-
- if kw.get('add_have_to_env', 1):
- self.env[define_name] = 1
-
- # pkg-config version
- if 'atleast_pkgconfig_version' in kw:
- cmd = path + ['--atleast-pkgconfig-version=%s' % kw['atleast_pkgconfig_version']]
- self.cmd_and_log(cmd, env=env)
- return
-
- # single version for a module
- if 'modversion' in kw:
- version = self.cmd_and_log(path + ['--modversion', kw['modversion']], env=env).strip()
- if not 'okmsg' in kw:
- kw['okmsg'] = version
- self.define(kw['define_name'], version)
- return version
-
- lst = [] + path
-
- defi = kw.get('define_variable')
- if not defi:
- defi = self.env.PKG_CONFIG_DEFINES or {}
- for key, val in defi.items():
- lst.append('--define-variable=%s=%s' % (key, val))
-
- static = kw.get('force_static', False)
- if 'args' in kw:
- args = Utils.to_list(kw['args'])
- if '--static' in args or '--static-libs' in args:
- static = True
- lst += args
-
- # tools like pkgconf expect the package argument after the -- ones -_-
- lst.extend(Utils.to_list(kw['package']))
-
- # retrieving variables of a module
- if 'variables' in kw:
- v_env = kw.get('env', self.env)
- vars = Utils.to_list(kw['variables'])
- for v in vars:
- val = self.cmd_and_log(lst + ['--variable=' + v], env=env).strip()
- var = '%s_%s' % (kw['uselib_store'], v)
- v_env[var] = val
- return
-
- # so we assume the command-line will output flags to be parsed afterwards
- ret = self.cmd_and_log(lst, env=env)
-
- define_it()
- self.parse_flags(ret, kw['uselib_store'], kw.get('env', self.env), force_static=static, posix=kw.get('posix'))
- return ret
-
-@conf
-def check_cfg(self, *k, **kw):
- """
- Checks for configuration flags using a **-config**-like program (pkg-config, sdl-config, etc).
- This wraps internal calls to :py:func:`waflib.Tools.c_config.validate_cfg` and :py:func:`waflib.Tools.c_config.exec_cfg`
-
- A few examples::
-
- def configure(conf):
- conf.load('compiler_c')
- conf.check_cfg(package='glib-2.0', args='--libs --cflags')
- conf.check_cfg(package='pango')
- conf.check_cfg(package='pango', uselib_store='MYPANGO', args=['--cflags', '--libs'])
- conf.check_cfg(package='pango',
- args=['pango >= 0.1.0', 'pango < 9.9.9', '--cflags', '--libs'],
- msg="Checking for 'pango 0.1.0'")
- conf.check_cfg(path='sdl-config', args='--cflags --libs', package='', uselib_store='SDL')
- conf.check_cfg(path='mpicc', args='--showme:compile --showme:link',
- package='', uselib_store='OPEN_MPI', mandatory=False)
- # variables
- conf.check_cfg(package='gtk+-2.0', variables=['includedir', 'prefix'], uselib_store='FOO')
- print(conf.env.FOO_includedir)
- """
- self.validate_cfg(kw)
- if 'msg' in kw:
- self.start_msg(kw['msg'], **kw)
- ret = None
- try:
- ret = self.exec_cfg(kw)
- except self.errors.WafError as e:
- if 'errmsg' in kw:
- self.end_msg(kw['errmsg'], 'YELLOW', **kw)
- if Logs.verbose > 1:
- self.to_log('Command failure: %s' % e)
- self.fatal('The configuration failed')
- else:
- if not ret:
- ret = True
- kw['success'] = ret
- if 'okmsg' in kw:
- self.end_msg(self.ret_msg(kw['okmsg'], kw), **kw)
-
- return ret
-
-def build_fun(bld):
- """
- Build function that is used for running configuration tests with ``conf.check()``
- """
- if bld.kw['compile_filename']:
- node = bld.srcnode.make_node(bld.kw['compile_filename'])
- node.write(bld.kw['code'])
-
- o = bld(features=bld.kw['features'], source=bld.kw['compile_filename'], target='testprog')
-
- for k, v in bld.kw.items():
- setattr(o, k, v)
-
- if not bld.kw.get('quiet'):
- bld.conf.to_log("==>\n%s\n<==" % bld.kw['code'])
-
-@conf
-def validate_c(self, kw):
- """
- Pre-checks the parameters that will be given to :py:func:`waflib.Configure.run_build`
-
- :param compiler: c or cxx (tries to guess what is best)
- :type compiler: string
- :param type: cprogram, cshlib, cstlib - not required if *features are given directly*
- :type type: binary to create
- :param feature: desired features for the task generator that will execute the test, for example ``cxx cxxstlib``
- :type feature: list of string
- :param fragment: provide a piece of code for the test (default is to let the system create one)
- :type fragment: string
- :param uselib_store: define variables after the test is executed (IMPORTANT!)
- :type uselib_store: string
- :param use: parameters to use for building (just like the normal *use* keyword)
- :type use: list of string
- :param define_name: define to set when the check is over
- :type define_name: string
- :param execute: execute the resulting binary
- :type execute: bool
- :param define_ret: if execute is set to True, use the execution output in both the define and the return value
- :type define_ret: bool
- :param header_name: check for a particular header
- :type header_name: string
- :param auto_add_header_name: if header_name was set, add the headers in env.INCKEYS so the next tests will include these headers
- :type auto_add_header_name: bool
- """
- for x in ('type_name', 'field_name', 'function_name'):
- if x in kw:
- Logs.warn('Invalid argument %r in test' % x)
-
- if not 'build_fun' in kw:
- kw['build_fun'] = build_fun
-
- if not 'env' in kw:
- kw['env'] = self.env.derive()
- env = kw['env']
-
- if not 'compiler' in kw and not 'features' in kw:
- kw['compiler'] = 'c'
- if env.CXX_NAME and Task.classes.get('cxx'):
- kw['compiler'] = 'cxx'
- if not self.env.CXX:
- self.fatal('a c++ compiler is required')
- else:
- if not self.env.CC:
- self.fatal('a c compiler is required')
-
- if not 'compile_mode' in kw:
- kw['compile_mode'] = 'c'
- if 'cxx' in Utils.to_list(kw.get('features', [])) or kw.get('compiler') == 'cxx':
- kw['compile_mode'] = 'cxx'
-
- if not 'type' in kw:
- kw['type'] = 'cprogram'
-
- if not 'features' in kw:
- if not 'header_name' in kw or kw.get('link_header_test', True):
- kw['features'] = [kw['compile_mode'], kw['type']] # "c ccprogram"
- else:
- kw['features'] = [kw['compile_mode']]
- else:
- kw['features'] = Utils.to_list(kw['features'])
-
- if not 'compile_filename' in kw:
- kw['compile_filename'] = 'test.c' + ((kw['compile_mode'] == 'cxx') and 'pp' or '')
-
- def to_header(dct):
- if 'header_name' in dct:
- dct = Utils.to_list(dct['header_name'])
- return ''.join(['#include <%s>\n' % x for x in dct])
- return ''
-
- if 'framework_name' in kw:
- # OSX, not sure this is used anywhere
- fwkname = kw['framework_name']
- if not 'uselib_store' in kw:
- kw['uselib_store'] = fwkname.upper()
- if not kw.get('no_header'):
- fwk = '%s/%s.h' % (fwkname, fwkname)
- if kw.get('remove_dot_h'):
- fwk = fwk[:-2]
- val = kw.get('header_name', [])
- kw['header_name'] = Utils.to_list(val) + [fwk]
- kw['msg'] = 'Checking for framework %s' % fwkname
- kw['framework'] = fwkname
-
- elif 'header_name' in kw:
- if not 'msg' in kw:
- kw['msg'] = 'Checking for header %s' % kw['header_name']
-
- l = Utils.to_list(kw['header_name'])
- assert len(l), 'list of headers in header_name is empty'
-
- kw['code'] = to_header(kw) + SNIP_EMPTY_PROGRAM
- if not 'uselib_store' in kw:
- kw['uselib_store'] = l[0].upper()
- if not 'define_name' in kw:
- kw['define_name'] = self.have_define(l[0])
-
- if 'lib' in kw:
- if not 'msg' in kw:
- kw['msg'] = 'Checking for library %s' % kw['lib']
- if not 'uselib_store' in kw:
- kw['uselib_store'] = kw['lib'].upper()
-
- if 'stlib' in kw:
- if not 'msg' in kw:
- kw['msg'] = 'Checking for static library %s' % kw['stlib']
- if not 'uselib_store' in kw:
- kw['uselib_store'] = kw['stlib'].upper()
-
- if 'fragment' in kw:
- # an additional code fragment may be provided to replace the predefined code
- # in custom headers
- kw['code'] = kw['fragment']
- if not 'msg' in kw:
- kw['msg'] = 'Checking for code snippet'
- if not 'errmsg' in kw:
- kw['errmsg'] = 'no'
-
- for (flagsname,flagstype) in (('cxxflags','compiler'), ('cflags','compiler'), ('linkflags','linker')):
- if flagsname in kw:
- if not 'msg' in kw:
- kw['msg'] = 'Checking for %s flags %s' % (flagstype, kw[flagsname])
- if not 'errmsg' in kw:
- kw['errmsg'] = 'no'
-
- if not 'execute' in kw:
- kw['execute'] = False
- if kw['execute']:
- kw['features'].append('test_exec')
- kw['chmod'] = Utils.O755
-
- if not 'errmsg' in kw:
- kw['errmsg'] = 'not found'
-
- if not 'okmsg' in kw:
- kw['okmsg'] = 'yes'
-
- if not 'code' in kw:
- kw['code'] = SNIP_EMPTY_PROGRAM
-
- # if there are headers to append automatically to the next tests
- if self.env[INCKEYS]:
- kw['code'] = '\n'.join(['#include <%s>' % x for x in self.env[INCKEYS]]) + '\n' + kw['code']
-
- # in case defines lead to very long command-lines
- if kw.get('merge_config_header') or env.merge_config_header:
- kw['code'] = '%s\n\n%s' % (self.get_config_header(), kw['code'])
- env.DEFINES = [] # modify the copy
-
- if not kw.get('success'):
- kw['success'] = None
-
- if 'define_name' in kw:
- self.undefine(kw['define_name'])
- if not 'msg' in kw:
- self.fatal('missing "msg" in conf.check(...)')
-
-@conf
-def post_check(self, *k, **kw):
- """
- Sets the variables after a test executed in
- :py:func:`waflib.Tools.c_config.check` was run successfully
- """
- is_success = 0
- if kw['execute']:
- if kw['success'] is not None:
- if kw.get('define_ret'):
- is_success = kw['success']
- else:
- is_success = (kw['success'] == 0)
- else:
- is_success = (kw['success'] == 0)
-
- if kw.get('define_name'):
- comment = kw.get('comment', '')
- define_name = kw['define_name']
- if kw['execute'] and kw.get('define_ret') and isinstance(is_success, str):
- if kw.get('global_define', 1):
- self.define(define_name, is_success, quote=kw.get('quote', 1), comment=comment)
- else:
- if kw.get('quote', 1):
- succ = '"%s"' % is_success
- else:
- succ = int(is_success)
- val = '%s=%s' % (define_name, succ)
- var = 'DEFINES_%s' % kw['uselib_store']
- self.env.append_value(var, val)
- else:
- if kw.get('global_define', 1):
- self.define_cond(define_name, is_success, comment=comment)
- else:
- var = 'DEFINES_%s' % kw['uselib_store']
- self.env.append_value(var, '%s=%s' % (define_name, int(is_success)))
-
- # define conf.env.HAVE_X to 1
- if kw.get('add_have_to_env', 1):
- if kw.get('uselib_store'):
- self.env[self.have_define(kw['uselib_store'])] = 1
- elif kw['execute'] and kw.get('define_ret'):
- self.env[define_name] = is_success
- else:
- self.env[define_name] = int(is_success)
-
- if 'header_name' in kw:
- if kw.get('auto_add_header_name'):
- self.env.append_value(INCKEYS, Utils.to_list(kw['header_name']))
-
- if is_success and 'uselib_store' in kw:
- from waflib.Tools import ccroot
- # See get_uselib_vars in ccroot.py
- _vars = set()
- for x in kw['features']:
- if x in ccroot.USELIB_VARS:
- _vars |= ccroot.USELIB_VARS[x]
-
- for k in _vars:
- x = k.lower()
- if x in kw:
- self.env.append_value(k + '_' + kw['uselib_store'], kw[x])
- return is_success
-
-@conf
-def check(self, *k, **kw):
- """
- Performs a configuration test by calling :py:func:`waflib.Configure.run_build`.
- For the complete list of parameters, see :py:func:`waflib.Tools.c_config.validate_c`.
- To force a specific compiler, pass ``compiler='c'`` or ``compiler='cxx'`` to the list of arguments
-
- Besides build targets, complete builds can be given through a build function. All files will
- be written to a temporary directory::
-
- def build(bld):
- lib_node = bld.srcnode.make_node('libdir/liblc1.c')
- lib_node.parent.mkdir()
- lib_node.write('#include <stdio.h>\\nint lib_func(void) { FILE *f = fopen("foo", "r");}\\n', 'w')
- bld(features='c cshlib', source=[lib_node], linkflags=conf.env.EXTRA_LDFLAGS, target='liblc')
- conf.check(build_fun=build, msg=msg)
- """
- self.validate_c(kw)
- self.start_msg(kw['msg'], **kw)
- ret = None
- try:
- ret = self.run_build(*k, **kw)
- except self.errors.ConfigurationError:
- self.end_msg(kw['errmsg'], 'YELLOW', **kw)
- if Logs.verbose > 1:
- raise
- else:
- self.fatal('The configuration failed')
- else:
- kw['success'] = ret
-
- ret = self.post_check(*k, **kw)
- if not ret:
- self.end_msg(kw['errmsg'], 'YELLOW', **kw)
- self.fatal('The configuration failed %r' % ret)
- else:
- self.end_msg(self.ret_msg(kw['okmsg'], kw), **kw)
- return ret
-
-class test_exec(Task.Task):
- """
- A task that runs programs after they are built. See :py:func:`waflib.Tools.c_config.test_exec_fun`.
- """
- color = 'PINK'
- def run(self):
- if getattr(self.generator, 'rpath', None):
- if getattr(self.generator, 'define_ret', False):
- self.generator.bld.retval = self.generator.bld.cmd_and_log([self.inputs[0].abspath()])
- else:
- self.generator.bld.retval = self.generator.bld.exec_command([self.inputs[0].abspath()])
- else:
- env = self.env.env or {}
- env.update(dict(os.environ))
- for var in ('LD_LIBRARY_PATH', 'DYLD_LIBRARY_PATH', 'PATH'):
- env[var] = self.inputs[0].parent.abspath() + os.path.pathsep + env.get(var, '')
- if getattr(self.generator, 'define_ret', False):
- self.generator.bld.retval = self.generator.bld.cmd_and_log([self.inputs[0].abspath()], env=env)
- else:
- self.generator.bld.retval = self.generator.bld.exec_command([self.inputs[0].abspath()], env=env)
-
-@feature('test_exec')
-@after_method('apply_link')
-def test_exec_fun(self):
- """
- The feature **test_exec** is used to create a task that will to execute the binary
- created (link task output) during the build. The exit status will be set
- on the build context, so only one program may have the feature *test_exec*.
- This is used by configuration tests::
-
- def configure(conf):
- conf.check(execute=True)
- """
- self.create_task('test_exec', self.link_task.outputs[0])
-
-@conf
-def check_cxx(self, *k, **kw):
- """
- Runs a test with a task generator of the form::
-
- conf.check(features='cxx cxxprogram', ...)
- """
- kw['compiler'] = 'cxx'
- return self.check(*k, **kw)
-
-@conf
-def check_cc(self, *k, **kw):
- """
- Runs a test with a task generator of the form::
-
- conf.check(features='c cprogram', ...)
- """
- kw['compiler'] = 'c'
- return self.check(*k, **kw)
-
-@conf
-def set_define_comment(self, key, comment):
- """
- Sets a comment that will appear in the configuration header
-
- :type key: string
- :type comment: string
- """
- coms = self.env.DEFINE_COMMENTS
- if not coms:
- coms = self.env.DEFINE_COMMENTS = {}
- coms[key] = comment or ''
-
-@conf
-def get_define_comment(self, key):
- """
- Returns the comment associated to a define
-
- :type key: string
- """
- coms = self.env.DEFINE_COMMENTS or {}
- return coms.get(key, '')
-
-@conf
-def define(self, key, val, quote=True, comment=''):
- """
- Stores a single define and its state into ``conf.env.DEFINES``. The value is cast to an integer (0/1).
-
- :param key: define name
- :type key: string
- :param val: value
- :type val: int or string
- :param quote: enclose strings in quotes (yes by default)
- :type quote: bool
- """
- assert isinstance(key, str)
- if not key:
- return
- if val is True:
- val = 1
- elif val in (False, None):
- val = 0
-
- if isinstance(val, int) or isinstance(val, float):
- s = '%s=%s'
- else:
- s = quote and '%s="%s"' or '%s=%s'
- app = s % (key, str(val))
-
- ban = key + '='
- lst = self.env.DEFINES
- for x in lst:
- if x.startswith(ban):
- lst[lst.index(x)] = app
- break
- else:
- self.env.append_value('DEFINES', app)
-
- self.env.append_unique(DEFKEYS, key)
- self.set_define_comment(key, comment)
-
-@conf
-def undefine(self, key, comment=''):
- """
- Removes a global define from ``conf.env.DEFINES``
-
- :param key: define name
- :type key: string
- """
- assert isinstance(key, str)
- if not key:
- return
- ban = key + '='
- lst = [x for x in self.env.DEFINES if not x.startswith(ban)]
- self.env.DEFINES = lst
- self.env.append_unique(DEFKEYS, key)
- self.set_define_comment(key, comment)
-
-@conf
-def define_cond(self, key, val, comment=''):
- """
- Conditionally defines a name::
-
- def configure(conf):
- conf.define_cond('A', True)
- # equivalent to:
- # if val: conf.define('A', 1)
- # else: conf.undefine('A')
-
- :param key: define name
- :type key: string
- :param val: value
- :type val: int or string
- """
- assert isinstance(key, str)
- if not key:
- return
- if val:
- self.define(key, 1, comment=comment)
- else:
- self.undefine(key, comment=comment)
-
-@conf
-def is_defined(self, key):
- """
- Indicates whether a particular define is globally set in ``conf.env.DEFINES``.
-
- :param key: define name
- :type key: string
- :return: True if the define is set
- :rtype: bool
- """
- assert key and isinstance(key, str)
-
- ban = key + '='
- for x in self.env.DEFINES:
- if x.startswith(ban):
- return True
- return False
-
-@conf
-def get_define(self, key):
- """
- Returns the value of an existing define, or None if not found
-
- :param key: define name
- :type key: string
- :rtype: string
- """
- assert key and isinstance(key, str)
-
- ban = key + '='
- for x in self.env.DEFINES:
- if x.startswith(ban):
- return x[len(ban):]
- return None
-
-@conf
-def have_define(self, key):
- """
- Returns a variable suitable for command-line or header use by removing invalid characters
- and prefixing it with ``HAVE_``
-
- :param key: define name
- :type key: string
- :return: the input key prefixed by *HAVE_* and substitute any invalid characters.
- :rtype: string
- """
- return (self.env.HAVE_PAT or 'HAVE_%s') % Utils.quote_define_name(key)
-
-@conf
-def write_config_header(self, configfile='', guard='', top=False, defines=True, headers=False, remove=True, define_prefix=''):
- """
- Writes a configuration header containing defines and includes::
-
- def configure(cnf):
- cnf.define('A', 1)
- cnf.write_config_header('config.h')
-
- This function only adds include guards (if necessary), consult
- :py:func:`waflib.Tools.c_config.get_config_header` for details on the body.
-
- :param configfile: path to the file to create (relative or absolute)
- :type configfile: string
- :param guard: include guard name to add, by default it is computed from the file name
- :type guard: string
- :param top: write the configuration header from the build directory (default is from the current path)
- :type top: bool
- :param defines: add the defines (yes by default)
- :type defines: bool
- :param headers: add #include in the file
- :type headers: bool
- :param remove: remove the defines after they are added (yes by default, works like in autoconf)
- :type remove: bool
- :type define_prefix: string
- :param define_prefix: prefix all the defines in the file with a particular prefix
- """
- if not configfile:
- configfile = WAF_CONFIG_H
- waf_guard = guard or 'W_%s_WAF' % Utils.quote_define_name(configfile)
-
- node = top and self.bldnode or self.path.get_bld()
- node = node.make_node(configfile)
- node.parent.mkdir()
-
- lst = ['/* WARNING! All changes made to this file will be lost! */\n']
- lst.append('#ifndef %s\n#define %s\n' % (waf_guard, waf_guard))
- lst.append(self.get_config_header(defines, headers, define_prefix=define_prefix))
- lst.append('\n#endif /* %s */\n' % waf_guard)
-
- node.write('\n'.join(lst))
-
- # config files must not be removed on "waf clean"
- self.env.append_unique(Build.CFG_FILES, [node.abspath()])
-
- if remove:
- for key in self.env[DEFKEYS]:
- self.undefine(key)
- self.env[DEFKEYS] = []
-
-@conf
-def get_config_header(self, defines=True, headers=False, define_prefix=''):
- """
- Creates the contents of a ``config.h`` file from the defines and includes
- set in conf.env.define_key / conf.env.include_key. No include guards are added.
-
- A prelude will be added from the variable env.WAF_CONFIG_H_PRELUDE if provided. This
- can be used to insert complex macros or include guards::
-
- def configure(conf):
- conf.env.WAF_CONFIG_H_PRELUDE = '#include <unistd.h>\\n'
- conf.write_config_header('config.h')
-
- :param defines: write the defines values
- :type defines: bool
- :param headers: write include entries for each element in self.env.INCKEYS
- :type headers: bool
- :type define_prefix: string
- :param define_prefix: prefix all the defines with a particular prefix
- :return: the contents of a ``config.h`` file
- :rtype: string
- """
- lst = []
-
- if self.env.WAF_CONFIG_H_PRELUDE:
- lst.append(self.env.WAF_CONFIG_H_PRELUDE)
-
- if headers:
- for x in self.env[INCKEYS]:
- lst.append('#include <%s>' % x)
-
- if defines:
- tbl = {}
- for k in self.env.DEFINES:
- a, _, b = k.partition('=')
- tbl[a] = b
-
- for k in self.env[DEFKEYS]:
- caption = self.get_define_comment(k)
- if caption:
- caption = ' /* %s */' % caption
- try:
- txt = '#define %s%s %s%s' % (define_prefix, k, tbl[k], caption)
- except KeyError:
- txt = '/* #undef %s%s */%s' % (define_prefix, k, caption)
- lst.append(txt)
- return "\n".join(lst)
-
-@conf
-def cc_add_flags(conf):
- """
- Adds CFLAGS / CPPFLAGS from os.environ to conf.env
- """
- conf.add_os_flags('CPPFLAGS', dup=False)
- conf.add_os_flags('CFLAGS', dup=False)
-
-@conf
-def cxx_add_flags(conf):
- """
- Adds CXXFLAGS / CPPFLAGS from os.environ to conf.env
- """
- conf.add_os_flags('CPPFLAGS', dup=False)
- conf.add_os_flags('CXXFLAGS', dup=False)
-
-@conf
-def link_add_flags(conf):
- """
- Adds LINKFLAGS / LDFLAGS from os.environ to conf.env
- """
- conf.add_os_flags('LINKFLAGS', dup=False)
- conf.add_os_flags('LDFLAGS', dup=False)
-
-@conf
-def cc_load_tools(conf):
- """
- Loads the Waf c extensions
- """
- if not conf.env.DEST_OS:
- conf.env.DEST_OS = Utils.unversioned_sys_platform()
- conf.load('c')
-
-@conf
-def cxx_load_tools(conf):
- """
- Loads the Waf c++ extensions
- """
- if not conf.env.DEST_OS:
- conf.env.DEST_OS = Utils.unversioned_sys_platform()
- conf.load('cxx')
-
-@conf
-def get_cc_version(conf, cc, gcc=False, icc=False, clang=False):
- """
- Runs the preprocessor to determine the gcc/icc/clang version
-
- The variables CC_VERSION, DEST_OS, DEST_BINFMT and DEST_CPU will be set in *conf.env*
-
- :raise: :py:class:`waflib.Errors.ConfigurationError`
- """
- cmd = cc + ['-dM', '-E', '-']
- env = conf.env.env or None
- try:
- out, err = conf.cmd_and_log(cmd, output=0, input='\n'.encode(), env=env)
- except Errors.WafError:
- conf.fatal('Could not determine the compiler version %r' % cmd)
-
- if gcc:
- if out.find('__INTEL_COMPILER') >= 0:
- conf.fatal('The intel compiler pretends to be gcc')
- if out.find('__GNUC__') < 0 and out.find('__clang__') < 0:
- conf.fatal('Could not determine the compiler type')
-
- if icc and out.find('__INTEL_COMPILER') < 0:
- conf.fatal('Not icc/icpc')
-
- if clang and out.find('__clang__') < 0:
- conf.fatal('Not clang/clang++')
- if not clang and out.find('__clang__') >= 0:
- conf.fatal('Could not find gcc/g++ (only Clang), if renamed try eg: CC=gcc48 CXX=g++48 waf configure')
-
- k = {}
- if icc or gcc or clang:
- out = out.splitlines()
- for line in out:
- lst = shlex.split(line)
- if len(lst)>2:
- key = lst[1]
- val = lst[2]
- k[key] = val
-
- def isD(var):
- return var in k
-
- # Some documentation is available at http://predef.sourceforge.net
- # The names given to DEST_OS must match what Utils.unversioned_sys_platform() returns.
- if not conf.env.DEST_OS:
- conf.env.DEST_OS = ''
- for i in MACRO_TO_DESTOS:
- if isD(i):
- conf.env.DEST_OS = MACRO_TO_DESTOS[i]
- break
- else:
- if isD('__APPLE__') and isD('__MACH__'):
- conf.env.DEST_OS = 'darwin'
- elif isD('__unix__'): # unix must be tested last as it's a generic fallback
- conf.env.DEST_OS = 'generic'
-
- if isD('__ELF__'):
- conf.env.DEST_BINFMT = 'elf'
- elif isD('__WINNT__') or isD('__CYGWIN__') or isD('_WIN32'):
- conf.env.DEST_BINFMT = 'pe'
- if not conf.env.IMPLIBDIR:
- conf.env.IMPLIBDIR = conf.env.LIBDIR # for .lib or .dll.a files
- conf.env.LIBDIR = conf.env.BINDIR
- elif isD('__APPLE__'):
- conf.env.DEST_BINFMT = 'mac-o'
-
- if not conf.env.DEST_BINFMT:
- # Infer the binary format from the os name.
- conf.env.DEST_BINFMT = Utils.destos_to_binfmt(conf.env.DEST_OS)
-
- for i in MACRO_TO_DEST_CPU:
- if isD(i):
- conf.env.DEST_CPU = MACRO_TO_DEST_CPU[i]
- break
-
- Logs.debug('ccroot: dest platform: ' + ' '.join([conf.env[x] or '?' for x in ('DEST_OS', 'DEST_BINFMT', 'DEST_CPU')]))
- if icc:
- ver = k['__INTEL_COMPILER']
- conf.env.CC_VERSION = (ver[:-2], ver[-2], ver[-1])
- else:
- if isD('__clang__') and isD('__clang_major__'):
- conf.env.CC_VERSION = (k['__clang_major__'], k['__clang_minor__'], k['__clang_patchlevel__'])
- else:
- # older clang versions and gcc
- conf.env.CC_VERSION = (k['__GNUC__'], k['__GNUC_MINOR__'], k.get('__GNUC_PATCHLEVEL__', '0'))
- return k
-
-@conf
-def get_xlc_version(conf, cc):
- """
- Returns the Aix compiler version
-
- :raise: :py:class:`waflib.Errors.ConfigurationError`
- """
- cmd = cc + ['-qversion']
- try:
- out, err = conf.cmd_and_log(cmd, output=0)
- except Errors.WafError:
- conf.fatal('Could not find xlc %r' % cmd)
-
- # the intention is to catch the 8.0 in "IBM XL C/C++ Enterprise Edition V8.0 for AIX..."
- for v in (r"IBM XL C/C\+\+.* V(?P<major>\d*)\.(?P<minor>\d*)",):
- version_re = re.compile(v, re.I).search
- match = version_re(out or err)
- if match:
- k = match.groupdict()
- conf.env.CC_VERSION = (k['major'], k['minor'])
- break
- else:
- conf.fatal('Could not determine the XLC version.')
-
-@conf
-def get_suncc_version(conf, cc):
- """
- Returns the Sun compiler version
-
- :raise: :py:class:`waflib.Errors.ConfigurationError`
- """
- cmd = cc + ['-V']
- try:
- out, err = conf.cmd_and_log(cmd, output=0)
- except Errors.WafError as e:
- # Older versions of the compiler exit with non-zero status when reporting their version
- if not (hasattr(e, 'returncode') and hasattr(e, 'stdout') and hasattr(e, 'stderr')):
- conf.fatal('Could not find suncc %r' % cmd)
- out = e.stdout
- err = e.stderr
-
- version = (out or err)
- version = version.splitlines()[0]
-
- # cc: Sun C 5.10 SunOS_i386 2009/06/03
- # cc: Studio 12.5 Sun C++ 5.14 SunOS_sparc Beta 2015/11/17
- # cc: WorkShop Compilers 5.0 98/12/15 C 5.0
- version_re = re.compile(r'cc: (studio.*?|\s+)?(sun\s+(c\+\+|c)|(WorkShop\s+Compilers))?\s+(?P<major>\d*)\.(?P<minor>\d*)', re.I).search
- match = version_re(version)
- if match:
- k = match.groupdict()
- conf.env.CC_VERSION = (k['major'], k['minor'])
- else:
- conf.fatal('Could not determine the suncc version.')
-
-# ============ the --as-needed flag should added during the configuration, not at runtime =========
-
-@conf
-def add_as_needed(self):
- """
- Adds ``--as-needed`` to the *LINKFLAGS*
- On some platforms, it is a default flag. In some cases (e.g., in NS-3) it is necessary to explicitly disable this feature with `-Wl,--no-as-needed` flag.
- """
- if self.env.DEST_BINFMT == 'elf' and 'gcc' in (self.env.CXX_NAME, self.env.CC_NAME):
- self.env.append_unique('LINKFLAGS', '-Wl,--as-needed')
-
-# ============ parallel configuration
-
-class cfgtask(Task.Task):
- """
- A task that executes build configuration tests (calls conf.check)
-
- Make sure to use locks if concurrent access to the same conf.env data is necessary.
- """
- def __init__(self, *k, **kw):
- Task.Task.__init__(self, *k, **kw)
- self.run_after = set()
-
- def display(self):
- return ''
-
- def runnable_status(self):
- for x in self.run_after:
- if not x.hasrun:
- return Task.ASK_LATER
- return Task.RUN_ME
-
- def uid(self):
- return Utils.SIG_NIL
-
- def signature(self):
- return Utils.SIG_NIL
-
- def run(self):
- conf = self.conf
- bld = Build.BuildContext(top_dir=conf.srcnode.abspath(), out_dir=conf.bldnode.abspath())
- bld.env = conf.env
- bld.init_dirs()
- bld.in_msg = 1 # suppress top-level start_msg
- bld.logger = self.logger
- bld.multicheck_task = self
- args = self.args
- try:
- if 'func' in args:
- bld.test(build_fun=args['func'],
- msg=args.get('msg', ''),
- okmsg=args.get('okmsg', ''),
- errmsg=args.get('errmsg', ''),
- )
- else:
- args['multicheck_mandatory'] = args.get('mandatory', True)
- args['mandatory'] = True
- try:
- bld.check(**args)
- finally:
- args['mandatory'] = args['multicheck_mandatory']
- except Exception:
- return 1
-
- def process(self):
- Task.Task.process(self)
- if 'msg' in self.args:
- with self.generator.bld.multicheck_lock:
- self.conf.start_msg(self.args['msg'])
- if self.hasrun == Task.NOT_RUN:
- self.conf.end_msg('test cancelled', 'YELLOW')
- elif self.hasrun != Task.SUCCESS:
- self.conf.end_msg(self.args.get('errmsg', 'no'), 'YELLOW')
- else:
- self.conf.end_msg(self.args.get('okmsg', 'yes'), 'GREEN')
-
-@conf
-def multicheck(self, *k, **kw):
- """
- Runs configuration tests in parallel; results are printed sequentially at the end of the build
- but each test must provide its own msg value to display a line::
-
- def test_build(ctx):
- ctx.in_msg = True # suppress console outputs
- ctx.check_large_file(mandatory=False)
-
- conf.multicheck(
- {'header_name':'stdio.h', 'msg':'... stdio', 'uselib_store':'STDIO', 'global_define':False},
- {'header_name':'xyztabcd.h', 'msg':'... optional xyztabcd.h', 'mandatory': False},
- {'header_name':'stdlib.h', 'msg':'... stdlib', 'okmsg': 'aye', 'errmsg': 'nope'},
- {'func': test_build, 'msg':'... testing an arbitrary build function', 'okmsg':'ok'},
- msg = 'Checking for headers in parallel',
- mandatory = True, # mandatory tests raise an error at the end
- run_all_tests = True, # try running all tests
- )
-
- The configuration tests may modify the values in conf.env in any order, and the define
- values can affect configuration tests being executed. It is hence recommended
- to provide `uselib_store` values with `global_define=False` to prevent such issues.
- """
- self.start_msg(kw.get('msg', 'Executing %d configuration tests' % len(k)), **kw)
-
- # Force a copy so that threads append to the same list at least
- # no order is guaranteed, but the values should not disappear at least
- for var in ('DEFINES', DEFKEYS):
- self.env.append_value(var, [])
- self.env.DEFINE_COMMENTS = self.env.DEFINE_COMMENTS or {}
-
- # define a task object that will execute our tests
- class par(object):
- def __init__(self):
- self.keep = False
- self.task_sigs = {}
- self.progress_bar = 0
- def total(self):
- return len(tasks)
- def to_log(self, *k, **kw):
- return
-
- bld = par()
- bld.keep = kw.get('run_all_tests', True)
- bld.imp_sigs = {}
- tasks = []
-
- id_to_task = {}
- for dct in k:
- x = Task.classes['cfgtask'](bld=bld, env=None)
- tasks.append(x)
- x.args = dct
- x.bld = bld
- x.conf = self
- x.args = dct
-
- # bind a logger that will keep the info in memory
- x.logger = Logs.make_mem_logger(str(id(x)), self.logger)
-
- if 'id' in dct:
- id_to_task[dct['id']] = x
-
- # second pass to set dependencies with after_test/before_test
- for x in tasks:
- for key in Utils.to_list(x.args.get('before_tests', [])):
- tsk = id_to_task[key]
- if not tsk:
- raise ValueError('No test named %r' % key)
- tsk.run_after.add(x)
- for key in Utils.to_list(x.args.get('after_tests', [])):
- tsk = id_to_task[key]
- if not tsk:
- raise ValueError('No test named %r' % key)
- x.run_after.add(tsk)
-
- def it():
- yield tasks
- while 1:
- yield []
- bld.producer = p = Runner.Parallel(bld, Options.options.jobs)
- bld.multicheck_lock = Utils.threading.Lock()
- p.biter = it()
-
- self.end_msg('started')
- p.start()
-
- # flush the logs in order into the config.log
- for x in tasks:
- x.logger.memhandler.flush()
-
- self.start_msg('-> processing test results')
- if p.error:
- for x in p.error:
- if getattr(x, 'err_msg', None):
- self.to_log(x.err_msg)
- self.end_msg('fail', color='RED')
- raise Errors.WafError('There is an error in the library, read config.log for more information')
-
- failure_count = 0
- for x in tasks:
- if x.hasrun not in (Task.SUCCESS, Task.NOT_RUN):
- failure_count += 1
-
- if failure_count:
- self.end_msg(kw.get('errmsg', '%s test failed' % failure_count), color='YELLOW', **kw)
- else:
- self.end_msg('all ok', **kw)
-
- for x in tasks:
- if x.hasrun != Task.SUCCESS:
- if x.args.get('mandatory', True):
- self.fatal(kw.get('fatalmsg') or 'One of the tests has failed, read config.log for more information')
-
-@conf
-def check_gcc_o_space(self, mode='c'):
- if int(self.env.CC_VERSION[0]) > 4:
- # this is for old compilers
- return
- self.env.stash()
- if mode == 'c':
- self.env.CCLNK_TGT_F = ['-o', '']
- elif mode == 'cxx':
- self.env.CXXLNK_TGT_F = ['-o', '']
- features = '%s %sshlib' % (mode, mode)
- try:
- self.check(msg='Checking if the -o link must be split from arguments', fragment=SNIP_EMPTY_PROGRAM, features=features)
- except self.errors.ConfigurationError:
- self.env.revert()
- else:
- self.env.commit()
-
diff --git a/waflib/Tools/c_osx.py b/waflib/Tools/c_osx.py
deleted file mode 100644
index f70b128..0000000
--- a/waflib/Tools/c_osx.py
+++ /dev/null
@@ -1,193 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy 2008-2018 (ita)
-
-"""
-MacOSX related tools
-"""
-
-import os, shutil, platform
-from waflib import Task, Utils
-from waflib.TaskGen import taskgen_method, feature, after_method, before_method
-
-app_info = '''
-<?xml version="1.0" encoding="UTF-8"?>
-<!DOCTYPE plist SYSTEM "file://localhost/System/Library/DTDs/PropertyList.dtd">
-<plist version="0.9">
-<dict>
- <key>CFBundlePackageType</key>
- <string>APPL</string>
- <key>CFBundleGetInfoString</key>
- <string>Created by Waf</string>
- <key>CFBundleSignature</key>
- <string>????</string>
- <key>NOTE</key>
- <string>THIS IS A GENERATED FILE, DO NOT MODIFY</string>
- <key>CFBundleExecutable</key>
- <string>{app_name}</string>
-</dict>
-</plist>
-'''
-"""
-plist template
-"""
-
-@feature('c', 'cxx')
-def set_macosx_deployment_target(self):
- """
- see WAF issue 285 and also and also http://trac.macports.org/ticket/17059
- """
- if self.env.MACOSX_DEPLOYMENT_TARGET:
- os.environ['MACOSX_DEPLOYMENT_TARGET'] = self.env.MACOSX_DEPLOYMENT_TARGET
- elif 'MACOSX_DEPLOYMENT_TARGET' not in os.environ:
- if Utils.unversioned_sys_platform() == 'darwin':
- os.environ['MACOSX_DEPLOYMENT_TARGET'] = '.'.join(platform.mac_ver()[0].split('.')[:2])
-
-@taskgen_method
-def create_bundle_dirs(self, name, out):
- """
- Creates bundle folders, used by :py:func:`create_task_macplist` and :py:func:`create_task_macapp`
- """
- dir = out.parent.find_or_declare(name)
- dir.mkdir()
- macos = dir.find_or_declare(['Contents', 'MacOS'])
- macos.mkdir()
- return dir
-
-def bundle_name_for_output(out):
- name = out.name
- k = name.rfind('.')
- if k >= 0:
- name = name[:k] + '.app'
- else:
- name = name + '.app'
- return name
-
-@feature('cprogram', 'cxxprogram')
-@after_method('apply_link')
-def create_task_macapp(self):
- """
- To compile an executable into a Mac application (a .app), set its *mac_app* attribute::
-
- def build(bld):
- bld.shlib(source='a.c', target='foo', mac_app=True)
-
- To force *all* executables to be transformed into Mac applications::
-
- def build(bld):
- bld.env.MACAPP = True
- bld.shlib(source='a.c', target='foo')
- """
- if self.env.MACAPP or getattr(self, 'mac_app', False):
- out = self.link_task.outputs[0]
-
- name = bundle_name_for_output(out)
- dir = self.create_bundle_dirs(name, out)
-
- n1 = dir.find_or_declare(['Contents', 'MacOS', out.name])
-
- self.apptask = self.create_task('macapp', self.link_task.outputs, n1)
- inst_to = getattr(self, 'install_path', '/Applications') + '/%s/Contents/MacOS/' % name
- self.add_install_files(install_to=inst_to, install_from=n1, chmod=Utils.O755)
-
- if getattr(self, 'mac_files', None):
- # this only accepts files; they will be installed as seen from mac_files_root
- mac_files_root = getattr(self, 'mac_files_root', None)
- if isinstance(mac_files_root, str):
- mac_files_root = self.path.find_node(mac_files_root)
- if not mac_files_root:
- self.bld.fatal('Invalid mac_files_root %r' % self.mac_files_root)
- res_dir = n1.parent.parent.make_node('Resources')
- inst_to = getattr(self, 'install_path', '/Applications') + '/%s/Resources' % name
- for node in self.to_nodes(self.mac_files):
- relpath = node.path_from(mac_files_root or node.parent)
- self.create_task('macapp', node, res_dir.make_node(relpath))
- self.add_install_as(install_to=os.path.join(inst_to, relpath), install_from=node)
-
- if getattr(self.bld, 'is_install', None):
- # disable regular binary installation
- self.install_task.hasrun = Task.SKIP_ME
-
-@feature('cprogram', 'cxxprogram')
-@after_method('apply_link')
-def create_task_macplist(self):
- """
- Creates a :py:class:`waflib.Tools.c_osx.macplist` instance.
- """
- if self.env.MACAPP or getattr(self, 'mac_app', False):
- out = self.link_task.outputs[0]
-
- name = bundle_name_for_output(out)
-
- dir = self.create_bundle_dirs(name, out)
- n1 = dir.find_or_declare(['Contents', 'Info.plist'])
- self.plisttask = plisttask = self.create_task('macplist', [], n1)
- plisttask.context = {
- 'app_name': self.link_task.outputs[0].name,
- 'env': self.env
- }
-
- plist_ctx = getattr(self, 'plist_context', None)
- if (plist_ctx):
- plisttask.context.update(plist_ctx)
-
- if getattr(self, 'mac_plist', False):
- node = self.path.find_resource(self.mac_plist)
- if node:
- plisttask.inputs.append(node)
- else:
- plisttask.code = self.mac_plist
- else:
- plisttask.code = app_info
-
- inst_to = getattr(self, 'install_path', '/Applications') + '/%s/Contents/' % name
- self.add_install_files(install_to=inst_to, install_from=n1)
-
-@feature('cshlib', 'cxxshlib')
-@before_method('apply_link', 'propagate_uselib_vars')
-def apply_bundle(self):
- """
- To make a bundled shared library (a ``.bundle``), set the *mac_bundle* attribute::
-
- def build(bld):
- bld.shlib(source='a.c', target='foo', mac_bundle = True)
-
- To force *all* executables to be transformed into bundles::
-
- def build(bld):
- bld.env.MACBUNDLE = True
- bld.shlib(source='a.c', target='foo')
- """
- if self.env.MACBUNDLE or getattr(self, 'mac_bundle', False):
- self.env.LINKFLAGS_cshlib = self.env.LINKFLAGS_cxxshlib = [] # disable the '-dynamiclib' flag
- self.env.cshlib_PATTERN = self.env.cxxshlib_PATTERN = self.env.macbundle_PATTERN
- use = self.use = self.to_list(getattr(self, 'use', []))
- if not 'MACBUNDLE' in use:
- use.append('MACBUNDLE')
-
-app_dirs = ['Contents', 'Contents/MacOS', 'Contents/Resources']
-
-class macapp(Task.Task):
- """
- Creates mac applications
- """
- color = 'PINK'
- def run(self):
- self.outputs[0].parent.mkdir()
- shutil.copy2(self.inputs[0].srcpath(), self.outputs[0].abspath())
-
-class macplist(Task.Task):
- """
- Creates plist files
- """
- color = 'PINK'
- ext_in = ['.bin']
- def run(self):
- if getattr(self, 'code', None):
- txt = self.code
- else:
- txt = self.inputs[0].read()
- context = getattr(self, 'context', {})
- txt = txt.format(**context)
- self.outputs[0].write(txt)
-
diff --git a/waflib/Tools/c_preproc.py b/waflib/Tools/c_preproc.py
deleted file mode 100644
index 7e04b4a..0000000
--- a/waflib/Tools/c_preproc.py
+++ /dev/null
@@ -1,1091 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2006-2018 (ita)
-
-"""
-C/C++ preprocessor for finding dependencies
-
-Reasons for using the Waf preprocessor by default
-
-#. Some c/c++ extensions (Qt) require a custom preprocessor for obtaining the dependencies (.moc files)
-#. Not all compilers provide .d files for obtaining the dependencies (portability)
-#. A naive file scanner will not catch the constructs such as "#include foo()"
-#. A naive file scanner will catch unnecessary dependencies (change an unused header -> recompile everything)
-
-Regarding the speed concerns:
-
-* the preprocessing is performed only when files must be compiled
-* the macros are evaluated only for #if/#elif/#include
-* system headers are not scanned by default
-
-Now if you do not want the Waf preprocessor, the tool +gccdeps* uses the .d files produced
-during the compilation to track the dependencies (useful when used with the boost libraries).
-It only works with gcc >= 4.4 though.
-
-A dumb preprocessor is also available in the tool *c_dumbpreproc*
-"""
-# TODO: more varargs, pragma once
-
-import re, string, traceback
-from waflib import Logs, Utils, Errors
-
-class PreprocError(Errors.WafError):
- pass
-
-FILE_CACHE_SIZE = 100000
-LINE_CACHE_SIZE = 100000
-
-POPFILE = '-'
-"Constant representing a special token used in :py:meth:`waflib.Tools.c_preproc.c_parser.start` iteration to switch to a header read previously"
-
-recursion_limit = 150
-"Limit on the amount of files to read in the dependency scanner"
-
-go_absolute = False
-"Set to True to track headers on files in /usr/include, else absolute paths are ignored (but it becomes very slow)"
-
-standard_includes = ['/usr/local/include', '/usr/include']
-if Utils.is_win32:
- standard_includes = []
-
-use_trigraphs = 0
-"""Apply trigraph rules (False by default)"""
-
-# obsolete, do not use
-strict_quotes = 0
-
-g_optrans = {
-'not':'!',
-'not_eq':'!',
-'and':'&&',
-'and_eq':'&=',
-'or':'||',
-'or_eq':'|=',
-'xor':'^',
-'xor_eq':'^=',
-'bitand':'&',
-'bitor':'|',
-'compl':'~',
-}
-"""Operators such as and/or/xor for c++. Set an empty dict to disable."""
-
-# ignore #warning and #error
-re_lines = re.compile(
- '^[ \t]*(?:#|%:)[ \t]*(ifdef|ifndef|if|else|elif|endif|include|import|define|undef|pragma)[ \t]*(.*)\r*$',
- re.IGNORECASE | re.MULTILINE)
-"""Match #include lines"""
-
-re_mac = re.compile("^[a-zA-Z_]\w*")
-"""Match macro definitions"""
-
-re_fun = re.compile('^[a-zA-Z_][a-zA-Z0-9_]*[(]')
-"""Match macro functions"""
-
-re_pragma_once = re.compile('^\s*once\s*', re.IGNORECASE)
-"""Match #pragma once statements"""
-
-re_nl = re.compile('\\\\\r*\n', re.MULTILINE)
-"""Match newlines"""
-
-re_cpp = re.compile(r'//.*?$|/\*.*?\*/|\'(?:\\.|[^\\\'])*\'|"(?:\\.|[^\\"])*"', re.DOTALL | re.MULTILINE )
-"""Filter C/C++ comments"""
-
-trig_def = [('??'+a, b) for a, b in zip("=-/!'()<>", r'#~\|^[]{}')]
-"""Trigraph definitions"""
-
-chr_esc = {'0':0, 'a':7, 'b':8, 't':9, 'n':10, 'f':11, 'v':12, 'r':13, '\\':92, "'":39}
-"""Escape characters"""
-
-NUM = 'i'
-"""Number token"""
-
-OP = 'O'
-"""Operator token"""
-
-IDENT = 'T'
-"""Identifier token"""
-
-STR = 's'
-"""String token"""
-
-CHAR = 'c'
-"""Character token"""
-
-tok_types = [NUM, STR, IDENT, OP]
-"""Token types"""
-
-exp_types = [
- r"""0[xX](?P<hex>[a-fA-F0-9]+)(?P<qual1>[uUlL]*)|L*?'(?P<char>(\\.|[^\\'])+)'|(?P<n1>\d+)[Ee](?P<exp0>[+-]*?\d+)(?P<float0>[fFlL]*)|(?P<n2>\d*\.\d+)([Ee](?P<exp1>[+-]*?\d+))?(?P<float1>[fFlL]*)|(?P<n4>\d+\.\d*)([Ee](?P<exp2>[+-]*?\d+))?(?P<float2>[fFlL]*)|(?P<oct>0*)(?P<n0>\d+)(?P<qual2>[uUlL]*)""",
- r'L?"([^"\\]|\\.)*"',
- r'[a-zA-Z_]\w*',
- r'%:%:|<<=|>>=|\.\.\.|<<|<%|<:|<=|>>|>=|\+\+|\+=|--|->|-=|\*=|/=|%:|%=|%>|==|&&|&=|\|\||\|=|\^=|:>|!=|##|[\(\)\{\}\[\]<>\?\|\^\*\+&=:!#;,%/\-\?\~\.]',
-]
-"""Expression types"""
-
-re_clexer = re.compile('|'.join(["(?P<%s>%s)" % (name, part) for name, part in zip(tok_types, exp_types)]), re.M)
-"""Match expressions into tokens"""
-
-accepted = 'a'
-"""Parser state is *accepted*"""
-
-ignored = 'i'
-"""Parser state is *ignored*, for example preprocessor lines in an #if 0 block"""
-
-undefined = 'u'
-"""Parser state is *undefined* at the moment"""
-
-skipped = 's'
-"""Parser state is *skipped*, for example preprocessor lines in a #elif 0 block"""
-
-def repl(m):
- """Replace function used with :py:attr:`waflib.Tools.c_preproc.re_cpp`"""
- s = m.group()
- if s[0] == '/':
- return ' '
- return s
-
-prec = {}
-"""
-Operator precedence rules required for parsing expressions of the form::
-
- #if 1 && 2 != 0
-"""
-ops = ['* / %', '+ -', '<< >>', '< <= >= >', '== !=', '& | ^', '&& ||', ',']
-for x, syms in enumerate(ops):
- for u in syms.split():
- prec[u] = x
-
-def reduce_nums(val_1, val_2, val_op):
- """
- Apply arithmetic rules to compute a result
-
- :param val1: input parameter
- :type val1: int or string
- :param val2: input parameter
- :type val2: int or string
- :param val_op: C operator in *+*, */*, *-*, etc
- :type val_op: string
- :rtype: int
- """
- #print val_1, val_2, val_op
-
- # now perform the operation, make certain a and b are numeric
- try:
- a = 0 + val_1
- except TypeError:
- a = int(val_1)
- try:
- b = 0 + val_2
- except TypeError:
- b = int(val_2)
-
- d = val_op
- if d == '%':
- c = a % b
- elif d=='+':
- c = a + b
- elif d=='-':
- c = a - b
- elif d=='*':
- c = a * b
- elif d=='/':
- c = a / b
- elif d=='^':
- c = a ^ b
- elif d=='==':
- c = int(a == b)
- elif d=='|' or d == 'bitor':
- c = a | b
- elif d=='||' or d == 'or' :
- c = int(a or b)
- elif d=='&' or d == 'bitand':
- c = a & b
- elif d=='&&' or d == 'and':
- c = int(a and b)
- elif d=='!=' or d == 'not_eq':
- c = int(a != b)
- elif d=='^' or d == 'xor':
- c = int(a^b)
- elif d=='<=':
- c = int(a <= b)
- elif d=='<':
- c = int(a < b)
- elif d=='>':
- c = int(a > b)
- elif d=='>=':
- c = int(a >= b)
- elif d=='<<':
- c = a << b
- elif d=='>>':
- c = a >> b
- else:
- c = 0
- return c
-
-def get_num(lst):
- """
- Try to obtain a number from a list of tokens. The token types are defined in :py:attr:`waflib.Tools.ccroot.tok_types`.
-
- :param lst: list of preprocessor tokens
- :type lst: list of tuple (tokentype, value)
- :return: a pair containing the number and the rest of the list
- :rtype: tuple(value, list)
- """
- if not lst:
- raise PreprocError('empty list for get_num')
- (p, v) = lst[0]
- if p == OP:
- if v == '(':
- count_par = 1
- i = 1
- while i < len(lst):
- (p, v) = lst[i]
-
- if p == OP:
- if v == ')':
- count_par -= 1
- if count_par == 0:
- break
- elif v == '(':
- count_par += 1
- i += 1
- else:
- raise PreprocError('rparen expected %r' % lst)
-
- (num, _) = get_term(lst[1:i])
- return (num, lst[i+1:])
-
- elif v == '+':
- return get_num(lst[1:])
- elif v == '-':
- num, lst = get_num(lst[1:])
- return (reduce_nums('-1', num, '*'), lst)
- elif v == '!':
- num, lst = get_num(lst[1:])
- return (int(not int(num)), lst)
- elif v == '~':
- num, lst = get_num(lst[1:])
- return (~ int(num), lst)
- else:
- raise PreprocError('Invalid op token %r for get_num' % lst)
- elif p == NUM:
- return v, lst[1:]
- elif p == IDENT:
- # all macros should have been replaced, remaining identifiers eval to 0
- return 0, lst[1:]
- else:
- raise PreprocError('Invalid token %r for get_num' % lst)
-
-def get_term(lst):
- """
- Evaluate an expression recursively, for example::
-
- 1+1+1 -> 2+1 -> 3
-
- :param lst: list of tokens
- :type lst: list of tuple(token, value)
- :return: the value and the remaining tokens
- :rtype: value, list
- """
-
- if not lst:
- raise PreprocError('empty list for get_term')
- num, lst = get_num(lst)
- if not lst:
- return (num, [])
- (p, v) = lst[0]
- if p == OP:
- if v == ',':
- # skip
- return get_term(lst[1:])
- elif v == '?':
- count_par = 0
- i = 1
- while i < len(lst):
- (p, v) = lst[i]
-
- if p == OP:
- if v == ')':
- count_par -= 1
- elif v == '(':
- count_par += 1
- elif v == ':':
- if count_par == 0:
- break
- i += 1
- else:
- raise PreprocError('rparen expected %r' % lst)
-
- if int(num):
- return get_term(lst[1:i])
- else:
- return get_term(lst[i+1:])
-
- else:
- num2, lst = get_num(lst[1:])
-
- if not lst:
- # no more tokens to process
- num2 = reduce_nums(num, num2, v)
- return get_term([(NUM, num2)] + lst)
-
- # operator precedence
- p2, v2 = lst[0]
- if p2 != OP:
- raise PreprocError('op expected %r' % lst)
-
- if prec[v2] >= prec[v]:
- num2 = reduce_nums(num, num2, v)
- return get_term([(NUM, num2)] + lst)
- else:
- num3, lst = get_num(lst[1:])
- num3 = reduce_nums(num2, num3, v2)
- return get_term([(NUM, num), (p, v), (NUM, num3)] + lst)
-
-
- raise PreprocError('cannot reduce %r' % lst)
-
-def reduce_eval(lst):
- """
- Take a list of tokens and output true or false for #if/#elif conditions.
-
- :param lst: a list of tokens
- :type lst: list of tuple(token, value)
- :return: a token
- :rtype: tuple(NUM, int)
- """
- num, lst = get_term(lst)
- return (NUM, num)
-
-def stringize(lst):
- """
- Merge a list of tokens into a string
-
- :param lst: a list of tokens
- :type lst: list of tuple(token, value)
- :rtype: string
- """
- lst = [str(v2) for (p2, v2) in lst]
- return "".join(lst)
-
-def paste_tokens(t1, t2):
- """
- Token pasting works between identifiers, particular operators, and identifiers and numbers::
-
- a ## b -> ab
- > ## = -> >=
- a ## 2 -> a2
-
- :param t1: token
- :type t1: tuple(type, value)
- :param t2: token
- :type t2: tuple(type, value)
- """
- p1 = None
- if t1[0] == OP and t2[0] == OP:
- p1 = OP
- elif t1[0] == IDENT and (t2[0] == IDENT or t2[0] == NUM):
- p1 = IDENT
- elif t1[0] == NUM and t2[0] == NUM:
- p1 = NUM
- if not p1:
- raise PreprocError('tokens do not make a valid paste %r and %r' % (t1, t2))
- return (p1, t1[1] + t2[1])
-
-def reduce_tokens(lst, defs, ban=[]):
- """
- Replace the tokens in lst, using the macros provided in defs, and a list of macros that cannot be re-applied
-
- :param lst: list of tokens
- :type lst: list of tuple(token, value)
- :param defs: macro definitions
- :type defs: dict
- :param ban: macros that cannot be substituted (recursion is not allowed)
- :type ban: list of string
- :return: the new list of tokens
- :rtype: value, list
- """
-
- i = 0
- while i < len(lst):
- (p, v) = lst[i]
-
- if p == IDENT and v == "defined":
- del lst[i]
- if i < len(lst):
- (p2, v2) = lst[i]
- if p2 == IDENT:
- if v2 in defs:
- lst[i] = (NUM, 1)
- else:
- lst[i] = (NUM, 0)
- elif p2 == OP and v2 == '(':
- del lst[i]
- (p2, v2) = lst[i]
- del lst[i] # remove the ident, and change the ) for the value
- if v2 in defs:
- lst[i] = (NUM, 1)
- else:
- lst[i] = (NUM, 0)
- else:
- raise PreprocError('Invalid define expression %r' % lst)
-
- elif p == IDENT and v in defs:
-
- if isinstance(defs[v], str):
- a, b = extract_macro(defs[v])
- defs[v] = b
- macro_def = defs[v]
- to_add = macro_def[1]
-
- if isinstance(macro_def[0], list):
- # macro without arguments
- del lst[i]
- accu = to_add[:]
- reduce_tokens(accu, defs, ban+[v])
- for tmp in accu:
- lst.insert(i, tmp)
- i += 1
- else:
- # collect the arguments for the funcall
-
- args = []
- del lst[i]
-
- if i >= len(lst):
- raise PreprocError('expected ( after %r (got nothing)' % v)
-
- (p2, v2) = lst[i]
- if p2 != OP or v2 != '(':
- raise PreprocError('expected ( after %r' % v)
-
- del lst[i]
-
- one_param = []
- count_paren = 0
- while i < len(lst):
- p2, v2 = lst[i]
-
- del lst[i]
- if p2 == OP and count_paren == 0:
- if v2 == '(':
- one_param.append((p2, v2))
- count_paren += 1
- elif v2 == ')':
- if one_param:
- args.append(one_param)
- break
- elif v2 == ',':
- if not one_param:
- raise PreprocError('empty param in funcall %r' % v)
- args.append(one_param)
- one_param = []
- else:
- one_param.append((p2, v2))
- else:
- one_param.append((p2, v2))
- if v2 == '(':
- count_paren += 1
- elif v2 == ')':
- count_paren -= 1
- else:
- raise PreprocError('malformed macro')
-
- # substitute the arguments within the define expression
- accu = []
- arg_table = macro_def[0]
- j = 0
- while j < len(to_add):
- (p2, v2) = to_add[j]
-
- if p2 == OP and v2 == '#':
- # stringize is for arguments only
- if j+1 < len(to_add) and to_add[j+1][0] == IDENT and to_add[j+1][1] in arg_table:
- toks = args[arg_table[to_add[j+1][1]]]
- accu.append((STR, stringize(toks)))
- j += 1
- else:
- accu.append((p2, v2))
- elif p2 == OP and v2 == '##':
- # token pasting, how can man invent such a complicated system?
- if accu and j+1 < len(to_add):
- # we have at least two tokens
-
- t1 = accu[-1]
-
- if to_add[j+1][0] == IDENT and to_add[j+1][1] in arg_table:
- toks = args[arg_table[to_add[j+1][1]]]
-
- if toks:
- accu[-1] = paste_tokens(t1, toks[0]) #(IDENT, accu[-1][1] + toks[0][1])
- accu.extend(toks[1:])
- else:
- # error, case "a##"
- accu.append((p2, v2))
- accu.extend(toks)
- elif to_add[j+1][0] == IDENT and to_add[j+1][1] == '__VA_ARGS__':
- # first collect the tokens
- va_toks = []
- st = len(macro_def[0])
- pt = len(args)
- for x in args[pt-st+1:]:
- va_toks.extend(x)
- va_toks.append((OP, ','))
- if va_toks:
- va_toks.pop() # extra comma
- if len(accu)>1:
- (p3, v3) = accu[-1]
- (p4, v4) = accu[-2]
- if v3 == '##':
- # remove the token paste
- accu.pop()
- if v4 == ',' and pt < st:
- # remove the comma
- accu.pop()
- accu += va_toks
- else:
- accu[-1] = paste_tokens(t1, to_add[j+1])
-
- j += 1
- else:
- # Invalid paste, case "##a" or "b##"
- accu.append((p2, v2))
-
- elif p2 == IDENT and v2 in arg_table:
- toks = args[arg_table[v2]]
- reduce_tokens(toks, defs, ban+[v])
- accu.extend(toks)
- else:
- accu.append((p2, v2))
-
- j += 1
-
-
- reduce_tokens(accu, defs, ban+[v])
-
- for x in range(len(accu)-1, -1, -1):
- lst.insert(i, accu[x])
-
- i += 1
-
-
-def eval_macro(lst, defs):
- """
- Reduce the tokens by :py:func:`waflib.Tools.c_preproc.reduce_tokens` and try to return a 0/1 result by :py:func:`waflib.Tools.c_preproc.reduce_eval`.
-
- :param lst: list of tokens
- :type lst: list of tuple(token, value)
- :param defs: macro definitions
- :type defs: dict
- :rtype: int
- """
- reduce_tokens(lst, defs, [])
- if not lst:
- raise PreprocError('missing tokens to evaluate')
-
- if lst:
- p, v = lst[0]
- if p == IDENT and v not in defs:
- raise PreprocError('missing macro %r' % lst)
-
- p, v = reduce_eval(lst)
- return int(v) != 0
-
-def extract_macro(txt):
- """
- Process a macro definition of the form::
- #define f(x, y) x * y
-
- into a function or a simple macro without arguments
-
- :param txt: expression to exact a macro definition from
- :type txt: string
- :return: a tuple containing the name, the list of arguments and the replacement
- :rtype: tuple(string, [list, list])
- """
- t = tokenize(txt)
- if re_fun.search(txt):
- p, name = t[0]
-
- p, v = t[1]
- if p != OP:
- raise PreprocError('expected (')
-
- i = 1
- pindex = 0
- params = {}
- prev = '('
-
- while 1:
- i += 1
- p, v = t[i]
-
- if prev == '(':
- if p == IDENT:
- params[v] = pindex
- pindex += 1
- prev = p
- elif p == OP and v == ')':
- break
- else:
- raise PreprocError('unexpected token (3)')
- elif prev == IDENT:
- if p == OP and v == ',':
- prev = v
- elif p == OP and v == ')':
- break
- else:
- raise PreprocError('comma or ... expected')
- elif prev == ',':
- if p == IDENT:
- params[v] = pindex
- pindex += 1
- prev = p
- elif p == OP and v == '...':
- raise PreprocError('not implemented (1)')
- else:
- raise PreprocError('comma or ... expected (2)')
- elif prev == '...':
- raise PreprocError('not implemented (2)')
- else:
- raise PreprocError('unexpected else')
-
- #~ print (name, [params, t[i+1:]])
- return (name, [params, t[i+1:]])
- else:
- (p, v) = t[0]
- if len(t) > 1:
- return (v, [[], t[1:]])
- else:
- # empty define, assign an empty token
- return (v, [[], [('T','')]])
-
-re_include = re.compile('^\s*(<(?:.*)>|"(?:.*)")')
-def extract_include(txt, defs):
- """
- Process a line in the form::
-
- #include foo
-
- :param txt: include line to process
- :type txt: string
- :param defs: macro definitions
- :type defs: dict
- :return: the file name
- :rtype: string
- """
- m = re_include.search(txt)
- if m:
- txt = m.group(1)
- return txt[0], txt[1:-1]
-
- # perform preprocessing and look at the result, it must match an include
- toks = tokenize(txt)
- reduce_tokens(toks, defs, ['waf_include'])
-
- if not toks:
- raise PreprocError('could not parse include %r' % txt)
-
- if len(toks) == 1:
- if toks[0][0] == STR:
- return '"', toks[0][1]
- else:
- if toks[0][1] == '<' and toks[-1][1] == '>':
- ret = '<', stringize(toks).lstrip('<').rstrip('>')
- return ret
-
- raise PreprocError('could not parse include %r' % txt)
-
-def parse_char(txt):
- """
- Parse a c character
-
- :param txt: character to parse
- :type txt: string
- :return: a character literal
- :rtype: string
- """
-
- if not txt:
- raise PreprocError('attempted to parse a null char')
- if txt[0] != '\\':
- return ord(txt)
- c = txt[1]
- if c == 'x':
- if len(txt) == 4 and txt[3] in string.hexdigits:
- return int(txt[2:], 16)
- return int(txt[2:], 16)
- elif c.isdigit():
- if c == '0' and len(txt)==2:
- return 0
- for i in 3, 2, 1:
- if len(txt) > i and txt[1:1+i].isdigit():
- return (1+i, int(txt[1:1+i], 8))
- else:
- try:
- return chr_esc[c]
- except KeyError:
- raise PreprocError('could not parse char literal %r' % txt)
-
-def tokenize(s):
- """
- Convert a string into a list of tokens (shlex.split does not apply to c/c++/d)
-
- :param s: input to tokenize
- :type s: string
- :return: a list of tokens
- :rtype: list of tuple(token, value)
- """
- return tokenize_private(s)[:] # force a copy of the results
-
-def tokenize_private(s):
- ret = []
- for match in re_clexer.finditer(s):
- m = match.group
- for name in tok_types:
- v = m(name)
- if v:
- if name == IDENT:
- if v in g_optrans:
- name = OP
- elif v.lower() == "true":
- v = 1
- name = NUM
- elif v.lower() == "false":
- v = 0
- name = NUM
- elif name == NUM:
- if m('oct'):
- v = int(v, 8)
- elif m('hex'):
- v = int(m('hex'), 16)
- elif m('n0'):
- v = m('n0')
- else:
- v = m('char')
- if v:
- v = parse_char(v)
- else:
- v = m('n2') or m('n4')
- elif name == OP:
- if v == '%:':
- v = '#'
- elif v == '%:%:':
- v = '##'
- elif name == STR:
- # remove the quotes around the string
- v = v[1:-1]
- ret.append((name, v))
- break
- return ret
-
-def format_defines(lst):
- ret = []
- for y in lst:
- if y:
- pos = y.find('=')
- if pos == -1:
- # "-DFOO" should give "#define FOO 1"
- ret.append(y)
- elif pos > 0:
- # all others are assumed to be -DX=Y
- ret.append('%s %s' % (y[:pos], y[pos+1:]))
- else:
- raise ValueError('Invalid define expression %r' % y)
- return ret
-
-class c_parser(object):
- """
- Used by :py:func:`waflib.Tools.c_preproc.scan` to parse c/h files. Note that by default,
- only project headers are parsed.
- """
- def __init__(self, nodepaths=None, defines=None):
- self.lines = []
- """list of lines read"""
-
- if defines is None:
- self.defs = {}
- else:
- self.defs = dict(defines) # make a copy
- self.state = []
-
- self.count_files = 0
- self.currentnode_stack = []
-
- self.nodepaths = nodepaths or []
- """Include paths"""
-
- self.nodes = []
- """List of :py:class:`waflib.Node.Node` found so far"""
-
- self.names = []
- """List of file names that could not be matched by any file"""
-
- self.curfile = ''
- """Current file"""
-
- self.ban_includes = set()
- """Includes that must not be read (#pragma once)"""
-
- self.listed = set()
- """Include nodes/names already listed to avoid duplicates in self.nodes/self.names"""
-
- def cached_find_resource(self, node, filename):
- """
- Find a file from the input directory
-
- :param node: directory
- :type node: :py:class:`waflib.Node.Node`
- :param filename: header to find
- :type filename: string
- :return: the node if found, or None
- :rtype: :py:class:`waflib.Node.Node`
- """
- try:
- cache = node.ctx.preproc_cache_node
- except AttributeError:
- cache = node.ctx.preproc_cache_node = Utils.lru_cache(FILE_CACHE_SIZE)
-
- key = (node, filename)
- try:
- return cache[key]
- except KeyError:
- ret = node.find_resource(filename)
- if ret:
- if getattr(ret, 'children', None):
- ret = None
- elif ret.is_child_of(node.ctx.bldnode):
- tmp = node.ctx.srcnode.search_node(ret.path_from(node.ctx.bldnode))
- if tmp and getattr(tmp, 'children', None):
- ret = None
- cache[key] = ret
- return ret
-
- def tryfind(self, filename, kind='"', env=None):
- """
- Try to obtain a node from the filename based from the include paths. Will add
- the node found to :py:attr:`waflib.Tools.c_preproc.c_parser.nodes` or the file name to
- :py:attr:`waflib.Tools.c_preproc.c_parser.names` if no corresponding file is found. Called by
- :py:attr:`waflib.Tools.c_preproc.c_parser.start`.
-
- :param filename: header to find
- :type filename: string
- :return: the node if found
- :rtype: :py:class:`waflib.Node.Node`
- """
- if filename.endswith('.moc'):
- # we could let the qt4 module use a subclass, but then the function "scan" below must be duplicated
- # in the qt4 and in the qt5 classes. So we have two lines here and it is sufficient.
- self.names.append(filename)
- return None
-
- self.curfile = filename
-
- found = None
- if kind == '"':
- if env.MSVC_VERSION:
- for n in reversed(self.currentnode_stack):
- found = self.cached_find_resource(n, filename)
- if found:
- break
- else:
- found = self.cached_find_resource(self.currentnode_stack[-1], filename)
-
- if not found:
- for n in self.nodepaths:
- found = self.cached_find_resource(n, filename)
- if found:
- break
-
- listed = self.listed
- if found and not found in self.ban_includes:
- if found not in listed:
- listed.add(found)
- self.nodes.append(found)
- self.addlines(found)
- else:
- if filename not in listed:
- listed.add(filename)
- self.names.append(filename)
- return found
-
- def filter_comments(self, node):
- """
- Filter the comments from a c/h file, and return the preprocessor lines.
- The regexps :py:attr:`waflib.Tools.c_preproc.re_cpp`, :py:attr:`waflib.Tools.c_preproc.re_nl` and :py:attr:`waflib.Tools.c_preproc.re_lines` are used internally.
-
- :return: the preprocessor directives as a list of (keyword, line)
- :rtype: a list of string pairs
- """
- # return a list of tuples : keyword, line
- code = node.read()
- if use_trigraphs:
- for (a, b) in trig_def:
- code = code.split(a).join(b)
- code = re_nl.sub('', code)
- code = re_cpp.sub(repl, code)
- return re_lines.findall(code)
-
- def parse_lines(self, node):
- try:
- cache = node.ctx.preproc_cache_lines
- except AttributeError:
- cache = node.ctx.preproc_cache_lines = Utils.lru_cache(LINE_CACHE_SIZE)
- try:
- return cache[node]
- except KeyError:
- cache[node] = lines = self.filter_comments(node)
- lines.append((POPFILE, ''))
- lines.reverse()
- return lines
-
- def addlines(self, node):
- """
- Add the lines from a header in the list of preprocessor lines to parse
-
- :param node: header
- :type node: :py:class:`waflib.Node.Node`
- """
-
- self.currentnode_stack.append(node.parent)
-
- self.count_files += 1
- if self.count_files > recursion_limit:
- # issue #812
- raise PreprocError('recursion limit exceeded')
-
- if Logs.verbose:
- Logs.debug('preproc: reading file %r', node)
- try:
- lines = self.parse_lines(node)
- except EnvironmentError:
- raise PreprocError('could not read the file %r' % node)
- except Exception:
- if Logs.verbose > 0:
- Logs.error('parsing %r failed %s', node, traceback.format_exc())
- else:
- self.lines.extend(lines)
-
- def start(self, node, env):
- """
- Preprocess a source file to obtain the dependencies, which are accumulated to :py:attr:`waflib.Tools.c_preproc.c_parser.nodes`
- and :py:attr:`waflib.Tools.c_preproc.c_parser.names`.
-
- :param node: source file
- :type node: :py:class:`waflib.Node.Node`
- :param env: config set containing additional defines to take into account
- :type env: :py:class:`waflib.ConfigSet.ConfigSet`
- """
- Logs.debug('preproc: scanning %s (in %s)', node.name, node.parent.name)
-
- self.current_file = node
- self.addlines(node)
-
- # macros may be defined on the command-line, so they must be parsed as if they were part of the file
- if env.DEFINES:
- lst = format_defines(env.DEFINES)
- lst.reverse()
- self.lines.extend([('define', x) for x in lst])
-
- while self.lines:
- (token, line) = self.lines.pop()
- if token == POPFILE:
- self.count_files -= 1
- self.currentnode_stack.pop()
- continue
-
- try:
- state = self.state
-
- # make certain we define the state if we are about to enter in an if block
- if token[:2] == 'if':
- state.append(undefined)
- elif token == 'endif':
- state.pop()
-
- # skip lines when in a dead 'if' branch, wait for the endif
- if token[0] != 'e':
- if skipped in self.state or ignored in self.state:
- continue
-
- if token == 'if':
- ret = eval_macro(tokenize(line), self.defs)
- if ret:
- state[-1] = accepted
- else:
- state[-1] = ignored
- elif token == 'ifdef':
- m = re_mac.match(line)
- if m and m.group() in self.defs:
- state[-1] = accepted
- else:
- state[-1] = ignored
- elif token == 'ifndef':
- m = re_mac.match(line)
- if m and m.group() in self.defs:
- state[-1] = ignored
- else:
- state[-1] = accepted
- elif token == 'include' or token == 'import':
- (kind, inc) = extract_include(line, self.defs)
- self.current_file = self.tryfind(inc, kind, env)
- if token == 'import':
- self.ban_includes.add(self.current_file)
- elif token == 'elif':
- if state[-1] == accepted:
- state[-1] = skipped
- elif state[-1] == ignored:
- if eval_macro(tokenize(line), self.defs):
- state[-1] = accepted
- elif token == 'else':
- if state[-1] == accepted:
- state[-1] = skipped
- elif state[-1] == ignored:
- state[-1] = accepted
- elif token == 'define':
- try:
- self.defs[self.define_name(line)] = line
- except AttributeError:
- raise PreprocError('Invalid define line %r' % line)
- elif token == 'undef':
- m = re_mac.match(line)
- if m and m.group() in self.defs:
- self.defs.__delitem__(m.group())
- #print "undef %s" % name
- elif token == 'pragma':
- if re_pragma_once.match(line.lower()):
- self.ban_includes.add(self.current_file)
- except Exception as e:
- if Logs.verbose:
- Logs.debug('preproc: line parsing failed (%s): %s %s', e, line, traceback.format_exc())
-
- def define_name(self, line):
- """
- :param line: define line
- :type line: string
- :rtype: string
- :return: the define name
- """
- return re_mac.match(line).group()
-
-def scan(task):
- """
- Get the dependencies using a c/c++ preprocessor, this is required for finding dependencies of the kind::
-
- #include some_macro()
-
- This function is bound as a task method on :py:class:`waflib.Tools.c.c` and :py:class:`waflib.Tools.cxx.cxx` for example
- """
- try:
- incn = task.generator.includes_nodes
- except AttributeError:
- raise Errors.WafError('%r is missing a feature such as "c", "cxx" or "includes": ' % task.generator)
-
- if go_absolute:
- nodepaths = incn + [task.generator.bld.root.find_dir(x) for x in standard_includes]
- else:
- nodepaths = [x for x in incn if x.is_child_of(x.ctx.srcnode) or x.is_child_of(x.ctx.bldnode)]
-
- tmp = c_parser(nodepaths)
- tmp.start(task.inputs[0], task.env)
- return (tmp.nodes, tmp.names)
diff --git a/waflib/Tools/c_tests.py b/waflib/Tools/c_tests.py
deleted file mode 100644
index f858df5..0000000
--- a/waflib/Tools/c_tests.py
+++ /dev/null
@@ -1,229 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2016-2018 (ita)
-
-"""
-Various configuration tests.
-"""
-
-from waflib import Task
-from waflib.Configure import conf
-from waflib.TaskGen import feature, before_method, after_method
-
-LIB_CODE = '''
-#ifdef _MSC_VER
-#define testEXPORT __declspec(dllexport)
-#else
-#define testEXPORT
-#endif
-testEXPORT int lib_func(void) { return 9; }
-'''
-
-MAIN_CODE = '''
-#ifdef _MSC_VER
-#define testEXPORT __declspec(dllimport)
-#else
-#define testEXPORT
-#endif
-testEXPORT int lib_func(void);
-int main(int argc, char **argv) {
- (void)argc; (void)argv;
- return !(lib_func() == 9);
-}
-'''
-
-@feature('link_lib_test')
-@before_method('process_source')
-def link_lib_test_fun(self):
- """
- The configuration test :py:func:`waflib.Configure.run_build` declares a unique task generator,
- so we need to create other task generators from here to check if the linker is able to link libraries.
- """
- def write_test_file(task):
- task.outputs[0].write(task.generator.code)
-
- rpath = []
- if getattr(self, 'add_rpath', False):
- rpath = [self.bld.path.get_bld().abspath()]
-
- mode = self.mode
- m = '%s %s' % (mode, mode)
- ex = self.test_exec and 'test_exec' or ''
- bld = self.bld
- bld(rule=write_test_file, target='test.' + mode, code=LIB_CODE)
- bld(rule=write_test_file, target='main.' + mode, code=MAIN_CODE)
- bld(features='%sshlib' % m, source='test.' + mode, target='test')
- bld(features='%sprogram %s' % (m, ex), source='main.' + mode, target='app', use='test', rpath=rpath)
-
-@conf
-def check_library(self, mode=None, test_exec=True):
- """
- Checks if libraries can be linked with the current linker. Uses :py:func:`waflib.Tools.c_tests.link_lib_test_fun`.
-
- :param mode: c or cxx or d
- :type mode: string
- """
- if not mode:
- mode = 'c'
- if self.env.CXX:
- mode = 'cxx'
- self.check(
- compile_filename = [],
- features = 'link_lib_test',
- msg = 'Checking for libraries',
- mode = mode,
- test_exec = test_exec)
-
-########################################################################################
-
-INLINE_CODE = '''
-typedef int foo_t;
-static %s foo_t static_foo () {return 0; }
-%s foo_t foo () {
- return 0;
-}
-'''
-INLINE_VALUES = ['inline', '__inline__', '__inline']
-
-@conf
-def check_inline(self, **kw):
- """
- Checks for the right value for inline macro.
- Define INLINE_MACRO to 1 if the define is found.
- If the inline macro is not 'inline', add a define to the ``config.h`` (#define inline __inline__)
-
- :param define_name: define INLINE_MACRO by default to 1 if the macro is defined
- :type define_name: string
- :param features: by default *c* or *cxx* depending on the compiler present
- :type features: list of string
- """
- self.start_msg('Checking for inline')
-
- if not 'define_name' in kw:
- kw['define_name'] = 'INLINE_MACRO'
- if not 'features' in kw:
- if self.env.CXX:
- kw['features'] = ['cxx']
- else:
- kw['features'] = ['c']
-
- for x in INLINE_VALUES:
- kw['fragment'] = INLINE_CODE % (x, x)
-
- try:
- self.check(**kw)
- except self.errors.ConfigurationError:
- continue
- else:
- self.end_msg(x)
- if x != 'inline':
- self.define('inline', x, quote=False)
- return x
- self.fatal('could not use inline functions')
-
-########################################################################################
-
-LARGE_FRAGMENT = '''#include <unistd.h>
-int main(int argc, char **argv) {
- (void)argc; (void)argv;
- return !(sizeof(off_t) >= 8);
-}
-'''
-
-@conf
-def check_large_file(self, **kw):
- """
- Checks for large file support and define the macro HAVE_LARGEFILE
- The test is skipped on win32 systems (DEST_BINFMT == pe).
-
- :param define_name: define to set, by default *HAVE_LARGEFILE*
- :type define_name: string
- :param execute: execute the test (yes by default)
- :type execute: bool
- """
- if not 'define_name' in kw:
- kw['define_name'] = 'HAVE_LARGEFILE'
- if not 'execute' in kw:
- kw['execute'] = True
-
- if not 'features' in kw:
- if self.env.CXX:
- kw['features'] = ['cxx', 'cxxprogram']
- else:
- kw['features'] = ['c', 'cprogram']
-
- kw['fragment'] = LARGE_FRAGMENT
-
- kw['msg'] = 'Checking for large file support'
- ret = True
- try:
- if self.env.DEST_BINFMT != 'pe':
- ret = self.check(**kw)
- except self.errors.ConfigurationError:
- pass
- else:
- if ret:
- return True
-
- kw['msg'] = 'Checking for -D_FILE_OFFSET_BITS=64'
- kw['defines'] = ['_FILE_OFFSET_BITS=64']
- try:
- ret = self.check(**kw)
- except self.errors.ConfigurationError:
- pass
- else:
- self.define('_FILE_OFFSET_BITS', 64)
- return ret
-
- self.fatal('There is no support for large files')
-
-########################################################################################
-
-ENDIAN_FRAGMENT = '''
-short int ascii_mm[] = { 0x4249, 0x4765, 0x6E44, 0x6961, 0x6E53, 0x7953, 0 };
-short int ascii_ii[] = { 0x694C, 0x5454, 0x656C, 0x6E45, 0x6944, 0x6E61, 0 };
-int use_ascii (int i) {
- return ascii_mm[i] + ascii_ii[i];
-}
-short int ebcdic_ii[] = { 0x89D3, 0xE3E3, 0x8593, 0x95C5, 0x89C4, 0x9581, 0 };
-short int ebcdic_mm[] = { 0xC2C9, 0xC785, 0x95C4, 0x8981, 0x95E2, 0xA8E2, 0 };
-int use_ebcdic (int i) {
- return ebcdic_mm[i] + ebcdic_ii[i];
-}
-extern int foo;
-'''
-
-class grep_for_endianness(Task.Task):
- """
- Task that reads a binary and tries to determine the endianness
- """
- color = 'PINK'
- def run(self):
- txt = self.inputs[0].read(flags='rb').decode('latin-1')
- if txt.find('LiTTleEnDian') > -1:
- self.generator.tmp.append('little')
- elif txt.find('BIGenDianSyS') > -1:
- self.generator.tmp.append('big')
- else:
- return -1
-
-@feature('grep_for_endianness')
-@after_method('process_source')
-def grep_for_endianness_fun(self):
- """
- Used by the endianness configuration test
- """
- self.create_task('grep_for_endianness', self.compiled_tasks[0].outputs[0])
-
-@conf
-def check_endianness(self):
- """
- Executes a configuration test to determine the endianness
- """
- tmp = []
- def check_msg(self):
- return tmp[0]
- self.check(fragment=ENDIAN_FRAGMENT, features='c grep_for_endianness',
- msg='Checking for endianness', define='ENDIANNESS', tmp=tmp, okmsg=check_msg)
- return tmp[0]
-
diff --git a/waflib/Tools/ccroot.py b/waflib/Tools/ccroot.py
deleted file mode 100644
index cfef8bf..0000000
--- a/waflib/Tools/ccroot.py
+++ /dev/null
@@ -1,776 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2005-2018 (ita)
-
-"""
-Classes and methods shared by tools providing support for C-like language such
-as C/C++/D/Assembly/Go (this support module is almost never used alone).
-"""
-
-import os, re
-from waflib import Task, Utils, Node, Errors, Logs
-from waflib.TaskGen import after_method, before_method, feature, taskgen_method, extension
-from waflib.Tools import c_aliases, c_preproc, c_config, c_osx, c_tests
-from waflib.Configure import conf
-
-SYSTEM_LIB_PATHS = ['/usr/lib64', '/usr/lib', '/usr/local/lib64', '/usr/local/lib']
-
-USELIB_VARS = Utils.defaultdict(set)
-"""
-Mapping for features to :py:class:`waflib.ConfigSet.ConfigSet` variables. See :py:func:`waflib.Tools.ccroot.propagate_uselib_vars`.
-"""
-
-USELIB_VARS['c'] = set(['INCLUDES', 'FRAMEWORKPATH', 'DEFINES', 'CPPFLAGS', 'CCDEPS', 'CFLAGS', 'ARCH'])
-USELIB_VARS['cxx'] = set(['INCLUDES', 'FRAMEWORKPATH', 'DEFINES', 'CPPFLAGS', 'CXXDEPS', 'CXXFLAGS', 'ARCH'])
-USELIB_VARS['d'] = set(['INCLUDES', 'DFLAGS'])
-USELIB_VARS['includes'] = set(['INCLUDES', 'FRAMEWORKPATH', 'ARCH'])
-
-USELIB_VARS['cprogram'] = USELIB_VARS['cxxprogram'] = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS', 'FRAMEWORK', 'FRAMEWORKPATH', 'ARCH', 'LDFLAGS'])
-USELIB_VARS['cshlib'] = USELIB_VARS['cxxshlib'] = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS', 'FRAMEWORK', 'FRAMEWORKPATH', 'ARCH', 'LDFLAGS'])
-USELIB_VARS['cstlib'] = USELIB_VARS['cxxstlib'] = set(['ARFLAGS', 'LINKDEPS'])
-
-USELIB_VARS['dprogram'] = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS'])
-USELIB_VARS['dshlib'] = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS'])
-USELIB_VARS['dstlib'] = set(['ARFLAGS', 'LINKDEPS'])
-
-USELIB_VARS['asm'] = set(['ASFLAGS'])
-
-# =================================================================================================
-
-@taskgen_method
-def create_compiled_task(self, name, node):
- """
- Create the compilation task: c, cxx, asm, etc. The output node is created automatically (object file with a typical **.o** extension).
- The task is appended to the list *compiled_tasks* which is then used by :py:func:`waflib.Tools.ccroot.apply_link`
-
- :param name: name of the task class
- :type name: string
- :param node: the file to compile
- :type node: :py:class:`waflib.Node.Node`
- :return: The task created
- :rtype: :py:class:`waflib.Task.Task`
- """
- out = '%s.%d.o' % (node.name, self.idx)
- task = self.create_task(name, node, node.parent.find_or_declare(out))
- try:
- self.compiled_tasks.append(task)
- except AttributeError:
- self.compiled_tasks = [task]
- return task
-
-@taskgen_method
-def to_incnodes(self, inlst):
- """
- Task generator method provided to convert a list of string/nodes into a list of includes folders.
-
- The paths are assumed to be relative to the task generator path, except if they begin by **#**
- in which case they are searched from the top-level directory (``bld.srcnode``).
- The folders are simply assumed to be existing.
-
- The node objects in the list are returned in the output list. The strings are converted
- into node objects if possible. The node is searched from the source directory, and if a match is found,
- the equivalent build directory is created and added to the returned list too. When a folder cannot be found, it is ignored.
-
- :param inlst: list of folders
- :type inlst: space-delimited string or a list of string/nodes
- :rtype: list of :py:class:`waflib.Node.Node`
- :return: list of include folders as nodes
- """
- lst = []
- seen = set()
- for x in self.to_list(inlst):
- if x in seen or not x:
- continue
- seen.add(x)
-
- # with a real lot of targets, it is sometimes interesting to cache the results below
- if isinstance(x, Node.Node):
- lst.append(x)
- else:
- if os.path.isabs(x):
- lst.append(self.bld.root.make_node(x) or x)
- else:
- if x[0] == '#':
- p = self.bld.bldnode.make_node(x[1:])
- v = self.bld.srcnode.make_node(x[1:])
- else:
- p = self.path.get_bld().make_node(x)
- v = self.path.make_node(x)
- if p.is_child_of(self.bld.bldnode):
- p.mkdir()
- lst.append(p)
- lst.append(v)
- return lst
-
-@feature('c', 'cxx', 'd', 'asm', 'fc', 'includes')
-@after_method('propagate_uselib_vars', 'process_source')
-def apply_incpaths(self):
- """
- Task generator method that processes the attribute *includes*::
-
- tg = bld(features='includes', includes='.')
-
- The folders only need to be relative to the current directory, the equivalent build directory is
- added automatically (for headers created in the build directory). This enable using a build directory
- or not (``top == out``).
-
- This method will add a list of nodes read by :py:func:`waflib.Tools.ccroot.to_incnodes` in ``tg.env.INCPATHS``,
- and the list of include paths in ``tg.env.INCLUDES``.
- """
-
- lst = self.to_incnodes(self.to_list(getattr(self, 'includes', [])) + self.env.INCLUDES)
- self.includes_nodes = lst
- cwd = self.get_cwd()
- self.env.INCPATHS = [x.path_from(cwd) for x in lst]
-
-class link_task(Task.Task):
- """
- Base class for all link tasks. A task generator is supposed to have at most one link task bound in the attribute *link_task*. See :py:func:`waflib.Tools.ccroot.apply_link`.
-
- .. inheritance-diagram:: waflib.Tools.ccroot.stlink_task waflib.Tools.c.cprogram waflib.Tools.c.cshlib waflib.Tools.cxx.cxxstlib waflib.Tools.cxx.cxxprogram waflib.Tools.cxx.cxxshlib waflib.Tools.d.dprogram waflib.Tools.d.dshlib waflib.Tools.d.dstlib waflib.Tools.ccroot.fake_shlib waflib.Tools.ccroot.fake_stlib waflib.Tools.asm.asmprogram waflib.Tools.asm.asmshlib waflib.Tools.asm.asmstlib
- """
- color = 'YELLOW'
-
- weight = 3
- """Try to process link tasks as early as possible"""
-
- inst_to = None
- """Default installation path for the link task outputs, or None to disable"""
-
- chmod = Utils.O755
- """Default installation mode for the link task outputs"""
-
- def add_target(self, target):
- """
- Process the *target* attribute to add the platform-specific prefix/suffix such as *.so* or *.exe*.
- The settings are retrieved from ``env.clsname_PATTERN``
- """
- if isinstance(target, str):
- base = self.generator.path
- if target.startswith('#'):
- # for those who like flat structures
- target = target[1:]
- base = self.generator.bld.bldnode
-
- pattern = self.env[self.__class__.__name__ + '_PATTERN']
- if not pattern:
- pattern = '%s'
- folder, name = os.path.split(target)
-
- if self.__class__.__name__.find('shlib') > 0 and getattr(self.generator, 'vnum', None):
- nums = self.generator.vnum.split('.')
- if self.env.DEST_BINFMT == 'pe':
- # include the version in the dll file name,
- # the import lib file name stays unversioned.
- name = name + '-' + nums[0]
- elif self.env.DEST_OS == 'openbsd':
- pattern = '%s.%s' % (pattern, nums[0])
- if len(nums) >= 2:
- pattern += '.%s' % nums[1]
-
- if folder:
- tmp = folder + os.sep + pattern % name
- else:
- tmp = pattern % name
- target = base.find_or_declare(tmp)
- self.set_outputs(target)
-
- def exec_command(self, *k, **kw):
- ret = super(link_task, self).exec_command(*k, **kw)
- if not ret and self.env.DO_MANIFEST:
- ret = self.exec_mf()
- return ret
-
- def exec_mf(self):
- """
- Create manifest files for VS-like compilers (msvc, ifort, ...)
- """
- if not self.env.MT:
- return 0
-
- manifest = None
- for out_node in self.outputs:
- if out_node.name.endswith('.manifest'):
- manifest = out_node.abspath()
- break
- else:
- # Should never get here. If we do, it means the manifest file was
- # never added to the outputs list, thus we don't have a manifest file
- # to embed, so we just return.
- return 0
-
- # embedding mode. Different for EXE's and DLL's.
- # see: http://msdn2.microsoft.com/en-us/library/ms235591(VS.80).aspx
- mode = ''
- for x in Utils.to_list(self.generator.features):
- if x in ('cprogram', 'cxxprogram', 'fcprogram', 'fcprogram_test'):
- mode = 1
- elif x in ('cshlib', 'cxxshlib', 'fcshlib'):
- mode = 2
-
- Logs.debug('msvc: embedding manifest in mode %r', mode)
-
- lst = [] + self.env.MT
- lst.extend(Utils.to_list(self.env.MTFLAGS))
- lst.extend(['-manifest', manifest])
- lst.append('-outputresource:%s;%s' % (self.outputs[0].abspath(), mode))
-
- return super(link_task, self).exec_command(lst)
-
-class stlink_task(link_task):
- """
- Base for static link tasks, which use *ar* most of the time.
- The target is always removed before being written.
- """
- run_str = '${AR} ${ARFLAGS} ${AR_TGT_F}${TGT} ${AR_SRC_F}${SRC}'
-
- chmod = Utils.O644
- """Default installation mode for the static libraries"""
-
-def rm_tgt(cls):
- old = cls.run
- def wrap(self):
- try:
- os.remove(self.outputs[0].abspath())
- except OSError:
- pass
- return old(self)
- setattr(cls, 'run', wrap)
-rm_tgt(stlink_task)
-
-@feature('c', 'cxx', 'd', 'fc', 'asm')
-@after_method('process_source')
-def apply_link(self):
- """
- Collect the tasks stored in ``compiled_tasks`` (created by :py:func:`waflib.Tools.ccroot.create_compiled_task`), and
- use the outputs for a new instance of :py:class:`waflib.Tools.ccroot.link_task`. The class to use is the first link task
- matching a name from the attribute *features*, for example::
-
- def build(bld):
- tg = bld(features='cxx cxxprogram cprogram', source='main.c', target='app')
-
- will create the task ``tg.link_task`` as a new instance of :py:class:`waflib.Tools.cxx.cxxprogram`
- """
-
- for x in self.features:
- if x == 'cprogram' and 'cxx' in self.features: # limited compat
- x = 'cxxprogram'
- elif x == 'cshlib' and 'cxx' in self.features:
- x = 'cxxshlib'
-
- if x in Task.classes:
- if issubclass(Task.classes[x], link_task):
- link = x
- break
- else:
- return
-
- objs = [t.outputs[0] for t in getattr(self, 'compiled_tasks', [])]
- self.link_task = self.create_task(link, objs)
- self.link_task.add_target(self.target)
-
- # remember that the install paths are given by the task generators
- try:
- inst_to = self.install_path
- except AttributeError:
- inst_to = self.link_task.inst_to
- if inst_to:
- # install a copy of the node list we have at this moment (implib not added)
- self.install_task = self.add_install_files(
- install_to=inst_to, install_from=self.link_task.outputs[:],
- chmod=self.link_task.chmod, task=self.link_task)
-
-@taskgen_method
-def use_rec(self, name, **kw):
- """
- Processes the ``use`` keyword recursively. This method is kind of private and only meant to be used from ``process_use``
- """
-
- if name in self.tmp_use_not or name in self.tmp_use_seen:
- return
-
- try:
- y = self.bld.get_tgen_by_name(name)
- except Errors.WafError:
- self.uselib.append(name)
- self.tmp_use_not.add(name)
- return
-
- self.tmp_use_seen.append(name)
- y.post()
-
- # bind temporary attributes on the task generator
- y.tmp_use_objects = objects = kw.get('objects', True)
- y.tmp_use_stlib = stlib = kw.get('stlib', True)
- try:
- link_task = y.link_task
- except AttributeError:
- y.tmp_use_var = ''
- else:
- objects = False
- if not isinstance(link_task, stlink_task):
- stlib = False
- y.tmp_use_var = 'LIB'
- else:
- y.tmp_use_var = 'STLIB'
-
- p = self.tmp_use_prec
- for x in self.to_list(getattr(y, 'use', [])):
- if self.env["STLIB_" + x]:
- continue
- try:
- p[x].append(name)
- except KeyError:
- p[x] = [name]
- self.use_rec(x, objects=objects, stlib=stlib)
-
-@feature('c', 'cxx', 'd', 'use', 'fc')
-@before_method('apply_incpaths', 'propagate_uselib_vars')
-@after_method('apply_link', 'process_source')
-def process_use(self):
- """
- Process the ``use`` attribute which contains a list of task generator names::
-
- def build(bld):
- bld.shlib(source='a.c', target='lib1')
- bld.program(source='main.c', target='app', use='lib1')
-
- See :py:func:`waflib.Tools.ccroot.use_rec`.
- """
-
- use_not = self.tmp_use_not = set()
- self.tmp_use_seen = [] # we would like an ordered set
- use_prec = self.tmp_use_prec = {}
- self.uselib = self.to_list(getattr(self, 'uselib', []))
- self.includes = self.to_list(getattr(self, 'includes', []))
- names = self.to_list(getattr(self, 'use', []))
-
- for x in names:
- self.use_rec(x)
-
- for x in use_not:
- if x in use_prec:
- del use_prec[x]
-
- # topological sort
- out = self.tmp_use_sorted = []
- tmp = []
- for x in self.tmp_use_seen:
- for k in use_prec.values():
- if x in k:
- break
- else:
- tmp.append(x)
-
- while tmp:
- e = tmp.pop()
- out.append(e)
- try:
- nlst = use_prec[e]
- except KeyError:
- pass
- else:
- del use_prec[e]
- for x in nlst:
- for y in use_prec:
- if x in use_prec[y]:
- break
- else:
- tmp.append(x)
- if use_prec:
- raise Errors.WafError('Cycle detected in the use processing %r' % use_prec)
- out.reverse()
-
- link_task = getattr(self, 'link_task', None)
- for x in out:
- y = self.bld.get_tgen_by_name(x)
- var = y.tmp_use_var
- if var and link_task:
- if var == 'LIB' or y.tmp_use_stlib or x in names:
- self.env.append_value(var, [y.target[y.target.rfind(os.sep) + 1:]])
- self.link_task.dep_nodes.extend(y.link_task.outputs)
- tmp_path = y.link_task.outputs[0].parent.path_from(self.get_cwd())
- self.env.append_unique(var + 'PATH', [tmp_path])
- else:
- if y.tmp_use_objects:
- self.add_objects_from_tgen(y)
-
- if getattr(y, 'export_includes', None):
- # self.includes may come from a global variable #2035
- self.includes = self.includes + y.to_incnodes(y.export_includes)
-
- if getattr(y, 'export_defines', None):
- self.env.append_value('DEFINES', self.to_list(y.export_defines))
-
-
- # and finally, add the use variables (no recursion needed)
- for x in names:
- try:
- y = self.bld.get_tgen_by_name(x)
- except Errors.WafError:
- if not self.env['STLIB_' + x] and not x in self.uselib:
- self.uselib.append(x)
- else:
- for k in self.to_list(getattr(y, 'use', [])):
- if not self.env['STLIB_' + k] and not k in self.uselib:
- self.uselib.append(k)
-
-@taskgen_method
-def accept_node_to_link(self, node):
- """
- PRIVATE INTERNAL USE ONLY
- """
- return not node.name.endswith('.pdb')
-
-@taskgen_method
-def add_objects_from_tgen(self, tg):
- """
- Add the objects from the depending compiled tasks as link task inputs.
-
- Some objects are filtered: for instance, .pdb files are added
- to the compiled tasks but not to the link tasks (to avoid errors)
- PRIVATE INTERNAL USE ONLY
- """
- try:
- link_task = self.link_task
- except AttributeError:
- pass
- else:
- for tsk in getattr(tg, 'compiled_tasks', []):
- for x in tsk.outputs:
- if self.accept_node_to_link(x):
- link_task.inputs.append(x)
-
-@taskgen_method
-def get_uselib_vars(self):
- """
- :return: the *uselib* variables associated to the *features* attribute (see :py:attr:`waflib.Tools.ccroot.USELIB_VARS`)
- :rtype: list of string
- """
- _vars = set()
- for x in self.features:
- if x in USELIB_VARS:
- _vars |= USELIB_VARS[x]
- return _vars
-
-@feature('c', 'cxx', 'd', 'fc', 'javac', 'cs', 'uselib', 'asm')
-@after_method('process_use')
-def propagate_uselib_vars(self):
- """
- Process uselib variables for adding flags. For example, the following target::
-
- def build(bld):
- bld.env.AFLAGS_aaa = ['bar']
- from waflib.Tools.ccroot import USELIB_VARS
- USELIB_VARS['aaa'] = ['AFLAGS']
-
- tg = bld(features='aaa', aflags='test')
-
- The *aflags* attribute will be processed and this method will set::
-
- tg.env.AFLAGS = ['bar', 'test']
- """
- _vars = self.get_uselib_vars()
- env = self.env
- app = env.append_value
- feature_uselib = self.features + self.to_list(getattr(self, 'uselib', []))
- for var in _vars:
- y = var.lower()
- val = getattr(self, y, [])
- if val:
- app(var, self.to_list(val))
-
- for x in feature_uselib:
- val = env['%s_%s' % (var, x)]
- if val:
- app(var, val)
-
-# ============ the code above must not know anything about import libs ==========
-
-@feature('cshlib', 'cxxshlib', 'fcshlib')
-@after_method('apply_link')
-def apply_implib(self):
- """
- Handle dlls and their import libs on Windows-like systems.
-
- A ``.dll.a`` file called *import library* is generated.
- It must be installed as it is required for linking the library.
- """
- if not self.env.DEST_BINFMT == 'pe':
- return
-
- dll = self.link_task.outputs[0]
- if isinstance(self.target, Node.Node):
- name = self.target.name
- else:
- name = os.path.split(self.target)[1]
- implib = self.env.implib_PATTERN % name
- implib = dll.parent.find_or_declare(implib)
- self.env.append_value('LINKFLAGS', self.env.IMPLIB_ST % implib.bldpath())
- self.link_task.outputs.append(implib)
-
- if getattr(self, 'defs', None) and self.env.DEST_BINFMT == 'pe':
- node = self.path.find_resource(self.defs)
- if not node:
- raise Errors.WafError('invalid def file %r' % self.defs)
- if self.env.def_PATTERN:
- self.env.append_value('LINKFLAGS', self.env.def_PATTERN % node.path_from(self.get_cwd()))
- self.link_task.dep_nodes.append(node)
- else:
- # gcc for windows takes *.def file as input without any special flag
- self.link_task.inputs.append(node)
-
- # where to put the import library
- if getattr(self, 'install_task', None):
- try:
- # user has given a specific installation path for the import library
- inst_to = self.install_path_implib
- except AttributeError:
- try:
- # user has given an installation path for the main library, put the import library in it
- inst_to = self.install_path
- except AttributeError:
- # else, put the library in BINDIR and the import library in LIBDIR
- inst_to = '${IMPLIBDIR}'
- self.install_task.install_to = '${BINDIR}'
- if not self.env.IMPLIBDIR:
- self.env.IMPLIBDIR = self.env.LIBDIR
- self.implib_install_task = self.add_install_files(install_to=inst_to, install_from=implib,
- chmod=self.link_task.chmod, task=self.link_task)
-
-# ============ the code above must not know anything about vnum processing on unix platforms =========
-
-re_vnum = re.compile('^([1-9]\\d*|0)([.]([1-9]\\d*|0)){0,2}?$')
-@feature('cshlib', 'cxxshlib', 'dshlib', 'fcshlib', 'vnum')
-@after_method('apply_link', 'propagate_uselib_vars')
-def apply_vnum(self):
- """
- Enforce version numbering on shared libraries. The valid version numbers must have either zero or two dots::
-
- def build(bld):
- bld.shlib(source='a.c', target='foo', vnum='14.15.16')
-
- In this example on Linux platform, ``libfoo.so`` is installed as ``libfoo.so.14.15.16``, and the following symbolic links are created:
-
- * ``libfoo.so → libfoo.so.14.15.16``
- * ``libfoo.so.14 → libfoo.so.14.15.16``
-
- By default, the library will be assigned SONAME ``libfoo.so.14``, effectively declaring ABI compatibility between all minor and patch releases for the major version of the library. When necessary, the compatibility can be explicitly defined using `cnum` parameter:
-
- def build(bld):
- bld.shlib(source='a.c', target='foo', vnum='14.15.16', cnum='14.15')
-
- In this case, the assigned SONAME will be ``libfoo.so.14.15`` with ABI compatibility only between path releases for a specific major and minor version of the library.
-
- On OS X platform, install-name parameter will follow the above logic for SONAME with exception that it also specifies an absolute path (based on install_path) of the library.
- """
- if not getattr(self, 'vnum', '') or os.name != 'posix' or self.env.DEST_BINFMT not in ('elf', 'mac-o'):
- return
-
- link = self.link_task
- if not re_vnum.match(self.vnum):
- raise Errors.WafError('Invalid vnum %r for target %r' % (self.vnum, getattr(self, 'name', self)))
- nums = self.vnum.split('.')
- node = link.outputs[0]
-
- cnum = getattr(self, 'cnum', str(nums[0]))
- cnums = cnum.split('.')
- if len(cnums)>len(nums) or nums[0:len(cnums)] != cnums:
- raise Errors.WafError('invalid compatibility version %s' % cnum)
-
- libname = node.name
- if libname.endswith('.dylib'):
- name3 = libname.replace('.dylib', '.%s.dylib' % self.vnum)
- name2 = libname.replace('.dylib', '.%s.dylib' % cnum)
- else:
- name3 = libname + '.' + self.vnum
- name2 = libname + '.' + cnum
-
- # add the so name for the ld linker - to disable, just unset env.SONAME_ST
- if self.env.SONAME_ST:
- v = self.env.SONAME_ST % name2
- self.env.append_value('LINKFLAGS', v.split())
-
- # the following task is just to enable execution from the build dir :-/
- if self.env.DEST_OS != 'openbsd':
- outs = [node.parent.make_node(name3)]
- if name2 != name3:
- outs.append(node.parent.make_node(name2))
- self.create_task('vnum', node, outs)
-
- if getattr(self, 'install_task', None):
- self.install_task.hasrun = Task.SKIPPED
- self.install_task.no_errcheck_out = True
- path = self.install_task.install_to
- if self.env.DEST_OS == 'openbsd':
- libname = self.link_task.outputs[0].name
- t1 = self.add_install_as(install_to='%s/%s' % (path, libname), install_from=node, chmod=self.link_task.chmod)
- self.vnum_install_task = (t1,)
- else:
- t1 = self.add_install_as(install_to=path + os.sep + name3, install_from=node, chmod=self.link_task.chmod)
- t3 = self.add_symlink_as(install_to=path + os.sep + libname, install_from=name3)
- if name2 != name3:
- t2 = self.add_symlink_as(install_to=path + os.sep + name2, install_from=name3)
- self.vnum_install_task = (t1, t2, t3)
- else:
- self.vnum_install_task = (t1, t3)
-
- if '-dynamiclib' in self.env.LINKFLAGS:
- # this requires after(propagate_uselib_vars)
- try:
- inst_to = self.install_path
- except AttributeError:
- inst_to = self.link_task.inst_to
- if inst_to:
- p = Utils.subst_vars(inst_to, self.env)
- path = os.path.join(p, name2)
- self.env.append_value('LINKFLAGS', ['-install_name', path])
- self.env.append_value('LINKFLAGS', '-Wl,-compatibility_version,%s' % cnum)
- self.env.append_value('LINKFLAGS', '-Wl,-current_version,%s' % self.vnum)
-
-class vnum(Task.Task):
- """
- Create the symbolic links for a versioned shared library. Instances are created by :py:func:`waflib.Tools.ccroot.apply_vnum`
- """
- color = 'CYAN'
- ext_in = ['.bin']
- def keyword(self):
- return 'Symlinking'
- def run(self):
- for x in self.outputs:
- path = x.abspath()
- try:
- os.remove(path)
- except OSError:
- pass
-
- try:
- os.symlink(self.inputs[0].name, path)
- except OSError:
- return 1
-
-class fake_shlib(link_task):
- """
- Task used for reading a system library and adding the dependency on it
- """
- def runnable_status(self):
- for t in self.run_after:
- if not t.hasrun:
- return Task.ASK_LATER
- return Task.SKIP_ME
-
-class fake_stlib(stlink_task):
- """
- Task used for reading a system library and adding the dependency on it
- """
- def runnable_status(self):
- for t in self.run_after:
- if not t.hasrun:
- return Task.ASK_LATER
- return Task.SKIP_ME
-
-@conf
-def read_shlib(self, name, paths=[], export_includes=[], export_defines=[]):
- """
- Read a system shared library, enabling its use as a local library. Will trigger a rebuild if the file changes::
-
- def build(bld):
- bld.read_shlib('m')
- bld.program(source='main.c', use='m')
- """
- return self(name=name, features='fake_lib', lib_paths=paths, lib_type='shlib', export_includes=export_includes, export_defines=export_defines)
-
-@conf
-def read_stlib(self, name, paths=[], export_includes=[], export_defines=[]):
- """
- Read a system static library, enabling a use as a local library. Will trigger a rebuild if the file changes.
- """
- return self(name=name, features='fake_lib', lib_paths=paths, lib_type='stlib', export_includes=export_includes, export_defines=export_defines)
-
-lib_patterns = {
- 'shlib' : ['lib%s.so', '%s.so', 'lib%s.dylib', 'lib%s.dll', '%s.dll'],
- 'stlib' : ['lib%s.a', '%s.a', 'lib%s.dll', '%s.dll', 'lib%s.lib', '%s.lib'],
-}
-
-@feature('fake_lib')
-def process_lib(self):
- """
- Find the location of a foreign library. Used by :py:class:`waflib.Tools.ccroot.read_shlib` and :py:class:`waflib.Tools.ccroot.read_stlib`.
- """
- node = None
-
- names = [x % self.name for x in lib_patterns[self.lib_type]]
- for x in self.lib_paths + [self.path] + SYSTEM_LIB_PATHS:
- if not isinstance(x, Node.Node):
- x = self.bld.root.find_node(x) or self.path.find_node(x)
- if not x:
- continue
-
- for y in names:
- node = x.find_node(y)
- if node:
- try:
- Utils.h_file(node.abspath())
- except EnvironmentError:
- raise ValueError('Could not read %r' % y)
- break
- else:
- continue
- break
- else:
- raise Errors.WafError('could not find library %r' % self.name)
- self.link_task = self.create_task('fake_%s' % self.lib_type, [], [node])
- self.target = self.name
-
-
-class fake_o(Task.Task):
- def runnable_status(self):
- return Task.SKIP_ME
-
-@extension('.o', '.obj')
-def add_those_o_files(self, node):
- tsk = self.create_task('fake_o', [], node)
- try:
- self.compiled_tasks.append(tsk)
- except AttributeError:
- self.compiled_tasks = [tsk]
-
-@feature('fake_obj')
-@before_method('process_source')
-def process_objs(self):
- """
- Puts object files in the task generator outputs
- """
- for node in self.to_nodes(self.source):
- self.add_those_o_files(node)
- self.source = []
-
-@conf
-def read_object(self, obj):
- """
- Read an object file, enabling injection in libs/programs. Will trigger a rebuild if the file changes.
-
- :param obj: object file path, as string or Node
- """
- if not isinstance(obj, self.path.__class__):
- obj = self.path.find_resource(obj)
- return self(features='fake_obj', source=obj, name=obj.name)
-
-@feature('cxxprogram', 'cprogram')
-@after_method('apply_link', 'process_use')
-def set_full_paths_hpux(self):
- """
- On hp-ux, extend the libpaths and static library paths to absolute paths
- """
- if self.env.DEST_OS != 'hp-ux':
- return
- base = self.bld.bldnode.abspath()
- for var in ['LIBPATH', 'STLIBPATH']:
- lst = []
- for x in self.env[var]:
- if x.startswith('/'):
- lst.append(x)
- else:
- lst.append(os.path.normpath(os.path.join(base, x)))
- self.env[var] = lst
-
diff --git a/waflib/Tools/clang.py b/waflib/Tools/clang.py
deleted file mode 100644
index 3828e39..0000000
--- a/waflib/Tools/clang.py
+++ /dev/null
@@ -1,29 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Krzysztof Kosiński 2014
-
-"""
-Detect the Clang C compiler
-"""
-
-from waflib.Tools import ccroot, ar, gcc
-from waflib.Configure import conf
-
-@conf
-def find_clang(conf):
- """
- Finds the program clang and executes it to ensure it really is clang
- """
- cc = conf.find_program('clang', var='CC')
- conf.get_cc_version(cc, clang=True)
- conf.env.CC_NAME = 'clang'
-
-def configure(conf):
- conf.find_clang()
- conf.find_program(['llvm-ar', 'ar'], var='AR')
- conf.find_ar()
- conf.gcc_common_flags()
- conf.gcc_modifier_platform()
- conf.cc_load_tools()
- conf.cc_add_flags()
- conf.link_add_flags()
diff --git a/waflib/Tools/clangxx.py b/waflib/Tools/clangxx.py
deleted file mode 100644
index 152013c..0000000
--- a/waflib/Tools/clangxx.py
+++ /dev/null
@@ -1,30 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy 2009-2018 (ita)
-
-"""
-Detect the Clang++ C++ compiler
-"""
-
-from waflib.Tools import ccroot, ar, gxx
-from waflib.Configure import conf
-
-@conf
-def find_clangxx(conf):
- """
- Finds the program clang++, and executes it to ensure it really is clang++
- """
- cxx = conf.find_program('clang++', var='CXX')
- conf.get_cc_version(cxx, clang=True)
- conf.env.CXX_NAME = 'clang'
-
-def configure(conf):
- conf.find_clangxx()
- conf.find_program(['llvm-ar', 'ar'], var='AR')
- conf.find_ar()
- conf.gxx_common_flags()
- conf.gxx_modifier_platform()
- conf.cxx_load_tools()
- conf.cxx_add_flags()
- conf.link_add_flags()
-
diff --git a/waflib/Tools/compiler_c.py b/waflib/Tools/compiler_c.py
deleted file mode 100644
index 2dba3f8..0000000
--- a/waflib/Tools/compiler_c.py
+++ /dev/null
@@ -1,110 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Matthias Jahn jahn dôt matthias ât freenet dôt de, 2007 (pmarat)
-
-"""
-Try to detect a C compiler from the list of supported compilers (gcc, msvc, etc)::
-
- def options(opt):
- opt.load('compiler_c')
- def configure(cnf):
- cnf.load('compiler_c')
- def build(bld):
- bld.program(source='main.c', target='app')
-
-The compilers are associated to platforms in :py:attr:`waflib.Tools.compiler_c.c_compiler`. To register
-a new C compiler named *cfoo* (assuming the tool ``waflib/extras/cfoo.py`` exists), use::
-
- from waflib.Tools.compiler_c import c_compiler
- c_compiler['win32'] = ['cfoo', 'msvc', 'gcc']
-
- def options(opt):
- opt.load('compiler_c')
- def configure(cnf):
- cnf.load('compiler_c')
- def build(bld):
- bld.program(source='main.c', target='app')
-
-Not all compilers need to have a specific tool. For example, the clang compilers can be detected by the gcc tools when using::
-
- $ CC=clang waf configure
-"""
-
-import re
-from waflib.Tools import ccroot
-from waflib import Utils
-from waflib.Logs import debug
-
-c_compiler = {
-'win32': ['msvc', 'gcc', 'clang'],
-'cygwin': ['gcc'],
-'darwin': ['clang', 'gcc'],
-'aix': ['xlc', 'gcc', 'clang'],
-'linux': ['gcc', 'clang', 'icc'],
-'sunos': ['suncc', 'gcc'],
-'irix': ['gcc', 'irixcc'],
-'hpux': ['gcc'],
-'osf1V': ['gcc'],
-'gnu': ['gcc', 'clang'],
-'java': ['gcc', 'msvc', 'clang', 'icc'],
-'default':['clang', 'gcc'],
-}
-"""
-Dict mapping platform names to Waf tools finding specific C compilers::
-
- from waflib.Tools.compiler_c import c_compiler
- c_compiler['linux'] = ['gcc', 'icc', 'suncc']
-"""
-
-def default_compilers():
- build_platform = Utils.unversioned_sys_platform()
- possible_compiler_list = c_compiler.get(build_platform, c_compiler['default'])
- return ' '.join(possible_compiler_list)
-
-def configure(conf):
- """
- Detects a suitable C compiler
-
- :raises: :py:class:`waflib.Errors.ConfigurationError` when no suitable compiler is found
- """
- try:
- test_for_compiler = conf.options.check_c_compiler or default_compilers()
- except AttributeError:
- conf.fatal("Add options(opt): opt.load('compiler_c')")
-
- for compiler in re.split('[ ,]+', test_for_compiler):
- conf.env.stash()
- conf.start_msg('Checking for %r (C compiler)' % compiler)
- try:
- conf.load(compiler)
- except conf.errors.ConfigurationError as e:
- conf.env.revert()
- conf.end_msg(False)
- debug('compiler_c: %r', e)
- else:
- if conf.env.CC:
- conf.end_msg(conf.env.get_flat('CC'))
- conf.env.COMPILER_CC = compiler
- conf.env.commit()
- break
- conf.env.revert()
- conf.end_msg(False)
- else:
- conf.fatal('could not configure a C compiler!')
-
-def options(opt):
- """
- This is how to provide compiler preferences on the command-line::
-
- $ waf configure --check-c-compiler=gcc
- """
- test_for_compiler = default_compilers()
- opt.load_special_tools('c_*.py', ban=['c_dumbpreproc.py'])
- cc_compiler_opts = opt.add_option_group('Configuration options')
- cc_compiler_opts.add_option('--check-c-compiler', default=None,
- help='list of C compilers to try [%s]' % test_for_compiler,
- dest="check_c_compiler")
-
- for x in test_for_compiler.split():
- opt.load('%s' % x)
-
diff --git a/waflib/Tools/compiler_cxx.py b/waflib/Tools/compiler_cxx.py
deleted file mode 100644
index 1af65a2..0000000
--- a/waflib/Tools/compiler_cxx.py
+++ /dev/null
@@ -1,111 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Matthias Jahn jahn dôt matthias ât freenet dôt de 2007 (pmarat)
-
-"""
-Try to detect a C++ compiler from the list of supported compilers (g++, msvc, etc)::
-
- def options(opt):
- opt.load('compiler_cxx')
- def configure(cnf):
- cnf.load('compiler_cxx')
- def build(bld):
- bld.program(source='main.cpp', target='app')
-
-The compilers are associated to platforms in :py:attr:`waflib.Tools.compiler_cxx.cxx_compiler`. To register
-a new C++ compiler named *cfoo* (assuming the tool ``waflib/extras/cfoo.py`` exists), use::
-
- from waflib.Tools.compiler_cxx import cxx_compiler
- cxx_compiler['win32'] = ['cfoo', 'msvc', 'gcc']
-
- def options(opt):
- opt.load('compiler_cxx')
- def configure(cnf):
- cnf.load('compiler_cxx')
- def build(bld):
- bld.program(source='main.c', target='app')
-
-Not all compilers need to have a specific tool. For example, the clang compilers can be detected by the gcc tools when using::
-
- $ CXX=clang waf configure
-"""
-
-
-import re
-from waflib.Tools import ccroot
-from waflib import Utils
-from waflib.Logs import debug
-
-cxx_compiler = {
-'win32': ['msvc', 'g++', 'clang++'],
-'cygwin': ['g++'],
-'darwin': ['clang++', 'g++'],
-'aix': ['xlc++', 'g++', 'clang++'],
-'linux': ['g++', 'clang++', 'icpc'],
-'sunos': ['sunc++', 'g++'],
-'irix': ['g++'],
-'hpux': ['g++'],
-'osf1V': ['g++'],
-'gnu': ['g++', 'clang++'],
-'java': ['g++', 'msvc', 'clang++', 'icpc'],
-'default': ['clang++', 'g++']
-}
-"""
-Dict mapping the platform names to Waf tools finding specific C++ compilers::
-
- from waflib.Tools.compiler_cxx import cxx_compiler
- cxx_compiler['linux'] = ['gxx', 'icpc', 'suncxx']
-"""
-
-def default_compilers():
- build_platform = Utils.unversioned_sys_platform()
- possible_compiler_list = cxx_compiler.get(build_platform, cxx_compiler['default'])
- return ' '.join(possible_compiler_list)
-
-def configure(conf):
- """
- Detects a suitable C++ compiler
-
- :raises: :py:class:`waflib.Errors.ConfigurationError` when no suitable compiler is found
- """
- try:
- test_for_compiler = conf.options.check_cxx_compiler or default_compilers()
- except AttributeError:
- conf.fatal("Add options(opt): opt.load('compiler_cxx')")
-
- for compiler in re.split('[ ,]+', test_for_compiler):
- conf.env.stash()
- conf.start_msg('Checking for %r (C++ compiler)' % compiler)
- try:
- conf.load(compiler)
- except conf.errors.ConfigurationError as e:
- conf.env.revert()
- conf.end_msg(False)
- debug('compiler_cxx: %r', e)
- else:
- if conf.env.CXX:
- conf.end_msg(conf.env.get_flat('CXX'))
- conf.env.COMPILER_CXX = compiler
- conf.env.commit()
- break
- conf.env.revert()
- conf.end_msg(False)
- else:
- conf.fatal('could not configure a C++ compiler!')
-
-def options(opt):
- """
- This is how to provide compiler preferences on the command-line::
-
- $ waf configure --check-cxx-compiler=gxx
- """
- test_for_compiler = default_compilers()
- opt.load_special_tools('cxx_*.py')
- cxx_compiler_opts = opt.add_option_group('Configuration options')
- cxx_compiler_opts.add_option('--check-cxx-compiler', default=None,
- help='list of C++ compilers to try [%s]' % test_for_compiler,
- dest="check_cxx_compiler")
-
- for x in test_for_compiler.split():
- opt.load('%s' % x)
-
diff --git a/waflib/Tools/compiler_d.py b/waflib/Tools/compiler_d.py
deleted file mode 100644
index 43bb1f6..0000000
--- a/waflib/Tools/compiler_d.py
+++ /dev/null
@@ -1,85 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Carlos Rafael Giani, 2007 (dv)
-# Thomas Nagy, 2016-2018 (ita)
-
-"""
-Try to detect a D compiler from the list of supported compilers::
-
- def options(opt):
- opt.load('compiler_d')
- def configure(cnf):
- cnf.load('compiler_d')
- def build(bld):
- bld.program(source='main.d', target='app')
-
-Only three D compilers are really present at the moment:
-
-* gdc
-* dmd, the ldc compiler having a very similar command-line interface
-* ldc2
-"""
-
-import re
-from waflib import Utils, Logs
-
-d_compiler = {
-'default' : ['gdc', 'dmd', 'ldc2']
-}
-"""
-Dict mapping the platform names to lists of names of D compilers to try, in order of preference::
-
- from waflib.Tools.compiler_d import d_compiler
- d_compiler['default'] = ['gdc', 'dmd', 'ldc2']
-"""
-
-def default_compilers():
- build_platform = Utils.unversioned_sys_platform()
- possible_compiler_list = d_compiler.get(build_platform, d_compiler['default'])
- return ' '.join(possible_compiler_list)
-
-def configure(conf):
- """
- Detects a suitable D compiler
-
- :raises: :py:class:`waflib.Errors.ConfigurationError` when no suitable compiler is found
- """
- try:
- test_for_compiler = conf.options.check_d_compiler or default_compilers()
- except AttributeError:
- conf.fatal("Add options(opt): opt.load('compiler_d')")
-
- for compiler in re.split('[ ,]+', test_for_compiler):
- conf.env.stash()
- conf.start_msg('Checking for %r (D compiler)' % compiler)
- try:
- conf.load(compiler)
- except conf.errors.ConfigurationError as e:
- conf.env.revert()
- conf.end_msg(False)
- Logs.debug('compiler_d: %r', e)
- else:
- if conf.env.D:
- conf.end_msg(conf.env.get_flat('D'))
- conf.env.COMPILER_D = compiler
- conf.env.commit()
- break
- conf.env.revert()
- conf.end_msg(False)
- else:
- conf.fatal('could not configure a D compiler!')
-
-def options(opt):
- """
- This is how to provide compiler preferences on the command-line::
-
- $ waf configure --check-d-compiler=dmd
- """
- test_for_compiler = default_compilers()
- d_compiler_opts = opt.add_option_group('Configuration options')
- d_compiler_opts.add_option('--check-d-compiler', default=None,
- help='list of D compilers to try [%s]' % test_for_compiler, dest='check_d_compiler')
-
- for x in test_for_compiler.split():
- opt.load('%s' % x)
-
diff --git a/waflib/Tools/compiler_fc.py b/waflib/Tools/compiler_fc.py
deleted file mode 100644
index 96b58e7..0000000
--- a/waflib/Tools/compiler_fc.py
+++ /dev/null
@@ -1,73 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-
-import re
-from waflib import Utils, Logs
-from waflib.Tools import fc
-
-fc_compiler = {
- 'win32' : ['gfortran','ifort'],
- 'darwin' : ['gfortran', 'g95', 'ifort'],
- 'linux' : ['gfortran', 'g95', 'ifort'],
- 'java' : ['gfortran', 'g95', 'ifort'],
- 'default': ['gfortran'],
- 'aix' : ['gfortran']
-}
-"""
-Dict mapping the platform names to lists of names of Fortran compilers to try, in order of preference::
-
- from waflib.Tools.compiler_c import c_compiler
- c_compiler['linux'] = ['gfortran', 'g95', 'ifort']
-"""
-
-def default_compilers():
- build_platform = Utils.unversioned_sys_platform()
- possible_compiler_list = fc_compiler.get(build_platform, fc_compiler['default'])
- return ' '.join(possible_compiler_list)
-
-def configure(conf):
- """
- Detects a suitable Fortran compiler
-
- :raises: :py:class:`waflib.Errors.ConfigurationError` when no suitable compiler is found
- """
- try:
- test_for_compiler = conf.options.check_fortran_compiler or default_compilers()
- except AttributeError:
- conf.fatal("Add options(opt): opt.load('compiler_fc')")
- for compiler in re.split('[ ,]+', test_for_compiler):
- conf.env.stash()
- conf.start_msg('Checking for %r (Fortran compiler)' % compiler)
- try:
- conf.load(compiler)
- except conf.errors.ConfigurationError as e:
- conf.env.revert()
- conf.end_msg(False)
- Logs.debug('compiler_fortran: %r', e)
- else:
- if conf.env.FC:
- conf.end_msg(conf.env.get_flat('FC'))
- conf.env.COMPILER_FORTRAN = compiler
- conf.env.commit()
- break
- conf.env.revert()
- conf.end_msg(False)
- else:
- conf.fatal('could not configure a Fortran compiler!')
-
-def options(opt):
- """
- This is how to provide compiler preferences on the command-line::
-
- $ waf configure --check-fortran-compiler=ifort
- """
- test_for_compiler = default_compilers()
- opt.load_special_tools('fc_*.py')
- fortran_compiler_opts = opt.add_option_group('Configuration options')
- fortran_compiler_opts.add_option('--check-fortran-compiler', default=None,
- help='list of Fortran compiler to try [%s]' % test_for_compiler,
- dest="check_fortran_compiler")
-
- for x in test_for_compiler.split():
- opt.load('%s' % x)
-
diff --git a/waflib/Tools/cs.py b/waflib/Tools/cs.py
deleted file mode 100644
index aecca6d..0000000
--- a/waflib/Tools/cs.py
+++ /dev/null
@@ -1,211 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2006-2018 (ita)
-
-"""
-C# support. A simple example::
-
- def configure(conf):
- conf.load('cs')
- def build(bld):
- bld(features='cs', source='main.cs', gen='foo')
-
-Note that the configuration may compile C# snippets::
-
- FRAG = '''
- namespace Moo {
- public class Test { public static int Main(string[] args) { return 0; } }
- }'''
- def configure(conf):
- conf.check(features='cs', fragment=FRAG, compile_filename='test.cs', gen='test.exe',
- bintype='exe', csflags=['-pkg:gtk-sharp-2.0'], msg='Checking for Gtksharp support')
-"""
-
-from waflib import Utils, Task, Options, Errors
-from waflib.TaskGen import before_method, after_method, feature
-from waflib.Tools import ccroot
-from waflib.Configure import conf
-
-ccroot.USELIB_VARS['cs'] = set(['CSFLAGS', 'ASSEMBLIES', 'RESOURCES'])
-ccroot.lib_patterns['csshlib'] = ['%s']
-
-@feature('cs')
-@before_method('process_source')
-def apply_cs(self):
- """
- Create a C# task bound to the attribute *cs_task*. There can be only one C# task by task generator.
- """
- cs_nodes = []
- no_nodes = []
- for x in self.to_nodes(self.source):
- if x.name.endswith('.cs'):
- cs_nodes.append(x)
- else:
- no_nodes.append(x)
- self.source = no_nodes
-
- bintype = getattr(self, 'bintype', self.gen.endswith('.dll') and 'library' or 'exe')
- self.cs_task = tsk = self.create_task('mcs', cs_nodes, self.path.find_or_declare(self.gen))
- tsk.env.CSTYPE = '/target:%s' % bintype
- tsk.env.OUT = '/out:%s' % tsk.outputs[0].abspath()
- self.env.append_value('CSFLAGS', '/platform:%s' % getattr(self, 'platform', 'anycpu'))
-
- inst_to = getattr(self, 'install_path', bintype=='exe' and '${BINDIR}' or '${LIBDIR}')
- if inst_to:
- # note: we are making a copy, so the files added to cs_task.outputs won't be installed automatically
- mod = getattr(self, 'chmod', bintype=='exe' and Utils.O755 or Utils.O644)
- self.install_task = self.add_install_files(install_to=inst_to, install_from=self.cs_task.outputs[:], chmod=mod)
-
-@feature('cs')
-@after_method('apply_cs')
-def use_cs(self):
- """
- C# applications honor the **use** keyword::
-
- def build(bld):
- bld(features='cs', source='My.cs', bintype='library', gen='my.dll', name='mylib')
- bld(features='cs', source='Hi.cs', includes='.', bintype='exe', gen='hi.exe', use='mylib', name='hi')
- """
- names = self.to_list(getattr(self, 'use', []))
- get = self.bld.get_tgen_by_name
- for x in names:
- try:
- y = get(x)
- except Errors.WafError:
- self.env.append_value('CSFLAGS', '/reference:%s' % x)
- continue
- y.post()
-
- tsk = getattr(y, 'cs_task', None) or getattr(y, 'link_task', None)
- if not tsk:
- self.bld.fatal('cs task has no link task for use %r' % self)
- self.cs_task.dep_nodes.extend(tsk.outputs) # dependency
- self.cs_task.set_run_after(tsk) # order (redundant, the order is inferred from the nodes inputs/outputs)
- self.env.append_value('CSFLAGS', '/reference:%s' % tsk.outputs[0].abspath())
-
-@feature('cs')
-@after_method('apply_cs', 'use_cs')
-def debug_cs(self):
- """
- The C# targets may create .mdb or .pdb files::
-
- def build(bld):
- bld(features='cs', source='My.cs', bintype='library', gen='my.dll', csdebug='full')
- # csdebug is a value in (True, 'full', 'pdbonly')
- """
- csdebug = getattr(self, 'csdebug', self.env.CSDEBUG)
- if not csdebug:
- return
-
- node = self.cs_task.outputs[0]
- if self.env.CS_NAME == 'mono':
- out = node.parent.find_or_declare(node.name + '.mdb')
- else:
- out = node.change_ext('.pdb')
- self.cs_task.outputs.append(out)
-
- if getattr(self, 'install_task', None):
- self.pdb_install_task = self.add_install_files(
- install_to=self.install_task.install_to, install_from=out)
-
- if csdebug == 'pdbonly':
- val = ['/debug+', '/debug:pdbonly']
- elif csdebug == 'full':
- val = ['/debug+', '/debug:full']
- else:
- val = ['/debug-']
- self.env.append_value('CSFLAGS', val)
-
-@feature('cs')
-@after_method('debug_cs')
-def doc_cs(self):
- """
- The C# targets may create .xml documentation files::
-
- def build(bld):
- bld(features='cs', source='My.cs', bintype='library', gen='my.dll', csdoc=True)
- # csdoc is a boolean value
- """
- csdoc = getattr(self, 'csdoc', self.env.CSDOC)
- if not csdoc:
- return
-
- node = self.cs_task.outputs[0]
- out = node.change_ext('.xml')
- self.cs_task.outputs.append(out)
-
- if getattr(self, 'install_task', None):
- self.doc_install_task = self.add_install_files(
- install_to=self.install_task.install_to, install_from=out)
-
- self.env.append_value('CSFLAGS', '/doc:%s' % out.abspath())
-
-class mcs(Task.Task):
- """
- Compile C# files
- """
- color = 'YELLOW'
- run_str = '${MCS} ${CSTYPE} ${CSFLAGS} ${ASS_ST:ASSEMBLIES} ${RES_ST:RESOURCES} ${OUT} ${SRC}'
-
- def split_argfile(self, cmd):
- inline = [cmd[0]]
- infile = []
- for x in cmd[1:]:
- # csc doesn't want /noconfig in @file
- if x.lower() == '/noconfig':
- inline.append(x)
- else:
- infile.append(self.quote_flag(x))
- return (inline, infile)
-
-def configure(conf):
- """
- Find a C# compiler, set the variable MCS for the compiler and CS_NAME (mono or csc)
- """
- csc = getattr(Options.options, 'cscbinary', None)
- if csc:
- conf.env.MCS = csc
- conf.find_program(['csc', 'mcs', 'gmcs'], var='MCS')
- conf.env.ASS_ST = '/r:%s'
- conf.env.RES_ST = '/resource:%s'
-
- conf.env.CS_NAME = 'csc'
- if str(conf.env.MCS).lower().find('mcs') > -1:
- conf.env.CS_NAME = 'mono'
-
-def options(opt):
- """
- Add a command-line option for the configuration::
-
- $ waf configure --with-csc-binary=/foo/bar/mcs
- """
- opt.add_option('--with-csc-binary', type='string', dest='cscbinary')
-
-class fake_csshlib(Task.Task):
- """
- Task used for reading a foreign .net assembly and adding the dependency on it
- """
- color = 'YELLOW'
- inst_to = None
-
- def runnable_status(self):
- return Task.SKIP_ME
-
-@conf
-def read_csshlib(self, name, paths=[]):
- """
- Read a foreign .net assembly for the *use* system::
-
- def build(bld):
- bld.read_csshlib('ManagedLibrary.dll', paths=[bld.env.mylibrarypath])
- bld(features='cs', source='Hi.cs', bintype='exe', gen='hi.exe', use='ManagedLibrary.dll')
-
- :param name: Name of the library
- :type name: string
- :param paths: Folders in which the library may be found
- :type paths: list of string
- :return: A task generator having the feature *fake_lib* which will call :py:func:`waflib.Tools.ccroot.process_lib`
- :rtype: :py:class:`waflib.TaskGen.task_gen`
- """
- return self(name=name, features='fake_lib', lib_paths=paths, lib_type='csshlib')
-
diff --git a/waflib/Tools/cxx.py b/waflib/Tools/cxx.py
deleted file mode 100644
index 194fad7..0000000
--- a/waflib/Tools/cxx.py
+++ /dev/null
@@ -1,40 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2005-2018 (ita)
-
-"Base for c++ programs and libraries"
-
-from waflib import TaskGen, Task
-from waflib.Tools import c_preproc
-from waflib.Tools.ccroot import link_task, stlink_task
-
-@TaskGen.extension('.cpp','.cc','.cxx','.C','.c++')
-def cxx_hook(self, node):
- "Binds c++ file extensions to create :py:class:`waflib.Tools.cxx.cxx` instances"
- return self.create_compiled_task('cxx', node)
-
-if not '.c' in TaskGen.task_gen.mappings:
- TaskGen.task_gen.mappings['.c'] = TaskGen.task_gen.mappings['.cpp']
-
-class cxx(Task.Task):
- "Compiles C++ files into object files"
- run_str = '${CXX} ${ARCH_ST:ARCH} ${CXXFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${CXX_SRC_F}${SRC} ${CXX_TGT_F}${TGT[0].abspath()} ${CPPFLAGS}'
- vars = ['CXXDEPS'] # unused variable to depend on, just in case
- ext_in = ['.h'] # set the build order easily by using ext_out=['.h']
- scan = c_preproc.scan
-
-class cxxprogram(link_task):
- "Links object files into c++ programs"
- run_str = '${LINK_CXX} ${LINKFLAGS} ${CXXLNK_SRC_F}${SRC} ${CXXLNK_TGT_F}${TGT[0].abspath()} ${RPATH_ST:RPATH} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${FRAMEWORK_ST:FRAMEWORK} ${ARCH_ST:ARCH} ${STLIB_MARKER} ${STLIBPATH_ST:STLIBPATH} ${STLIB_ST:STLIB} ${SHLIB_MARKER} ${LIBPATH_ST:LIBPATH} ${LIB_ST:LIB} ${LDFLAGS}'
- vars = ['LINKDEPS']
- ext_out = ['.bin']
- inst_to = '${BINDIR}'
-
-class cxxshlib(cxxprogram):
- "Links object files into c++ shared libraries"
- inst_to = '${LIBDIR}'
-
-class cxxstlib(stlink_task):
- "Links object files into c++ static libraries"
- pass # do not remove
-
diff --git a/waflib/Tools/d.py b/waflib/Tools/d.py
deleted file mode 100644
index e4cf73b..0000000
--- a/waflib/Tools/d.py
+++ /dev/null
@@ -1,97 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Carlos Rafael Giani, 2007 (dv)
-# Thomas Nagy, 2007-2018 (ita)
-
-from waflib import Utils, Task, Errors
-from waflib.TaskGen import taskgen_method, feature, extension
-from waflib.Tools import d_scan, d_config
-from waflib.Tools.ccroot import link_task, stlink_task
-
-class d(Task.Task):
- "Compile a d file into an object file"
- color = 'GREEN'
- run_str = '${D} ${DFLAGS} ${DINC_ST:INCPATHS} ${D_SRC_F:SRC} ${D_TGT_F:TGT}'
- scan = d_scan.scan
-
-class d_with_header(d):
- "Compile a d file and generate a header"
- run_str = '${D} ${DFLAGS} ${DINC_ST:INCPATHS} ${D_HDR_F:tgt.outputs[1].bldpath()} ${D_SRC_F:SRC} ${D_TGT_F:tgt.outputs[0].bldpath()}'
-
-class d_header(Task.Task):
- "Compile d headers"
- color = 'BLUE'
- run_str = '${D} ${D_HEADER} ${SRC}'
-
-class dprogram(link_task):
- "Link object files into a d program"
- run_str = '${D_LINKER} ${LINKFLAGS} ${DLNK_SRC_F}${SRC} ${DLNK_TGT_F:TGT} ${RPATH_ST:RPATH} ${DSTLIB_MARKER} ${DSTLIBPATH_ST:STLIBPATH} ${DSTLIB_ST:STLIB} ${DSHLIB_MARKER} ${DLIBPATH_ST:LIBPATH} ${DSHLIB_ST:LIB}'
- inst_to = '${BINDIR}'
-
-class dshlib(dprogram):
- "Link object files into a d shared library"
- inst_to = '${LIBDIR}'
-
-class dstlib(stlink_task):
- "Link object files into a d static library"
- pass # do not remove
-
-@extension('.d', '.di', '.D')
-def d_hook(self, node):
- """
- Compile *D* files. To get .di files as well as .o files, set the following::
-
- def build(bld):
- bld.program(source='foo.d', target='app', generate_headers=True)
-
- """
- ext = Utils.destos_to_binfmt(self.env.DEST_OS) == 'pe' and 'obj' or 'o'
- out = '%s.%d.%s' % (node.name, self.idx, ext)
- def create_compiled_task(self, name, node):
- task = self.create_task(name, node, node.parent.find_or_declare(out))
- try:
- self.compiled_tasks.append(task)
- except AttributeError:
- self.compiled_tasks = [task]
- return task
-
- if getattr(self, 'generate_headers', None):
- tsk = create_compiled_task(self, 'd_with_header', node)
- tsk.outputs.append(node.change_ext(self.env.DHEADER_ext))
- else:
- tsk = create_compiled_task(self, 'd', node)
- return tsk
-
-@taskgen_method
-def generate_header(self, filename):
- """
- See feature request #104::
-
- def build(bld):
- tg = bld.program(source='foo.d', target='app')
- tg.generate_header('blah.d')
- # is equivalent to:
- #tg = bld.program(source='foo.d', target='app', header_lst='blah.d')
-
- :param filename: header to create
- :type filename: string
- """
- try:
- self.header_lst.append([filename, self.install_path])
- except AttributeError:
- self.header_lst = [[filename, self.install_path]]
-
-@feature('d')
-def process_header(self):
- """
- Process the attribute 'header_lst' to create the d header compilation tasks::
-
- def build(bld):
- bld.program(source='foo.d', target='app', header_lst='blah.d')
- """
- for i in getattr(self, 'header_lst', []):
- node = self.path.find_resource(i[0])
- if not node:
- raise Errors.WafError('file %r not found on d obj' % i[0])
- self.create_task('d_header', node, node.change_ext('.di'))
-
diff --git a/waflib/Tools/d_config.py b/waflib/Tools/d_config.py
deleted file mode 100644
index 6637556..0000000
--- a/waflib/Tools/d_config.py
+++ /dev/null
@@ -1,64 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2016-2018 (ita)
-
-from waflib import Utils
-from waflib.Configure import conf
-
-@conf
-def d_platform_flags(self):
- """
- Sets the extensions dll/so for d programs and libraries
- """
- v = self.env
- if not v.DEST_OS:
- v.DEST_OS = Utils.unversioned_sys_platform()
- binfmt = Utils.destos_to_binfmt(self.env.DEST_OS)
- if binfmt == 'pe':
- v.dprogram_PATTERN = '%s.exe'
- v.dshlib_PATTERN = 'lib%s.dll'
- v.dstlib_PATTERN = 'lib%s.a'
- elif binfmt == 'mac-o':
- v.dprogram_PATTERN = '%s'
- v.dshlib_PATTERN = 'lib%s.dylib'
- v.dstlib_PATTERN = 'lib%s.a'
- else:
- v.dprogram_PATTERN = '%s'
- v.dshlib_PATTERN = 'lib%s.so'
- v.dstlib_PATTERN = 'lib%s.a'
-
-DLIB = '''
-version(D_Version2) {
- import std.stdio;
- int main() {
- writefln("phobos2");
- return 0;
- }
-} else {
- version(Tango) {
- import tango.stdc.stdio;
- int main() {
- printf("tango");
- return 0;
- }
- } else {
- import std.stdio;
- int main() {
- writefln("phobos1");
- return 0;
- }
- }
-}
-'''
-"""Detection string for the D standard library"""
-
-@conf
-def check_dlibrary(self, execute=True):
- """
- Detects the kind of standard library that comes with the compiler,
- and sets conf.env.DLIBRARY to tango, phobos1 or phobos2
- """
- ret = self.check_cc(features='d dprogram', fragment=DLIB, compile_filename='test.d', execute=execute, define_ret=True)
- if execute:
- self.env.DLIBRARY = ret.strip()
-
diff --git a/waflib/Tools/d_scan.py b/waflib/Tools/d_scan.py
deleted file mode 100644
index 14c6c31..0000000
--- a/waflib/Tools/d_scan.py
+++ /dev/null
@@ -1,211 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2016-2018 (ita)
-
-"""
-Provide a scanner for finding dependencies on d files
-"""
-
-import re
-from waflib import Utils
-
-def filter_comments(filename):
- """
- :param filename: d file name
- :type filename: string
- :rtype: list
- :return: a list of characters
- """
- txt = Utils.readf(filename)
- i = 0
- buf = []
- max = len(txt)
- begin = 0
- while i < max:
- c = txt[i]
- if c == '"' or c == "'": # skip a string or character literal
- buf.append(txt[begin:i])
- delim = c
- i += 1
- while i < max:
- c = txt[i]
- if c == delim:
- break
- elif c == '\\': # skip the character following backslash
- i += 1
- i += 1
- i += 1
- begin = i
- elif c == '/': # try to replace a comment with whitespace
- buf.append(txt[begin:i])
- i += 1
- if i == max:
- break
- c = txt[i]
- if c == '+': # eat nesting /+ +/ comment
- i += 1
- nesting = 1
- c = None
- while i < max:
- prev = c
- c = txt[i]
- if prev == '/' and c == '+':
- nesting += 1
- c = None
- elif prev == '+' and c == '/':
- nesting -= 1
- if nesting == 0:
- break
- c = None
- i += 1
- elif c == '*': # eat /* */ comment
- i += 1
- c = None
- while i < max:
- prev = c
- c = txt[i]
- if prev == '*' and c == '/':
- break
- i += 1
- elif c == '/': # eat // comment
- i += 1
- while i < max and txt[i] != '\n':
- i += 1
- else: # no comment
- begin = i - 1
- continue
- i += 1
- begin = i
- buf.append(' ')
- else:
- i += 1
- buf.append(txt[begin:])
- return buf
-
-class d_parser(object):
- """
- Parser for d files
- """
- def __init__(self, env, incpaths):
- #self.code = ''
- #self.module = ''
- #self.imports = []
-
- self.allnames = []
-
- self.re_module = re.compile("module\s+([^;]+)")
- self.re_import = re.compile("import\s+([^;]+)")
- self.re_import_bindings = re.compile("([^:]+):(.*)")
- self.re_import_alias = re.compile("[^=]+=(.+)")
-
- self.env = env
-
- self.nodes = []
- self.names = []
-
- self.incpaths = incpaths
-
- def tryfind(self, filename):
- """
- Search file a file matching an module/import directive
-
- :param filename: file to read
- :type filename: string
- """
- found = 0
- for n in self.incpaths:
- found = n.find_resource(filename.replace('.', '/') + '.d')
- if found:
- self.nodes.append(found)
- self.waiting.append(found)
- break
- if not found:
- if not filename in self.names:
- self.names.append(filename)
-
- def get_strings(self, code):
- """
- :param code: d code to parse
- :type code: string
- :return: the modules that the code uses
- :rtype: a list of match objects
- """
- #self.imports = []
- self.module = ''
- lst = []
-
- # get the module name (if present)
-
- mod_name = self.re_module.search(code)
- if mod_name:
- self.module = re.sub('\s+', '', mod_name.group(1)) # strip all whitespaces
-
- # go through the code, have a look at all import occurrences
-
- # first, lets look at anything beginning with "import" and ending with ";"
- import_iterator = self.re_import.finditer(code)
- if import_iterator:
- for import_match in import_iterator:
- import_match_str = re.sub('\s+', '', import_match.group(1)) # strip all whitespaces
-
- # does this end with an import bindings declaration?
- # (import bindings always terminate the list of imports)
- bindings_match = self.re_import_bindings.match(import_match_str)
- if bindings_match:
- import_match_str = bindings_match.group(1)
- # if so, extract the part before the ":" (since the module declaration(s) is/are located there)
-
- # split the matching string into a bunch of strings, separated by a comma
- matches = import_match_str.split(',')
-
- for match in matches:
- alias_match = self.re_import_alias.match(match)
- if alias_match:
- # is this an alias declaration? (alias = module name) if so, extract the module name
- match = alias_match.group(1)
-
- lst.append(match)
- return lst
-
- def start(self, node):
- """
- The parsing starts here
-
- :param node: input file
- :type node: :py:class:`waflib.Node.Node`
- """
- self.waiting = [node]
- # while the stack is not empty, add the dependencies
- while self.waiting:
- nd = self.waiting.pop(0)
- self.iter(nd)
-
- def iter(self, node):
- """
- Find all the modules that a file depends on, uses :py:meth:`waflib.Tools.d_scan.d_parser.tryfind` to process dependent files
-
- :param node: input file
- :type node: :py:class:`waflib.Node.Node`
- """
- path = node.abspath() # obtain the absolute path
- code = "".join(filter_comments(path)) # read the file and filter the comments
- names = self.get_strings(code) # obtain the import strings
- for x in names:
- # optimization
- if x in self.allnames:
- continue
- self.allnames.append(x)
-
- # for each name, see if it is like a node or not
- self.tryfind(x)
-
-def scan(self):
- "look for .d/.di used by a d file"
- env = self.env
- gruik = d_parser(env, self.generator.includes_nodes)
- node = self.inputs[0]
- gruik.start(node)
- nodes = gruik.nodes
- names = gruik.names
- return (nodes, names)
-
diff --git a/waflib/Tools/dbus.py b/waflib/Tools/dbus.py
deleted file mode 100644
index d520f1c..0000000
--- a/waflib/Tools/dbus.py
+++ /dev/null
@@ -1,70 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Ali Sabil, 2007
-
-"""
-Compiles dbus files with **dbus-binding-tool**
-
-Typical usage::
-
- def options(opt):
- opt.load('compiler_c dbus')
- def configure(conf):
- conf.load('compiler_c dbus')
- def build(bld):
- tg = bld.program(
- includes = '.',
- source = bld.path.ant_glob('*.c'),
- target = 'gnome-hello')
- tg.add_dbus_file('test.xml', 'test_prefix', 'glib-server')
-"""
-
-from waflib import Task, Errors
-from waflib.TaskGen import taskgen_method, before_method
-
-@taskgen_method
-def add_dbus_file(self, filename, prefix, mode):
- """
- Adds a dbus file to the list of dbus files to process. Store them in the attribute *dbus_lst*.
-
- :param filename: xml file to compile
- :type filename: string
- :param prefix: dbus binding tool prefix (--prefix=prefix)
- :type prefix: string
- :param mode: dbus binding tool mode (--mode=mode)
- :type mode: string
- """
- if not hasattr(self, 'dbus_lst'):
- self.dbus_lst = []
- if not 'process_dbus' in self.meths:
- self.meths.append('process_dbus')
- self.dbus_lst.append([filename, prefix, mode])
-
-@before_method('process_source')
-def process_dbus(self):
- """
- Processes the dbus files stored in the attribute *dbus_lst* to create :py:class:`waflib.Tools.dbus.dbus_binding_tool` instances.
- """
- for filename, prefix, mode in getattr(self, 'dbus_lst', []):
- node = self.path.find_resource(filename)
- if not node:
- raise Errors.WafError('file not found ' + filename)
- tsk = self.create_task('dbus_binding_tool', node, node.change_ext('.h'))
- tsk.env.DBUS_BINDING_TOOL_PREFIX = prefix
- tsk.env.DBUS_BINDING_TOOL_MODE = mode
-
-class dbus_binding_tool(Task.Task):
- """
- Compiles a dbus file
- """
- color = 'BLUE'
- ext_out = ['.h']
- run_str = '${DBUS_BINDING_TOOL} --prefix=${DBUS_BINDING_TOOL_PREFIX} --mode=${DBUS_BINDING_TOOL_MODE} --output=${TGT} ${SRC}'
- shell = True # temporary workaround for #795
-
-def configure(conf):
- """
- Detects the program dbus-binding-tool and sets ``conf.env.DBUS_BINDING_TOOL``
- """
- conf.find_program('dbus-binding-tool', var='DBUS_BINDING_TOOL')
-
diff --git a/waflib/Tools/dmd.py b/waflib/Tools/dmd.py
deleted file mode 100644
index 8917ca1..0000000
--- a/waflib/Tools/dmd.py
+++ /dev/null
@@ -1,80 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Carlos Rafael Giani, 2007 (dv)
-# Thomas Nagy, 2008-2018 (ita)
-
-import sys
-from waflib.Tools import ar, d
-from waflib.Configure import conf
-
-@conf
-def find_dmd(conf):
- """
- Finds the program *dmd*, *dmd2*, or *ldc* and set the variable *D*
- """
- conf.find_program(['dmd', 'dmd2', 'ldc'], var='D')
-
- # make sure that we're dealing with dmd1, dmd2, or ldc(1)
- out = conf.cmd_and_log(conf.env.D + ['--help'])
- if out.find("D Compiler v") == -1:
- out = conf.cmd_and_log(conf.env.D + ['-version'])
- if out.find("based on DMD v1.") == -1:
- conf.fatal("detected compiler is not dmd/ldc")
-
-@conf
-def common_flags_ldc(conf):
- """
- Sets the D flags required by *ldc*
- """
- v = conf.env
- v.DFLAGS = ['-d-version=Posix']
- v.LINKFLAGS = []
- v.DFLAGS_dshlib = ['-relocation-model=pic']
-
-@conf
-def common_flags_dmd(conf):
- """
- Set the flags required by *dmd* or *dmd2*
- """
- v = conf.env
-
- v.D_SRC_F = ['-c']
- v.D_TGT_F = '-of%s'
-
- v.D_LINKER = v.D
- v.DLNK_SRC_F = ''
- v.DLNK_TGT_F = '-of%s'
- v.DINC_ST = '-I%s'
-
- v.DSHLIB_MARKER = v.DSTLIB_MARKER = ''
- v.DSTLIB_ST = v.DSHLIB_ST = '-L-l%s'
- v.DSTLIBPATH_ST = v.DLIBPATH_ST = '-L-L%s'
-
- v.LINKFLAGS_dprogram= ['-quiet']
-
- v.DFLAGS_dshlib = ['-fPIC']
- v.LINKFLAGS_dshlib = ['-L-shared']
-
- v.DHEADER_ext = '.di'
- v.DFLAGS_d_with_header = ['-H', '-Hf']
- v.D_HDR_F = '%s'
-
-def configure(conf):
- """
- Configuration for *dmd*, *dmd2*, and *ldc*
- """
- conf.find_dmd()
-
- if sys.platform == 'win32':
- out = conf.cmd_and_log(conf.env.D + ['--help'])
- if out.find('D Compiler v2.') > -1:
- conf.fatal('dmd2 on Windows is not supported, use gdc or ldc2 instead')
-
- conf.load('ar')
- conf.load('d')
- conf.common_flags_dmd()
- conf.d_platform_flags()
-
- if str(conf.env.D).find('ldc') > -1:
- conf.common_flags_ldc()
-
diff --git a/waflib/Tools/errcheck.py b/waflib/Tools/errcheck.py
deleted file mode 100644
index de8d75a..0000000
--- a/waflib/Tools/errcheck.py
+++ /dev/null
@@ -1,237 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2011 (ita)
-
-"""
-Common mistakes highlighting.
-
-There is a performance impact, so this tool is only loaded when running ``waf -v``
-"""
-
-typos = {
-'feature':'features',
-'sources':'source',
-'targets':'target',
-'include':'includes',
-'export_include':'export_includes',
-'define':'defines',
-'importpath':'includes',
-'installpath':'install_path',
-'iscopy':'is_copy',
-'uses':'use',
-}
-
-meths_typos = ['__call__', 'program', 'shlib', 'stlib', 'objects']
-
-import sys
-from waflib import Logs, Build, Node, Task, TaskGen, ConfigSet, Errors, Utils
-from waflib.Tools import ccroot
-
-def check_same_targets(self):
- mp = Utils.defaultdict(list)
- uids = {}
-
- def check_task(tsk):
- if not isinstance(tsk, Task.Task):
- return
- if hasattr(tsk, 'no_errcheck_out'):
- return
-
- for node in tsk.outputs:
- mp[node].append(tsk)
- try:
- uids[tsk.uid()].append(tsk)
- except KeyError:
- uids[tsk.uid()] = [tsk]
-
- for g in self.groups:
- for tg in g:
- try:
- for tsk in tg.tasks:
- check_task(tsk)
- except AttributeError:
- # raised if not a task generator, which should be uncommon
- check_task(tg)
-
- dupe = False
- for (k, v) in mp.items():
- if len(v) > 1:
- dupe = True
- msg = '* Node %r is created more than once%s. The task generators are:' % (k, Logs.verbose == 1 and " (full message on 'waf -v -v')" or "")
- Logs.error(msg)
- for x in v:
- if Logs.verbose > 1:
- Logs.error(' %d. %r', 1 + v.index(x), x.generator)
- else:
- Logs.error(' %d. %r in %r', 1 + v.index(x), x.generator.name, getattr(x.generator, 'path', None))
- Logs.error('If you think that this is an error, set no_errcheck_out on the task instance')
-
- if not dupe:
- for (k, v) in uids.items():
- if len(v) > 1:
- Logs.error('* Several tasks use the same identifier. Please check the information on\n https://waf.io/apidocs/Task.html?highlight=uid#waflib.Task.Task.uid')
- tg_details = tsk.generator.name
- if Logs.verbose > 2:
- tg_details = tsk.generator
- for tsk in v:
- Logs.error(' - object %r (%r) defined in %r', tsk.__class__.__name__, tsk, tg_details)
-
-def check_invalid_constraints(self):
- feat = set()
- for x in list(TaskGen.feats.values()):
- feat.union(set(x))
- for (x, y) in TaskGen.task_gen.prec.items():
- feat.add(x)
- feat.union(set(y))
- ext = set()
- for x in TaskGen.task_gen.mappings.values():
- ext.add(x.__name__)
- invalid = ext & feat
- if invalid:
- Logs.error('The methods %r have invalid annotations: @extension <-> @feature/@before_method/@after_method', list(invalid))
-
- # the build scripts have been read, so we can check for invalid after/before attributes on task classes
- for cls in list(Task.classes.values()):
- if sys.hexversion > 0x3000000 and issubclass(cls, Task.Task) and isinstance(cls.hcode, str):
- raise Errors.WafError('Class %r has hcode value %r of type <str>, expecting <bytes> (use Utils.h_cmd() ?)' % (cls, cls.hcode))
-
- for x in ('before', 'after'):
- for y in Utils.to_list(getattr(cls, x, [])):
- if not Task.classes.get(y):
- Logs.error('Erroneous order constraint %r=%r on task class %r', x, y, cls.__name__)
- if getattr(cls, 'rule', None):
- Logs.error('Erroneous attribute "rule" on task class %r (rename to "run_str")', cls.__name__)
-
-def replace(m):
- """
- Replaces existing BuildContext methods to verify parameter names,
- for example ``bld(source=)`` has no ending *s*
- """
- oldcall = getattr(Build.BuildContext, m)
- def call(self, *k, **kw):
- ret = oldcall(self, *k, **kw)
- for x in typos:
- if x in kw:
- if x == 'iscopy' and 'subst' in getattr(self, 'features', ''):
- continue
- Logs.error('Fix the typo %r -> %r on %r', x, typos[x], ret)
- return ret
- setattr(Build.BuildContext, m, call)
-
-def enhance_lib():
- """
- Modifies existing classes and methods to enable error verification
- """
- for m in meths_typos:
- replace(m)
-
- # catch '..' in ant_glob patterns
- def ant_glob(self, *k, **kw):
- if k:
- lst = Utils.to_list(k[0])
- for pat in lst:
- sp = pat.split('/')
- if '..' in sp:
- Logs.error("In ant_glob pattern %r: '..' means 'two dots', not 'parent directory'", k[0])
- if '.' in sp:
- Logs.error("In ant_glob pattern %r: '.' means 'one dot', not 'current directory'", k[0])
- return self.old_ant_glob(*k, **kw)
- Node.Node.old_ant_glob = Node.Node.ant_glob
- Node.Node.ant_glob = ant_glob
-
- # catch ant_glob on build folders
- def ant_iter(self, accept=None, maxdepth=25, pats=[], dir=False, src=True, remove=True, quiet=False):
- if remove:
- try:
- if self.is_child_of(self.ctx.bldnode) and not quiet:
- quiet = True
- Logs.error('Calling ant_glob on build folders (%r) is dangerous: add quiet=True / remove=False', self)
- except AttributeError:
- pass
- return self.old_ant_iter(accept, maxdepth, pats, dir, src, remove, quiet)
- Node.Node.old_ant_iter = Node.Node.ant_iter
- Node.Node.ant_iter = ant_iter
-
- # catch conflicting ext_in/ext_out/before/after declarations
- old = Task.is_before
- def is_before(t1, t2):
- ret = old(t1, t2)
- if ret and old(t2, t1):
- Logs.error('Contradictory order constraints in classes %r %r', t1, t2)
- return ret
- Task.is_before = is_before
-
- # check for bld(feature='cshlib') where no 'c' is given - this can be either a mistake or on purpose
- # so we only issue a warning
- def check_err_features(self):
- lst = self.to_list(self.features)
- if 'shlib' in lst:
- Logs.error('feature shlib -> cshlib, dshlib or cxxshlib')
- for x in ('c', 'cxx', 'd', 'fc'):
- if not x in lst and lst and lst[0] in [x+y for y in ('program', 'shlib', 'stlib')]:
- Logs.error('%r features is probably missing %r', self, x)
- TaskGen.feature('*')(check_err_features)
-
- # check for erroneous order constraints
- def check_err_order(self):
- if not hasattr(self, 'rule') and not 'subst' in Utils.to_list(self.features):
- for x in ('before', 'after', 'ext_in', 'ext_out'):
- if hasattr(self, x):
- Logs.warn('Erroneous order constraint %r on non-rule based task generator %r', x, self)
- else:
- for x in ('before', 'after'):
- for y in self.to_list(getattr(self, x, [])):
- if not Task.classes.get(y):
- Logs.error('Erroneous order constraint %s=%r on %r (no such class)', x, y, self)
- TaskGen.feature('*')(check_err_order)
-
- # check for @extension used with @feature/@before_method/@after_method
- def check_compile(self):
- check_invalid_constraints(self)
- try:
- ret = self.orig_compile()
- finally:
- check_same_targets(self)
- return ret
- Build.BuildContext.orig_compile = Build.BuildContext.compile
- Build.BuildContext.compile = check_compile
-
- # check for invalid build groups #914
- def use_rec(self, name, **kw):
- try:
- y = self.bld.get_tgen_by_name(name)
- except Errors.WafError:
- pass
- else:
- idx = self.bld.get_group_idx(self)
- odx = self.bld.get_group_idx(y)
- if odx > idx:
- msg = "Invalid 'use' across build groups:"
- if Logs.verbose > 1:
- msg += '\n target %r\n uses:\n %r' % (self, y)
- else:
- msg += " %r uses %r (try 'waf -v -v' for the full error)" % (self.name, name)
- raise Errors.WafError(msg)
- self.orig_use_rec(name, **kw)
- TaskGen.task_gen.orig_use_rec = TaskGen.task_gen.use_rec
- TaskGen.task_gen.use_rec = use_rec
-
- # check for env.append
- def _getattr(self, name, default=None):
- if name == 'append' or name == 'add':
- raise Errors.WafError('env.append and env.add do not exist: use env.append_value/env.append_unique')
- elif name == 'prepend':
- raise Errors.WafError('env.prepend does not exist: use env.prepend_value')
- if name in self.__slots__:
- return super(ConfigSet.ConfigSet, self).__getattr__(name, default)
- else:
- return self[name]
- ConfigSet.ConfigSet.__getattr__ = _getattr
-
-
-def options(opt):
- """
- Error verification can be enabled by default (not just on ``waf -v``) by adding to the user script options
- """
- enhance_lib()
-
diff --git a/waflib/Tools/fc.py b/waflib/Tools/fc.py
deleted file mode 100644
index d9e8d8c..0000000
--- a/waflib/Tools/fc.py
+++ /dev/null
@@ -1,189 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# DC 2008
-# Thomas Nagy 2016-2018 (ita)
-
-"""
-Fortran support
-"""
-
-from waflib import Utils, Task, Errors
-from waflib.Tools import ccroot, fc_config, fc_scan
-from waflib.TaskGen import extension
-from waflib.Configure import conf
-
-ccroot.USELIB_VARS['fc'] = set(['FCFLAGS', 'DEFINES', 'INCLUDES', 'FCPPFLAGS'])
-ccroot.USELIB_VARS['fcprogram_test'] = ccroot.USELIB_VARS['fcprogram'] = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS'])
-ccroot.USELIB_VARS['fcshlib'] = set(['LIB', 'STLIB', 'LIBPATH', 'STLIBPATH', 'LINKFLAGS', 'RPATH', 'LINKDEPS'])
-ccroot.USELIB_VARS['fcstlib'] = set(['ARFLAGS', 'LINKDEPS'])
-
-@extension('.f','.F','.f90','.F90','.for','.FOR','.f95','.F95','.f03','.F03','.f08','.F08')
-def fc_hook(self, node):
- "Binds the Fortran file extensions create :py:class:`waflib.Tools.fc.fc` instances"
- return self.create_compiled_task('fc', node)
-
-@conf
-def modfile(conf, name):
- """
- Turns a module name into the right module file name.
- Defaults to all lower case.
- """
- return {'lower' :name.lower() + '.mod',
- 'lower.MOD' :name.lower() + '.MOD',
- 'UPPER.mod' :name.upper() + '.mod',
- 'UPPER' :name.upper() + '.MOD'}[conf.env.FC_MOD_CAPITALIZATION or 'lower']
-
-def get_fortran_tasks(tsk):
- """
- Obtains all fortran tasks from the same build group. Those tasks must not have
- the attribute 'nomod' or 'mod_fortran_done'
-
- :return: a list of :py:class:`waflib.Tools.fc.fc` instances
- """
- bld = tsk.generator.bld
- tasks = bld.get_tasks_group(bld.get_group_idx(tsk.generator))
- return [x for x in tasks if isinstance(x, fc) and not getattr(x, 'nomod', None) and not getattr(x, 'mod_fortran_done', None)]
-
-class fc(Task.Task):
- """
- Fortran tasks can only run when all fortran tasks in a current task group are ready to be executed
- This may cause a deadlock if some fortran task is waiting for something that cannot happen (circular dependency)
- Should this ever happen, set the 'nomod=True' on those tasks instances to break the loop
- """
- color = 'GREEN'
- run_str = '${FC} ${FCFLAGS} ${FCINCPATH_ST:INCPATHS} ${FCDEFINES_ST:DEFINES} ${_FCMODOUTFLAGS} ${FC_TGT_F}${TGT[0].abspath()} ${FC_SRC_F}${SRC[0].abspath()} ${FCPPFLAGS}'
- vars = ["FORTRANMODPATHFLAG"]
-
- def scan(self):
- """Fortran dependency scanner"""
- tmp = fc_scan.fortran_parser(self.generator.includes_nodes)
- tmp.task = self
- tmp.start(self.inputs[0])
- return (tmp.nodes, tmp.names)
-
- def runnable_status(self):
- """
- Sets the mod file outputs and the dependencies on the mod files over all Fortran tasks
- executed by the main thread so there are no concurrency issues
- """
- if getattr(self, 'mod_fortran_done', None):
- return super(fc, self).runnable_status()
-
- # now, if we reach this part it is because this fortran task is the first in the list
- bld = self.generator.bld
-
- # obtain the fortran tasks
- lst = get_fortran_tasks(self)
-
- # disable this method for other tasks
- for tsk in lst:
- tsk.mod_fortran_done = True
-
- # wait for all the .f tasks to be ready for execution
- # and ensure that the scanners are called at least once
- for tsk in lst:
- ret = tsk.runnable_status()
- if ret == Task.ASK_LATER:
- # we have to wait for one of the other fortran tasks to be ready
- # this may deadlock if there are dependencies between fortran tasks
- # but this should not happen (we are setting them here!)
- for x in lst:
- x.mod_fortran_done = None
-
- return Task.ASK_LATER
-
- ins = Utils.defaultdict(set)
- outs = Utils.defaultdict(set)
-
- # the .mod files to create
- for tsk in lst:
- key = tsk.uid()
- for x in bld.raw_deps[key]:
- if x.startswith('MOD@'):
- name = bld.modfile(x.replace('MOD@', ''))
- node = bld.srcnode.find_or_declare(name)
- tsk.set_outputs(node)
- outs[node].add(tsk)
-
- # the .mod files to use
- for tsk in lst:
- key = tsk.uid()
- for x in bld.raw_deps[key]:
- if x.startswith('USE@'):
- name = bld.modfile(x.replace('USE@', ''))
- node = bld.srcnode.find_resource(name)
- if node and node not in tsk.outputs:
- if not node in bld.node_deps[key]:
- bld.node_deps[key].append(node)
- ins[node].add(tsk)
-
- # if the intersection matches, set the order
- for k in ins.keys():
- for a in ins[k]:
- a.run_after.update(outs[k])
- for x in outs[k]:
- self.generator.bld.producer.revdeps[x].add(a)
-
- # the scanner cannot output nodes, so we have to set them
- # ourselves as task.dep_nodes (additional input nodes)
- tmp = []
- for t in outs[k]:
- tmp.extend(t.outputs)
- a.dep_nodes.extend(tmp)
- a.dep_nodes.sort(key=lambda x: x.abspath())
-
- # the task objects have changed: clear the signature cache
- for tsk in lst:
- try:
- delattr(tsk, 'cache_sig')
- except AttributeError:
- pass
-
- return super(fc, self).runnable_status()
-
-class fcprogram(ccroot.link_task):
- """Links Fortran programs"""
- color = 'YELLOW'
- run_str = '${FC} ${LINKFLAGS} ${FCLNK_SRC_F}${SRC} ${FCLNK_TGT_F}${TGT[0].abspath()} ${RPATH_ST:RPATH} ${FCSTLIB_MARKER} ${FCSTLIBPATH_ST:STLIBPATH} ${FCSTLIB_ST:STLIB} ${FCSHLIB_MARKER} ${FCLIBPATH_ST:LIBPATH} ${FCLIB_ST:LIB} ${LDFLAGS}'
- inst_to = '${BINDIR}'
-
-class fcshlib(fcprogram):
- """Links Fortran libraries"""
- inst_to = '${LIBDIR}'
-
-class fcstlib(ccroot.stlink_task):
- """Links Fortran static libraries (uses ar by default)"""
- pass # do not remove the pass statement
-
-class fcprogram_test(fcprogram):
- """Custom link task to obtain compiler outputs for Fortran configuration tests"""
-
- def runnable_status(self):
- """This task is always executed"""
- ret = super(fcprogram_test, self).runnable_status()
- if ret == Task.SKIP_ME:
- ret = Task.RUN_ME
- return ret
-
- def exec_command(self, cmd, **kw):
- """Stores the compiler std our/err onto the build context, to bld.out + bld.err"""
- bld = self.generator.bld
-
- kw['shell'] = isinstance(cmd, str)
- kw['stdout'] = kw['stderr'] = Utils.subprocess.PIPE
- kw['cwd'] = self.get_cwd()
- bld.out = bld.err = ''
-
- bld.to_log('command: %s\n' % cmd)
-
- kw['output'] = 0
- try:
- (bld.out, bld.err) = bld.cmd_and_log(cmd, **kw)
- except Errors.WafError:
- return -1
-
- if bld.out:
- bld.to_log('out: %s\n' % bld.out)
- if bld.err:
- bld.to_log('err: %s\n' % bld.err)
-
diff --git a/waflib/Tools/fc_config.py b/waflib/Tools/fc_config.py
deleted file mode 100644
index 222f3a5..0000000
--- a/waflib/Tools/fc_config.py
+++ /dev/null
@@ -1,488 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# DC 2008
-# Thomas Nagy 2016-2018 (ita)
-
-"""
-Fortran configuration helpers
-"""
-
-import re, os, sys, shlex
-from waflib.Configure import conf
-from waflib.TaskGen import feature, before_method
-
-FC_FRAGMENT = ' program main\n end program main\n'
-FC_FRAGMENT2 = ' PROGRAM MAIN\n END\n' # what's the actual difference between these?
-
-@conf
-def fc_flags(conf):
- """
- Defines common fortran configuration flags and file extensions
- """
- v = conf.env
-
- v.FC_SRC_F = []
- v.FC_TGT_F = ['-c', '-o']
- v.FCINCPATH_ST = '-I%s'
- v.FCDEFINES_ST = '-D%s'
-
- if not v.LINK_FC:
- v.LINK_FC = v.FC
-
- v.FCLNK_SRC_F = []
- v.FCLNK_TGT_F = ['-o']
-
- v.FCFLAGS_fcshlib = ['-fpic']
- v.LINKFLAGS_fcshlib = ['-shared']
- v.fcshlib_PATTERN = 'lib%s.so'
-
- v.fcstlib_PATTERN = 'lib%s.a'
-
- v.FCLIB_ST = '-l%s'
- v.FCLIBPATH_ST = '-L%s'
- v.FCSTLIB_ST = '-l%s'
- v.FCSTLIBPATH_ST = '-L%s'
- v.FCSTLIB_MARKER = '-Wl,-Bstatic'
- v.FCSHLIB_MARKER = '-Wl,-Bdynamic'
-
- v.SONAME_ST = '-Wl,-h,%s'
-
-@conf
-def fc_add_flags(conf):
- """
- Adds FCFLAGS / LDFLAGS / LINKFLAGS from os.environ to conf.env
- """
- conf.add_os_flags('FCPPFLAGS', dup=False)
- conf.add_os_flags('FCFLAGS', dup=False)
- conf.add_os_flags('LINKFLAGS', dup=False)
- conf.add_os_flags('LDFLAGS', dup=False)
-
-@conf
-def check_fortran(self, *k, **kw):
- """
- Compiles a Fortran program to ensure that the settings are correct
- """
- self.check_cc(
- fragment = FC_FRAGMENT,
- compile_filename = 'test.f',
- features = 'fc fcprogram',
- msg = 'Compiling a simple fortran app')
-
-@conf
-def check_fc(self, *k, **kw):
- """
- Same as :py:func:`waflib.Tools.c_config.check` but defaults to the *Fortran* programming language
- (this overrides the C defaults in :py:func:`waflib.Tools.c_config.validate_c`)
- """
- kw['compiler'] = 'fc'
- if not 'compile_mode' in kw:
- kw['compile_mode'] = 'fc'
- if not 'type' in kw:
- kw['type'] = 'fcprogram'
- if not 'compile_filename' in kw:
- kw['compile_filename'] = 'test.f90'
- if not 'code' in kw:
- kw['code'] = FC_FRAGMENT
- return self.check(*k, **kw)
-
-# ------------------------------------------------------------------------
-# --- These are the default platform modifiers, refactored here for
-# convenience. gfortran and g95 have much overlap.
-# ------------------------------------------------------------------------
-
-@conf
-def fortran_modifier_darwin(conf):
- """
- Defines Fortran flags and extensions for OSX systems
- """
- v = conf.env
- v.FCFLAGS_fcshlib = ['-fPIC']
- v.LINKFLAGS_fcshlib = ['-dynamiclib']
- v.fcshlib_PATTERN = 'lib%s.dylib'
- v.FRAMEWORKPATH_ST = '-F%s'
- v.FRAMEWORK_ST = ['-framework']
-
- v.LINKFLAGS_fcstlib = []
-
- v.FCSHLIB_MARKER = ''
- v.FCSTLIB_MARKER = ''
- v.SONAME_ST = ''
-
-@conf
-def fortran_modifier_win32(conf):
- """
- Defines Fortran flags for Windows platforms
- """
- v = conf.env
- v.fcprogram_PATTERN = v.fcprogram_test_PATTERN = '%s.exe'
-
- v.fcshlib_PATTERN = '%s.dll'
- v.implib_PATTERN = '%s.dll.a'
- v.IMPLIB_ST = '-Wl,--out-implib,%s'
-
- v.FCFLAGS_fcshlib = []
-
- # Auto-import is enabled by default even without this option,
- # but enabling it explicitly has the nice effect of suppressing the rather boring, debug-level messages
- # that the linker emits otherwise.
- v.append_value('LINKFLAGS', ['-Wl,--enable-auto-import'])
-
-@conf
-def fortran_modifier_cygwin(conf):
- """
- Defines Fortran flags for use on cygwin
- """
- fortran_modifier_win32(conf)
- v = conf.env
- v.fcshlib_PATTERN = 'cyg%s.dll'
- v.append_value('LINKFLAGS_fcshlib', ['-Wl,--enable-auto-image-base'])
- v.FCFLAGS_fcshlib = []
-
-# ------------------------------------------------------------------------
-
-@conf
-def check_fortran_dummy_main(self, *k, **kw):
- """
- Determines if a main function is needed by compiling a code snippet with
- the C compiler and linking it with the Fortran compiler (useful on unix-like systems)
- """
- if not self.env.CC:
- self.fatal('A c compiler is required for check_fortran_dummy_main')
-
- lst = ['MAIN__', '__MAIN', '_MAIN', 'MAIN_', 'MAIN']
- lst.extend([m.lower() for m in lst])
- lst.append('')
-
- self.start_msg('Detecting whether we need a dummy main')
- for main in lst:
- kw['fortran_main'] = main
- try:
- self.check_cc(
- fragment = 'int %s() { return 0; }\n' % (main or 'test'),
- features = 'c fcprogram',
- mandatory = True
- )
- if not main:
- self.env.FC_MAIN = -1
- self.end_msg('no')
- else:
- self.env.FC_MAIN = main
- self.end_msg('yes %s' % main)
- break
- except self.errors.ConfigurationError:
- pass
- else:
- self.end_msg('not found')
- self.fatal('could not detect whether fortran requires a dummy main, see the config.log')
-
-# ------------------------------------------------------------------------
-
-GCC_DRIVER_LINE = re.compile('^Driving:')
-POSIX_STATIC_EXT = re.compile('\S+\.a')
-POSIX_LIB_FLAGS = re.compile('-l\S+')
-
-@conf
-def is_link_verbose(self, txt):
- """Returns True if 'useful' link options can be found in txt"""
- assert isinstance(txt, str)
- for line in txt.splitlines():
- if not GCC_DRIVER_LINE.search(line):
- if POSIX_STATIC_EXT.search(line) or POSIX_LIB_FLAGS.search(line):
- return True
- return False
-
-@conf
-def check_fortran_verbose_flag(self, *k, **kw):
- """
- Checks what kind of verbose (-v) flag works, then sets it to env.FC_VERBOSE_FLAG
- """
- self.start_msg('fortran link verbose flag')
- for x in ('-v', '--verbose', '-verbose', '-V'):
- try:
- self.check_cc(
- features = 'fc fcprogram_test',
- fragment = FC_FRAGMENT2,
- compile_filename = 'test.f',
- linkflags = [x],
- mandatory=True)
- except self.errors.ConfigurationError:
- pass
- else:
- # output is on stderr or stdout (for xlf)
- if self.is_link_verbose(self.test_bld.err) or self.is_link_verbose(self.test_bld.out):
- self.end_msg(x)
- break
- else:
- self.end_msg('failure')
- self.fatal('Could not obtain the fortran link verbose flag (see config.log)')
-
- self.env.FC_VERBOSE_FLAG = x
- return x
-
-# ------------------------------------------------------------------------
-
-# linkflags which match those are ignored
-LINKFLAGS_IGNORED = [r'-lang*', r'-lcrt[a-zA-Z0-9\.]*\.o', r'-lc$', r'-lSystem', r'-libmil', r'-LIST:*', r'-LNO:*']
-if os.name == 'nt':
- LINKFLAGS_IGNORED.extend([r'-lfrt*', r'-luser32', r'-lkernel32', r'-ladvapi32', r'-lmsvcrt', r'-lshell32', r'-lmingw', r'-lmoldname'])
-else:
- LINKFLAGS_IGNORED.append(r'-lgcc*')
-RLINKFLAGS_IGNORED = [re.compile(f) for f in LINKFLAGS_IGNORED]
-
-def _match_ignore(line):
- """Returns True if the line should be ignored (Fortran verbose flag test)"""
- for i in RLINKFLAGS_IGNORED:
- if i.match(line):
- return True
- return False
-
-def parse_fortran_link(lines):
- """Given the output of verbose link of Fortran compiler, this returns a
- list of flags necessary for linking using the standard linker."""
- final_flags = []
- for line in lines:
- if not GCC_DRIVER_LINE.match(line):
- _parse_flink_line(line, final_flags)
- return final_flags
-
-SPACE_OPTS = re.compile('^-[LRuYz]$')
-NOSPACE_OPTS = re.compile('^-[RL]')
-
-def _parse_flink_token(lexer, token, tmp_flags):
- # Here we go (convention for wildcard is shell, not regex !)
- # 1 TODO: we first get some root .a libraries
- # 2 TODO: take everything starting by -bI:*
- # 3 Ignore the following flags: -lang* | -lcrt*.o | -lc |
- # -lgcc* | -lSystem | -libmil | -LANG:=* | -LIST:* | -LNO:*)
- # 4 take into account -lkernel32
- # 5 For options of the kind -[[LRuYz]], as they take one argument
- # after, the actual option is the next token
- # 6 For -YP,*: take and replace by -Larg where arg is the old
- # argument
- # 7 For -[lLR]*: take
-
- # step 3
- if _match_ignore(token):
- pass
- # step 4
- elif token.startswith('-lkernel32') and sys.platform == 'cygwin':
- tmp_flags.append(token)
- # step 5
- elif SPACE_OPTS.match(token):
- t = lexer.get_token()
- if t.startswith('P,'):
- t = t[2:]
- for opt in t.split(os.pathsep):
- tmp_flags.append('-L%s' % opt)
- # step 6
- elif NOSPACE_OPTS.match(token):
- tmp_flags.append(token)
- # step 7
- elif POSIX_LIB_FLAGS.match(token):
- tmp_flags.append(token)
- else:
- # ignore anything not explicitly taken into account
- pass
-
- t = lexer.get_token()
- return t
-
-def _parse_flink_line(line, final_flags):
- """private"""
- lexer = shlex.shlex(line, posix = True)
- lexer.whitespace_split = True
-
- t = lexer.get_token()
- tmp_flags = []
- while t:
- t = _parse_flink_token(lexer, t, tmp_flags)
-
- final_flags.extend(tmp_flags)
- return final_flags
-
-@conf
-def check_fortran_clib(self, autoadd=True, *k, **kw):
- """
- Obtains the flags for linking with the C library
- if this check works, add uselib='CLIB' to your task generators
- """
- if not self.env.FC_VERBOSE_FLAG:
- self.fatal('env.FC_VERBOSE_FLAG is not set: execute check_fortran_verbose_flag?')
-
- self.start_msg('Getting fortran runtime link flags')
- try:
- self.check_cc(
- fragment = FC_FRAGMENT2,
- compile_filename = 'test.f',
- features = 'fc fcprogram_test',
- linkflags = [self.env.FC_VERBOSE_FLAG]
- )
- except Exception:
- self.end_msg(False)
- if kw.get('mandatory', True):
- conf.fatal('Could not find the c library flags')
- else:
- out = self.test_bld.err
- flags = parse_fortran_link(out.splitlines())
- self.end_msg('ok (%s)' % ' '.join(flags))
- self.env.LINKFLAGS_CLIB = flags
- return flags
- return []
-
-def getoutput(conf, cmd, stdin=False):
- """
- Obtains Fortran command outputs
- """
- from waflib import Errors
- if conf.env.env:
- env = conf.env.env
- else:
- env = dict(os.environ)
- env['LANG'] = 'C'
- input = stdin and '\n'.encode() or None
- try:
- out, err = conf.cmd_and_log(cmd, env=env, output=0, input=input)
- except Errors.WafError as e:
- # An WafError might indicate an error code during the command
- # execution, in this case we still obtain the stderr and stdout,
- # which we can use to find the version string.
- if not (hasattr(e, 'stderr') and hasattr(e, 'stdout')):
- raise e
- else:
- # Ignore the return code and return the original
- # stdout and stderr.
- out = e.stdout
- err = e.stderr
- except Exception:
- conf.fatal('could not determine the compiler version %r' % cmd)
- return (out, err)
-
-# ------------------------------------------------------------------------
-
-ROUTINES_CODE = """\
- subroutine foobar()
- return
- end
- subroutine foo_bar()
- return
- end
-"""
-
-MAIN_CODE = """
-void %(dummy_func_nounder)s(void);
-void %(dummy_func_under)s(void);
-int %(main_func_name)s() {
- %(dummy_func_nounder)s();
- %(dummy_func_under)s();
- return 0;
-}
-"""
-
-@feature('link_main_routines_func')
-@before_method('process_source')
-def link_main_routines_tg_method(self):
- """
- The configuration test declares a unique task generator,
- so we create other task generators from there for fortran link tests
- """
- def write_test_file(task):
- task.outputs[0].write(task.generator.code)
- bld = self.bld
- bld(rule=write_test_file, target='main.c', code=MAIN_CODE % self.__dict__)
- bld(rule=write_test_file, target='test.f', code=ROUTINES_CODE)
- bld(features='fc fcstlib', source='test.f', target='test')
- bld(features='c fcprogram', source='main.c', target='app', use='test')
-
-def mangling_schemes():
- """
- Generate triplets for use with mangle_name
- (used in check_fortran_mangling)
- the order is tuned for gfortan
- """
- for u in ('_', ''):
- for du in ('', '_'):
- for c in ("lower", "upper"):
- yield (u, du, c)
-
-def mangle_name(u, du, c, name):
- """Mangle a name from a triplet (used in check_fortran_mangling)"""
- return getattr(name, c)() + u + (name.find('_') != -1 and du or '')
-
-@conf
-def check_fortran_mangling(self, *k, **kw):
- """
- Detect the mangling scheme, sets FORTRAN_MANGLING to the triplet found
-
- This test will compile a fortran static library, then link a c app against it
- """
- if not self.env.CC:
- self.fatal('A c compiler is required for link_main_routines')
- if not self.env.FC:
- self.fatal('A fortran compiler is required for link_main_routines')
- if not self.env.FC_MAIN:
- self.fatal('Checking for mangling requires self.env.FC_MAIN (execute "check_fortran_dummy_main" first?)')
-
- self.start_msg('Getting fortran mangling scheme')
- for (u, du, c) in mangling_schemes():
- try:
- self.check_cc(
- compile_filename = [],
- features = 'link_main_routines_func',
- msg = 'nomsg',
- errmsg = 'nomsg',
- dummy_func_nounder = mangle_name(u, du, c, 'foobar'),
- dummy_func_under = mangle_name(u, du, c, 'foo_bar'),
- main_func_name = self.env.FC_MAIN
- )
- except self.errors.ConfigurationError:
- pass
- else:
- self.end_msg("ok ('%s', '%s', '%s-case')" % (u, du, c))
- self.env.FORTRAN_MANGLING = (u, du, c)
- break
- else:
- self.end_msg(False)
- self.fatal('mangler not found')
- return (u, du, c)
-
-@feature('pyext')
-@before_method('propagate_uselib_vars', 'apply_link')
-def set_lib_pat(self):
- """Sets the Fortran flags for linking with Python"""
- self.env.fcshlib_PATTERN = self.env.pyext_PATTERN
-
-@conf
-def detect_openmp(self):
- """
- Detects openmp flags and sets the OPENMP ``FCFLAGS``/``LINKFLAGS``
- """
- for x in ('-fopenmp','-openmp','-mp','-xopenmp','-omp','-qsmp=omp'):
- try:
- self.check_fc(
- msg = 'Checking for OpenMP flag %s' % x,
- fragment = 'program main\n call omp_get_num_threads()\nend program main',
- fcflags = x,
- linkflags = x,
- uselib_store = 'OPENMP'
- )
- except self.errors.ConfigurationError:
- pass
- else:
- break
- else:
- self.fatal('Could not find OpenMP')
-
-@conf
-def check_gfortran_o_space(self):
- if self.env.FC_NAME != 'GFORTRAN' or int(self.env.FC_VERSION[0]) > 4:
- # This is for old compilers and only for gfortran.
- # No idea how other implementations handle this. Be safe and bail out.
- return
- self.env.stash()
- self.env.FCLNK_TGT_F = ['-o', '']
- try:
- self.check_fc(msg='Checking if the -o link must be split from arguments', fragment=FC_FRAGMENT, features='fc fcshlib')
- except self.errors.ConfigurationError:
- self.env.revert()
- else:
- self.env.commit()
diff --git a/waflib/Tools/fc_scan.py b/waflib/Tools/fc_scan.py
deleted file mode 100644
index 12cb0fc..0000000
--- a/waflib/Tools/fc_scan.py
+++ /dev/null
@@ -1,114 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# DC 2008
-# Thomas Nagy 2016-2018 (ita)
-
-import re
-
-INC_REGEX = """(?:^|['">]\s*;)\s*(?:|#\s*)INCLUDE\s+(?:\w+_)?[<"'](.+?)(?=["'>])"""
-USE_REGEX = """(?:^|;)\s*USE(?:\s+|(?:(?:\s*,\s*(?:NON_)?INTRINSIC)?\s*::))\s*(\w+)"""
-MOD_REGEX = """(?:^|;)\s*MODULE(?!\s*PROCEDURE)(?:\s+|(?:(?:\s*,\s*(?:NON_)?INTRINSIC)?\s*::))\s*(\w+)"""
-
-re_inc = re.compile(INC_REGEX, re.I)
-re_use = re.compile(USE_REGEX, re.I)
-re_mod = re.compile(MOD_REGEX, re.I)
-
-class fortran_parser(object):
- """
- This parser returns:
-
- * the nodes corresponding to the module names to produce
- * the nodes corresponding to the include files used
- * the module names used by the fortran files
- """
- def __init__(self, incpaths):
- self.seen = []
- """Files already parsed"""
-
- self.nodes = []
- """List of :py:class:`waflib.Node.Node` representing the dependencies to return"""
-
- self.names = []
- """List of module names to return"""
-
- self.incpaths = incpaths
- """List of :py:class:`waflib.Node.Node` representing the include paths"""
-
- def find_deps(self, node):
- """
- Parses a Fortran file to obtain the dependencies used/provided
-
- :param node: fortran file to read
- :type node: :py:class:`waflib.Node.Node`
- :return: lists representing the includes, the modules used, and the modules created by a fortran file
- :rtype: tuple of list of strings
- """
- txt = node.read()
- incs = []
- uses = []
- mods = []
- for line in txt.splitlines():
- # line by line regexp search? optimize?
- m = re_inc.search(line)
- if m:
- incs.append(m.group(1))
- m = re_use.search(line)
- if m:
- uses.append(m.group(1))
- m = re_mod.search(line)
- if m:
- mods.append(m.group(1))
- return (incs, uses, mods)
-
- def start(self, node):
- """
- Start parsing. Use the stack ``self.waiting`` to hold nodes to iterate on
-
- :param node: fortran file
- :type node: :py:class:`waflib.Node.Node`
- """
- self.waiting = [node]
- while self.waiting:
- nd = self.waiting.pop(0)
- self.iter(nd)
-
- def iter(self, node):
- """
- Processes a single file during dependency parsing. Extracts files used
- modules used and modules provided.
- """
- incs, uses, mods = self.find_deps(node)
- for x in incs:
- if x in self.seen:
- continue
- self.seen.append(x)
- self.tryfind_header(x)
-
- for x in uses:
- name = "USE@%s" % x
- if not name in self.names:
- self.names.append(name)
-
- for x in mods:
- name = "MOD@%s" % x
- if not name in self.names:
- self.names.append(name)
-
- def tryfind_header(self, filename):
- """
- Adds an include file to the list of nodes to process
-
- :param filename: file name
- :type filename: string
- """
- found = None
- for n in self.incpaths:
- found = n.find_resource(filename)
- if found:
- self.nodes.append(found)
- self.waiting.append(found)
- break
- if not found:
- if not filename in self.names:
- self.names.append(filename)
-
diff --git a/waflib/Tools/flex.py b/waflib/Tools/flex.py
deleted file mode 100644
index 2256657..0000000
--- a/waflib/Tools/flex.py
+++ /dev/null
@@ -1,62 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# John O'Meara, 2006
-# Thomas Nagy, 2006-2018 (ita)
-
-"""
-The **flex** program is a code generator which creates C or C++ files.
-The generated files are compiled into object files.
-"""
-
-import os, re
-from waflib import Task, TaskGen
-from waflib.Tools import ccroot
-
-def decide_ext(self, node):
- if 'cxx' in self.features:
- return ['.lex.cc']
- return ['.lex.c']
-
-def flexfun(tsk):
- env = tsk.env
- bld = tsk.generator.bld
- wd = bld.variant_dir
- def to_list(xx):
- if isinstance(xx, str):
- return [xx]
- return xx
- tsk.last_cmd = lst = []
- lst.extend(to_list(env.FLEX))
- lst.extend(to_list(env.FLEXFLAGS))
- inputs = [a.path_from(tsk.get_cwd()) for a in tsk.inputs]
- if env.FLEX_MSYS:
- inputs = [x.replace(os.sep, '/') for x in inputs]
- lst.extend(inputs)
- lst = [x for x in lst if x]
- txt = bld.cmd_and_log(lst, cwd=wd, env=env.env or None, quiet=0)
- tsk.outputs[0].write(txt.replace('\r\n', '\n').replace('\r', '\n')) # issue #1207
-
-TaskGen.declare_chain(
- name = 'flex',
- rule = flexfun, # issue #854
- ext_in = '.l',
- decider = decide_ext,
-)
-
-# To support the following:
-# bld(features='c', flexflags='-P/foo')
-Task.classes['flex'].vars = ['FLEXFLAGS', 'FLEX']
-ccroot.USELIB_VARS['c'].add('FLEXFLAGS')
-ccroot.USELIB_VARS['cxx'].add('FLEXFLAGS')
-
-def configure(conf):
- """
- Detect the *flex* program
- """
- conf.find_program('flex', var='FLEX')
- conf.env.FLEXFLAGS = ['-t']
-
- if re.search (r"\\msys\\[0-9.]+\\bin\\flex.exe$", conf.env.FLEX[0]):
- # this is the flex shipped with MSYS
- conf.env.FLEX_MSYS = True
-
diff --git a/waflib/Tools/g95.py b/waflib/Tools/g95.py
deleted file mode 100644
index f69ba4f..0000000
--- a/waflib/Tools/g95.py
+++ /dev/null
@@ -1,66 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# KWS 2010
-# Thomas Nagy 2016-2018 (ita)
-
-import re
-from waflib import Utils
-from waflib.Tools import fc, fc_config, fc_scan, ar
-from waflib.Configure import conf
-
-@conf
-def find_g95(conf):
- fc = conf.find_program('g95', var='FC')
- conf.get_g95_version(fc)
- conf.env.FC_NAME = 'G95'
-
-@conf
-def g95_flags(conf):
- v = conf.env
- v.FCFLAGS_fcshlib = ['-fPIC']
- v.FORTRANMODFLAG = ['-fmod=', ''] # template for module path
- v.FCFLAGS_DEBUG = ['-Werror'] # why not
-
-@conf
-def g95_modifier_win32(conf):
- fc_config.fortran_modifier_win32(conf)
-
-@conf
-def g95_modifier_cygwin(conf):
- fc_config.fortran_modifier_cygwin(conf)
-
-@conf
-def g95_modifier_darwin(conf):
- fc_config.fortran_modifier_darwin(conf)
-
-@conf
-def g95_modifier_platform(conf):
- dest_os = conf.env.DEST_OS or Utils.unversioned_sys_platform()
- g95_modifier_func = getattr(conf, 'g95_modifier_' + dest_os, None)
- if g95_modifier_func:
- g95_modifier_func()
-
-@conf
-def get_g95_version(conf, fc):
- """get the compiler version"""
-
- version_re = re.compile(r"g95\s*(?P<major>\d*)\.(?P<minor>\d*)").search
- cmd = fc + ['--version']
- out, err = fc_config.getoutput(conf, cmd, stdin=False)
- if out:
- match = version_re(out)
- else:
- match = version_re(err)
- if not match:
- conf.fatal('cannot determine g95 version')
- k = match.groupdict()
- conf.env.FC_VERSION = (k['major'], k['minor'])
-
-def configure(conf):
- conf.find_g95()
- conf.find_ar()
- conf.fc_flags()
- conf.fc_add_flags()
- conf.g95_flags()
- conf.g95_modifier_platform()
-
diff --git a/waflib/Tools/gas.py b/waflib/Tools/gas.py
deleted file mode 100644
index 77afed7..0000000
--- a/waflib/Tools/gas.py
+++ /dev/null
@@ -1,18 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2008-2018 (ita)
-
-"Detect as/gas/gcc for compiling assembly files"
-
-import waflib.Tools.asm # - leave this
-from waflib.Tools import ar
-
-def configure(conf):
- """
- Find the programs gas/as/gcc and set the variable *AS*
- """
- conf.find_program(['gas', 'gcc'], var='AS')
- conf.env.AS_TGT_F = ['-c', '-o']
- conf.env.ASLNK_TGT_F = ['-o']
- conf.find_ar()
- conf.load('asm')
diff --git a/waflib/Tools/gcc.py b/waflib/Tools/gcc.py
deleted file mode 100644
index acdd473..0000000
--- a/waflib/Tools/gcc.py
+++ /dev/null
@@ -1,156 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2006-2018 (ita)
-# Ralf Habacker, 2006 (rh)
-# Yinon Ehrlich, 2009
-
-"""
-gcc/llvm detection.
-"""
-
-from waflib.Tools import ccroot, ar
-from waflib.Configure import conf
-
-@conf
-def find_gcc(conf):
- """
- Find the program gcc, and if present, try to detect its version number
- """
- cc = conf.find_program(['gcc', 'cc'], var='CC')
- conf.get_cc_version(cc, gcc=True)
- conf.env.CC_NAME = 'gcc'
-
-@conf
-def gcc_common_flags(conf):
- """
- Common flags for gcc on nearly all platforms
- """
- v = conf.env
-
- v.CC_SRC_F = []
- v.CC_TGT_F = ['-c', '-o']
-
- if not v.LINK_CC:
- v.LINK_CC = v.CC
-
- v.CCLNK_SRC_F = []
- v.CCLNK_TGT_F = ['-o']
- v.CPPPATH_ST = '-I%s'
- v.DEFINES_ST = '-D%s'
-
- v.LIB_ST = '-l%s' # template for adding libs
- v.LIBPATH_ST = '-L%s' # template for adding libpaths
- v.STLIB_ST = '-l%s'
- v.STLIBPATH_ST = '-L%s'
- v.RPATH_ST = '-Wl,-rpath,%s'
-
- v.SONAME_ST = '-Wl,-h,%s'
- v.SHLIB_MARKER = '-Wl,-Bdynamic'
- v.STLIB_MARKER = '-Wl,-Bstatic'
-
- v.cprogram_PATTERN = '%s'
-
- v.CFLAGS_cshlib = ['-fPIC']
- v.LINKFLAGS_cshlib = ['-shared']
- v.cshlib_PATTERN = 'lib%s.so'
-
- v.LINKFLAGS_cstlib = ['-Wl,-Bstatic']
- v.cstlib_PATTERN = 'lib%s.a'
-
- v.LINKFLAGS_MACBUNDLE = ['-bundle', '-undefined', 'dynamic_lookup']
- v.CFLAGS_MACBUNDLE = ['-fPIC']
- v.macbundle_PATTERN = '%s.bundle'
-
-@conf
-def gcc_modifier_win32(conf):
- """Configuration flags for executing gcc on Windows"""
- v = conf.env
- v.cprogram_PATTERN = '%s.exe'
-
- v.cshlib_PATTERN = '%s.dll'
- v.implib_PATTERN = '%s.dll.a'
- v.IMPLIB_ST = '-Wl,--out-implib,%s'
-
- v.CFLAGS_cshlib = []
-
- # Auto-import is enabled by default even without this option,
- # but enabling it explicitly has the nice effect of suppressing the rather boring, debug-level messages
- # that the linker emits otherwise.
- v.append_value('LINKFLAGS', ['-Wl,--enable-auto-import'])
-
-@conf
-def gcc_modifier_cygwin(conf):
- """Configuration flags for executing gcc on Cygwin"""
- gcc_modifier_win32(conf)
- v = conf.env
- v.cshlib_PATTERN = 'cyg%s.dll'
- v.append_value('LINKFLAGS_cshlib', ['-Wl,--enable-auto-image-base'])
- v.CFLAGS_cshlib = []
-
-@conf
-def gcc_modifier_darwin(conf):
- """Configuration flags for executing gcc on MacOS"""
- v = conf.env
- v.CFLAGS_cshlib = ['-fPIC']
- v.LINKFLAGS_cshlib = ['-dynamiclib']
- v.cshlib_PATTERN = 'lib%s.dylib'
- v.FRAMEWORKPATH_ST = '-F%s'
- v.FRAMEWORK_ST = ['-framework']
- v.ARCH_ST = ['-arch']
-
- v.LINKFLAGS_cstlib = []
-
- v.SHLIB_MARKER = []
- v.STLIB_MARKER = []
- v.SONAME_ST = []
-
-@conf
-def gcc_modifier_aix(conf):
- """Configuration flags for executing gcc on AIX"""
- v = conf.env
- v.LINKFLAGS_cprogram = ['-Wl,-brtl']
- v.LINKFLAGS_cshlib = ['-shared','-Wl,-brtl,-bexpfull']
- v.SHLIB_MARKER = []
-
-@conf
-def gcc_modifier_hpux(conf):
- v = conf.env
- v.SHLIB_MARKER = []
- v.STLIB_MARKER = []
- v.CFLAGS_cshlib = ['-fPIC','-DPIC']
- v.cshlib_PATTERN = 'lib%s.sl'
-
-@conf
-def gcc_modifier_openbsd(conf):
- conf.env.SONAME_ST = []
-
-@conf
-def gcc_modifier_osf1V(conf):
- v = conf.env
- v.SHLIB_MARKER = []
- v.STLIB_MARKER = []
- v.SONAME_ST = []
-
-@conf
-def gcc_modifier_platform(conf):
- """Execute platform-specific functions based on *gcc_modifier_+NAME*"""
- # * set configurations specific for a platform.
- # * the destination platform is detected automatically by looking at the macros the compiler predefines,
- # and if it's not recognised, it fallbacks to sys.platform.
- gcc_modifier_func = getattr(conf, 'gcc_modifier_' + conf.env.DEST_OS, None)
- if gcc_modifier_func:
- gcc_modifier_func()
-
-def configure(conf):
- """
- Configuration for gcc
- """
- conf.find_gcc()
- conf.find_ar()
- conf.gcc_common_flags()
- conf.gcc_modifier_platform()
- conf.cc_load_tools()
- conf.cc_add_flags()
- conf.link_add_flags()
- conf.check_gcc_o_space()
-
diff --git a/waflib/Tools/gdc.py b/waflib/Tools/gdc.py
deleted file mode 100644
index d89a66d..0000000
--- a/waflib/Tools/gdc.py
+++ /dev/null
@@ -1,55 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Carlos Rafael Giani, 2007 (dv)
-
-from waflib.Tools import ar, d
-from waflib.Configure import conf
-
-@conf
-def find_gdc(conf):
- """
- Finds the program gdc and set the variable *D*
- """
- conf.find_program('gdc', var='D')
-
- out = conf.cmd_and_log(conf.env.D + ['--version'])
- if out.find("gdc") == -1:
- conf.fatal("detected compiler is not gdc")
-
-@conf
-def common_flags_gdc(conf):
- """
- Sets the flags required by *gdc*
- """
- v = conf.env
-
- v.DFLAGS = []
-
- v.D_SRC_F = ['-c']
- v.D_TGT_F = '-o%s'
-
- v.D_LINKER = v.D
- v.DLNK_SRC_F = ''
- v.DLNK_TGT_F = '-o%s'
- v.DINC_ST = '-I%s'
-
- v.DSHLIB_MARKER = v.DSTLIB_MARKER = ''
- v.DSTLIB_ST = v.DSHLIB_ST = '-l%s'
- v.DSTLIBPATH_ST = v.DLIBPATH_ST = '-L%s'
-
- v.LINKFLAGS_dshlib = ['-shared']
-
- v.DHEADER_ext = '.di'
- v.DFLAGS_d_with_header = '-fintfc'
- v.D_HDR_F = '-fintfc-file=%s'
-
-def configure(conf):
- """
- Configuration for gdc
- """
- conf.find_gdc()
- conf.load('ar')
- conf.load('d')
- conf.common_flags_gdc()
- conf.d_platform_flags()
-
diff --git a/waflib/Tools/gfortran.py b/waflib/Tools/gfortran.py
deleted file mode 100644
index 1050667..0000000
--- a/waflib/Tools/gfortran.py
+++ /dev/null
@@ -1,93 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# DC 2008
-# Thomas Nagy 2016-2018 (ita)
-
-import re
-from waflib import Utils
-from waflib.Tools import fc, fc_config, fc_scan, ar
-from waflib.Configure import conf
-
-@conf
-def find_gfortran(conf):
- """Find the gfortran program (will look in the environment variable 'FC')"""
- fc = conf.find_program(['gfortran','g77'], var='FC')
- # (fallback to g77 for systems, where no gfortran is available)
- conf.get_gfortran_version(fc)
- conf.env.FC_NAME = 'GFORTRAN'
-
-@conf
-def gfortran_flags(conf):
- v = conf.env
- v.FCFLAGS_fcshlib = ['-fPIC']
- v.FORTRANMODFLAG = ['-J', ''] # template for module path
- v.FCFLAGS_DEBUG = ['-Werror'] # why not
-
-@conf
-def gfortran_modifier_win32(conf):
- fc_config.fortran_modifier_win32(conf)
-
-@conf
-def gfortran_modifier_cygwin(conf):
- fc_config.fortran_modifier_cygwin(conf)
-
-@conf
-def gfortran_modifier_darwin(conf):
- fc_config.fortran_modifier_darwin(conf)
-
-@conf
-def gfortran_modifier_platform(conf):
- dest_os = conf.env.DEST_OS or Utils.unversioned_sys_platform()
- gfortran_modifier_func = getattr(conf, 'gfortran_modifier_' + dest_os, None)
- if gfortran_modifier_func:
- gfortran_modifier_func()
-
-@conf
-def get_gfortran_version(conf, fc):
- """Get the compiler version"""
-
- # ensure this is actually gfortran, not an imposter.
- version_re = re.compile(r"GNU\s*Fortran", re.I).search
- cmd = fc + ['--version']
- out, err = fc_config.getoutput(conf, cmd, stdin=False)
- if out:
- match = version_re(out)
- else:
- match = version_re(err)
- if not match:
- conf.fatal('Could not determine the compiler type')
-
- # --- now get more detailed info -- see c_config.get_cc_version
- cmd = fc + ['-dM', '-E', '-']
- out, err = fc_config.getoutput(conf, cmd, stdin=True)
-
- if out.find('__GNUC__') < 0:
- conf.fatal('Could not determine the compiler type')
-
- k = {}
- out = out.splitlines()
- import shlex
-
- for line in out:
- lst = shlex.split(line)
- if len(lst)>2:
- key = lst[1]
- val = lst[2]
- k[key] = val
-
- def isD(var):
- return var in k
-
- def isT(var):
- return var in k and k[var] != '0'
-
- conf.env.FC_VERSION = (k['__GNUC__'], k['__GNUC_MINOR__'], k['__GNUC_PATCHLEVEL__'])
-
-def configure(conf):
- conf.find_gfortran()
- conf.find_ar()
- conf.fc_flags()
- conf.fc_add_flags()
- conf.gfortran_flags()
- conf.gfortran_modifier_platform()
- conf.check_gfortran_o_space()
diff --git a/waflib/Tools/glib2.py b/waflib/Tools/glib2.py
deleted file mode 100644
index 949fe37..0000000
--- a/waflib/Tools/glib2.py
+++ /dev/null
@@ -1,489 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2006-2018 (ita)
-
-"""
-Support for GLib2 tools:
-
-* marshal
-* enums
-* gsettings
-* gresource
-"""
-
-import os
-import functools
-from waflib import Context, Task, Utils, Options, Errors, Logs
-from waflib.TaskGen import taskgen_method, before_method, feature, extension
-from waflib.Configure import conf
-
-################## marshal files
-
-@taskgen_method
-def add_marshal_file(self, filename, prefix):
- """
- Adds a file to the list of marshal files to process. Store them in the attribute *marshal_list*.
-
- :param filename: xml file to compile
- :type filename: string
- :param prefix: marshal prefix (--prefix=prefix)
- :type prefix: string
- """
- if not hasattr(self, 'marshal_list'):
- self.marshal_list = []
- self.meths.append('process_marshal')
- self.marshal_list.append((filename, prefix))
-
-@before_method('process_source')
-def process_marshal(self):
- """
- Processes the marshal files stored in the attribute *marshal_list* to create :py:class:`waflib.Tools.glib2.glib_genmarshal` instances.
- Adds the c file created to the list of source to process.
- """
- for f, prefix in getattr(self, 'marshal_list', []):
- node = self.path.find_resource(f)
-
- if not node:
- raise Errors.WafError('file not found %r' % f)
-
- h_node = node.change_ext('.h')
- c_node = node.change_ext('.c')
-
- task = self.create_task('glib_genmarshal', node, [h_node, c_node])
- task.env.GLIB_GENMARSHAL_PREFIX = prefix
- self.source = self.to_nodes(getattr(self, 'source', []))
- self.source.append(c_node)
-
-class glib_genmarshal(Task.Task):
- vars = ['GLIB_GENMARSHAL_PREFIX', 'GLIB_GENMARSHAL']
- color = 'BLUE'
- ext_out = ['.h']
- def run(self):
- bld = self.generator.bld
-
- get = self.env.get_flat
- cmd1 = "%s %s --prefix=%s --header > %s" % (
- get('GLIB_GENMARSHAL'),
- self.inputs[0].srcpath(),
- get('GLIB_GENMARSHAL_PREFIX'),
- self.outputs[0].abspath()
- )
-
- ret = bld.exec_command(cmd1)
- if ret:
- return ret
-
- #print self.outputs[1].abspath()
- c = '''#include "%s"\n''' % self.outputs[0].name
- self.outputs[1].write(c)
-
- cmd2 = "%s %s --prefix=%s --body >> %s" % (
- get('GLIB_GENMARSHAL'),
- self.inputs[0].srcpath(),
- get('GLIB_GENMARSHAL_PREFIX'),
- self.outputs[1].abspath()
- )
- return bld.exec_command(cmd2)
-
-########################## glib-mkenums
-
-@taskgen_method
-def add_enums_from_template(self, source='', target='', template='', comments=''):
- """
- Adds a file to the list of enum files to process. Stores them in the attribute *enums_list*.
-
- :param source: enum file to process
- :type source: string
- :param target: target file
- :type target: string
- :param template: template file
- :type template: string
- :param comments: comments
- :type comments: string
- """
- if not hasattr(self, 'enums_list'):
- self.enums_list = []
- self.meths.append('process_enums')
- self.enums_list.append({'source': source,
- 'target': target,
- 'template': template,
- 'file-head': '',
- 'file-prod': '',
- 'file-tail': '',
- 'enum-prod': '',
- 'value-head': '',
- 'value-prod': '',
- 'value-tail': '',
- 'comments': comments})
-
-@taskgen_method
-def add_enums(self, source='', target='',
- file_head='', file_prod='', file_tail='', enum_prod='',
- value_head='', value_prod='', value_tail='', comments=''):
- """
- Adds a file to the list of enum files to process. Stores them in the attribute *enums_list*.
-
- :param source: enum file to process
- :type source: string
- :param target: target file
- :type target: string
- :param file_head: unused
- :param file_prod: unused
- :param file_tail: unused
- :param enum_prod: unused
- :param value_head: unused
- :param value_prod: unused
- :param value_tail: unused
- :param comments: comments
- :type comments: string
- """
- if not hasattr(self, 'enums_list'):
- self.enums_list = []
- self.meths.append('process_enums')
- self.enums_list.append({'source': source,
- 'template': '',
- 'target': target,
- 'file-head': file_head,
- 'file-prod': file_prod,
- 'file-tail': file_tail,
- 'enum-prod': enum_prod,
- 'value-head': value_head,
- 'value-prod': value_prod,
- 'value-tail': value_tail,
- 'comments': comments})
-
-@before_method('process_source')
-def process_enums(self):
- """
- Processes the enum files stored in the attribute *enum_list* to create :py:class:`waflib.Tools.glib2.glib_mkenums` instances.
- """
- for enum in getattr(self, 'enums_list', []):
- task = self.create_task('glib_mkenums')
- env = task.env
-
- inputs = []
-
- # process the source
- source_list = self.to_list(enum['source'])
- if not source_list:
- raise Errors.WafError('missing source ' + str(enum))
- source_list = [self.path.find_resource(k) for k in source_list]
- inputs += source_list
- env.GLIB_MKENUMS_SOURCE = [k.abspath() for k in source_list]
-
- # find the target
- if not enum['target']:
- raise Errors.WafError('missing target ' + str(enum))
- tgt_node = self.path.find_or_declare(enum['target'])
- if tgt_node.name.endswith('.c'):
- self.source.append(tgt_node)
- env.GLIB_MKENUMS_TARGET = tgt_node.abspath()
-
-
- options = []
-
- if enum['template']: # template, if provided
- template_node = self.path.find_resource(enum['template'])
- options.append('--template %s' % (template_node.abspath()))
- inputs.append(template_node)
- params = {'file-head' : '--fhead',
- 'file-prod' : '--fprod',
- 'file-tail' : '--ftail',
- 'enum-prod' : '--eprod',
- 'value-head' : '--vhead',
- 'value-prod' : '--vprod',
- 'value-tail' : '--vtail',
- 'comments': '--comments'}
- for param, option in params.items():
- if enum[param]:
- options.append('%s %r' % (option, enum[param]))
-
- env.GLIB_MKENUMS_OPTIONS = ' '.join(options)
-
- # update the task instance
- task.set_inputs(inputs)
- task.set_outputs(tgt_node)
-
-class glib_mkenums(Task.Task):
- """
- Processes enum files
- """
- run_str = '${GLIB_MKENUMS} ${GLIB_MKENUMS_OPTIONS} ${GLIB_MKENUMS_SOURCE} > ${GLIB_MKENUMS_TARGET}'
- color = 'PINK'
- ext_out = ['.h']
-
-######################################### gsettings
-
-@taskgen_method
-def add_settings_schemas(self, filename_list):
- """
- Adds settings files to process to *settings_schema_files*
-
- :param filename_list: files
- :type filename_list: list of string
- """
- if not hasattr(self, 'settings_schema_files'):
- self.settings_schema_files = []
-
- if not isinstance(filename_list, list):
- filename_list = [filename_list]
-
- self.settings_schema_files.extend(filename_list)
-
-@taskgen_method
-def add_settings_enums(self, namespace, filename_list):
- """
- Called only once by task generator to set the enums namespace.
-
- :param namespace: namespace
- :type namespace: string
- :param filename_list: enum files to process
- :type filename_list: file list
- """
- if hasattr(self, 'settings_enum_namespace'):
- raise Errors.WafError("Tried to add gsettings enums to %r more than once" % self.name)
- self.settings_enum_namespace = namespace
-
- if not isinstance(filename_list, list):
- filename_list = [filename_list]
- self.settings_enum_files = filename_list
-
-@feature('glib2')
-def process_settings(self):
- """
- Processes the schema files in *settings_schema_files* to create :py:class:`waflib.Tools.glib2.glib_mkenums` instances. The
- same files are validated through :py:class:`waflib.Tools.glib2.glib_validate_schema` tasks.
-
- """
- enums_tgt_node = []
- install_files = []
-
- settings_schema_files = getattr(self, 'settings_schema_files', [])
- if settings_schema_files and not self.env.GLIB_COMPILE_SCHEMAS:
- raise Errors.WafError ("Unable to process GSettings schemas - glib-compile-schemas was not found during configure")
-
- # 1. process gsettings_enum_files (generate .enums.xml)
- #
- if hasattr(self, 'settings_enum_files'):
- enums_task = self.create_task('glib_mkenums')
-
- source_list = self.settings_enum_files
- source_list = [self.path.find_resource(k) for k in source_list]
- enums_task.set_inputs(source_list)
- enums_task.env.GLIB_MKENUMS_SOURCE = [k.abspath() for k in source_list]
-
- target = self.settings_enum_namespace + '.enums.xml'
- tgt_node = self.path.find_or_declare(target)
- enums_task.set_outputs(tgt_node)
- enums_task.env.GLIB_MKENUMS_TARGET = tgt_node.abspath()
- enums_tgt_node = [tgt_node]
-
- install_files.append(tgt_node)
-
- options = '--comments "<!-- @comment@ -->" --fhead "<schemalist>" --vhead " <@type@ id=\\"%s.@EnumName@\\">" --vprod " <value nick=\\"@valuenick@\\" value=\\"@valuenum@\\"/>" --vtail " </@type@>" --ftail "</schemalist>" ' % (self.settings_enum_namespace)
- enums_task.env.GLIB_MKENUMS_OPTIONS = options
-
- # 2. process gsettings_schema_files (validate .gschema.xml files)
- #
- for schema in settings_schema_files:
- schema_task = self.create_task ('glib_validate_schema')
-
- schema_node = self.path.find_resource(schema)
- if not schema_node:
- raise Errors.WafError("Cannot find the schema file %r" % schema)
- install_files.append(schema_node)
- source_list = enums_tgt_node + [schema_node]
-
- schema_task.set_inputs (source_list)
- schema_task.env.GLIB_COMPILE_SCHEMAS_OPTIONS = [("--schema-file=" + k.abspath()) for k in source_list]
-
- target_node = schema_node.change_ext('.xml.valid')
- schema_task.set_outputs (target_node)
- schema_task.env.GLIB_VALIDATE_SCHEMA_OUTPUT = target_node.abspath()
-
- # 3. schemas install task
- def compile_schemas_callback(bld):
- if not bld.is_install:
- return
- compile_schemas = Utils.to_list(bld.env.GLIB_COMPILE_SCHEMAS)
- destdir = Options.options.destdir
- paths = bld._compile_schemas_registered
- if destdir:
- paths = (os.path.join(destdir, path.lstrip(os.sep)) for path in paths)
- for path in paths:
- Logs.pprint('YELLOW', 'Updating GSettings schema cache %r' % path)
- if self.bld.exec_command(compile_schemas + [path]):
- Logs.warn('Could not update GSettings schema cache %r' % path)
-
- if self.bld.is_install:
- schemadir = self.env.GSETTINGSSCHEMADIR
- if not schemadir:
- raise Errors.WafError ('GSETTINGSSCHEMADIR not defined (should have been set up automatically during configure)')
-
- if install_files:
- self.add_install_files(install_to=schemadir, install_from=install_files)
- registered_schemas = getattr(self.bld, '_compile_schemas_registered', None)
- if not registered_schemas:
- registered_schemas = self.bld._compile_schemas_registered = set()
- self.bld.add_post_fun(compile_schemas_callback)
- registered_schemas.add(schemadir)
-
-class glib_validate_schema(Task.Task):
- """
- Validates schema files
- """
- run_str = 'rm -f ${GLIB_VALIDATE_SCHEMA_OUTPUT} && ${GLIB_COMPILE_SCHEMAS} --dry-run ${GLIB_COMPILE_SCHEMAS_OPTIONS} && touch ${GLIB_VALIDATE_SCHEMA_OUTPUT}'
- color = 'PINK'
-
-################## gresource
-
-@extension('.gresource.xml')
-def process_gresource_source(self, node):
- """
- Creates tasks that turn ``.gresource.xml`` files to C code
- """
- if not self.env.GLIB_COMPILE_RESOURCES:
- raise Errors.WafError ("Unable to process GResource file - glib-compile-resources was not found during configure")
-
- if 'gresource' in self.features:
- return
-
- h_node = node.change_ext('_xml.h')
- c_node = node.change_ext('_xml.c')
- self.create_task('glib_gresource_source', node, [h_node, c_node])
- self.source.append(c_node)
-
-@feature('gresource')
-def process_gresource_bundle(self):
- """
- Creates tasks to turn ``.gresource`` files from ``.gresource.xml`` files::
-
- def build(bld):
- bld(
- features='gresource',
- source=['resources1.gresource.xml', 'resources2.gresource.xml'],
- install_path='${LIBDIR}/${PACKAGE}'
- )
-
- :param source: XML files to process
- :type source: list of string
- :param install_path: installation path
- :type install_path: string
- """
- for i in self.to_list(self.source):
- node = self.path.find_resource(i)
-
- task = self.create_task('glib_gresource_bundle', node, node.change_ext(''))
- inst_to = getattr(self, 'install_path', None)
- if inst_to:
- self.add_install_files(install_to=inst_to, install_from=task.outputs)
-
-class glib_gresource_base(Task.Task):
- """
- Base class for gresource based tasks
- """
- color = 'BLUE'
- base_cmd = '${GLIB_COMPILE_RESOURCES} --sourcedir=${SRC[0].parent.srcpath()} --sourcedir=${SRC[0].bld_dir()}'
-
- def scan(self):
- """
- Scans gresource dependencies through ``glib-compile-resources --generate-dependencies command``
- """
- bld = self.generator.bld
- kw = {}
- kw['cwd'] = self.get_cwd()
- kw['quiet'] = Context.BOTH
-
- cmd = Utils.subst_vars('${GLIB_COMPILE_RESOURCES} --sourcedir=%s --sourcedir=%s --generate-dependencies %s' % (
- self.inputs[0].parent.srcpath(),
- self.inputs[0].bld_dir(),
- self.inputs[0].bldpath()
- ), self.env)
-
- output = bld.cmd_and_log(cmd, **kw)
-
- nodes = []
- names = []
- for dep in output.splitlines():
- if dep:
- node = bld.bldnode.find_node(dep)
- if node:
- nodes.append(node)
- else:
- names.append(dep)
-
- return (nodes, names)
-
-class glib_gresource_source(glib_gresource_base):
- """
- Task to generate C source code (.h and .c files) from a gresource.xml file
- """
- vars = ['GLIB_COMPILE_RESOURCES']
- fun_h = Task.compile_fun_shell(glib_gresource_base.base_cmd + ' --target=${TGT[0].abspath()} --generate-header ${SRC}')
- fun_c = Task.compile_fun_shell(glib_gresource_base.base_cmd + ' --target=${TGT[1].abspath()} --generate-source ${SRC}')
- ext_out = ['.h']
-
- def run(self):
- return self.fun_h[0](self) or self.fun_c[0](self)
-
-class glib_gresource_bundle(glib_gresource_base):
- """
- Task to generate a .gresource binary file from a gresource.xml file
- """
- run_str = glib_gresource_base.base_cmd + ' --target=${TGT} ${SRC}'
- shell = True # temporary workaround for #795
-
-@conf
-def find_glib_genmarshal(conf):
- conf.find_program('glib-genmarshal', var='GLIB_GENMARSHAL')
-
-@conf
-def find_glib_mkenums(conf):
- if not conf.env.PERL:
- conf.find_program('perl', var='PERL')
- conf.find_program('glib-mkenums', interpreter='PERL', var='GLIB_MKENUMS')
-
-@conf
-def find_glib_compile_schemas(conf):
- # when cross-compiling, gsettings.m4 locates the program with the following:
- # pkg-config --variable glib_compile_schemas gio-2.0
- conf.find_program('glib-compile-schemas', var='GLIB_COMPILE_SCHEMAS')
-
- def getstr(varname):
- return getattr(Options.options, varname, getattr(conf.env,varname, ''))
-
- gsettingsschemadir = getstr('GSETTINGSSCHEMADIR')
- if not gsettingsschemadir:
- datadir = getstr('DATADIR')
- if not datadir:
- prefix = conf.env.PREFIX
- datadir = os.path.join(prefix, 'share')
- gsettingsschemadir = os.path.join(datadir, 'glib-2.0', 'schemas')
-
- conf.env.GSETTINGSSCHEMADIR = gsettingsschemadir
-
-@conf
-def find_glib_compile_resources(conf):
- conf.find_program('glib-compile-resources', var='GLIB_COMPILE_RESOURCES')
-
-def configure(conf):
- """
- Finds the following programs:
-
- * *glib-genmarshal* and set *GLIB_GENMARSHAL*
- * *glib-mkenums* and set *GLIB_MKENUMS*
- * *glib-compile-schemas* and set *GLIB_COMPILE_SCHEMAS* (not mandatory)
- * *glib-compile-resources* and set *GLIB_COMPILE_RESOURCES* (not mandatory)
- """
- conf.find_glib_genmarshal()
- conf.find_glib_mkenums()
- conf.find_glib_compile_schemas(mandatory=False)
- conf.find_glib_compile_resources(mandatory=False)
-
-def options(opt):
- """
- Adds the ``--gsettingsschemadir`` command-line option
- """
- gr = opt.add_option_group('Installation directories')
- gr.add_option('--gsettingsschemadir', help='GSettings schema location [DATADIR/glib-2.0/schemas]', default='', dest='GSETTINGSSCHEMADIR')
-
diff --git a/waflib/Tools/gnu_dirs.py b/waflib/Tools/gnu_dirs.py
deleted file mode 100644
index 2847071..0000000
--- a/waflib/Tools/gnu_dirs.py
+++ /dev/null
@@ -1,131 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Ali Sabil, 2007
-
-"""
-Sets various standard variables such as INCLUDEDIR. SBINDIR and others. To use this module just call::
-
- opt.load('gnu_dirs')
-
-and::
-
- conf.load('gnu_dirs')
-
-Add options for the standard GNU directories, this tool will add the options
-found in autotools, and will update the environment with the following
-installation variables:
-
-============== ========================================= =======================
-Variable Description Default Value
-============== ========================================= =======================
-PREFIX installation prefix /usr/local
-EXEC_PREFIX installation prefix for binaries PREFIX
-BINDIR user commands EXEC_PREFIX/bin
-SBINDIR system binaries EXEC_PREFIX/sbin
-LIBEXECDIR program-specific binaries EXEC_PREFIX/libexec
-SYSCONFDIR host-specific configuration PREFIX/etc
-SHAREDSTATEDIR architecture-independent variable data PREFIX/com
-LOCALSTATEDIR variable data PREFIX/var
-LIBDIR object code libraries EXEC_PREFIX/lib
-INCLUDEDIR header files PREFIX/include
-OLDINCLUDEDIR header files for non-GCC compilers /usr/include
-DATAROOTDIR architecture-independent data root PREFIX/share
-DATADIR architecture-independent data DATAROOTDIR
-INFODIR GNU "info" documentation DATAROOTDIR/info
-LOCALEDIR locale-dependent data DATAROOTDIR/locale
-MANDIR manual pages DATAROOTDIR/man
-DOCDIR documentation root DATAROOTDIR/doc/APPNAME
-HTMLDIR HTML documentation DOCDIR
-DVIDIR DVI documentation DOCDIR
-PDFDIR PDF documentation DOCDIR
-PSDIR PostScript documentation DOCDIR
-============== ========================================= =======================
-"""
-
-import os, re
-from waflib import Utils, Options, Context
-
-gnuopts = '''
-bindir, user commands, ${EXEC_PREFIX}/bin
-sbindir, system binaries, ${EXEC_PREFIX}/sbin
-libexecdir, program-specific binaries, ${EXEC_PREFIX}/libexec
-sysconfdir, host-specific configuration, ${PREFIX}/etc
-sharedstatedir, architecture-independent variable data, ${PREFIX}/com
-localstatedir, variable data, ${PREFIX}/var
-libdir, object code libraries, ${EXEC_PREFIX}/lib%s
-includedir, header files, ${PREFIX}/include
-oldincludedir, header files for non-GCC compilers, /usr/include
-datarootdir, architecture-independent data root, ${PREFIX}/share
-datadir, architecture-independent data, ${DATAROOTDIR}
-infodir, GNU "info" documentation, ${DATAROOTDIR}/info
-localedir, locale-dependent data, ${DATAROOTDIR}/locale
-mandir, manual pages, ${DATAROOTDIR}/man
-docdir, documentation root, ${DATAROOTDIR}/doc/${PACKAGE}
-htmldir, HTML documentation, ${DOCDIR}
-dvidir, DVI documentation, ${DOCDIR}
-pdfdir, PDF documentation, ${DOCDIR}
-psdir, PostScript documentation, ${DOCDIR}
-''' % Utils.lib64()
-
-_options = [x.split(', ') for x in gnuopts.splitlines() if x]
-
-def configure(conf):
- """
- Reads the command-line options to set lots of variables in *conf.env*. The variables
- BINDIR and LIBDIR will be overwritten.
- """
- def get_param(varname, default):
- return getattr(Options.options, varname, '') or default
-
- env = conf.env
- env.LIBDIR = env.BINDIR = []
- env.EXEC_PREFIX = get_param('EXEC_PREFIX', env.PREFIX)
- env.PACKAGE = getattr(Context.g_module, 'APPNAME', None) or env.PACKAGE
-
- complete = False
- iter = 0
- while not complete and iter < len(_options) + 1:
- iter += 1
- complete = True
- for name, help, default in _options:
- name = name.upper()
- if not env[name]:
- try:
- env[name] = Utils.subst_vars(get_param(name, default).replace('/', os.sep), env)
- except TypeError:
- complete = False
-
- if not complete:
- lst = [x for x, _, _ in _options if not env[x.upper()]]
- raise conf.errors.WafError('Variable substitution failure %r' % lst)
-
-def options(opt):
- """
- Adds lots of command-line options, for example::
-
- --exec-prefix: EXEC_PREFIX
- """
- inst_dir = opt.add_option_group('Installation prefix',
-'By default, "waf install" will put the files in\
- "/usr/local/bin", "/usr/local/lib" etc. An installation prefix other\
- than "/usr/local" can be given using "--prefix", for example "--prefix=$HOME"')
-
- for k in ('--prefix', '--destdir'):
- option = opt.parser.get_option(k)
- if option:
- opt.parser.remove_option(k)
- inst_dir.add_option(option)
-
- inst_dir.add_option('--exec-prefix',
- help = 'installation prefix for binaries [PREFIX]',
- default = '',
- dest = 'EXEC_PREFIX')
-
- dirs_options = opt.add_option_group('Installation directories')
-
- for name, help, default in _options:
- option_name = '--' + name
- str_default = default
- str_help = '%s [%s]' % (help, re.sub(r'\$\{([^}]+)\}', r'\1', str_default))
- dirs_options.add_option(option_name, help=str_help, default='', dest=name.upper())
-
diff --git a/waflib/Tools/gxx.py b/waflib/Tools/gxx.py
deleted file mode 100644
index 22c5d26..0000000
--- a/waflib/Tools/gxx.py
+++ /dev/null
@@ -1,157 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2006-2018 (ita)
-# Ralf Habacker, 2006 (rh)
-# Yinon Ehrlich, 2009
-
-"""
-g++/llvm detection.
-"""
-
-from waflib.Tools import ccroot, ar
-from waflib.Configure import conf
-
-@conf
-def find_gxx(conf):
- """
- Finds the program g++, and if present, try to detect its version number
- """
- cxx = conf.find_program(['g++', 'c++'], var='CXX')
- conf.get_cc_version(cxx, gcc=True)
- conf.env.CXX_NAME = 'gcc'
-
-@conf
-def gxx_common_flags(conf):
- """
- Common flags for g++ on nearly all platforms
- """
- v = conf.env
-
- v.CXX_SRC_F = []
- v.CXX_TGT_F = ['-c', '-o']
-
- if not v.LINK_CXX:
- v.LINK_CXX = v.CXX
-
- v.CXXLNK_SRC_F = []
- v.CXXLNK_TGT_F = ['-o']
- v.CPPPATH_ST = '-I%s'
- v.DEFINES_ST = '-D%s'
-
- v.LIB_ST = '-l%s' # template for adding libs
- v.LIBPATH_ST = '-L%s' # template for adding libpaths
- v.STLIB_ST = '-l%s'
- v.STLIBPATH_ST = '-L%s'
- v.RPATH_ST = '-Wl,-rpath,%s'
-
- v.SONAME_ST = '-Wl,-h,%s'
- v.SHLIB_MARKER = '-Wl,-Bdynamic'
- v.STLIB_MARKER = '-Wl,-Bstatic'
-
- v.cxxprogram_PATTERN = '%s'
-
- v.CXXFLAGS_cxxshlib = ['-fPIC']
- v.LINKFLAGS_cxxshlib = ['-shared']
- v.cxxshlib_PATTERN = 'lib%s.so'
-
- v.LINKFLAGS_cxxstlib = ['-Wl,-Bstatic']
- v.cxxstlib_PATTERN = 'lib%s.a'
-
- v.LINKFLAGS_MACBUNDLE = ['-bundle', '-undefined', 'dynamic_lookup']
- v.CXXFLAGS_MACBUNDLE = ['-fPIC']
- v.macbundle_PATTERN = '%s.bundle'
-
-@conf
-def gxx_modifier_win32(conf):
- """Configuration flags for executing gcc on Windows"""
- v = conf.env
- v.cxxprogram_PATTERN = '%s.exe'
-
- v.cxxshlib_PATTERN = '%s.dll'
- v.implib_PATTERN = '%s.dll.a'
- v.IMPLIB_ST = '-Wl,--out-implib,%s'
-
- v.CXXFLAGS_cxxshlib = []
-
- # Auto-import is enabled by default even without this option,
- # but enabling it explicitly has the nice effect of suppressing the rather boring, debug-level messages
- # that the linker emits otherwise.
- v.append_value('LINKFLAGS', ['-Wl,--enable-auto-import'])
-
-@conf
-def gxx_modifier_cygwin(conf):
- """Configuration flags for executing g++ on Cygwin"""
- gxx_modifier_win32(conf)
- v = conf.env
- v.cxxshlib_PATTERN = 'cyg%s.dll'
- v.append_value('LINKFLAGS_cxxshlib', ['-Wl,--enable-auto-image-base'])
- v.CXXFLAGS_cxxshlib = []
-
-@conf
-def gxx_modifier_darwin(conf):
- """Configuration flags for executing g++ on MacOS"""
- v = conf.env
- v.CXXFLAGS_cxxshlib = ['-fPIC']
- v.LINKFLAGS_cxxshlib = ['-dynamiclib']
- v.cxxshlib_PATTERN = 'lib%s.dylib'
- v.FRAMEWORKPATH_ST = '-F%s'
- v.FRAMEWORK_ST = ['-framework']
- v.ARCH_ST = ['-arch']
-
- v.LINKFLAGS_cxxstlib = []
-
- v.SHLIB_MARKER = []
- v.STLIB_MARKER = []
- v.SONAME_ST = []
-
-@conf
-def gxx_modifier_aix(conf):
- """Configuration flags for executing g++ on AIX"""
- v = conf.env
- v.LINKFLAGS_cxxprogram= ['-Wl,-brtl']
-
- v.LINKFLAGS_cxxshlib = ['-shared', '-Wl,-brtl,-bexpfull']
- v.SHLIB_MARKER = []
-
-@conf
-def gxx_modifier_hpux(conf):
- v = conf.env
- v.SHLIB_MARKER = []
- v.STLIB_MARKER = []
- v.CFLAGS_cxxshlib = ['-fPIC','-DPIC']
- v.cxxshlib_PATTERN = 'lib%s.sl'
-
-@conf
-def gxx_modifier_openbsd(conf):
- conf.env.SONAME_ST = []
-
-@conf
-def gcc_modifier_osf1V(conf):
- v = conf.env
- v.SHLIB_MARKER = []
- v.STLIB_MARKER = []
- v.SONAME_ST = []
-
-@conf
-def gxx_modifier_platform(conf):
- """Execute platform-specific functions based on *gxx_modifier_+NAME*"""
- # * set configurations specific for a platform.
- # * the destination platform is detected automatically by looking at the macros the compiler predefines,
- # and if it's not recognised, it fallbacks to sys.platform.
- gxx_modifier_func = getattr(conf, 'gxx_modifier_' + conf.env.DEST_OS, None)
- if gxx_modifier_func:
- gxx_modifier_func()
-
-def configure(conf):
- """
- Configuration for g++
- """
- conf.find_gxx()
- conf.find_ar()
- conf.gxx_common_flags()
- conf.gxx_modifier_platform()
- conf.cxx_load_tools()
- conf.cxx_add_flags()
- conf.link_add_flags()
- conf.check_gcc_o_space('cxx')
-
diff --git a/waflib/Tools/icc.py b/waflib/Tools/icc.py
deleted file mode 100644
index b6492c8..0000000
--- a/waflib/Tools/icc.py
+++ /dev/null
@@ -1,30 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Stian Selnes 2008
-# Thomas Nagy 2009-2018 (ita)
-
-"""
-Detects the Intel C compiler
-"""
-
-import sys
-from waflib.Tools import ccroot, ar, gcc
-from waflib.Configure import conf
-
-@conf
-def find_icc(conf):
- """
- Finds the program icc and execute it to ensure it really is icc
- """
- cc = conf.find_program(['icc', 'ICL'], var='CC')
- conf.get_cc_version(cc, icc=True)
- conf.env.CC_NAME = 'icc'
-
-def configure(conf):
- conf.find_icc()
- conf.find_ar()
- conf.gcc_common_flags()
- conf.gcc_modifier_platform()
- conf.cc_load_tools()
- conf.cc_add_flags()
- conf.link_add_flags()
diff --git a/waflib/Tools/icpc.py b/waflib/Tools/icpc.py
deleted file mode 100644
index 8a6cc6c..0000000
--- a/waflib/Tools/icpc.py
+++ /dev/null
@@ -1,30 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy 2009-2018 (ita)
-
-"""
-Detects the Intel C++ compiler
-"""
-
-import sys
-from waflib.Tools import ccroot, ar, gxx
-from waflib.Configure import conf
-
-@conf
-def find_icpc(conf):
- """
- Finds the program icpc, and execute it to ensure it really is icpc
- """
- cxx = conf.find_program('icpc', var='CXX')
- conf.get_cc_version(cxx, icc=True)
- conf.env.CXX_NAME = 'icc'
-
-def configure(conf):
- conf.find_icpc()
- conf.find_ar()
- conf.gxx_common_flags()
- conf.gxx_modifier_platform()
- conf.cxx_load_tools()
- conf.cxx_add_flags()
- conf.link_add_flags()
-
diff --git a/waflib/Tools/ifort.py b/waflib/Tools/ifort.py
deleted file mode 100644
index 74934f3..0000000
--- a/waflib/Tools/ifort.py
+++ /dev/null
@@ -1,413 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# DC 2008
-# Thomas Nagy 2016-2018 (ita)
-
-import os, re, traceback
-from waflib import Utils, Logs, Errors
-from waflib.Tools import fc, fc_config, fc_scan, ar, ccroot
-from waflib.Configure import conf
-from waflib.TaskGen import after_method, feature
-
-@conf
-def find_ifort(conf):
- fc = conf.find_program('ifort', var='FC')
- conf.get_ifort_version(fc)
- conf.env.FC_NAME = 'IFORT'
-
-@conf
-def ifort_modifier_win32(self):
- v = self.env
- v.IFORT_WIN32 = True
- v.FCSTLIB_MARKER = ''
- v.FCSHLIB_MARKER = ''
-
- v.FCLIB_ST = v.FCSTLIB_ST = '%s.lib'
- v.FCLIBPATH_ST = v.STLIBPATH_ST = '/LIBPATH:%s'
- v.FCINCPATH_ST = '/I%s'
- v.FCDEFINES_ST = '/D%s'
-
- v.fcprogram_PATTERN = v.fcprogram_test_PATTERN = '%s.exe'
- v.fcshlib_PATTERN = '%s.dll'
- v.fcstlib_PATTERN = v.implib_PATTERN = '%s.lib'
-
- v.FCLNK_TGT_F = '/out:'
- v.FC_TGT_F = ['/c', '/o', '']
- v.FCFLAGS_fcshlib = ''
- v.LINKFLAGS_fcshlib = '/DLL'
- v.AR_TGT_F = '/out:'
- v.IMPLIB_ST = '/IMPLIB:%s'
-
- v.append_value('LINKFLAGS', '/subsystem:console')
- if v.IFORT_MANIFEST:
- v.append_value('LINKFLAGS', ['/MANIFEST'])
-
-@conf
-def ifort_modifier_darwin(conf):
- fc_config.fortran_modifier_darwin(conf)
-
-@conf
-def ifort_modifier_platform(conf):
- dest_os = conf.env.DEST_OS or Utils.unversioned_sys_platform()
- ifort_modifier_func = getattr(conf, 'ifort_modifier_' + dest_os, None)
- if ifort_modifier_func:
- ifort_modifier_func()
-
-@conf
-def get_ifort_version(conf, fc):
- """
- Detects the compiler version and sets ``conf.env.FC_VERSION``
- """
- version_re = re.compile(r"\bIntel\b.*\bVersion\s*(?P<major>\d*)\.(?P<minor>\d*)",re.I).search
- if Utils.is_win32:
- cmd = fc
- else:
- cmd = fc + ['-logo']
-
- out, err = fc_config.getoutput(conf, cmd, stdin=False)
- match = version_re(out) or version_re(err)
- if not match:
- conf.fatal('cannot determine ifort version.')
- k = match.groupdict()
- conf.env.FC_VERSION = (k['major'], k['minor'])
-
-def configure(conf):
- """
- Detects the Intel Fortran compilers
- """
- if Utils.is_win32:
- compiler, version, path, includes, libdirs, arch = conf.detect_ifort()
- v = conf.env
- v.DEST_CPU = arch
- v.PATH = path
- v.INCLUDES = includes
- v.LIBPATH = libdirs
- v.MSVC_COMPILER = compiler
- try:
- v.MSVC_VERSION = float(version)
- except ValueError:
- v.MSVC_VERSION = float(version[:-3])
-
- conf.find_ifort_win32()
- conf.ifort_modifier_win32()
- else:
- conf.find_ifort()
- conf.find_program('xiar', var='AR')
- conf.find_ar()
- conf.fc_flags()
- conf.fc_add_flags()
- conf.ifort_modifier_platform()
-
-
-all_ifort_platforms = [ ('intel64', 'amd64'), ('em64t', 'amd64'), ('ia32', 'x86'), ('Itanium', 'ia64')]
-"""List of icl platforms"""
-
-@conf
-def gather_ifort_versions(conf, versions):
- """
- List compiler versions by looking up registry keys
- """
- version_pattern = re.compile('^...?.?\....?.?')
- try:
- all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Intel\\Compilers\\Fortran')
- except OSError:
- try:
- all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Intel\\Compilers\\Fortran')
- except OSError:
- return
- index = 0
- while 1:
- try:
- version = Utils.winreg.EnumKey(all_versions, index)
- except OSError:
- break
- index += 1
- if not version_pattern.match(version):
- continue
- targets = {}
- for target,arch in all_ifort_platforms:
- if target=='intel64':
- targetDir='EM64T_NATIVE'
- else:
- targetDir=target
- try:
- Utils.winreg.OpenKey(all_versions,version+'\\'+targetDir)
- icl_version=Utils.winreg.OpenKey(all_versions,version)
- path,type=Utils.winreg.QueryValueEx(icl_version,'ProductDir')
- except OSError:
- pass
- else:
- batch_file=os.path.join(path,'bin','ifortvars.bat')
- if os.path.isfile(batch_file):
- targets[target] = target_compiler(conf, 'intel', arch, version, target, batch_file)
-
- for target,arch in all_ifort_platforms:
- try:
- icl_version = Utils.winreg.OpenKey(all_versions, version+'\\'+target)
- path,type = Utils.winreg.QueryValueEx(icl_version,'ProductDir')
- except OSError:
- continue
- else:
- batch_file=os.path.join(path,'bin','ifortvars.bat')
- if os.path.isfile(batch_file):
- targets[target] = target_compiler(conf, 'intel', arch, version, target, batch_file)
- major = version[0:2]
- versions['intel ' + major] = targets
-
-@conf
-def setup_ifort(conf, versiondict):
- """
- Checks installed compilers and targets and returns the first combination from the user's
- options, env, or the global supported lists that checks.
-
- :param versiondict: dict(platform -> dict(architecture -> configuration))
- :type versiondict: dict(string -> dict(string -> target_compiler)
- :return: the compiler, revision, path, include dirs, library paths and target architecture
- :rtype: tuple of strings
- """
- platforms = Utils.to_list(conf.env.MSVC_TARGETS) or [i for i,j in all_ifort_platforms]
- desired_versions = conf.env.MSVC_VERSIONS or list(reversed(list(versiondict.keys())))
- for version in desired_versions:
- try:
- targets = versiondict[version]
- except KeyError:
- continue
- for arch in platforms:
- try:
- cfg = targets[arch]
- except KeyError:
- continue
- cfg.evaluate()
- if cfg.is_valid:
- compiler,revision = version.rsplit(' ', 1)
- return compiler,revision,cfg.bindirs,cfg.incdirs,cfg.libdirs,cfg.cpu
- conf.fatal('ifort: Impossible to find a valid architecture for building %r - %r' % (desired_versions, list(versiondict.keys())))
-
-@conf
-def get_ifort_version_win32(conf, compiler, version, target, vcvars):
- # FIXME hack
- try:
- conf.msvc_cnt += 1
- except AttributeError:
- conf.msvc_cnt = 1
- batfile = conf.bldnode.make_node('waf-print-msvc-%d.bat' % conf.msvc_cnt)
- batfile.write("""@echo off
-set INCLUDE=
-set LIB=
-call "%s" %s
-echo PATH=%%PATH%%
-echo INCLUDE=%%INCLUDE%%
-echo LIB=%%LIB%%;%%LIBPATH%%
-""" % (vcvars,target))
- sout = conf.cmd_and_log(['cmd.exe', '/E:on', '/V:on', '/C', batfile.abspath()])
- batfile.delete()
- lines = sout.splitlines()
-
- if not lines[0]:
- lines.pop(0)
-
- MSVC_PATH = MSVC_INCDIR = MSVC_LIBDIR = None
- for line in lines:
- if line.startswith('PATH='):
- path = line[5:]
- MSVC_PATH = path.split(';')
- elif line.startswith('INCLUDE='):
- MSVC_INCDIR = [i for i in line[8:].split(';') if i]
- elif line.startswith('LIB='):
- MSVC_LIBDIR = [i for i in line[4:].split(';') if i]
- if None in (MSVC_PATH, MSVC_INCDIR, MSVC_LIBDIR):
- conf.fatal('ifort: Could not find a valid architecture for building (get_ifort_version_win32)')
-
- # Check if the compiler is usable at all.
- # The detection may return 64-bit versions even on 32-bit systems, and these would fail to run.
- env = dict(os.environ)
- env.update(PATH = path)
- compiler_name, linker_name, lib_name = _get_prog_names(conf, compiler)
- fc = conf.find_program(compiler_name, path_list=MSVC_PATH)
-
- # delete CL if exists. because it could contain parameters which can change cl's behaviour rather catastrophically.
- if 'CL' in env:
- del(env['CL'])
-
- try:
- conf.cmd_and_log(fc + ['/help'], env=env)
- except UnicodeError:
- st = traceback.format_exc()
- if conf.logger:
- conf.logger.error(st)
- conf.fatal('ifort: Unicode error - check the code page?')
- except Exception as e:
- Logs.debug('ifort: get_ifort_version: %r %r %r -> failure %s', compiler, version, target, str(e))
- conf.fatal('ifort: cannot run the compiler in get_ifort_version (run with -v to display errors)')
- else:
- Logs.debug('ifort: get_ifort_version: %r %r %r -> OK', compiler, version, target)
- finally:
- conf.env[compiler_name] = ''
-
- return (MSVC_PATH, MSVC_INCDIR, MSVC_LIBDIR)
-
-class target_compiler(object):
- """
- Wraps a compiler configuration; call evaluate() to determine
- whether the configuration is usable.
- """
- def __init__(self, ctx, compiler, cpu, version, bat_target, bat, callback=None):
- """
- :param ctx: configuration context to use to eventually get the version environment
- :param compiler: compiler name
- :param cpu: target cpu
- :param version: compiler version number
- :param bat_target: ?
- :param bat: path to the batch file to run
- :param callback: optional function to take the realized environment variables tup and map it (e.g. to combine other constant paths)
- """
- self.conf = ctx
- self.name = None
- self.is_valid = False
- self.is_done = False
-
- self.compiler = compiler
- self.cpu = cpu
- self.version = version
- self.bat_target = bat_target
- self.bat = bat
- self.callback = callback
-
- def evaluate(self):
- if self.is_done:
- return
- self.is_done = True
- try:
- vs = self.conf.get_ifort_version_win32(self.compiler, self.version, self.bat_target, self.bat)
- except Errors.ConfigurationError:
- self.is_valid = False
- return
- if self.callback:
- vs = self.callback(self, vs)
- self.is_valid = True
- (self.bindirs, self.incdirs, self.libdirs) = vs
-
- def __str__(self):
- return str((self.bindirs, self.incdirs, self.libdirs))
-
- def __repr__(self):
- return repr((self.bindirs, self.incdirs, self.libdirs))
-
-@conf
-def detect_ifort(self):
- return self.setup_ifort(self.get_ifort_versions(False))
-
-@conf
-def get_ifort_versions(self, eval_and_save=True):
- """
- :return: platforms to compiler configurations
- :rtype: dict
- """
- dct = {}
- self.gather_ifort_versions(dct)
- return dct
-
-def _get_prog_names(self, compiler):
- if compiler=='intel':
- compiler_name = 'ifort'
- linker_name = 'XILINK'
- lib_name = 'XILIB'
- else:
- # assumes CL.exe
- compiler_name = 'CL'
- linker_name = 'LINK'
- lib_name = 'LIB'
- return compiler_name, linker_name, lib_name
-
-@conf
-def find_ifort_win32(conf):
- # the autodetection is supposed to be performed before entering in this method
- v = conf.env
- path = v.PATH
- compiler = v.MSVC_COMPILER
- version = v.MSVC_VERSION
-
- compiler_name, linker_name, lib_name = _get_prog_names(conf, compiler)
- v.IFORT_MANIFEST = (compiler == 'intel' and version >= 11)
-
- # compiler
- fc = conf.find_program(compiler_name, var='FC', path_list=path)
-
- # before setting anything, check if the compiler is really intel fortran
- env = dict(conf.environ)
- if path:
- env.update(PATH = ';'.join(path))
- if not conf.cmd_and_log(fc + ['/nologo', '/help'], env=env):
- conf.fatal('not intel fortran compiler could not be identified')
-
- v.FC_NAME = 'IFORT'
-
- if not v.LINK_FC:
- conf.find_program(linker_name, var='LINK_FC', path_list=path, mandatory=True)
-
- if not v.AR:
- conf.find_program(lib_name, path_list=path, var='AR', mandatory=True)
- v.ARFLAGS = ['/nologo']
-
- # manifest tool. Not required for VS 2003 and below. Must have for VS 2005 and later
- if v.IFORT_MANIFEST:
- conf.find_program('MT', path_list=path, var='MT')
- v.MTFLAGS = ['/nologo']
-
- try:
- conf.load('winres')
- except Errors.WafError:
- Logs.warn('Resource compiler not found. Compiling resource file is disabled')
-
-#######################################################################################################
-##### conf above, build below
-
-@after_method('apply_link')
-@feature('fc')
-def apply_flags_ifort(self):
- """
- Adds additional flags implied by msvc, such as subsystems and pdb files::
-
- def build(bld):
- bld.stlib(source='main.c', target='bar', subsystem='gruik')
- """
- if not self.env.IFORT_WIN32 or not getattr(self, 'link_task', None):
- return
-
- is_static = isinstance(self.link_task, ccroot.stlink_task)
-
- subsystem = getattr(self, 'subsystem', '')
- if subsystem:
- subsystem = '/subsystem:%s' % subsystem
- flags = is_static and 'ARFLAGS' or 'LINKFLAGS'
- self.env.append_value(flags, subsystem)
-
- if not is_static:
- for f in self.env.LINKFLAGS:
- d = f.lower()
- if d[1:] == 'debug':
- pdbnode = self.link_task.outputs[0].change_ext('.pdb')
- self.link_task.outputs.append(pdbnode)
-
- if getattr(self, 'install_task', None):
- self.pdb_install_task = self.add_install_files(install_to=self.install_task.install_to, install_from=pdbnode)
-
- break
-
-@feature('fcprogram', 'fcshlib', 'fcprogram_test')
-@after_method('apply_link')
-def apply_manifest_ifort(self):
- """
- Enables manifest embedding in Fortran DLLs when using ifort on Windows
- See: http://msdn2.microsoft.com/en-us/library/ms235542(VS.80).aspx
- """
- if self.env.IFORT_WIN32 and getattr(self, 'link_task', None):
- # it seems ifort.exe cannot be called for linking
- self.link_task.env.FC = self.env.LINK_FC
-
- if self.env.IFORT_WIN32 and self.env.IFORT_MANIFEST and getattr(self, 'link_task', None):
- out_node = self.link_task.outputs[0]
- man_node = out_node.parent.find_or_declare(out_node.name + '.manifest')
- self.link_task.outputs.append(man_node)
- self.env.DO_MANIFEST = True
-
diff --git a/waflib/Tools/intltool.py b/waflib/Tools/intltool.py
deleted file mode 100644
index af95ba8..0000000
--- a/waflib/Tools/intltool.py
+++ /dev/null
@@ -1,231 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2006-2018 (ita)
-
-"""
-Support for translation tools such as msgfmt and intltool
-
-Usage::
-
- def configure(conf):
- conf.load('gnu_dirs intltool')
-
- def build(bld):
- # process the .po files into .gmo files, and install them in LOCALEDIR
- bld(features='intltool_po', appname='myapp', podir='po', install_path="${LOCALEDIR}")
-
- # process an input file, substituting the translations from the po dir
- bld(
- features = "intltool_in",
- podir = "../po",
- style = "desktop",
- flags = ["-u"],
- source = 'kupfer.desktop.in',
- install_path = "${DATADIR}/applications",
- )
-
-Usage of the :py:mod:`waflib.Tools.gnu_dirs` is recommended, but not obligatory.
-"""
-
-from __future__ import with_statement
-
-import os, re
-from waflib import Context, Task, Utils, Logs
-import waflib.Tools.ccroot
-from waflib.TaskGen import feature, before_method, taskgen_method
-from waflib.Logs import error
-from waflib.Configure import conf
-
-_style_flags = {
- 'ba': '-b',
- 'desktop': '-d',
- 'keys': '-k',
- 'quoted': '--quoted-style',
- 'quotedxml': '--quotedxml-style',
- 'rfc822deb': '-r',
- 'schemas': '-s',
- 'xml': '-x',
-}
-
-@taskgen_method
-def ensure_localedir(self):
- """
- Expands LOCALEDIR from DATAROOTDIR/locale if possible, or falls back to PREFIX/share/locale
- """
- # use the tool gnu_dirs to provide options to define this
- if not self.env.LOCALEDIR:
- if self.env.DATAROOTDIR:
- self.env.LOCALEDIR = os.path.join(self.env.DATAROOTDIR, 'locale')
- else:
- self.env.LOCALEDIR = os.path.join(self.env.PREFIX, 'share', 'locale')
-
-@before_method('process_source')
-@feature('intltool_in')
-def apply_intltool_in_f(self):
- """
- Creates tasks to translate files by intltool-merge::
-
- def build(bld):
- bld(
- features = "intltool_in",
- podir = "../po",
- style = "desktop",
- flags = ["-u"],
- source = 'kupfer.desktop.in',
- install_path = "${DATADIR}/applications",
- )
-
- :param podir: location of the .po files
- :type podir: string
- :param source: source files to process
- :type source: list of string
- :param style: the intltool-merge mode of operation, can be one of the following values:
- ``ba``, ``desktop``, ``keys``, ``quoted``, ``quotedxml``, ``rfc822deb``, ``schemas`` and ``xml``.
- See the ``intltool-merge`` man page for more information about supported modes of operation.
- :type style: string
- :param flags: compilation flags ("-quc" by default)
- :type flags: list of string
- :param install_path: installation path
- :type install_path: string
- """
- try:
- self.meths.remove('process_source')
- except ValueError:
- pass
-
- self.ensure_localedir()
-
- podir = getattr(self, 'podir', '.')
- podirnode = self.path.find_dir(podir)
- if not podirnode:
- error("could not find the podir %r" % podir)
- return
-
- cache = getattr(self, 'intlcache', '.intlcache')
- self.env.INTLCACHE = [os.path.join(str(self.path.get_bld()), podir, cache)]
- self.env.INTLPODIR = podirnode.bldpath()
- self.env.append_value('INTLFLAGS', getattr(self, 'flags', self.env.INTLFLAGS_DEFAULT))
-
- if '-c' in self.env.INTLFLAGS:
- self.bld.fatal('Redundant -c flag in intltool task %r' % self)
-
- style = getattr(self, 'style', None)
- if style:
- try:
- style_flag = _style_flags[style]
- except KeyError:
- self.bld.fatal('intltool_in style "%s" is not valid' % style)
-
- self.env.append_unique('INTLFLAGS', [style_flag])
-
- for i in self.to_list(self.source):
- node = self.path.find_resource(i)
-
- task = self.create_task('intltool', node, node.change_ext(''))
- inst = getattr(self, 'install_path', None)
- if inst:
- self.add_install_files(install_to=inst, install_from=task.outputs)
-
-@feature('intltool_po')
-def apply_intltool_po(self):
- """
- Creates tasks to process po files::
-
- def build(bld):
- bld(features='intltool_po', appname='myapp', podir='po', install_path="${LOCALEDIR}")
-
- The relevant task generator arguments are:
-
- :param podir: directory of the .po files
- :type podir: string
- :param appname: name of the application
- :type appname: string
- :param install_path: installation directory
- :type install_path: string
-
- The file LINGUAS must be present in the directory pointed by *podir* and list the translation files to process.
- """
- try:
- self.meths.remove('process_source')
- except ValueError:
- pass
-
- self.ensure_localedir()
-
- appname = getattr(self, 'appname', getattr(Context.g_module, Context.APPNAME, 'set_your_app_name'))
- podir = getattr(self, 'podir', '.')
- inst = getattr(self, 'install_path', '${LOCALEDIR}')
-
- linguas = self.path.find_node(os.path.join(podir, 'LINGUAS'))
- if linguas:
- # scan LINGUAS file for locales to process
- with open(linguas.abspath()) as f:
- langs = []
- for line in f.readlines():
- # ignore lines containing comments
- if not line.startswith('#'):
- langs += line.split()
- re_linguas = re.compile('[-a-zA-Z_@.]+')
- for lang in langs:
- # Make sure that we only process lines which contain locales
- if re_linguas.match(lang):
- node = self.path.find_resource(os.path.join(podir, re_linguas.match(lang).group() + '.po'))
- task = self.create_task('po', node, node.change_ext('.mo'))
-
- if inst:
- filename = task.outputs[0].name
- (langname, ext) = os.path.splitext(filename)
- inst_file = inst + os.sep + langname + os.sep + 'LC_MESSAGES' + os.sep + appname + '.mo'
- self.add_install_as(install_to=inst_file, install_from=task.outputs[0],
- chmod=getattr(self, 'chmod', Utils.O644))
-
- else:
- Logs.pprint('RED', "Error no LINGUAS file found in po directory")
-
-class po(Task.Task):
- """
- Compiles .po files into .gmo files
- """
- run_str = '${MSGFMT} -o ${TGT} ${SRC}'
- color = 'BLUE'
-
-class intltool(Task.Task):
- """
- Calls intltool-merge to update translation files
- """
- run_str = '${INTLTOOL} ${INTLFLAGS} ${INTLCACHE_ST:INTLCACHE} ${INTLPODIR} ${SRC} ${TGT}'
- color = 'BLUE'
-
-@conf
-def find_msgfmt(conf):
- """
- Detects msgfmt and sets the ``MSGFMT`` variable
- """
- conf.find_program('msgfmt', var='MSGFMT')
-
-@conf
-def find_intltool_merge(conf):
- """
- Detects intltool-merge
- """
- if not conf.env.PERL:
- conf.find_program('perl', var='PERL')
- conf.env.INTLCACHE_ST = '--cache=%s'
- conf.env.INTLFLAGS_DEFAULT = ['-q', '-u']
- conf.find_program('intltool-merge', interpreter='PERL', var='INTLTOOL')
-
-def configure(conf):
- """
- Detects the program *msgfmt* and set *conf.env.MSGFMT*.
- Detects the program *intltool-merge* and set *conf.env.INTLTOOL*.
- It is possible to set INTLTOOL in the environment, but it must not have spaces in it::
-
- $ INTLTOOL="/path/to/the program/intltool" waf configure
-
- If a C/C++ compiler is present, execute a compilation test to find the header *locale.h*.
- """
- conf.find_msgfmt()
- conf.find_intltool_merge()
- if conf.env.CC or conf.env.CXX:
- conf.check(header_name='locale.h')
-
diff --git a/waflib/Tools/irixcc.py b/waflib/Tools/irixcc.py
deleted file mode 100644
index c3ae1ac..0000000
--- a/waflib/Tools/irixcc.py
+++ /dev/null
@@ -1,66 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# imported from samba
-
-"""
-Compiler definition for irix/MIPSpro cc compiler
-"""
-
-from waflib import Errors
-from waflib.Tools import ccroot, ar
-from waflib.Configure import conf
-
-@conf
-def find_irixcc(conf):
- v = conf.env
- cc = None
- if v.CC:
- cc = v.CC
- elif 'CC' in conf.environ:
- cc = conf.environ['CC']
- if not cc:
- cc = conf.find_program('cc', var='CC')
- if not cc:
- conf.fatal('irixcc was not found')
-
- try:
- conf.cmd_and_log(cc + ['-version'])
- except Errors.WafError:
- conf.fatal('%r -version could not be executed' % cc)
-
- v.CC = cc
- v.CC_NAME = 'irix'
-
-@conf
-def irixcc_common_flags(conf):
- v = conf.env
-
- v.CC_SRC_F = ''
- v.CC_TGT_F = ['-c', '-o']
- v.CPPPATH_ST = '-I%s'
- v.DEFINES_ST = '-D%s'
-
- if not v.LINK_CC:
- v.LINK_CC = v.CC
-
- v.CCLNK_SRC_F = ''
- v.CCLNK_TGT_F = ['-o']
-
- v.LIB_ST = '-l%s' # template for adding libs
- v.LIBPATH_ST = '-L%s' # template for adding libpaths
- v.STLIB_ST = '-l%s'
- v.STLIBPATH_ST = '-L%s'
-
- v.cprogram_PATTERN = '%s'
- v.cshlib_PATTERN = 'lib%s.so'
- v.cstlib_PATTERN = 'lib%s.a'
-
-def configure(conf):
- conf.find_irixcc()
- conf.find_cpp()
- conf.find_ar()
- conf.irixcc_common_flags()
- conf.cc_load_tools()
- conf.cc_add_flags()
- conf.link_add_flags()
-
diff --git a/waflib/Tools/javaw.py b/waflib/Tools/javaw.py
deleted file mode 100644
index f6fd20c..0000000
--- a/waflib/Tools/javaw.py
+++ /dev/null
@@ -1,464 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2006-2018 (ita)
-
-"""
-Java support
-
-Javac is one of the few compilers that behaves very badly:
-
-#. it outputs files where it wants to (-d is only for the package root)
-
-#. it recompiles files silently behind your back
-
-#. it outputs an undefined amount of files (inner classes)
-
-Remember that the compilation can be performed using Jython[1] rather than regular Python. Instead of
-running one of the following commands::
-
- ./waf configure
- python waf configure
-
-You would have to run::
-
- java -jar /path/to/jython.jar waf configure
-
-[1] http://www.jython.org/
-"""
-
-import os, shutil
-from waflib import Task, Utils, Errors, Node
-from waflib.Configure import conf
-from waflib.TaskGen import feature, before_method, after_method
-
-from waflib.Tools import ccroot
-ccroot.USELIB_VARS['javac'] = set(['CLASSPATH', 'JAVACFLAGS'])
-
-SOURCE_RE = '**/*.java'
-JAR_RE = '**/*'
-
-class_check_source = '''
-public class Test {
- public static void main(String[] argv) {
- Class lib;
- if (argv.length < 1) {
- System.err.println("Missing argument");
- System.exit(77);
- }
- try {
- lib = Class.forName(argv[0]);
- } catch (ClassNotFoundException e) {
- System.err.println("ClassNotFoundException");
- System.exit(1);
- }
- lib = null;
- System.exit(0);
- }
-}
-'''
-
-@feature('javac')
-@before_method('process_source')
-def apply_java(self):
- """
- Create a javac task for compiling *.java files*. There can be
- only one javac task by task generator.
- """
- Utils.def_attrs(self, jarname='', classpath='',
- sourcepath='.', srcdir='.',
- jar_mf_attributes={}, jar_mf_classpath=[])
-
- outdir = getattr(self, 'outdir', None)
- if outdir:
- if not isinstance(outdir, Node.Node):
- outdir = self.path.get_bld().make_node(self.outdir)
- else:
- outdir = self.path.get_bld()
- outdir.mkdir()
- self.outdir = outdir
- self.env.OUTDIR = outdir.abspath()
-
- self.javac_task = tsk = self.create_task('javac')
- tmp = []
-
- srcdir = getattr(self, 'srcdir', '')
- if isinstance(srcdir, Node.Node):
- srcdir = [srcdir]
- for x in Utils.to_list(srcdir):
- if isinstance(x, Node.Node):
- y = x
- else:
- y = self.path.find_dir(x)
- if not y:
- self.bld.fatal('Could not find the folder %s from %s' % (x, self.path))
- tmp.append(y)
-
- tsk.srcdir = tmp
-
- if getattr(self, 'compat', None):
- tsk.env.append_value('JAVACFLAGS', ['-source', str(self.compat)])
-
- if hasattr(self, 'sourcepath'):
- fold = [isinstance(x, Node.Node) and x or self.path.find_dir(x) for x in self.to_list(self.sourcepath)]
- names = os.pathsep.join([x.srcpath() for x in fold])
- else:
- names = [x.srcpath() for x in tsk.srcdir]
-
- if names:
- tsk.env.append_value('JAVACFLAGS', ['-sourcepath', names])
-
-@feature('javac')
-@before_method('propagate_uselib_vars')
-@after_method('apply_java')
-def use_javac_files(self):
- """
- Processes the *use* attribute referring to other java compilations
- """
- lst = []
- self.uselib = self.to_list(getattr(self, 'uselib', []))
- names = self.to_list(getattr(self, 'use', []))
- get = self.bld.get_tgen_by_name
- for x in names:
- try:
- y = get(x)
- except Errors.WafError:
- self.uselib.append(x)
- else:
- y.post()
- if hasattr(y, 'jar_task'):
- lst.append(y.jar_task.outputs[0].abspath())
- self.javac_task.set_run_after(y.jar_task)
- else:
- for tsk in y.tasks:
- self.javac_task.set_run_after(tsk)
- self.env.append_value('CLASSPATH', lst)
-
-@feature('javac')
-@after_method('apply_java', 'propagate_uselib_vars', 'use_javac_files')
-def set_classpath(self):
- """
- Sets the CLASSPATH value on the *javac* task previously created.
- """
- if getattr(self, 'classpath', None):
- self.env.append_unique('CLASSPATH', getattr(self, 'classpath', []))
- for x in self.tasks:
- x.env.CLASSPATH = os.pathsep.join(self.env.CLASSPATH) + os.pathsep
-
-@feature('jar')
-@after_method('apply_java', 'use_javac_files')
-@before_method('process_source')
-def jar_files(self):
- """
- Creates a jar task (one maximum per task generator)
- """
- destfile = getattr(self, 'destfile', 'test.jar')
- jaropts = getattr(self, 'jaropts', [])
- manifest = getattr(self, 'manifest', None)
-
- basedir = getattr(self, 'basedir', None)
- if basedir:
- if not isinstance(self.basedir, Node.Node):
- basedir = self.path.get_bld().make_node(basedir)
- else:
- basedir = self.path.get_bld()
- if not basedir:
- self.bld.fatal('Could not find the basedir %r for %r' % (self.basedir, self))
-
- self.jar_task = tsk = self.create_task('jar_create')
- if manifest:
- jarcreate = getattr(self, 'jarcreate', 'cfm')
- if not isinstance(manifest,Node.Node):
- node = self.path.find_resource(manifest)
- else:
- node = manifest
- if not node:
- self.bld.fatal('invalid manifest file %r for %r' % (manifest, self))
- tsk.dep_nodes.append(node)
- jaropts.insert(0, node.abspath())
- else:
- jarcreate = getattr(self, 'jarcreate', 'cf')
- if not isinstance(destfile, Node.Node):
- destfile = self.path.find_or_declare(destfile)
- if not destfile:
- self.bld.fatal('invalid destfile %r for %r' % (destfile, self))
- tsk.set_outputs(destfile)
- tsk.basedir = basedir
-
- jaropts.append('-C')
- jaropts.append(basedir.bldpath())
- jaropts.append('.')
-
- tsk.env.JAROPTS = jaropts
- tsk.env.JARCREATE = jarcreate
-
- if getattr(self, 'javac_task', None):
- tsk.set_run_after(self.javac_task)
-
-@feature('jar')
-@after_method('jar_files')
-def use_jar_files(self):
- """
- Processes the *use* attribute to set the build order on the
- tasks created by another task generator.
- """
- self.uselib = self.to_list(getattr(self, 'uselib', []))
- names = self.to_list(getattr(self, 'use', []))
- get = self.bld.get_tgen_by_name
- for x in names:
- try:
- y = get(x)
- except Errors.WafError:
- self.uselib.append(x)
- else:
- y.post()
- self.jar_task.run_after.update(y.tasks)
-
-class JTask(Task.Task):
- """
- Base class for java and jar tasks; provides functionality to run long commands
- """
- def split_argfile(self, cmd):
- inline = [cmd[0]]
- infile = []
- for x in cmd[1:]:
- # jar and javac do not want -J flags in @file
- if x.startswith('-J'):
- inline.append(x)
- else:
- infile.append(self.quote_flag(x))
- return (inline, infile)
-
-class jar_create(JTask):
- """
- Creates a jar file
- """
- color = 'GREEN'
- run_str = '${JAR} ${JARCREATE} ${TGT} ${JAROPTS}'
-
- def runnable_status(self):
- """
- Wait for dependent tasks to be executed, then read the
- files to update the list of inputs.
- """
- for t in self.run_after:
- if not t.hasrun:
- return Task.ASK_LATER
- if not self.inputs:
- try:
- self.inputs = [x for x in self.basedir.ant_glob(JAR_RE, remove=False) if id(x) != id(self.outputs[0])]
- except Exception:
- raise Errors.WafError('Could not find the basedir %r for %r' % (self.basedir, self))
- return super(jar_create, self).runnable_status()
-
-class javac(JTask):
- """
- Compiles java files
- """
- color = 'BLUE'
- run_str = '${JAVAC} -classpath ${CLASSPATH} -d ${OUTDIR} ${JAVACFLAGS} ${SRC}'
- vars = ['CLASSPATH', 'JAVACFLAGS', 'JAVAC', 'OUTDIR']
- """
- The javac task will be executed again if the variables CLASSPATH, JAVACFLAGS, JAVAC or OUTDIR change.
- """
- def uid(self):
- """Identify java tasks by input&output folder"""
- lst = [self.__class__.__name__, self.generator.outdir.abspath()]
- for x in self.srcdir:
- lst.append(x.abspath())
- return Utils.h_list(lst)
-
- def runnable_status(self):
- """
- Waits for dependent tasks to be complete, then read the file system to find the input nodes.
- """
- for t in self.run_after:
- if not t.hasrun:
- return Task.ASK_LATER
-
- if not self.inputs:
- self.inputs = []
- for x in self.srcdir:
- if x.exists():
- self.inputs.extend(x.ant_glob(SOURCE_RE, remove=False))
- return super(javac, self).runnable_status()
-
- def post_run(self):
- """
- List class files created
- """
- for node in self.generator.outdir.ant_glob('**/*.class'):
- self.generator.bld.node_sigs[node] = self.uid()
- self.generator.bld.task_sigs[self.uid()] = self.cache_sig
-
-@feature('javadoc')
-@after_method('process_rule')
-def create_javadoc(self):
- """
- Creates a javadoc task (feature 'javadoc')
- """
- tsk = self.create_task('javadoc')
- tsk.classpath = getattr(self, 'classpath', [])
- self.javadoc_package = Utils.to_list(self.javadoc_package)
- if not isinstance(self.javadoc_output, Node.Node):
- self.javadoc_output = self.bld.path.find_or_declare(self.javadoc_output)
-
-class javadoc(Task.Task):
- """
- Builds java documentation
- """
- color = 'BLUE'
-
- def __str__(self):
- return '%s: %s -> %s\n' % (self.__class__.__name__, self.generator.srcdir, self.generator.javadoc_output)
-
- def run(self):
- env = self.env
- bld = self.generator.bld
- wd = bld.bldnode
-
- #add src node + bld node (for generated java code)
- srcpath = self.generator.path.abspath() + os.sep + self.generator.srcdir
- srcpath += os.pathsep
- srcpath += self.generator.path.get_bld().abspath() + os.sep + self.generator.srcdir
-
- classpath = env.CLASSPATH
- classpath += os.pathsep
- classpath += os.pathsep.join(self.classpath)
- classpath = "".join(classpath)
-
- self.last_cmd = lst = []
- lst.extend(Utils.to_list(env.JAVADOC))
- lst.extend(['-d', self.generator.javadoc_output.abspath()])
- lst.extend(['-sourcepath', srcpath])
- lst.extend(['-classpath', classpath])
- lst.extend(['-subpackages'])
- lst.extend(self.generator.javadoc_package)
- lst = [x for x in lst if x]
-
- self.generator.bld.cmd_and_log(lst, cwd=wd, env=env.env or None, quiet=0)
-
- def post_run(self):
- nodes = self.generator.javadoc_output.ant_glob('**')
- for node in nodes:
- self.generator.bld.node_sigs[node] = self.uid()
- self.generator.bld.task_sigs[self.uid()] = self.cache_sig
-
-def configure(self):
- """
- Detects the javac, java and jar programs
- """
- # If JAVA_PATH is set, we prepend it to the path list
- java_path = self.environ['PATH'].split(os.pathsep)
- v = self.env
-
- if 'JAVA_HOME' in self.environ:
- java_path = [os.path.join(self.environ['JAVA_HOME'], 'bin')] + java_path
- self.env.JAVA_HOME = [self.environ['JAVA_HOME']]
-
- for x in 'javac java jar javadoc'.split():
- self.find_program(x, var=x.upper(), path_list=java_path)
-
- if 'CLASSPATH' in self.environ:
- v.CLASSPATH = self.environ['CLASSPATH']
-
- if not v.JAR:
- self.fatal('jar is required for making java packages')
- if not v.JAVAC:
- self.fatal('javac is required for compiling java classes')
-
- v.JARCREATE = 'cf' # can use cvf
- v.JAVACFLAGS = []
-
-@conf
-def check_java_class(self, classname, with_classpath=None):
- """
- Checks if the specified java class exists
-
- :param classname: class to check, like java.util.HashMap
- :type classname: string
- :param with_classpath: additional classpath to give
- :type with_classpath: string
- """
- javatestdir = '.waf-javatest'
-
- classpath = javatestdir
- if self.env.CLASSPATH:
- classpath += os.pathsep + self.env.CLASSPATH
- if isinstance(with_classpath, str):
- classpath += os.pathsep + with_classpath
-
- shutil.rmtree(javatestdir, True)
- os.mkdir(javatestdir)
-
- Utils.writef(os.path.join(javatestdir, 'Test.java'), class_check_source)
-
- # Compile the source
- self.exec_command(self.env.JAVAC + [os.path.join(javatestdir, 'Test.java')], shell=False)
-
- # Try to run the app
- cmd = self.env.JAVA + ['-cp', classpath, 'Test', classname]
- self.to_log("%s\n" % str(cmd))
- found = self.exec_command(cmd, shell=False)
-
- self.msg('Checking for java class %s' % classname, not found)
-
- shutil.rmtree(javatestdir, True)
-
- return found
-
-@conf
-def check_jni_headers(conf):
- """
- Checks for jni headers and libraries. On success the conf.env variables xxx_JAVA are added for use in C/C++ targets::
-
- def options(opt):
- opt.load('compiler_c')
-
- def configure(conf):
- conf.load('compiler_c java')
- conf.check_jni_headers()
-
- def build(bld):
- bld.shlib(source='a.c', target='app', use='JAVA')
- """
- if not conf.env.CC_NAME and not conf.env.CXX_NAME:
- conf.fatal('load a compiler first (gcc, g++, ..)')
-
- if not conf.env.JAVA_HOME:
- conf.fatal('set JAVA_HOME in the system environment')
-
- # jni requires the jvm
- javaHome = conf.env.JAVA_HOME[0]
-
- dir = conf.root.find_dir(conf.env.JAVA_HOME[0] + '/include')
- if dir is None:
- dir = conf.root.find_dir(conf.env.JAVA_HOME[0] + '/../Headers') # think different?!
- if dir is None:
- conf.fatal('JAVA_HOME does not seem to be set properly')
-
- f = dir.ant_glob('**/(jni|jni_md).h')
- incDirs = [x.parent.abspath() for x in f]
-
- dir = conf.root.find_dir(conf.env.JAVA_HOME[0])
- f = dir.ant_glob('**/*jvm.(so|dll|dylib)')
- libDirs = [x.parent.abspath() for x in f] or [javaHome]
-
- # On windows, we need both the .dll and .lib to link. On my JDK, they are
- # in different directories...
- f = dir.ant_glob('**/*jvm.(lib)')
- if f:
- libDirs = [[x, y.parent.abspath()] for x in libDirs for y in f]
-
- if conf.env.DEST_OS == 'freebsd':
- conf.env.append_unique('LINKFLAGS_JAVA', '-pthread')
- for d in libDirs:
- try:
- conf.check(header_name='jni.h', define_name='HAVE_JNI_H', lib='jvm',
- libpath=d, includes=incDirs, uselib_store='JAVA', uselib='JAVA')
- except Exception:
- pass
- else:
- break
- else:
- conf.fatal('could not find lib jvm in %r (see config.log)' % libDirs)
-
diff --git a/waflib/Tools/ldc2.py b/waflib/Tools/ldc2.py
deleted file mode 100644
index a51c344..0000000
--- a/waflib/Tools/ldc2.py
+++ /dev/null
@@ -1,56 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Alex Rønne Petersen, 2012 (alexrp/Zor)
-
-from waflib.Tools import ar, d
-from waflib.Configure import conf
-
-@conf
-def find_ldc2(conf):
- """
- Finds the program *ldc2* and set the variable *D*
- """
- conf.find_program(['ldc2'], var='D')
-
- out = conf.cmd_and_log(conf.env.D + ['-version'])
- if out.find("based on DMD v2.") == -1:
- conf.fatal("detected compiler is not ldc2")
-
-@conf
-def common_flags_ldc2(conf):
- """
- Sets the D flags required by *ldc2*
- """
- v = conf.env
-
- v.D_SRC_F = ['-c']
- v.D_TGT_F = '-of%s'
-
- v.D_LINKER = v.D
- v.DLNK_SRC_F = ''
- v.DLNK_TGT_F = '-of%s'
- v.DINC_ST = '-I%s'
-
- v.DSHLIB_MARKER = v.DSTLIB_MARKER = ''
- v.DSTLIB_ST = v.DSHLIB_ST = '-L-l%s'
- v.DSTLIBPATH_ST = v.DLIBPATH_ST = '-L-L%s'
-
- v.LINKFLAGS_dshlib = ['-L-shared']
-
- v.DHEADER_ext = '.di'
- v.DFLAGS_d_with_header = ['-H', '-Hf']
- v.D_HDR_F = '%s'
-
- v.LINKFLAGS = []
- v.DFLAGS_dshlib = ['-relocation-model=pic']
-
-def configure(conf):
- """
- Configuration for *ldc2*
- """
- conf.find_ldc2()
- conf.load('ar')
- conf.load('d')
- conf.common_flags_ldc2()
- conf.d_platform_flags()
-
diff --git a/waflib/Tools/lua.py b/waflib/Tools/lua.py
deleted file mode 100644
index 15a333a..0000000
--- a/waflib/Tools/lua.py
+++ /dev/null
@@ -1,38 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Sebastian Schlingmann, 2008
-# Thomas Nagy, 2008-2018 (ita)
-
-"""
-Lua support.
-
-Compile *.lua* files into *.luac*::
-
- def configure(conf):
- conf.load('lua')
- conf.env.LUADIR = '/usr/local/share/myapp/scripts/'
- def build(bld):
- bld(source='foo.lua')
-"""
-
-from waflib.TaskGen import extension
-from waflib import Task
-
-@extension('.lua')
-def add_lua(self, node):
- tsk = self.create_task('luac', node, node.change_ext('.luac'))
- inst_to = getattr(self, 'install_path', self.env.LUADIR and '${LUADIR}' or None)
- if inst_to:
- self.add_install_files(install_to=inst_to, install_from=tsk.outputs)
- return tsk
-
-class luac(Task.Task):
- run_str = '${LUAC} -s -o ${TGT} ${SRC}'
- color = 'PINK'
-
-def configure(conf):
- """
- Detect the luac compiler and set *conf.env.LUAC*
- """
- conf.find_program('luac', var='LUAC')
-
diff --git a/waflib/Tools/md5_tstamp.py b/waflib/Tools/md5_tstamp.py
deleted file mode 100644
index 6428e46..0000000
--- a/waflib/Tools/md5_tstamp.py
+++ /dev/null
@@ -1,39 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-
-"""
-Re-calculate md5 hashes of files only when the file times or the file
-size have changed.
-
-The hashes can also reflect either the file contents (STRONGEST=True) or the
-file time and file size.
-
-The performance benefits of this module are usually insignificant.
-"""
-
-import os, stat
-from waflib import Utils, Build, Node
-
-STRONGEST = True
-
-Build.SAVED_ATTRS.append('hashes_md5_tstamp')
-def h_file(self):
- filename = self.abspath()
- st = os.stat(filename)
-
- cache = self.ctx.hashes_md5_tstamp
- if filename in cache and cache[filename][0] == st.st_mtime:
- return cache[filename][1]
-
- if STRONGEST:
- ret = Utils.h_file(filename)
- else:
- if stat.S_ISDIR(st[stat.ST_MODE]):
- raise IOError('Not a file')
- ret = Utils.md5(str((st.st_mtime, st.st_size)).encode()).digest()
-
- cache[filename] = (st.st_mtime, ret)
- return ret
-h_file.__doc__ = Node.Node.h_file.__doc__
-Node.Node.h_file = h_file
-
diff --git a/waflib/Tools/msvc.py b/waflib/Tools/msvc.py
deleted file mode 100644
index 17b347d..0000000
--- a/waflib/Tools/msvc.py
+++ /dev/null
@@ -1,1020 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Carlos Rafael Giani, 2006 (dv)
-# Tamas Pal, 2007 (folti)
-# Nicolas Mercier, 2009
-# Matt Clarkson, 2012
-
-"""
-Microsoft Visual C++/Intel C++ compiler support
-
-If you get detection problems, first try any of the following::
-
- chcp 65001
- set PYTHONIOENCODING=...
- set PYTHONLEGACYWINDOWSSTDIO=1
-
-Usage::
-
- $ waf configure --msvc_version="msvc 10.0,msvc 9.0" --msvc_target="x64"
-
-or::
-
- def configure(conf):
- conf.env.MSVC_VERSIONS = ['msvc 10.0', 'msvc 9.0', 'msvc 8.0', 'msvc 7.1', 'msvc 7.0', 'msvc 6.0', 'wsdk 7.0', 'intel 11', 'PocketPC 9.0', 'Smartphone 8.0']
- conf.env.MSVC_TARGETS = ['x64']
- conf.load('msvc')
-
-or::
-
- def configure(conf):
- conf.load('msvc', funs='no_autodetect')
- conf.check_lib_msvc('gdi32')
- conf.check_libs_msvc('kernel32 user32')
- def build(bld):
- tg = bld.program(source='main.c', target='app', use='KERNEL32 USER32 GDI32')
-
-Platforms and targets will be tested in the order they appear;
-the first good configuration will be used.
-
-To force testing all the configurations that are not used, use the ``--no-msvc-lazy`` option
-or set ``conf.env.MSVC_LAZY_AUTODETECT=False``.
-
-Supported platforms: ia64, x64, x86, x86_amd64, x86_ia64, x86_arm, amd64_x86, amd64_arm
-
-Compilers supported:
-
-* msvc => Visual Studio, versions 6.0 (VC 98, VC .NET 2002) to 15 (Visual Studio 2017)
-* wsdk => Windows SDK, versions 6.0, 6.1, 7.0, 7.1, 8.0
-* icl => Intel compiler, versions 9, 10, 11, 13
-* winphone => Visual Studio to target Windows Phone 8 native (version 8.0 for now)
-* Smartphone => Compiler/SDK for Smartphone devices (armv4/v4i)
-* PocketPC => Compiler/SDK for PocketPC devices (armv4/v4i)
-
-To use WAF in a VS2008 Make file project (see http://code.google.com/p/waf/issues/detail?id=894)
-You may consider to set the environment variable "VS_UNICODE_OUTPUT" to nothing before calling waf.
-So in your project settings use something like 'cmd.exe /C "set VS_UNICODE_OUTPUT=& set PYTHONUNBUFFERED=true & waf build"'.
-cmd.exe /C "chcp 1252 & set PYTHONUNBUFFERED=true && set && waf configure"
-Setting PYTHONUNBUFFERED gives the unbuffered output.
-"""
-
-import os, sys, re, traceback
-from waflib import Utils, Logs, Options, Errors
-from waflib.TaskGen import after_method, feature
-
-from waflib.Configure import conf
-from waflib.Tools import ccroot, c, cxx, ar
-
-g_msvc_systemlibs = '''
-aclui activeds ad1 adptif adsiid advapi32 asycfilt authz bhsupp bits bufferoverflowu cabinet
-cap certadm certidl ciuuid clusapi comctl32 comdlg32 comsupp comsuppd comsuppw comsuppwd comsvcs
-credui crypt32 cryptnet cryptui d3d8thk daouuid dbgeng dbghelp dciman32 ddao35 ddao35d
-ddao35u ddao35ud delayimp dhcpcsvc dhcpsapi dlcapi dnsapi dsprop dsuiext dtchelp
-faultrep fcachdll fci fdi framedyd framedyn gdi32 gdiplus glauxglu32 gpedit gpmuuid
-gtrts32w gtrtst32hlink htmlhelp httpapi icm32 icmui imagehlp imm32 iphlpapi iprop
-kernel32 ksguid ksproxy ksuser libcmt libcmtd libcpmt libcpmtd loadperf lz32 mapi
-mapi32 mgmtapi minidump mmc mobsync mpr mprapi mqoa mqrt msacm32 mscms mscoree
-msdasc msimg32 msrating mstask msvcmrt msvcurt msvcurtd mswsock msxml2 mtx mtxdm
-netapi32 nmapinmsupp npptools ntdsapi ntdsbcli ntmsapi ntquery odbc32 odbcbcp
-odbccp32 oldnames ole32 oleacc oleaut32 oledb oledlgolepro32 opends60 opengl32
-osptk parser pdh penter pgobootrun pgort powrprof psapi ptrustm ptrustmd ptrustu
-ptrustud qosname rasapi32 rasdlg rassapi resutils riched20 rpcndr rpcns4 rpcrt4 rtm
-rtutils runtmchk scarddlg scrnsave scrnsavw secur32 sensapi setupapi sfc shell32
-shfolder shlwapi sisbkup snmpapi sporder srclient sti strsafe svcguid tapi32 thunk32
-traffic unicows url urlmon user32 userenv usp10 uuid uxtheme vcomp vcompd vdmdbg
-version vfw32 wbemuuid webpost wiaguid wininet winmm winscard winspool winstrm
-wintrust wldap32 wmiutils wow32 ws2_32 wsnmp32 wsock32 wst wtsapi32 xaswitch xolehlp
-'''.split()
-"""importlibs provided by MSVC/Platform SDK. Do NOT search them"""
-
-all_msvc_platforms = [ ('x64', 'amd64'), ('x86', 'x86'), ('ia64', 'ia64'),
- ('x86_amd64', 'amd64'), ('x86_ia64', 'ia64'), ('x86_arm', 'arm'), ('x86_arm64', 'arm64'),
- ('amd64_x86', 'x86'), ('amd64_arm', 'arm'), ('amd64_arm64', 'arm64') ]
-"""List of msvc platforms"""
-
-all_wince_platforms = [ ('armv4', 'arm'), ('armv4i', 'arm'), ('mipsii', 'mips'), ('mipsii_fp', 'mips'), ('mipsiv', 'mips'), ('mipsiv_fp', 'mips'), ('sh4', 'sh'), ('x86', 'cex86') ]
-"""List of wince platforms"""
-
-all_icl_platforms = [ ('intel64', 'amd64'), ('em64t', 'amd64'), ('ia32', 'x86'), ('Itanium', 'ia64')]
-"""List of icl platforms"""
-
-def options(opt):
- opt.add_option('--msvc_version', type='string', help = 'msvc version, eg: "msvc 10.0,msvc 9.0"', default='')
- opt.add_option('--msvc_targets', type='string', help = 'msvc targets, eg: "x64,arm"', default='')
- opt.add_option('--no-msvc-lazy', action='store_false', help = 'lazily check msvc target environments', default=True, dest='msvc_lazy')
-
-@conf
-def setup_msvc(conf, versiondict):
- """
- Checks installed compilers and targets and returns the first combination from the user's
- options, env, or the global supported lists that checks.
-
- :param versiondict: dict(platform -> dict(architecture -> configuration))
- :type versiondict: dict(string -> dict(string -> target_compiler)
- :return: the compiler, revision, path, include dirs, library paths and target architecture
- :rtype: tuple of strings
- """
- platforms = getattr(Options.options, 'msvc_targets', '').split(',')
- if platforms == ['']:
- platforms=Utils.to_list(conf.env.MSVC_TARGETS) or [i for i,j in all_msvc_platforms+all_icl_platforms+all_wince_platforms]
- desired_versions = getattr(Options.options, 'msvc_version', '').split(',')
- if desired_versions == ['']:
- desired_versions = conf.env.MSVC_VERSIONS or list(reversed(sorted(versiondict.keys())))
-
- # Override lazy detection by evaluating after the fact.
- lazy_detect = getattr(Options.options, 'msvc_lazy', True)
- if conf.env.MSVC_LAZY_AUTODETECT is False:
- lazy_detect = False
-
- if not lazy_detect:
- for val in versiondict.values():
- for arch in list(val.keys()):
- cfg = val[arch]
- cfg.evaluate()
- if not cfg.is_valid:
- del val[arch]
- conf.env.MSVC_INSTALLED_VERSIONS = versiondict
-
- for version in desired_versions:
- Logs.debug('msvc: detecting %r - %r', version, desired_versions)
- try:
- targets = versiondict[version]
- except KeyError:
- continue
-
- seen = set()
- for arch in platforms:
- if arch in seen:
- continue
- else:
- seen.add(arch)
- try:
- cfg = targets[arch]
- except KeyError:
- continue
-
- cfg.evaluate()
- if cfg.is_valid:
- compiler,revision = version.rsplit(' ', 1)
- return compiler,revision,cfg.bindirs,cfg.incdirs,cfg.libdirs,cfg.cpu
- conf.fatal('msvc: Impossible to find a valid architecture for building %r - %r' % (desired_versions, list(versiondict.keys())))
-
-@conf
-def get_msvc_version(conf, compiler, version, target, vcvars):
- """
- Checks that an installed compiler actually runs and uses vcvars to obtain the
- environment needed by the compiler.
-
- :param compiler: compiler type, for looking up the executable name
- :param version: compiler version, for debugging only
- :param target: target architecture
- :param vcvars: batch file to run to check the environment
- :return: the location of the compiler executable, the location of include dirs, and the library paths
- :rtype: tuple of strings
- """
- Logs.debug('msvc: get_msvc_version: %r %r %r', compiler, version, target)
-
- try:
- conf.msvc_cnt += 1
- except AttributeError:
- conf.msvc_cnt = 1
- batfile = conf.bldnode.make_node('waf-print-msvc-%d.bat' % conf.msvc_cnt)
- batfile.write("""@echo off
-set INCLUDE=
-set LIB=
-call "%s" %s
-echo PATH=%%PATH%%
-echo INCLUDE=%%INCLUDE%%
-echo LIB=%%LIB%%;%%LIBPATH%%
-""" % (vcvars,target))
- sout = conf.cmd_and_log(['cmd.exe', '/E:on', '/V:on', '/C', batfile.abspath()])
- lines = sout.splitlines()
-
- if not lines[0]:
- lines.pop(0)
-
- MSVC_PATH = MSVC_INCDIR = MSVC_LIBDIR = None
- for line in lines:
- if line.startswith('PATH='):
- path = line[5:]
- MSVC_PATH = path.split(';')
- elif line.startswith('INCLUDE='):
- MSVC_INCDIR = [i for i in line[8:].split(';') if i]
- elif line.startswith('LIB='):
- MSVC_LIBDIR = [i for i in line[4:].split(';') if i]
- if None in (MSVC_PATH, MSVC_INCDIR, MSVC_LIBDIR):
- conf.fatal('msvc: Could not find a valid architecture for building (get_msvc_version_3)')
-
- # Check if the compiler is usable at all.
- # The detection may return 64-bit versions even on 32-bit systems, and these would fail to run.
- env = dict(os.environ)
- env.update(PATH = path)
- compiler_name, linker_name, lib_name = _get_prog_names(conf, compiler)
- cxx = conf.find_program(compiler_name, path_list=MSVC_PATH)
-
- # delete CL if exists. because it could contain parameters which can change cl's behaviour rather catastrophically.
- if 'CL' in env:
- del(env['CL'])
-
- try:
- conf.cmd_and_log(cxx + ['/help'], env=env)
- except UnicodeError:
- st = traceback.format_exc()
- if conf.logger:
- conf.logger.error(st)
- conf.fatal('msvc: Unicode error - check the code page?')
- except Exception as e:
- Logs.debug('msvc: get_msvc_version: %r %r %r -> failure %s', compiler, version, target, str(e))
- conf.fatal('msvc: cannot run the compiler in get_msvc_version (run with -v to display errors)')
- else:
- Logs.debug('msvc: get_msvc_version: %r %r %r -> OK', compiler, version, target)
- finally:
- conf.env[compiler_name] = ''
-
- return (MSVC_PATH, MSVC_INCDIR, MSVC_LIBDIR)
-
-def gather_wince_supported_platforms():
- """
- Checks SmartPhones SDKs
-
- :param versions: list to modify
- :type versions: list
- """
- supported_wince_platforms = []
- try:
- ce_sdk = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Microsoft\\Windows CE Tools\\SDKs')
- except OSError:
- try:
- ce_sdk = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Microsoft\\Windows CE Tools\\SDKs')
- except OSError:
- ce_sdk = ''
- if not ce_sdk:
- return supported_wince_platforms
-
- index = 0
- while 1:
- try:
- sdk_device = Utils.winreg.EnumKey(ce_sdk, index)
- sdk = Utils.winreg.OpenKey(ce_sdk, sdk_device)
- except OSError:
- break
- index += 1
- try:
- path,type = Utils.winreg.QueryValueEx(sdk, 'SDKRootDir')
- except OSError:
- try:
- path,type = Utils.winreg.QueryValueEx(sdk,'SDKInformation')
- except OSError:
- continue
- path,xml = os.path.split(path)
- path = str(path)
- path,device = os.path.split(path)
- if not device:
- path,device = os.path.split(path)
- platforms = []
- for arch,compiler in all_wince_platforms:
- if os.path.isdir(os.path.join(path, device, 'Lib', arch)):
- platforms.append((arch, compiler, os.path.join(path, device, 'Include', arch), os.path.join(path, device, 'Lib', arch)))
- if platforms:
- supported_wince_platforms.append((device, platforms))
- return supported_wince_platforms
-
-def gather_msvc_detected_versions():
- #Detected MSVC versions!
- version_pattern = re.compile('^(\d\d?\.\d\d?)(Exp)?$')
- detected_versions = []
- for vcver,vcvar in (('VCExpress','Exp'), ('VisualStudio','')):
- prefix = 'SOFTWARE\\Wow6432node\\Microsoft\\' + vcver
- try:
- all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, prefix)
- except OSError:
- prefix = 'SOFTWARE\\Microsoft\\' + vcver
- try:
- all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, prefix)
- except OSError:
- continue
-
- index = 0
- while 1:
- try:
- version = Utils.winreg.EnumKey(all_versions, index)
- except OSError:
- break
- index += 1
- match = version_pattern.match(version)
- if match:
- versionnumber = float(match.group(1))
- else:
- continue
- detected_versions.append((versionnumber, version+vcvar, prefix+'\\'+version))
- def fun(tup):
- return tup[0]
-
- detected_versions.sort(key = fun)
- return detected_versions
-
-class target_compiler(object):
- """
- Wrap a compiler configuration; call evaluate() to determine
- whether the configuration is usable.
- """
- def __init__(self, ctx, compiler, cpu, version, bat_target, bat, callback=None):
- """
- :param ctx: configuration context to use to eventually get the version environment
- :param compiler: compiler name
- :param cpu: target cpu
- :param version: compiler version number
- :param bat_target: ?
- :param bat: path to the batch file to run
- """
- self.conf = ctx
- self.name = None
- self.is_valid = False
- self.is_done = False
-
- self.compiler = compiler
- self.cpu = cpu
- self.version = version
- self.bat_target = bat_target
- self.bat = bat
- self.callback = callback
-
- def evaluate(self):
- if self.is_done:
- return
- self.is_done = True
- try:
- vs = self.conf.get_msvc_version(self.compiler, self.version, self.bat_target, self.bat)
- except Errors.ConfigurationError:
- self.is_valid = False
- return
- if self.callback:
- vs = self.callback(self, vs)
- self.is_valid = True
- (self.bindirs, self.incdirs, self.libdirs) = vs
-
- def __str__(self):
- return str((self.compiler, self.cpu, self.version, self.bat_target, self.bat))
-
- def __repr__(self):
- return repr((self.compiler, self.cpu, self.version, self.bat_target, self.bat))
-
-@conf
-def gather_wsdk_versions(conf, versions):
- """
- Use winreg to add the msvc versions to the input list
-
- :param versions: list to modify
- :type versions: list
- """
- version_pattern = re.compile('^v..?.?\...?.?')
- try:
- all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Microsoft\\Microsoft SDKs\\Windows')
- except OSError:
- try:
- all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Microsoft\\Microsoft SDKs\\Windows')
- except OSError:
- return
- index = 0
- while 1:
- try:
- version = Utils.winreg.EnumKey(all_versions, index)
- except OSError:
- break
- index += 1
- if not version_pattern.match(version):
- continue
- try:
- msvc_version = Utils.winreg.OpenKey(all_versions, version)
- path,type = Utils.winreg.QueryValueEx(msvc_version,'InstallationFolder')
- except OSError:
- continue
- if path and os.path.isfile(os.path.join(path, 'bin', 'SetEnv.cmd')):
- targets = {}
- for target,arch in all_msvc_platforms:
- targets[target] = target_compiler(conf, 'wsdk', arch, version, '/'+target, os.path.join(path, 'bin', 'SetEnv.cmd'))
- versions['wsdk ' + version[1:]] = targets
-
-@conf
-def gather_msvc_targets(conf, versions, version, vc_path):
- #Looking for normal MSVC compilers!
- targets = {}
-
- if os.path.isfile(os.path.join(vc_path, 'VC', 'Auxiliary', 'Build', 'vcvarsall.bat')):
- for target,realtarget in all_msvc_platforms[::-1]:
- targets[target] = target_compiler(conf, 'msvc', realtarget, version, target, os.path.join(vc_path, 'VC', 'Auxiliary', 'Build', 'vcvarsall.bat'))
- elif os.path.isfile(os.path.join(vc_path, 'vcvarsall.bat')):
- for target,realtarget in all_msvc_platforms[::-1]:
- targets[target] = target_compiler(conf, 'msvc', realtarget, version, target, os.path.join(vc_path, 'vcvarsall.bat'))
- elif os.path.isfile(os.path.join(vc_path, 'Common7', 'Tools', 'vsvars32.bat')):
- targets['x86'] = target_compiler(conf, 'msvc', 'x86', version, 'x86', os.path.join(vc_path, 'Common7', 'Tools', 'vsvars32.bat'))
- elif os.path.isfile(os.path.join(vc_path, 'Bin', 'vcvars32.bat')):
- targets['x86'] = target_compiler(conf, 'msvc', 'x86', version, '', os.path.join(vc_path, 'Bin', 'vcvars32.bat'))
- if targets:
- versions['msvc %s' % version] = targets
-
-@conf
-def gather_wince_targets(conf, versions, version, vc_path, vsvars, supported_platforms):
- #Looking for Win CE compilers!
- for device,platforms in supported_platforms:
- targets = {}
- for platform,compiler,include,lib in platforms:
- winCEpath = os.path.join(vc_path, 'ce')
- if not os.path.isdir(winCEpath):
- continue
-
- if os.path.isdir(os.path.join(winCEpath, 'lib', platform)):
- bindirs = [os.path.join(winCEpath, 'bin', compiler), os.path.join(winCEpath, 'bin', 'x86_'+compiler)]
- incdirs = [os.path.join(winCEpath, 'include'), os.path.join(winCEpath, 'atlmfc', 'include'), include]
- libdirs = [os.path.join(winCEpath, 'lib', platform), os.path.join(winCEpath, 'atlmfc', 'lib', platform), lib]
- def combine_common(obj, compiler_env):
- # TODO this is likely broken, remove in waf 2.1
- (common_bindirs,_1,_2) = compiler_env
- return (bindirs + common_bindirs, incdirs, libdirs)
- targets[platform] = target_compiler(conf, 'msvc', platform, version, 'x86', vsvars, combine_common)
- if targets:
- versions[device + ' ' + version] = targets
-
-@conf
-def gather_winphone_targets(conf, versions, version, vc_path, vsvars):
- #Looking for WinPhone compilers
- targets = {}
- for target,realtarget in all_msvc_platforms[::-1]:
- targets[target] = target_compiler(conf, 'winphone', realtarget, version, target, vsvars)
- if targets:
- versions['winphone ' + version] = targets
-
-@conf
-def gather_vswhere_versions(conf, versions):
- try:
- import json
- except ImportError:
- Logs.error('Visual Studio 2017 detection requires Python 2.6')
- return
-
- prg_path = os.environ.get('ProgramFiles(x86)', os.environ.get('ProgramFiles', 'C:\\Program Files (x86)'))
-
- vswhere = os.path.join(prg_path, 'Microsoft Visual Studio', 'Installer', 'vswhere.exe')
- args = [vswhere, '-products', '*', '-legacy', '-format', 'json']
- try:
- txt = conf.cmd_and_log(args)
- except Errors.WafError as e:
- Logs.debug('msvc: vswhere.exe failed %s', e)
- return
-
- if sys.version_info[0] < 3:
- txt = txt.decode(Utils.console_encoding())
-
- arr = json.loads(txt)
- arr.sort(key=lambda x: x['installationVersion'])
- for entry in arr:
- ver = entry['installationVersion']
- ver = str('.'.join(ver.split('.')[:2]))
- path = str(os.path.abspath(entry['installationPath']))
- if os.path.exists(path) and ('msvc %s' % ver) not in versions:
- conf.gather_msvc_targets(versions, ver, path)
-
-@conf
-def gather_msvc_versions(conf, versions):
- vc_paths = []
- for (v,version,reg) in gather_msvc_detected_versions():
- try:
- try:
- msvc_version = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, reg + "\\Setup\\VC")
- except OSError:
- msvc_version = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, reg + "\\Setup\\Microsoft Visual C++")
- path,type = Utils.winreg.QueryValueEx(msvc_version, 'ProductDir')
- except OSError:
- try:
- msvc_version = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, "SOFTWARE\\Wow6432node\\Microsoft\\VisualStudio\\SxS\\VS7")
- path,type = Utils.winreg.QueryValueEx(msvc_version, version)
- except OSError:
- continue
- else:
- vc_paths.append((version, os.path.abspath(str(path))))
- continue
- else:
- vc_paths.append((version, os.path.abspath(str(path))))
-
- wince_supported_platforms = gather_wince_supported_platforms()
-
- for version,vc_path in vc_paths:
- vs_path = os.path.dirname(vc_path)
- vsvars = os.path.join(vs_path, 'Common7', 'Tools', 'vsvars32.bat')
- if wince_supported_platforms and os.path.isfile(vsvars):
- conf.gather_wince_targets(versions, version, vc_path, vsvars, wince_supported_platforms)
-
- # WP80 works with 11.0Exp and 11.0, both of which resolve to the same vc_path.
- # Stop after one is found.
- for version,vc_path in vc_paths:
- vs_path = os.path.dirname(vc_path)
- vsvars = os.path.join(vs_path, 'VC', 'WPSDK', 'WP80', 'vcvarsphoneall.bat')
- if os.path.isfile(vsvars):
- conf.gather_winphone_targets(versions, '8.0', vc_path, vsvars)
- break
-
- for version,vc_path in vc_paths:
- vs_path = os.path.dirname(vc_path)
- conf.gather_msvc_targets(versions, version, vc_path)
-
-@conf
-def gather_icl_versions(conf, versions):
- """
- Checks ICL compilers
-
- :param versions: list to modify
- :type versions: list
- """
- version_pattern = re.compile('^...?.?\....?.?')
- try:
- all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Intel\\Compilers\\C++')
- except OSError:
- try:
- all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Intel\\Compilers\\C++')
- except OSError:
- return
- index = 0
- while 1:
- try:
- version = Utils.winreg.EnumKey(all_versions, index)
- except OSError:
- break
- index += 1
- if not version_pattern.match(version):
- continue
- targets = {}
- for target,arch in all_icl_platforms:
- if target=='intel64':
- targetDir='EM64T_NATIVE'
- else:
- targetDir=target
- try:
- Utils.winreg.OpenKey(all_versions,version+'\\'+targetDir)
- icl_version=Utils.winreg.OpenKey(all_versions,version)
- path,type=Utils.winreg.QueryValueEx(icl_version,'ProductDir')
- except OSError:
- pass
- else:
- batch_file=os.path.join(path,'bin','iclvars.bat')
- if os.path.isfile(batch_file):
- targets[target] = target_compiler(conf, 'intel', arch, version, target, batch_file)
- for target,arch in all_icl_platforms:
- try:
- icl_version = Utils.winreg.OpenKey(all_versions, version+'\\'+target)
- path,type = Utils.winreg.QueryValueEx(icl_version,'ProductDir')
- except OSError:
- continue
- else:
- batch_file=os.path.join(path,'bin','iclvars.bat')
- if os.path.isfile(batch_file):
- targets[target] = target_compiler(conf, 'intel', arch, version, target, batch_file)
- major = version[0:2]
- versions['intel ' + major] = targets
-
-@conf
-def gather_intel_composer_versions(conf, versions):
- """
- Checks ICL compilers that are part of Intel Composer Suites
-
- :param versions: list to modify
- :type versions: list
- """
- version_pattern = re.compile('^...?.?\...?.?.?')
- try:
- all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Wow6432node\\Intel\\Suites')
- except OSError:
- try:
- all_versions = Utils.winreg.OpenKey(Utils.winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Intel\\Suites')
- except OSError:
- return
- index = 0
- while 1:
- try:
- version = Utils.winreg.EnumKey(all_versions, index)
- except OSError:
- break
- index += 1
- if not version_pattern.match(version):
- continue
- targets = {}
- for target,arch in all_icl_platforms:
- if target=='intel64':
- targetDir='EM64T_NATIVE'
- else:
- targetDir=target
- try:
- try:
- defaults = Utils.winreg.OpenKey(all_versions,version+'\\Defaults\\C++\\'+targetDir)
- except OSError:
- if targetDir == 'EM64T_NATIVE':
- defaults = Utils.winreg.OpenKey(all_versions,version+'\\Defaults\\C++\\EM64T')
- else:
- raise
- uid,type = Utils.winreg.QueryValueEx(defaults, 'SubKey')
- Utils.winreg.OpenKey(all_versions,version+'\\'+uid+'\\C++\\'+targetDir)
- icl_version=Utils.winreg.OpenKey(all_versions,version+'\\'+uid+'\\C++')
- path,type=Utils.winreg.QueryValueEx(icl_version,'ProductDir')
- except OSError:
- pass
- else:
- batch_file=os.path.join(path,'bin','iclvars.bat')
- if os.path.isfile(batch_file):
- targets[target] = target_compiler(conf, 'intel', arch, version, target, batch_file)
- # The intel compilervar_arch.bat is broken when used with Visual Studio Express 2012
- # http://software.intel.com/en-us/forums/topic/328487
- compilervars_warning_attr = '_compilervars_warning_key'
- if version[0:2] == '13' and getattr(conf, compilervars_warning_attr, True):
- setattr(conf, compilervars_warning_attr, False)
- patch_url = 'http://software.intel.com/en-us/forums/topic/328487'
- compilervars_arch = os.path.join(path, 'bin', 'compilervars_arch.bat')
- for vscomntool in ('VS110COMNTOOLS', 'VS100COMNTOOLS'):
- if vscomntool in os.environ:
- vs_express_path = os.environ[vscomntool] + r'..\IDE\VSWinExpress.exe'
- dev_env_path = os.environ[vscomntool] + r'..\IDE\devenv.exe'
- if (r'if exist "%VS110COMNTOOLS%..\IDE\VSWinExpress.exe"' in Utils.readf(compilervars_arch) and
- not os.path.exists(vs_express_path) and not os.path.exists(dev_env_path)):
- Logs.warn(('The Intel compilervar_arch.bat only checks for one Visual Studio SKU '
- '(VSWinExpress.exe) but it does not seem to be installed at %r. '
- 'The intel command line set up will fail to configure unless the file %r'
- 'is patched. See: %s') % (vs_express_path, compilervars_arch, patch_url))
- major = version[0:2]
- versions['intel ' + major] = targets
-
-@conf
-def detect_msvc(self):
- return self.setup_msvc(self.get_msvc_versions())
-
-@conf
-def get_msvc_versions(self):
- """
- :return: platform to compiler configurations
- :rtype: dict
- """
- dct = Utils.ordered_iter_dict()
- self.gather_icl_versions(dct)
- self.gather_intel_composer_versions(dct)
- self.gather_wsdk_versions(dct)
- self.gather_msvc_versions(dct)
- self.gather_vswhere_versions(dct)
- Logs.debug('msvc: detected versions %r', list(dct.keys()))
- return dct
-
-@conf
-def find_lt_names_msvc(self, libname, is_static=False):
- """
- Win32/MSVC specific code to glean out information from libtool la files.
- this function is not attached to the task_gen class. Returns a triplet:
- (library absolute path, library name without extension, whether the library is static)
- """
- lt_names=[
- 'lib%s.la' % libname,
- '%s.la' % libname,
- ]
-
- for path in self.env.LIBPATH:
- for la in lt_names:
- laf=os.path.join(path,la)
- dll=None
- if os.path.exists(laf):
- ltdict = Utils.read_la_file(laf)
- lt_libdir=None
- if ltdict.get('libdir', ''):
- lt_libdir = ltdict['libdir']
- if not is_static and ltdict.get('library_names', ''):
- dllnames=ltdict['library_names'].split()
- dll=dllnames[0].lower()
- dll=re.sub('\.dll$', '', dll)
- return (lt_libdir, dll, False)
- elif ltdict.get('old_library', ''):
- olib=ltdict['old_library']
- if os.path.exists(os.path.join(path,olib)):
- return (path, olib, True)
- elif lt_libdir != '' and os.path.exists(os.path.join(lt_libdir,olib)):
- return (lt_libdir, olib, True)
- else:
- return (None, olib, True)
- else:
- raise self.errors.WafError('invalid libtool object file: %s' % laf)
- return (None, None, None)
-
-@conf
-def libname_msvc(self, libname, is_static=False):
- lib = libname.lower()
- lib = re.sub('\.lib$','',lib)
-
- if lib in g_msvc_systemlibs:
- return lib
-
- lib=re.sub('^lib','',lib)
-
- if lib == 'm':
- return None
-
- (lt_path, lt_libname, lt_static) = self.find_lt_names_msvc(lib, is_static)
-
- if lt_path != None and lt_libname != None:
- if lt_static:
- # file existence check has been made by find_lt_names
- return os.path.join(lt_path,lt_libname)
-
- if lt_path != None:
- _libpaths = [lt_path] + self.env.LIBPATH
- else:
- _libpaths = self.env.LIBPATH
-
- static_libs=[
- 'lib%ss.lib' % lib,
- 'lib%s.lib' % lib,
- '%ss.lib' % lib,
- '%s.lib' %lib,
- ]
-
- dynamic_libs=[
- 'lib%s.dll.lib' % lib,
- 'lib%s.dll.a' % lib,
- '%s.dll.lib' % lib,
- '%s.dll.a' % lib,
- 'lib%s_d.lib' % lib,
- '%s_d.lib' % lib,
- '%s.lib' %lib,
- ]
-
- libnames=static_libs
- if not is_static:
- libnames=dynamic_libs + static_libs
-
- for path in _libpaths:
- for libn in libnames:
- if os.path.exists(os.path.join(path, libn)):
- Logs.debug('msvc: lib found: %s', os.path.join(path,libn))
- return re.sub('\.lib$', '',libn)
-
- #if no lib can be found, just return the libname as msvc expects it
- self.fatal('The library %r could not be found' % libname)
- return re.sub('\.lib$', '', libname)
-
-@conf
-def check_lib_msvc(self, libname, is_static=False, uselib_store=None):
- """
- Ideally we should be able to place the lib in the right env var, either STLIB or LIB,
- but we don't distinguish static libs from shared libs.
- This is ok since msvc doesn't have any special linker flag to select static libs (no env.STLIB_MARKER)
- """
- libn = self.libname_msvc(libname, is_static)
-
- if not uselib_store:
- uselib_store = libname.upper()
-
- if False and is_static: # disabled
- self.env['STLIB_' + uselib_store] = [libn]
- else:
- self.env['LIB_' + uselib_store] = [libn]
-
-@conf
-def check_libs_msvc(self, libnames, is_static=False):
- for libname in Utils.to_list(libnames):
- self.check_lib_msvc(libname, is_static)
-
-def configure(conf):
- """
- Configuration methods to call for detecting msvc
- """
- conf.autodetect(True)
- conf.find_msvc()
- conf.msvc_common_flags()
- conf.cc_load_tools()
- conf.cxx_load_tools()
- conf.cc_add_flags()
- conf.cxx_add_flags()
- conf.link_add_flags()
- conf.visual_studio_add_flags()
-
-@conf
-def no_autodetect(conf):
- conf.env.NO_MSVC_DETECT = 1
- configure(conf)
-
-@conf
-def autodetect(conf, arch=False):
- v = conf.env
- if v.NO_MSVC_DETECT:
- return
-
- compiler, version, path, includes, libdirs, cpu = conf.detect_msvc()
- if arch:
- v.DEST_CPU = cpu
-
- v.PATH = path
- v.INCLUDES = includes
- v.LIBPATH = libdirs
- v.MSVC_COMPILER = compiler
- try:
- v.MSVC_VERSION = float(version)
- except ValueError:
- v.MSVC_VERSION = float(version[:-3])
-
-def _get_prog_names(conf, compiler):
- if compiler == 'intel':
- compiler_name = 'ICL'
- linker_name = 'XILINK'
- lib_name = 'XILIB'
- else:
- # assumes CL.exe
- compiler_name = 'CL'
- linker_name = 'LINK'
- lib_name = 'LIB'
- return compiler_name, linker_name, lib_name
-
-@conf
-def find_msvc(conf):
- """Due to path format limitations, limit operation only to native Win32. Yeah it sucks."""
- if sys.platform == 'cygwin':
- conf.fatal('MSVC module does not work under cygwin Python!')
-
- # the autodetection is supposed to be performed before entering in this method
- v = conf.env
- path = v.PATH
- compiler = v.MSVC_COMPILER
- version = v.MSVC_VERSION
-
- compiler_name, linker_name, lib_name = _get_prog_names(conf, compiler)
- v.MSVC_MANIFEST = (compiler == 'msvc' and version >= 8) or (compiler == 'wsdk' and version >= 6) or (compiler == 'intel' and version >= 11)
-
- # compiler
- cxx = conf.find_program(compiler_name, var='CXX', path_list=path)
-
- # before setting anything, check if the compiler is really msvc
- env = dict(conf.environ)
- if path:
- env.update(PATH = ';'.join(path))
- if not conf.cmd_and_log(cxx + ['/nologo', '/help'], env=env):
- conf.fatal('the msvc compiler could not be identified')
-
- # c/c++ compiler
- v.CC = v.CXX = cxx
- v.CC_NAME = v.CXX_NAME = 'msvc'
-
- # linker
- if not v.LINK_CXX:
- conf.find_program(linker_name, path_list=path, errmsg='%s was not found (linker)' % linker_name, var='LINK_CXX')
-
- if not v.LINK_CC:
- v.LINK_CC = v.LINK_CXX
-
- # staticlib linker
- if not v.AR:
- stliblink = conf.find_program(lib_name, path_list=path, var='AR')
- if not stliblink:
- return
- v.ARFLAGS = ['/nologo']
-
- # manifest tool. Not required for VS 2003 and below. Must have for VS 2005 and later
- if v.MSVC_MANIFEST:
- conf.find_program('MT', path_list=path, var='MT')
- v.MTFLAGS = ['/nologo']
-
- try:
- conf.load('winres')
- except Errors.ConfigurationError:
- Logs.warn('Resource compiler not found. Compiling resource file is disabled')
-
-@conf
-def visual_studio_add_flags(self):
- """visual studio flags found in the system environment"""
- v = self.env
- if self.environ.get('INCLUDE'):
- v.prepend_value('INCLUDES', [x for x in self.environ['INCLUDE'].split(';') if x]) # notice the 'S'
- if self.environ.get('LIB'):
- v.prepend_value('LIBPATH', [x for x in self.environ['LIB'].split(';') if x])
-
-@conf
-def msvc_common_flags(conf):
- """
- Setup the flags required for executing the msvc compiler
- """
- v = conf.env
-
- v.DEST_BINFMT = 'pe'
- v.append_value('CFLAGS', ['/nologo'])
- v.append_value('CXXFLAGS', ['/nologo'])
- v.append_value('LINKFLAGS', ['/nologo'])
- v.DEFINES_ST = '/D%s'
-
- v.CC_SRC_F = ''
- v.CC_TGT_F = ['/c', '/Fo']
- v.CXX_SRC_F = ''
- v.CXX_TGT_F = ['/c', '/Fo']
-
- if (v.MSVC_COMPILER == 'msvc' and v.MSVC_VERSION >= 8) or (v.MSVC_COMPILER == 'wsdk' and v.MSVC_VERSION >= 6):
- v.CC_TGT_F = ['/FC'] + v.CC_TGT_F
- v.CXX_TGT_F = ['/FC'] + v.CXX_TGT_F
-
- v.CPPPATH_ST = '/I%s' # template for adding include paths
-
- v.AR_TGT_F = v.CCLNK_TGT_F = v.CXXLNK_TGT_F = '/OUT:'
-
- # CRT specific flags
- v.CFLAGS_CRT_MULTITHREADED = v.CXXFLAGS_CRT_MULTITHREADED = ['/MT']
- v.CFLAGS_CRT_MULTITHREADED_DLL = v.CXXFLAGS_CRT_MULTITHREADED_DLL = ['/MD']
-
- v.CFLAGS_CRT_MULTITHREADED_DBG = v.CXXFLAGS_CRT_MULTITHREADED_DBG = ['/MTd']
- v.CFLAGS_CRT_MULTITHREADED_DLL_DBG = v.CXXFLAGS_CRT_MULTITHREADED_DLL_DBG = ['/MDd']
-
- v.LIB_ST = '%s.lib'
- v.LIBPATH_ST = '/LIBPATH:%s'
- v.STLIB_ST = '%s.lib'
- v.STLIBPATH_ST = '/LIBPATH:%s'
-
- if v.MSVC_MANIFEST:
- v.append_value('LINKFLAGS', ['/MANIFEST'])
-
- v.CFLAGS_cshlib = []
- v.CXXFLAGS_cxxshlib = []
- v.LINKFLAGS_cshlib = v.LINKFLAGS_cxxshlib = ['/DLL']
- v.cshlib_PATTERN = v.cxxshlib_PATTERN = '%s.dll'
- v.implib_PATTERN = '%s.lib'
- v.IMPLIB_ST = '/IMPLIB:%s'
-
- v.LINKFLAGS_cstlib = []
- v.cstlib_PATTERN = v.cxxstlib_PATTERN = '%s.lib'
-
- v.cprogram_PATTERN = v.cxxprogram_PATTERN = '%s.exe'
-
- v.def_PATTERN = '/def:%s'
-
-
-#######################################################################################################
-##### conf above, build below
-
-@after_method('apply_link')
-@feature('c', 'cxx')
-def apply_flags_msvc(self):
- """
- Add additional flags implied by msvc, such as subsystems and pdb files::
-
- def build(bld):
- bld.stlib(source='main.c', target='bar', subsystem='gruik')
- """
- if self.env.CC_NAME != 'msvc' or not getattr(self, 'link_task', None):
- return
-
- is_static = isinstance(self.link_task, ccroot.stlink_task)
-
- subsystem = getattr(self, 'subsystem', '')
- if subsystem:
- subsystem = '/subsystem:%s' % subsystem
- flags = is_static and 'ARFLAGS' or 'LINKFLAGS'
- self.env.append_value(flags, subsystem)
-
- if not is_static:
- for f in self.env.LINKFLAGS:
- d = f.lower()
- if d[1:] == 'debug':
- pdbnode = self.link_task.outputs[0].change_ext('.pdb')
- self.link_task.outputs.append(pdbnode)
-
- if getattr(self, 'install_task', None):
- self.pdb_install_task = self.add_install_files(
- install_to=self.install_task.install_to, install_from=pdbnode)
- break
-
-@feature('cprogram', 'cshlib', 'cxxprogram', 'cxxshlib')
-@after_method('apply_link')
-def apply_manifest(self):
- """
- Special linker for MSVC with support for embedding manifests into DLL's
- and executables compiled by Visual Studio 2005 or probably later. Without
- the manifest file, the binaries are unusable.
- See: http://msdn2.microsoft.com/en-us/library/ms235542(VS.80).aspx
- """
- if self.env.CC_NAME == 'msvc' and self.env.MSVC_MANIFEST and getattr(self, 'link_task', None):
- out_node = self.link_task.outputs[0]
- man_node = out_node.parent.find_or_declare(out_node.name + '.manifest')
- self.link_task.outputs.append(man_node)
- self.env.DO_MANIFEST = True
-
-def make_winapp(self, family):
- append = self.env.append_unique
- append('DEFINES', 'WINAPI_FAMILY=%s' % family)
- append('CXXFLAGS', ['/ZW', '/TP'])
- for lib_path in self.env.LIBPATH:
- append('CXXFLAGS','/AI%s'%lib_path)
-
-@feature('winphoneapp')
-@after_method('process_use')
-@after_method('propagate_uselib_vars')
-def make_winphone_app(self):
- """
- Insert configuration flags for windows phone applications (adds /ZW, /TP...)
- """
- make_winapp(self, 'WINAPI_FAMILY_PHONE_APP')
- self.env.append_unique('LINKFLAGS', ['/NODEFAULTLIB:ole32.lib', 'PhoneAppModelHost.lib'])
-
-@feature('winapp')
-@after_method('process_use')
-@after_method('propagate_uselib_vars')
-def make_windows_app(self):
- """
- Insert configuration flags for windows applications (adds /ZW, /TP...)
- """
- make_winapp(self, 'WINAPI_FAMILY_DESKTOP_APP')
diff --git a/waflib/Tools/nasm.py b/waflib/Tools/nasm.py
deleted file mode 100644
index 411d582..0000000
--- a/waflib/Tools/nasm.py
+++ /dev/null
@@ -1,26 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2008-2018 (ita)
-
-"""
-Nasm tool (asm processing)
-"""
-
-import os
-import waflib.Tools.asm # leave this
-from waflib.TaskGen import feature
-
-@feature('asm')
-def apply_nasm_vars(self):
- """provided for compatibility"""
- self.env.append_value('ASFLAGS', self.to_list(getattr(self, 'nasm_flags', [])))
-
-def configure(conf):
- """
- Detect nasm/yasm and set the variable *AS*
- """
- conf.find_program(['nasm', 'yasm'], var='AS')
- conf.env.AS_TGT_F = ['-o']
- conf.env.ASLNK_TGT_F = ['-o']
- conf.load('asm')
- conf.env.ASMPATH_ST = '-I%s' + os.sep
diff --git a/waflib/Tools/nobuild.py b/waflib/Tools/nobuild.py
deleted file mode 100644
index 2e4b055..0000000
--- a/waflib/Tools/nobuild.py
+++ /dev/null
@@ -1,24 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2015 (ita)
-
-"""
-Override the build commands to write empty files.
-This is useful for profiling and evaluating the Python overhead.
-
-To use::
-
- def build(bld):
- ...
- bld.load('nobuild')
-
-"""
-
-from waflib import Task
-def build(bld):
- def run(self):
- for x in self.outputs:
- x.write('')
- for (name, cls) in Task.classes.items():
- cls.run = run
-
diff --git a/waflib/Tools/perl.py b/waflib/Tools/perl.py
deleted file mode 100644
index 32b03fb..0000000
--- a/waflib/Tools/perl.py
+++ /dev/null
@@ -1,156 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# andersg at 0x63.nu 2007
-# Thomas Nagy 2016-2018 (ita)
-
-"""
-Support for Perl extensions. A C/C++ compiler is required::
-
- def options(opt):
- opt.load('compiler_c perl')
- def configure(conf):
- conf.load('compiler_c perl')
- conf.check_perl_version((5,6,0))
- conf.check_perl_ext_devel()
- conf.check_perl_module('Cairo')
- conf.check_perl_module('Devel::PPPort 4.89')
- def build(bld):
- bld(
- features = 'c cshlib perlext',
- source = 'Mytest.xs',
- target = 'Mytest',
- install_path = '${ARCHDIR_PERL}/auto')
- bld.install_files('${ARCHDIR_PERL}', 'Mytest.pm')
-"""
-
-import os
-from waflib import Task, Options, Utils, Errors
-from waflib.Configure import conf
-from waflib.TaskGen import extension, feature, before_method
-
-@before_method('apply_incpaths', 'apply_link', 'propagate_uselib_vars')
-@feature('perlext')
-def init_perlext(self):
- """
- Change the values of *cshlib_PATTERN* and *cxxshlib_PATTERN* to remove the
- *lib* prefix from library names.
- """
- self.uselib = self.to_list(getattr(self, 'uselib', []))
- if not 'PERLEXT' in self.uselib:
- self.uselib.append('PERLEXT')
- self.env.cshlib_PATTERN = self.env.cxxshlib_PATTERN = self.env.perlext_PATTERN
-
-@extension('.xs')
-def xsubpp_file(self, node):
- """
- Create :py:class:`waflib.Tools.perl.xsubpp` tasks to process *.xs* files
- """
- outnode = node.change_ext('.c')
- self.create_task('xsubpp', node, outnode)
- self.source.append(outnode)
-
-class xsubpp(Task.Task):
- """
- Process *.xs* files
- """
- run_str = '${PERL} ${XSUBPP} -noprototypes -typemap ${EXTUTILS_TYPEMAP} ${SRC} > ${TGT}'
- color = 'BLUE'
- ext_out = ['.h']
-
-@conf
-def check_perl_version(self, minver=None):
- """
- Check if Perl is installed, and set the variable PERL.
- minver is supposed to be a tuple
- """
- res = True
- if minver:
- cver = '.'.join(map(str,minver))
- else:
- cver = ''
-
- self.start_msg('Checking for minimum perl version %s' % cver)
-
- perl = self.find_program('perl', var='PERL', value=getattr(Options.options, 'perlbinary', None))
- version = self.cmd_and_log(perl + ["-e", 'printf \"%vd\", $^V'])
- if not version:
- res = False
- version = "Unknown"
- elif not minver is None:
- ver = tuple(map(int, version.split(".")))
- if ver < minver:
- res = False
-
- self.end_msg(version, color=res and 'GREEN' or 'YELLOW')
- return res
-
-@conf
-def check_perl_module(self, module):
- """
- Check if specified perlmodule is installed.
-
- The minimum version can be specified by specifying it after modulename
- like this::
-
- def configure(conf):
- conf.check_perl_module("Some::Module 2.92")
- """
- cmd = self.env.PERL + ['-e', 'use %s' % module]
- self.start_msg('perl module %s' % module)
- try:
- r = self.cmd_and_log(cmd)
- except Errors.WafError:
- self.end_msg(False)
- return None
- self.end_msg(r or True)
- return r
-
-@conf
-def check_perl_ext_devel(self):
- """
- Check for configuration needed to build perl extensions.
-
- Sets different xxx_PERLEXT variables in the environment.
-
- Also sets the ARCHDIR_PERL variable useful as installation path,
- which can be overridden by ``--with-perl-archdir`` option.
- """
-
- env = self.env
- perl = env.PERL
- if not perl:
- self.fatal('find perl first')
-
- def cmd_perl_config(s):
- return perl + ['-MConfig', '-e', 'print \"%s\"' % s]
- def cfg_str(cfg):
- return self.cmd_and_log(cmd_perl_config(cfg))
- def cfg_lst(cfg):
- return Utils.to_list(cfg_str(cfg))
- def find_xsubpp():
- for var in ('privlib', 'vendorlib'):
- xsubpp = cfg_lst('$Config{%s}/ExtUtils/xsubpp$Config{exe_ext}' % var)
- if xsubpp and os.path.isfile(xsubpp[0]):
- return xsubpp
- return self.find_program('xsubpp')
-
- env.LINKFLAGS_PERLEXT = cfg_lst('$Config{lddlflags}')
- env.INCLUDES_PERLEXT = cfg_lst('$Config{archlib}/CORE')
- env.CFLAGS_PERLEXT = cfg_lst('$Config{ccflags} $Config{cccdlflags}')
- env.EXTUTILS_TYPEMAP = cfg_lst('$Config{privlib}/ExtUtils/typemap')
- env.XSUBPP = find_xsubpp()
-
- if not getattr(Options.options, 'perlarchdir', None):
- env.ARCHDIR_PERL = cfg_str('$Config{sitearch}')
- else:
- env.ARCHDIR_PERL = getattr(Options.options, 'perlarchdir')
-
- env.perlext_PATTERN = '%s.' + cfg_str('$Config{dlext}')
-
-def options(opt):
- """
- Add the ``--with-perl-archdir`` and ``--with-perl-binary`` command-line options.
- """
- opt.add_option('--with-perl-binary', type='string', dest='perlbinary', help = 'Specify alternate perl binary', default=None)
- opt.add_option('--with-perl-archdir', type='string', dest='perlarchdir', help = 'Specify directory where to install arch specific files', default=None)
-
diff --git a/waflib/Tools/python.py b/waflib/Tools/python.py
deleted file mode 100644
index 25841d0..0000000
--- a/waflib/Tools/python.py
+++ /dev/null
@@ -1,627 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2007-2015 (ita)
-# Gustavo Carneiro (gjc), 2007
-
-"""
-Support for Python, detect the headers and libraries and provide
-*use* variables to link C/C++ programs against them::
-
- def options(opt):
- opt.load('compiler_c python')
- def configure(conf):
- conf.load('compiler_c python')
- conf.check_python_version((2,4,2))
- conf.check_python_headers()
- def build(bld):
- bld.program(features='pyembed', source='a.c', target='myprog')
- bld.shlib(features='pyext', source='b.c', target='mylib')
-"""
-
-import os, sys
-from waflib import Errors, Logs, Node, Options, Task, Utils
-from waflib.TaskGen import extension, before_method, after_method, feature
-from waflib.Configure import conf
-
-FRAG = '''
-#include <Python.h>
-#ifdef __cplusplus
-extern "C" {
-#endif
- void Py_Initialize(void);
- void Py_Finalize(void);
-#ifdef __cplusplus
-}
-#endif
-int main(int argc, char **argv)
-{
- (void)argc; (void)argv;
- Py_Initialize();
- Py_Finalize();
- return 0;
-}
-'''
-"""
-Piece of C/C++ code used in :py:func:`waflib.Tools.python.check_python_headers`
-"""
-
-INST = '''
-import sys, py_compile
-py_compile.compile(sys.argv[1], sys.argv[2], sys.argv[3], True)
-'''
-"""
-Piece of Python code used in :py:class:`waflib.Tools.python.pyo` and :py:class:`waflib.Tools.python.pyc` for byte-compiling python files
-"""
-
-DISTUTILS_IMP = ['from distutils.sysconfig import get_config_var, get_python_lib']
-
-@before_method('process_source')
-@feature('py')
-def feature_py(self):
- """
- Create tasks to byte-compile .py files and install them, if requested
- """
- self.install_path = getattr(self, 'install_path', '${PYTHONDIR}')
- install_from = getattr(self, 'install_from', None)
- if install_from and not isinstance(install_from, Node.Node):
- install_from = self.path.find_dir(install_from)
- self.install_from = install_from
-
- ver = self.env.PYTHON_VERSION
- if not ver:
- self.bld.fatal('Installing python files requires PYTHON_VERSION, try conf.check_python_version')
-
- if int(ver.replace('.', '')) > 31:
- self.install_32 = True
-
-@extension('.py')
-def process_py(self, node):
- """
- Add signature of .py file, so it will be byte-compiled when necessary
- """
- assert(hasattr(self, 'install_path')), 'add features="py"'
-
- # where to install the python file
- if self.install_path:
- if self.install_from:
- self.add_install_files(install_to=self.install_path, install_from=node, cwd=self.install_from, relative_trick=True)
- else:
- self.add_install_files(install_to=self.install_path, install_from=node, relative_trick=True)
-
- lst = []
- if self.env.PYC:
- lst.append('pyc')
- if self.env.PYO:
- lst.append('pyo')
-
- if self.install_path:
- if self.install_from:
- pyd = Utils.subst_vars("%s/%s" % (self.install_path, node.path_from(self.install_from)), self.env)
- else:
- pyd = Utils.subst_vars("%s/%s" % (self.install_path, node.path_from(self.path)), self.env)
- else:
- pyd = node.abspath()
-
- for ext in lst:
- if self.env.PYTAG and not self.env.NOPYCACHE:
- # __pycache__ installation for python 3.2 - PEP 3147
- name = node.name[:-3]
- pyobj = node.parent.get_bld().make_node('__pycache__').make_node("%s.%s.%s" % (name, self.env.PYTAG, ext))
- pyobj.parent.mkdir()
- else:
- pyobj = node.change_ext(".%s" % ext)
-
- tsk = self.create_task(ext, node, pyobj)
- tsk.pyd = pyd
-
- if self.install_path:
- self.add_install_files(install_to=os.path.dirname(pyd), install_from=pyobj, cwd=node.parent.get_bld(), relative_trick=True)
-
-class pyc(Task.Task):
- """
- Byte-compiling python files
- """
- color = 'PINK'
- def __str__(self):
- node = self.outputs[0]
- return node.path_from(node.ctx.launch_node())
- def run(self):
- cmd = [Utils.subst_vars('${PYTHON}', self.env), '-c', INST, self.inputs[0].abspath(), self.outputs[0].abspath(), self.pyd]
- ret = self.generator.bld.exec_command(cmd)
- return ret
-
-class pyo(Task.Task):
- """
- Byte-compiling python files
- """
- color = 'PINK'
- def __str__(self):
- node = self.outputs[0]
- return node.path_from(node.ctx.launch_node())
- def run(self):
- cmd = [Utils.subst_vars('${PYTHON}', self.env), Utils.subst_vars('${PYFLAGS_OPT}', self.env), '-c', INST, self.inputs[0].abspath(), self.outputs[0].abspath(), self.pyd]
- ret = self.generator.bld.exec_command(cmd)
- return ret
-
-@feature('pyext')
-@before_method('propagate_uselib_vars', 'apply_link')
-@after_method('apply_bundle')
-def init_pyext(self):
- """
- Change the values of *cshlib_PATTERN* and *cxxshlib_PATTERN* to remove the
- *lib* prefix from library names.
- """
- self.uselib = self.to_list(getattr(self, 'uselib', []))
- if not 'PYEXT' in self.uselib:
- self.uselib.append('PYEXT')
- # override shlib_PATTERN set by the osx module
- self.env.cshlib_PATTERN = self.env.cxxshlib_PATTERN = self.env.macbundle_PATTERN = self.env.pyext_PATTERN
- self.env.fcshlib_PATTERN = self.env.dshlib_PATTERN = self.env.pyext_PATTERN
-
- try:
- if not self.install_path:
- return
- except AttributeError:
- self.install_path = '${PYTHONARCHDIR}'
-
-@feature('pyext')
-@before_method('apply_link', 'apply_bundle')
-def set_bundle(self):
- """Mac-specific pyext extension that enables bundles from c_osx.py"""
- if Utils.unversioned_sys_platform() == 'darwin':
- self.mac_bundle = True
-
-@before_method('propagate_uselib_vars')
-@feature('pyembed')
-def init_pyembed(self):
- """
- Add the PYEMBED variable.
- """
- self.uselib = self.to_list(getattr(self, 'uselib', []))
- if not 'PYEMBED' in self.uselib:
- self.uselib.append('PYEMBED')
-
-@conf
-def get_python_variables(self, variables, imports=None):
- """
- Spawn a new python process to dump configuration variables
-
- :param variables: variables to print
- :type variables: list of string
- :param imports: one import by element
- :type imports: list of string
- :return: the variable values
- :rtype: list of string
- """
- if not imports:
- try:
- imports = self.python_imports
- except AttributeError:
- imports = DISTUTILS_IMP
-
- program = list(imports) # copy
- program.append('')
- for v in variables:
- program.append("print(repr(%s))" % v)
- os_env = dict(os.environ)
- try:
- del os_env['MACOSX_DEPLOYMENT_TARGET'] # see comments in the OSX tool
- except KeyError:
- pass
-
- try:
- out = self.cmd_and_log(self.env.PYTHON + ['-c', '\n'.join(program)], env=os_env)
- except Errors.WafError:
- self.fatal('The distutils module is unusable: install "python-devel"?')
- self.to_log(out)
- return_values = []
- for s in out.splitlines():
- s = s.strip()
- if not s:
- continue
- if s == 'None':
- return_values.append(None)
- elif (s[0] == "'" and s[-1] == "'") or (s[0] == '"' and s[-1] == '"'):
- return_values.append(eval(s))
- elif s[0].isdigit():
- return_values.append(int(s))
- else: break
- return return_values
-
-@conf
-def test_pyembed(self, mode, msg='Testing pyembed configuration'):
- self.check(header_name='Python.h', define_name='HAVE_PYEMBED', msg=msg,
- fragment=FRAG, errmsg='Could not build a python embedded interpreter',
- features='%s %sprogram pyembed' % (mode, mode))
-
-@conf
-def test_pyext(self, mode, msg='Testing pyext configuration'):
- self.check(header_name='Python.h', define_name='HAVE_PYEXT', msg=msg,
- fragment=FRAG, errmsg='Could not build python extensions',
- features='%s %sshlib pyext' % (mode, mode))
-
-@conf
-def python_cross_compile(self, features='pyembed pyext'):
- """
- For cross-compilation purposes, it is possible to bypass the normal detection and set the flags that you want:
- PYTHON_VERSION='3.4' PYTAG='cpython34' pyext_PATTERN="%s.so" PYTHON_LDFLAGS='-lpthread -ldl' waf configure
-
- The following variables are used:
- PYTHON_VERSION required
- PYTAG required
- PYTHON_LDFLAGS required
- pyext_PATTERN required
- PYTHON_PYEXT_LDFLAGS
- PYTHON_PYEMBED_LDFLAGS
- """
- features = Utils.to_list(features)
- if not ('PYTHON_LDFLAGS' in self.environ or 'PYTHON_PYEXT_LDFLAGS' in self.environ or 'PYTHON_PYEMBED_LDFLAGS' in self.environ):
- return False
-
- for x in 'PYTHON_VERSION PYTAG pyext_PATTERN'.split():
- if not x in self.environ:
- self.fatal('Please set %s in the os environment' % x)
- else:
- self.env[x] = self.environ[x]
-
- xx = self.env.CXX_NAME and 'cxx' or 'c'
- if 'pyext' in features:
- flags = self.environ.get('PYTHON_PYEXT_LDFLAGS', self.environ.get('PYTHON_LDFLAGS'))
- if flags is None:
- self.fatal('No flags provided through PYTHON_PYEXT_LDFLAGS as required')
- else:
- self.parse_flags(flags, 'PYEXT')
- self.test_pyext(xx)
- if 'pyembed' in features:
- flags = self.environ.get('PYTHON_PYEMBED_LDFLAGS', self.environ.get('PYTHON_LDFLAGS'))
- if flags is None:
- self.fatal('No flags provided through PYTHON_PYEMBED_LDFLAGS as required')
- else:
- self.parse_flags(flags, 'PYEMBED')
- self.test_pyembed(xx)
- return True
-
-@conf
-def check_python_headers(conf, features='pyembed pyext'):
- """
- Check for headers and libraries necessary to extend or embed python by using the module *distutils*.
- On success the environment variables xxx_PYEXT and xxx_PYEMBED are added:
-
- * PYEXT: for compiling python extensions
- * PYEMBED: for embedding a python interpreter
- """
- features = Utils.to_list(features)
- assert ('pyembed' in features) or ('pyext' in features), "check_python_headers features must include 'pyembed' and/or 'pyext'"
- env = conf.env
- if not env.CC_NAME and not env.CXX_NAME:
- conf.fatal('load a compiler first (gcc, g++, ..)')
-
- # bypass all the code below for cross-compilation
- if conf.python_cross_compile(features):
- return
-
- if not env.PYTHON_VERSION:
- conf.check_python_version()
-
- pybin = env.PYTHON
- if not pybin:
- conf.fatal('Could not find the python executable')
-
- # so we actually do all this for compatibility reasons and for obtaining pyext_PATTERN below
- v = 'prefix SO LDFLAGS LIBDIR LIBPL INCLUDEPY Py_ENABLE_SHARED MACOSX_DEPLOYMENT_TARGET LDSHARED CFLAGS LDVERSION'.split()
- try:
- lst = conf.get_python_variables(["get_config_var('%s') or ''" % x for x in v])
- except RuntimeError:
- conf.fatal("Python development headers not found (-v for details).")
-
- vals = ['%s = %r' % (x, y) for (x, y) in zip(v, lst)]
- conf.to_log("Configuration returned from %r:\n%s\n" % (pybin, '\n'.join(vals)))
-
- dct = dict(zip(v, lst))
- x = 'MACOSX_DEPLOYMENT_TARGET'
- if dct[x]:
- env[x] = conf.environ[x] = dct[x]
- env.pyext_PATTERN = '%s' + dct['SO'] # not a mistake
-
-
- # Try to get pythonX.Y-config
- num = '.'.join(env.PYTHON_VERSION.split('.')[:2])
- conf.find_program([''.join(pybin) + '-config', 'python%s-config' % num, 'python-config-%s' % num, 'python%sm-config' % num], var='PYTHON_CONFIG', msg="python-config", mandatory=False)
-
- if env.PYTHON_CONFIG:
- # python2.6-config requires 3 runs
- all_flags = [['--cflags', '--libs', '--ldflags']]
- if sys.hexversion < 0x2070000:
- all_flags = [[k] for k in all_flags[0]]
-
- xx = env.CXX_NAME and 'cxx' or 'c'
-
- if 'pyembed' in features:
- for flags in all_flags:
- conf.check_cfg(msg='Asking python-config for pyembed %r flags' % ' '.join(flags), path=env.PYTHON_CONFIG, package='', uselib_store='PYEMBED', args=flags)
-
- try:
- conf.test_pyembed(xx)
- except conf.errors.ConfigurationError:
- # python bug 7352
- if dct['Py_ENABLE_SHARED'] and dct['LIBDIR']:
- env.append_unique('LIBPATH_PYEMBED', [dct['LIBDIR']])
- conf.test_pyembed(xx)
- else:
- raise
-
- if 'pyext' in features:
- for flags in all_flags:
- conf.check_cfg(msg='Asking python-config for pyext %r flags' % ' '.join(flags), path=env.PYTHON_CONFIG, package='', uselib_store='PYEXT', args=flags)
-
- try:
- conf.test_pyext(xx)
- except conf.errors.ConfigurationError:
- # python bug 7352
- if dct['Py_ENABLE_SHARED'] and dct['LIBDIR']:
- env.append_unique('LIBPATH_PYEXT', [dct['LIBDIR']])
- conf.test_pyext(xx)
- else:
- raise
-
- conf.define('HAVE_PYTHON_H', 1)
- return
-
- # No python-config, do something else on windows systems
- all_flags = dct['LDFLAGS'] + ' ' + dct['CFLAGS']
- conf.parse_flags(all_flags, 'PYEMBED')
-
- all_flags = dct['LDFLAGS'] + ' ' + dct['LDSHARED'] + ' ' + dct['CFLAGS']
- conf.parse_flags(all_flags, 'PYEXT')
-
- result = None
- if not dct["LDVERSION"]:
- dct["LDVERSION"] = env.PYTHON_VERSION
-
- # further simplification will be complicated
- for name in ('python' + dct['LDVERSION'], 'python' + env.PYTHON_VERSION + 'm', 'python' + env.PYTHON_VERSION.replace('.', '')):
-
- # LIBPATH_PYEMBED is already set; see if it works.
- if not result and env.LIBPATH_PYEMBED:
- path = env.LIBPATH_PYEMBED
- conf.to_log("\n\n# Trying default LIBPATH_PYEMBED: %r\n" % path)
- result = conf.check(lib=name, uselib='PYEMBED', libpath=path, mandatory=False, msg='Checking for library %s in LIBPATH_PYEMBED' % name)
-
- if not result and dct['LIBDIR']:
- path = [dct['LIBDIR']]
- conf.to_log("\n\n# try again with -L$python_LIBDIR: %r\n" % path)
- result = conf.check(lib=name, uselib='PYEMBED', libpath=path, mandatory=False, msg='Checking for library %s in LIBDIR' % name)
-
- if not result and dct['LIBPL']:
- path = [dct['LIBPL']]
- conf.to_log("\n\n# try again with -L$python_LIBPL (some systems don't install the python library in $prefix/lib)\n")
- result = conf.check(lib=name, uselib='PYEMBED', libpath=path, mandatory=False, msg='Checking for library %s in python_LIBPL' % name)
-
- if not result:
- path = [os.path.join(dct['prefix'], "libs")]
- conf.to_log("\n\n# try again with -L$prefix/libs, and pythonXY name rather than pythonX.Y (win32)\n")
- result = conf.check(lib=name, uselib='PYEMBED', libpath=path, mandatory=False, msg='Checking for library %s in $prefix/libs' % name)
-
- if result:
- break # do not forget to set LIBPATH_PYEMBED
-
- if result:
- env.LIBPATH_PYEMBED = path
- env.append_value('LIB_PYEMBED', [name])
- else:
- conf.to_log("\n\n### LIB NOT FOUND\n")
-
- # under certain conditions, python extensions must link to
- # python libraries, not just python embedding programs.
- if Utils.is_win32 or dct['Py_ENABLE_SHARED']:
- env.LIBPATH_PYEXT = env.LIBPATH_PYEMBED
- env.LIB_PYEXT = env.LIB_PYEMBED
-
- conf.to_log("Include path for Python extensions (found via distutils module): %r\n" % (dct['INCLUDEPY'],))
- env.INCLUDES_PYEXT = [dct['INCLUDEPY']]
- env.INCLUDES_PYEMBED = [dct['INCLUDEPY']]
-
- # Code using the Python API needs to be compiled with -fno-strict-aliasing
- if env.CC_NAME == 'gcc':
- env.append_value('CFLAGS_PYEMBED', ['-fno-strict-aliasing'])
- env.append_value('CFLAGS_PYEXT', ['-fno-strict-aliasing'])
- if env.CXX_NAME == 'gcc':
- env.append_value('CXXFLAGS_PYEMBED', ['-fno-strict-aliasing'])
- env.append_value('CXXFLAGS_PYEXT', ['-fno-strict-aliasing'])
-
- if env.CC_NAME == "msvc":
- from distutils.msvccompiler import MSVCCompiler
- dist_compiler = MSVCCompiler()
- dist_compiler.initialize()
- env.append_value('CFLAGS_PYEXT', dist_compiler.compile_options)
- env.append_value('CXXFLAGS_PYEXT', dist_compiler.compile_options)
- env.append_value('LINKFLAGS_PYEXT', dist_compiler.ldflags_shared)
-
- # See if it compiles
- conf.check(header_name='Python.h', define_name='HAVE_PYTHON_H', uselib='PYEMBED', fragment=FRAG, errmsg='Distutils not installed? Broken python installation? Get python-config now!')
-
-@conf
-def check_python_version(conf, minver=None):
- """
- Check if the python interpreter is found matching a given minimum version.
- minver should be a tuple, eg. to check for python >= 2.4.2 pass (2,4,2) as minver.
-
- If successful, PYTHON_VERSION is defined as 'MAJOR.MINOR' (eg. '2.4')
- of the actual python version found, and PYTHONDIR and PYTHONARCHDIR
- are defined, pointing to the site-packages directories appropriate for
- this python version, where modules/packages/extensions should be
- installed.
-
- :param minver: minimum version
- :type minver: tuple of int
- """
- assert minver is None or isinstance(minver, tuple)
- pybin = conf.env.PYTHON
- if not pybin:
- conf.fatal('could not find the python executable')
-
- # Get python version string
- cmd = pybin + ['-c', 'import sys\nfor x in sys.version_info: print(str(x))']
- Logs.debug('python: Running python command %r', cmd)
- lines = conf.cmd_and_log(cmd).split()
- assert len(lines) == 5, "found %r lines, expected 5: %r" % (len(lines), lines)
- pyver_tuple = (int(lines[0]), int(lines[1]), int(lines[2]), lines[3], int(lines[4]))
-
- # Compare python version with the minimum required
- result = (minver is None) or (pyver_tuple >= minver)
-
- if result:
- # define useful environment variables
- pyver = '.'.join([str(x) for x in pyver_tuple[:2]])
- conf.env.PYTHON_VERSION = pyver
-
- if 'PYTHONDIR' in conf.env:
- # Check if --pythondir was specified
- pydir = conf.env.PYTHONDIR
- elif 'PYTHONDIR' in conf.environ:
- # Check environment for PYTHONDIR
- pydir = conf.environ['PYTHONDIR']
- else:
- # Finally, try to guess
- if Utils.is_win32:
- (python_LIBDEST, pydir) = conf.get_python_variables(
- ["get_config_var('LIBDEST') or ''",
- "get_python_lib(standard_lib=0) or ''"])
- else:
- python_LIBDEST = None
- (pydir,) = conf.get_python_variables( ["get_python_lib(standard_lib=0, prefix=%r) or ''" % conf.env.PREFIX])
- if python_LIBDEST is None:
- if conf.env.LIBDIR:
- python_LIBDEST = os.path.join(conf.env.LIBDIR, 'python' + pyver)
- else:
- python_LIBDEST = os.path.join(conf.env.PREFIX, 'lib', 'python' + pyver)
-
- if 'PYTHONARCHDIR' in conf.env:
- # Check if --pythonarchdir was specified
- pyarchdir = conf.env.PYTHONARCHDIR
- elif 'PYTHONARCHDIR' in conf.environ:
- # Check environment for PYTHONDIR
- pyarchdir = conf.environ['PYTHONARCHDIR']
- else:
- # Finally, try to guess
- (pyarchdir, ) = conf.get_python_variables( ["get_python_lib(plat_specific=1, standard_lib=0, prefix=%r) or ''" % conf.env.PREFIX])
- if not pyarchdir:
- pyarchdir = pydir
-
- if hasattr(conf, 'define'): # conf.define is added by the C tool, so may not exist
- conf.define('PYTHONDIR', pydir)
- conf.define('PYTHONARCHDIR', pyarchdir)
-
- conf.env.PYTHONDIR = pydir
- conf.env.PYTHONARCHDIR = pyarchdir
-
- # Feedback
- pyver_full = '.'.join(map(str, pyver_tuple[:3]))
- if minver is None:
- conf.msg('Checking for python version', pyver_full)
- else:
- minver_str = '.'.join(map(str, minver))
- conf.msg('Checking for python version >= %s' % (minver_str,), pyver_full, color=result and 'GREEN' or 'YELLOW')
-
- if not result:
- conf.fatal('The python version is too old, expecting %r' % (minver,))
-
-PYTHON_MODULE_TEMPLATE = '''
-import %s as current_module
-version = getattr(current_module, '__version__', None)
-if version is not None:
- print(str(version))
-else:
- print('unknown version')
-'''
-
-@conf
-def check_python_module(conf, module_name, condition=''):
- """
- Check if the selected python interpreter can import the given python module::
-
- def configure(conf):
- conf.check_python_module('pygccxml')
- conf.check_python_module('re', condition="ver > num(2, 0, 4) and ver <= num(3, 0, 0)")
-
- :param module_name: module
- :type module_name: string
- """
- msg = "Checking for python module %r" % module_name
- if condition:
- msg = '%s (%s)' % (msg, condition)
- conf.start_msg(msg)
- try:
- ret = conf.cmd_and_log(conf.env.PYTHON + ['-c', PYTHON_MODULE_TEMPLATE % module_name])
- except Errors.WafError:
- conf.end_msg(False)
- conf.fatal('Could not find the python module %r' % module_name)
-
- ret = ret.strip()
- if condition:
- conf.end_msg(ret)
- if ret == 'unknown version':
- conf.fatal('Could not check the %s version' % module_name)
-
- from distutils.version import LooseVersion
- def num(*k):
- if isinstance(k[0], int):
- return LooseVersion('.'.join([str(x) for x in k]))
- else:
- return LooseVersion(k[0])
- d = {'num': num, 'ver': LooseVersion(ret)}
- ev = eval(condition, {}, d)
- if not ev:
- conf.fatal('The %s version does not satisfy the requirements' % module_name)
- else:
- if ret == 'unknown version':
- conf.end_msg(True)
- else:
- conf.end_msg(ret)
-
-def configure(conf):
- """
- Detect the python interpreter
- """
- v = conf.env
- if getattr(Options.options, 'pythondir', None):
- v.PYTHONDIR = Options.options.pythondir
- if getattr(Options.options, 'pythonarchdir', None):
- v.PYTHONARCHDIR = Options.options.pythonarchdir
- if getattr(Options.options, 'nopycache', None):
- v.NOPYCACHE=Options.options.nopycache
-
- if not v.PYTHON:
- v.PYTHON = [getattr(Options.options, 'python', None) or sys.executable]
- v.PYTHON = Utils.to_list(v.PYTHON)
- conf.find_program('python', var='PYTHON')
-
- v.PYFLAGS = ''
- v.PYFLAGS_OPT = '-O'
-
- v.PYC = getattr(Options.options, 'pyc', 1)
- v.PYO = getattr(Options.options, 'pyo', 1)
-
- try:
- v.PYTAG = conf.cmd_and_log(conf.env.PYTHON + ['-c', "import imp;print(imp.get_tag())"]).strip()
- except Errors.WafError:
- pass
-
-def options(opt):
- """
- Add python-specific options
- """
- pyopt=opt.add_option_group("Python Options")
- pyopt.add_option('--nopyc', dest = 'pyc', action='store_false', default=1,
- help = 'Do not install bytecode compiled .pyc files (configuration) [Default:install]')
- pyopt.add_option('--nopyo', dest='pyo', action='store_false', default=1,
- help='Do not install optimised compiled .pyo files (configuration) [Default:install]')
- pyopt.add_option('--nopycache',dest='nopycache', action='store_true',
- help='Do not use __pycache__ directory to install objects [Default:auto]')
- pyopt.add_option('--python', dest="python",
- help='python binary to be used [Default: %s]' % sys.executable)
- pyopt.add_option('--pythondir', dest='pythondir',
- help='Installation path for python modules (py, platform-independent .py and .pyc files)')
- pyopt.add_option('--pythonarchdir', dest='pythonarchdir',
- help='Installation path for python extension (pyext, platform-dependent .so or .dylib files)')
-
diff --git a/waflib/Tools/qt5.py b/waflib/Tools/qt5.py
deleted file mode 100644
index 4f9c690..0000000
--- a/waflib/Tools/qt5.py
+++ /dev/null
@@ -1,796 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2006-2018 (ita)
-
-"""
-This tool helps with finding Qt5 tools and libraries,
-and also provides syntactic sugar for using Qt5 tools.
-
-The following snippet illustrates the tool usage::
-
- def options(opt):
- opt.load('compiler_cxx qt5')
-
- def configure(conf):
- conf.load('compiler_cxx qt5')
-
- def build(bld):
- bld(
- features = 'qt5 cxx cxxprogram',
- uselib = 'QT5CORE QT5GUI QT5OPENGL QT5SVG',
- source = 'main.cpp textures.qrc aboutDialog.ui',
- target = 'window',
- )
-
-Here, the UI description and resource files will be processed
-to generate code.
-
-Usage
-=====
-
-Load the "qt5" tool.
-
-You also need to edit your sources accordingly:
-
-- the normal way of doing things is to have your C++ files
- include the .moc file.
- This is regarded as the best practice (and provides much faster
- compilations).
- It also implies that the include paths have beenset properly.
-
-- to have the include paths added automatically, use the following::
-
- from waflib.TaskGen import feature, before_method, after_method
- @feature('cxx')
- @after_method('process_source')
- @before_method('apply_incpaths')
- def add_includes_paths(self):
- incs = set(self.to_list(getattr(self, 'includes', '')))
- for x in self.compiled_tasks:
- incs.add(x.inputs[0].parent.path_from(self.path))
- self.includes = sorted(incs)
-
-Note: another tool provides Qt processing that does not require
-.moc includes, see 'playground/slow_qt/'.
-
-A few options (--qt{dir,bin,...}) and environment variables
-(QT5_{ROOT,DIR,MOC,UIC,XCOMPILE}) allow finer tuning of the tool,
-tool path selection, etc; please read the source for more info.
-
-The detection uses pkg-config on Linux by default. To force static library detection use:
-QT5_XCOMPILE=1 QT5_FORCE_STATIC=1 waf configure
-"""
-
-from __future__ import with_statement
-
-try:
- from xml.sax import make_parser
- from xml.sax.handler import ContentHandler
-except ImportError:
- has_xml = False
- ContentHandler = object
-else:
- has_xml = True
-
-import os, sys, re
-from waflib.Tools import cxx
-from waflib import Task, Utils, Options, Errors, Context
-from waflib.TaskGen import feature, after_method, extension, before_method
-from waflib.Configure import conf
-from waflib import Logs
-
-MOC_H = ['.h', '.hpp', '.hxx', '.hh']
-"""
-File extensions associated to .moc files
-"""
-
-EXT_RCC = ['.qrc']
-"""
-File extension for the resource (.qrc) files
-"""
-
-EXT_UI = ['.ui']
-"""
-File extension for the user interface (.ui) files
-"""
-
-EXT_QT5 = ['.cpp', '.cc', '.cxx', '.C']
-"""
-File extensions of C++ files that may require a .moc processing
-"""
-
-class qxx(Task.classes['cxx']):
- """
- Each C++ file can have zero or several .moc files to create.
- They are known only when the files are scanned (preprocessor)
- To avoid scanning the c++ files each time (parsing C/C++), the results
- are retrieved from the task cache (bld.node_deps/bld.raw_deps).
- The moc tasks are also created *dynamically* during the build.
- """
-
- def __init__(self, *k, **kw):
- Task.Task.__init__(self, *k, **kw)
- self.moc_done = 0
-
- def runnable_status(self):
- """
- Compute the task signature to make sure the scanner was executed. Create the
- moc tasks by using :py:meth:`waflib.Tools.qt5.qxx.add_moc_tasks` (if necessary),
- then postpone the task execution (there is no need to recompute the task signature).
- """
- if self.moc_done:
- return Task.Task.runnable_status(self)
- else:
- for t in self.run_after:
- if not t.hasrun:
- return Task.ASK_LATER
- self.add_moc_tasks()
- return Task.Task.runnable_status(self)
-
- def create_moc_task(self, h_node, m_node):
- """
- If several libraries use the same classes, it is possible that moc will run several times (Issue 1318)
- It is not possible to change the file names, but we can assume that the moc transformation will be identical,
- and the moc tasks can be shared in a global cache.
- """
- try:
- moc_cache = self.generator.bld.moc_cache
- except AttributeError:
- moc_cache = self.generator.bld.moc_cache = {}
-
- try:
- return moc_cache[h_node]
- except KeyError:
- tsk = moc_cache[h_node] = Task.classes['moc'](env=self.env, generator=self.generator)
- tsk.set_inputs(h_node)
- tsk.set_outputs(m_node)
- tsk.env.append_unique('MOC_FLAGS', '-i')
-
- if self.generator:
- self.generator.tasks.append(tsk)
-
- # direct injection in the build phase (safe because called from the main thread)
- gen = self.generator.bld.producer
- gen.outstanding.append(tsk)
- gen.total += 1
-
- return tsk
-
- else:
- # remove the signature, it must be recomputed with the moc task
- delattr(self, 'cache_sig')
-
- def add_moc_tasks(self):
- """
- Creates moc tasks by looking in the list of file dependencies ``bld.raw_deps[self.uid()]``
- """
- node = self.inputs[0]
- bld = self.generator.bld
-
- try:
- # compute the signature once to know if there is a moc file to create
- self.signature()
- except KeyError:
- # the moc file may be referenced somewhere else
- pass
- else:
- # remove the signature, it must be recomputed with the moc task
- delattr(self, 'cache_sig')
-
- include_nodes = [node.parent] + self.generator.includes_nodes
-
- moctasks = []
- mocfiles = set()
- for d in bld.raw_deps.get(self.uid(), []):
- if not d.endswith('.moc'):
- continue
-
- # process that base.moc only once
- if d in mocfiles:
- continue
- mocfiles.add(d)
-
- # find the source associated with the moc file
- h_node = None
- base2 = d[:-4]
-
- # foo.moc from foo.cpp
- prefix = node.name[:node.name.rfind('.')]
- if base2 == prefix:
- h_node = node
- else:
- # this deviates from the standard
- # if bar.cpp includes foo.moc, then assume it is from foo.h
- for x in include_nodes:
- for e in MOC_H:
- h_node = x.find_node(base2 + e)
- if h_node:
- break
- else:
- continue
- break
- if h_node:
- m_node = h_node.change_ext('.moc')
- else:
- raise Errors.WafError('No source found for %r which is a moc file' % d)
-
- # create the moc task
- task = self.create_moc_task(h_node, m_node)
- moctasks.append(task)
-
- # simple scheduler dependency: run the moc task before others
- self.run_after.update(set(moctasks))
- self.moc_done = 1
-
-class trans_update(Task.Task):
- """Updates a .ts files from a list of C++ files"""
- run_str = '${QT_LUPDATE} ${SRC} -ts ${TGT}'
- color = 'BLUE'
-
-class XMLHandler(ContentHandler):
- """
- Parses ``.qrc`` files
- """
- def __init__(self):
- ContentHandler.__init__(self)
- self.buf = []
- self.files = []
- def startElement(self, name, attrs):
- if name == 'file':
- self.buf = []
- def endElement(self, name):
- if name == 'file':
- self.files.append(str(''.join(self.buf)))
- def characters(self, cars):
- self.buf.append(cars)
-
-@extension(*EXT_RCC)
-def create_rcc_task(self, node):
- "Creates rcc and cxx tasks for ``.qrc`` files"
- rcnode = node.change_ext('_rc.%d.cpp' % self.idx)
- self.create_task('rcc', node, rcnode)
- cpptask = self.create_task('cxx', rcnode, rcnode.change_ext('.o'))
- try:
- self.compiled_tasks.append(cpptask)
- except AttributeError:
- self.compiled_tasks = [cpptask]
- return cpptask
-
-@extension(*EXT_UI)
-def create_uic_task(self, node):
- "Create uic tasks for user interface ``.ui`` definition files"
-
- """
- If UIC file is used in more than one bld, we would have a conflict in parallel execution
- It is not possible to change the file names (like .self.idx. as for objects) as they have
- to be referenced by the source file, but we can assume that the transformation will be identical
- and the tasks can be shared in a global cache.
- """
- try:
- uic_cache = self.bld.uic_cache
- except AttributeError:
- uic_cache = self.bld.uic_cache = {}
-
- if node not in uic_cache:
- uictask = uic_cache[node] = self.create_task('ui5', node)
- uictask.outputs = [node.parent.find_or_declare(self.env.ui_PATTERN % node.name[:-3])]
-
-@extension('.ts')
-def add_lang(self, node):
- """Adds all the .ts file into ``self.lang``"""
- self.lang = self.to_list(getattr(self, 'lang', [])) + [node]
-
-@feature('qt5')
-@before_method('process_source')
-def process_mocs(self):
- """
- Processes MOC files included in headers::
-
- def build(bld):
- bld.program(features='qt5', source='main.cpp', target='app', use='QT5CORE', moc='foo.h')
-
- The build will run moc on foo.h to create moc_foo.n.cpp. The number in the file name
- is provided to avoid name clashes when the same headers are used by several targets.
- """
- lst = self.to_nodes(getattr(self, 'moc', []))
- self.source = self.to_list(getattr(self, 'source', []))
- for x in lst:
- prefix = x.name[:x.name.rfind('.')] # foo.h -> foo
- moc_target = 'moc_%s.%d.cpp' % (prefix, self.idx)
- moc_node = x.parent.find_or_declare(moc_target)
- self.source.append(moc_node)
-
- self.create_task('moc', x, moc_node)
-
-@feature('qt5')
-@after_method('apply_link')
-def apply_qt5(self):
- """
- Adds MOC_FLAGS which may be necessary for moc::
-
- def build(bld):
- bld.program(features='qt5', source='main.cpp', target='app', use='QT5CORE')
-
- The additional parameters are:
-
- :param lang: list of translation files (\*.ts) to process
- :type lang: list of :py:class:`waflib.Node.Node` or string without the .ts extension
- :param update: whether to process the C++ files to update the \*.ts files (use **waf --translate**)
- :type update: bool
- :param langname: if given, transform the \*.ts files into a .qrc files to include in the binary file
- :type langname: :py:class:`waflib.Node.Node` or string without the .qrc extension
- """
- if getattr(self, 'lang', None):
- qmtasks = []
- for x in self.to_list(self.lang):
- if isinstance(x, str):
- x = self.path.find_resource(x + '.ts')
- qmtasks.append(self.create_task('ts2qm', x, x.change_ext('.%d.qm' % self.idx)))
-
- if getattr(self, 'update', None) and Options.options.trans_qt5:
- cxxnodes = [a.inputs[0] for a in self.compiled_tasks] + [
- a.inputs[0] for a in self.tasks if a.inputs and a.inputs[0].name.endswith('.ui')]
- for x in qmtasks:
- self.create_task('trans_update', cxxnodes, x.inputs)
-
- if getattr(self, 'langname', None):
- qmnodes = [x.outputs[0] for x in qmtasks]
- rcnode = self.langname
- if isinstance(rcnode, str):
- rcnode = self.path.find_or_declare(rcnode + ('.%d.qrc' % self.idx))
- t = self.create_task('qm2rcc', qmnodes, rcnode)
- k = create_rcc_task(self, t.outputs[0])
- self.link_task.inputs.append(k.outputs[0])
-
- lst = []
- for flag in self.to_list(self.env.CXXFLAGS):
- if len(flag) < 2:
- continue
- f = flag[0:2]
- if f in ('-D', '-I', '/D', '/I'):
- if (f[0] == '/'):
- lst.append('-' + flag[1:])
- else:
- lst.append(flag)
- self.env.append_value('MOC_FLAGS', lst)
-
-@extension(*EXT_QT5)
-def cxx_hook(self, node):
- """
- Re-maps C++ file extensions to the :py:class:`waflib.Tools.qt5.qxx` task.
- """
- return self.create_compiled_task('qxx', node)
-
-class rcc(Task.Task):
- """
- Processes ``.qrc`` files
- """
- color = 'BLUE'
- run_str = '${QT_RCC} -name ${tsk.rcname()} ${SRC[0].abspath()} ${RCC_ST} -o ${TGT}'
- ext_out = ['.h']
-
- def rcname(self):
- return os.path.splitext(self.inputs[0].name)[0]
-
- def scan(self):
- """Parse the *.qrc* files"""
- if not has_xml:
- Logs.error('No xml.sax support was found, rcc dependencies will be incomplete!')
- return ([], [])
-
- parser = make_parser()
- curHandler = XMLHandler()
- parser.setContentHandler(curHandler)
- with open(self.inputs[0].abspath(), 'r') as f:
- parser.parse(f)
-
- nodes = []
- names = []
- root = self.inputs[0].parent
- for x in curHandler.files:
- nd = root.find_resource(x)
- if nd:
- nodes.append(nd)
- else:
- names.append(x)
- return (nodes, names)
-
- def quote_flag(self, x):
- """
- Override Task.quote_flag. QT parses the argument files
- differently than cl.exe and link.exe
-
- :param x: flag
- :type x: string
- :return: quoted flag
- :rtype: string
- """
- return x
-
-
-class moc(Task.Task):
- """
- Creates ``.moc`` files
- """
- color = 'BLUE'
- run_str = '${QT_MOC} ${MOC_FLAGS} ${MOCCPPPATH_ST:INCPATHS} ${MOCDEFINES_ST:DEFINES} ${SRC} ${MOC_ST} ${TGT}'
-
- def quote_flag(self, x):
- """
- Override Task.quote_flag. QT parses the argument files
- differently than cl.exe and link.exe
-
- :param x: flag
- :type x: string
- :return: quoted flag
- :rtype: string
- """
- return x
-
-
-class ui5(Task.Task):
- """
- Processes ``.ui`` files
- """
- color = 'BLUE'
- run_str = '${QT_UIC} ${SRC} -o ${TGT}'
- ext_out = ['.h']
-
-class ts2qm(Task.Task):
- """
- Generates ``.qm`` files from ``.ts`` files
- """
- color = 'BLUE'
- run_str = '${QT_LRELEASE} ${QT_LRELEASE_FLAGS} ${SRC} -qm ${TGT}'
-
-class qm2rcc(Task.Task):
- """
- Generates ``.qrc`` files from ``.qm`` files
- """
- color = 'BLUE'
- after = 'ts2qm'
- def run(self):
- """Create a qrc file including the inputs"""
- txt = '\n'.join(['<file>%s</file>' % k.path_from(self.outputs[0].parent) for k in self.inputs])
- code = '<!DOCTYPE RCC><RCC version="1.0">\n<qresource>\n%s\n</qresource>\n</RCC>' % txt
- self.outputs[0].write(code)
-
-def configure(self):
- """
- Besides the configuration options, the environment variable QT5_ROOT may be used
- to give the location of the qt5 libraries (absolute path).
-
- The detection uses the program ``pkg-config`` through :py:func:`waflib.Tools.config_c.check_cfg`
- """
- self.find_qt5_binaries()
- self.set_qt5_libs_dir()
- self.set_qt5_libs_to_check()
- self.set_qt5_defines()
- self.find_qt5_libraries()
- self.add_qt5_rpath()
- self.simplify_qt5_libs()
-
- # warn about this during the configuration too
- if not has_xml:
- Logs.error('No xml.sax support was found, rcc dependencies will be incomplete!')
-
- if 'COMPILER_CXX' not in self.env:
- self.fatal('No CXX compiler defined: did you forget to configure compiler_cxx first?')
-
- # Qt5 may be compiled with '-reduce-relocations' which requires dependent programs to have -fPIE or -fPIC?
- frag = '#include <QApplication>\nint main(int argc, char **argv) {return 0;}\n'
- uses = 'QT5CORE QT5WIDGETS QT5GUI'
- for flag in [[], '-fPIE', '-fPIC', '-std=c++11' , ['-std=c++11', '-fPIE'], ['-std=c++11', '-fPIC']]:
- msg = 'See if Qt files compile '
- if flag:
- msg += 'with %s' % flag
- try:
- self.check(features='qt5 cxx', use=uses, uselib_store='qt5', cxxflags=flag, fragment=frag, msg=msg)
- except self.errors.ConfigurationError:
- pass
- else:
- break
- else:
- self.fatal('Could not build a simple Qt application')
-
- # FreeBSD does not add /usr/local/lib and the pkg-config files do not provide it either :-/
- if Utils.unversioned_sys_platform() == 'freebsd':
- frag = '#include <QApplication>\nint main(int argc, char **argv) { QApplication app(argc, argv); return NULL != (void*) (&app);}\n'
- try:
- self.check(features='qt5 cxx cxxprogram', use=uses, fragment=frag, msg='Can we link Qt programs on FreeBSD directly?')
- except self.errors.ConfigurationError:
- self.check(features='qt5 cxx cxxprogram', use=uses, uselib_store='qt5', libpath='/usr/local/lib', fragment=frag, msg='Is /usr/local/lib required?')
-
-@conf
-def find_qt5_binaries(self):
- """
- Detects Qt programs such as qmake, moc, uic, lrelease
- """
- env = self.env
- opt = Options.options
-
- qtdir = getattr(opt, 'qtdir', '')
- qtbin = getattr(opt, 'qtbin', '')
-
- paths = []
-
- if qtdir:
- qtbin = os.path.join(qtdir, 'bin')
-
- # the qt directory has been given from QT5_ROOT - deduce the qt binary path
- if not qtdir:
- qtdir = self.environ.get('QT5_ROOT', '')
- qtbin = self.environ.get('QT5_BIN') or os.path.join(qtdir, 'bin')
-
- if qtbin:
- paths = [qtbin]
-
- # no qtdir, look in the path and in /usr/local/Trolltech
- if not qtdir:
- paths = self.environ.get('PATH', '').split(os.pathsep)
- paths.extend(['/usr/share/qt5/bin', '/usr/local/lib/qt5/bin'])
- try:
- lst = Utils.listdir('/usr/local/Trolltech/')
- except OSError:
- pass
- else:
- if lst:
- lst.sort()
- lst.reverse()
-
- # keep the highest version
- qtdir = '/usr/local/Trolltech/%s/' % lst[0]
- qtbin = os.path.join(qtdir, 'bin')
- paths.append(qtbin)
-
- # at the end, try to find qmake in the paths given
- # keep the one with the highest version
- cand = None
- prev_ver = ['5', '0', '0']
- for qmk in ('qmake-qt5', 'qmake5', 'qmake'):
- try:
- qmake = self.find_program(qmk, path_list=paths)
- except self.errors.ConfigurationError:
- pass
- else:
- try:
- version = self.cmd_and_log(qmake + ['-query', 'QT_VERSION']).strip()
- except self.errors.WafError:
- pass
- else:
- if version:
- new_ver = version.split('.')
- if new_ver > prev_ver:
- cand = qmake
- prev_ver = new_ver
-
- # qmake could not be found easily, rely on qtchooser
- if not cand:
- try:
- self.find_program('qtchooser')
- except self.errors.ConfigurationError:
- pass
- else:
- cmd = self.env.QTCHOOSER + ['-qt=5', '-run-tool=qmake']
- try:
- version = self.cmd_and_log(cmd + ['-query', 'QT_VERSION'])
- except self.errors.WafError:
- pass
- else:
- cand = cmd
-
- if cand:
- self.env.QMAKE = cand
- else:
- self.fatal('Could not find qmake for qt5')
-
- self.env.QT_HOST_BINS = qtbin = self.cmd_and_log(self.env.QMAKE + ['-query', 'QT_HOST_BINS']).strip()
- paths.insert(0, qtbin)
-
- def find_bin(lst, var):
- if var in env:
- return
- for f in lst:
- try:
- ret = self.find_program(f, path_list=paths)
- except self.errors.ConfigurationError:
- pass
- else:
- env[var]=ret
- break
-
- find_bin(['uic-qt5', 'uic'], 'QT_UIC')
- if not env.QT_UIC:
- self.fatal('cannot find the uic compiler for qt5')
-
- self.start_msg('Checking for uic version')
- uicver = self.cmd_and_log(env.QT_UIC + ['-version'], output=Context.BOTH)
- uicver = ''.join(uicver).strip()
- uicver = uicver.replace('Qt User Interface Compiler ','').replace('User Interface Compiler for Qt', '')
- self.end_msg(uicver)
- if uicver.find(' 3.') != -1 or uicver.find(' 4.') != -1:
- self.fatal('this uic compiler is for qt3 or qt4, add uic for qt5 to your path')
-
- find_bin(['moc-qt5', 'moc'], 'QT_MOC')
- find_bin(['rcc-qt5', 'rcc'], 'QT_RCC')
- find_bin(['lrelease-qt5', 'lrelease'], 'QT_LRELEASE')
- find_bin(['lupdate-qt5', 'lupdate'], 'QT_LUPDATE')
-
- env.UIC_ST = '%s -o %s'
- env.MOC_ST = '-o'
- env.ui_PATTERN = 'ui_%s.h'
- env.QT_LRELEASE_FLAGS = ['-silent']
- env.MOCCPPPATH_ST = '-I%s'
- env.MOCDEFINES_ST = '-D%s'
-
-@conf
-def set_qt5_libs_dir(self):
- env = self.env
- qtlibs = getattr(Options.options, 'qtlibs', None) or self.environ.get('QT5_LIBDIR')
- if not qtlibs:
- try:
- qtlibs = self.cmd_and_log(env.QMAKE + ['-query', 'QT_INSTALL_LIBS']).strip()
- except Errors.WafError:
- qtdir = self.cmd_and_log(env.QMAKE + ['-query', 'QT_INSTALL_PREFIX']).strip()
- qtlibs = os.path.join(qtdir, 'lib')
- self.msg('Found the Qt5 libraries in', qtlibs)
- env.QTLIBS = qtlibs
-
-@conf
-def find_single_qt5_lib(self, name, uselib, qtlibs, qtincludes, force_static):
- env = self.env
- if force_static:
- exts = ('.a', '.lib')
- prefix = 'STLIB'
- else:
- exts = ('.so', '.lib')
- prefix = 'LIB'
-
- def lib_names():
- for x in exts:
- for k in ('', '5') if Utils.is_win32 else ['']:
- for p in ('lib', ''):
- yield (p, name, k, x)
-
- for tup in lib_names():
- k = ''.join(tup)
- path = os.path.join(qtlibs, k)
- if os.path.exists(path):
- if env.DEST_OS == 'win32':
- libval = ''.join(tup[:-1])
- else:
- libval = name
- env.append_unique(prefix + '_' + uselib, libval)
- env.append_unique('%sPATH_%s' % (prefix, uselib), qtlibs)
- env.append_unique('INCLUDES_' + uselib, qtincludes)
- env.append_unique('INCLUDES_' + uselib, os.path.join(qtincludes, name.replace('Qt5', 'Qt')))
- return k
- return False
-
-@conf
-def find_qt5_libraries(self):
- env = self.env
-
- qtincludes = self.environ.get('QT5_INCLUDES') or self.cmd_and_log(env.QMAKE + ['-query', 'QT_INSTALL_HEADERS']).strip()
- force_static = self.environ.get('QT5_FORCE_STATIC')
- try:
- if self.environ.get('QT5_XCOMPILE'):
- self.fatal('QT5_XCOMPILE Disables pkg-config detection')
- self.check_cfg(atleast_pkgconfig_version='0.1')
- except self.errors.ConfigurationError:
- for i in self.qt5_vars:
- uselib = i.upper()
- if Utils.unversioned_sys_platform() == 'darwin':
- # Since at least qt 4.7.3 each library locates in separate directory
- fwk = i.replace('Qt5', 'Qt')
- frameworkName = fwk + '.framework'
-
- qtDynamicLib = os.path.join(env.QTLIBS, frameworkName, fwk)
- if os.path.exists(qtDynamicLib):
- env.append_unique('FRAMEWORK_' + uselib, fwk)
- env.append_unique('FRAMEWORKPATH_' + uselib, env.QTLIBS)
- self.msg('Checking for %s' % i, qtDynamicLib, 'GREEN')
- else:
- self.msg('Checking for %s' % i, False, 'YELLOW')
- env.append_unique('INCLUDES_' + uselib, os.path.join(env.QTLIBS, frameworkName, 'Headers'))
- else:
- ret = self.find_single_qt5_lib(i, uselib, env.QTLIBS, qtincludes, force_static)
- if not force_static and not ret:
- ret = self.find_single_qt5_lib(i, uselib, env.QTLIBS, qtincludes, True)
- self.msg('Checking for %s' % i, ret, 'GREEN' if ret else 'YELLOW')
- else:
- path = '%s:%s:%s/pkgconfig:/usr/lib/qt5/lib/pkgconfig:/opt/qt5/lib/pkgconfig:/usr/lib/qt5/lib:/opt/qt5/lib' % (
- self.environ.get('PKG_CONFIG_PATH', ''), env.QTLIBS, env.QTLIBS)
- for i in self.qt5_vars:
- self.check_cfg(package=i, args='--cflags --libs', mandatory=False, force_static=force_static, pkg_config_path=path)
-
-@conf
-def simplify_qt5_libs(self):
- """
- Since library paths make really long command-lines,
- and since everything depends on qtcore, remove the qtcore ones from qtgui, etc
- """
- env = self.env
- def process_lib(vars_, coreval):
- for d in vars_:
- var = d.upper()
- if var == 'QTCORE':
- continue
-
- value = env['LIBPATH_'+var]
- if value:
- core = env[coreval]
- accu = []
- for lib in value:
- if lib in core:
- continue
- accu.append(lib)
- env['LIBPATH_'+var] = accu
- process_lib(self.qt5_vars, 'LIBPATH_QTCORE')
-
-@conf
-def add_qt5_rpath(self):
- """
- Defines rpath entries for Qt libraries
- """
- env = self.env
- if getattr(Options.options, 'want_rpath', False):
- def process_rpath(vars_, coreval):
- for d in vars_:
- var = d.upper()
- value = env['LIBPATH_' + var]
- if value:
- core = env[coreval]
- accu = []
- for lib in value:
- if var != 'QTCORE':
- if lib in core:
- continue
- accu.append('-Wl,--rpath='+lib)
- env['RPATH_' + var] = accu
- process_rpath(self.qt5_vars, 'LIBPATH_QTCORE')
-
-@conf
-def set_qt5_libs_to_check(self):
- self.qt5_vars = Utils.to_list(getattr(self, 'qt5_vars', []))
- if not self.qt5_vars:
- dirlst = Utils.listdir(self.env.QTLIBS)
-
- pat = self.env.cxxshlib_PATTERN
- if Utils.is_win32:
- pat = pat.replace('.dll', '.lib')
- if self.environ.get('QT5_FORCE_STATIC'):
- pat = self.env.cxxstlib_PATTERN
- if Utils.unversioned_sys_platform() == 'darwin':
- pat = "%s\.framework"
- re_qt = re.compile(pat%'Qt5?(?P<name>.*)'+'$')
- for x in dirlst:
- m = re_qt.match(x)
- if m:
- self.qt5_vars.append("Qt5%s" % m.group('name'))
- if not self.qt5_vars:
- self.fatal('cannot find any Qt5 library (%r)' % self.env.QTLIBS)
-
- qtextralibs = getattr(Options.options, 'qtextralibs', None)
- if qtextralibs:
- self.qt5_vars.extend(qtextralibs.split(','))
-
-@conf
-def set_qt5_defines(self):
- if sys.platform != 'win32':
- return
- for x in self.qt5_vars:
- y=x.replace('Qt5', 'Qt')[2:].upper()
- self.env.append_unique('DEFINES_%s' % x.upper(), 'QT_%s_LIB' % y)
-
-def options(opt):
- """
- Command-line options
- """
- opt.add_option('--want-rpath', action='store_true', default=False, dest='want_rpath', help='enable the rpath for qt libraries')
- for i in 'qtdir qtbin qtlibs'.split():
- opt.add_option('--'+i, type='string', default='', dest=i)
-
- opt.add_option('--translate', action='store_true', help='collect translation strings', dest='trans_qt5', default=False)
- opt.add_option('--qtextralibs', type='string', default='', dest='qtextralibs', help='additional qt libraries on the system to add to default ones, comma separated')
-
diff --git a/waflib/Tools/ruby.py b/waflib/Tools/ruby.py
deleted file mode 100644
index 8d92a79..0000000
--- a/waflib/Tools/ruby.py
+++ /dev/null
@@ -1,186 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# daniel.svensson at purplescout.se 2008
-# Thomas Nagy 2016-2018 (ita)
-
-"""
-Support for Ruby extensions. A C/C++ compiler is required::
-
- def options(opt):
- opt.load('compiler_c ruby')
- def configure(conf):
- conf.load('compiler_c ruby')
- conf.check_ruby_version((1,8,0))
- conf.check_ruby_ext_devel()
- conf.check_ruby_module('libxml')
- def build(bld):
- bld(
- features = 'c cshlib rubyext',
- source = 'rb_mytest.c',
- target = 'mytest_ext',
- install_path = '${ARCHDIR_RUBY}')
- bld.install_files('${LIBDIR_RUBY}', 'Mytest.rb')
-"""
-
-import os
-from waflib import Errors, Options, Task, Utils
-from waflib.TaskGen import before_method, feature, extension
-from waflib.Configure import conf
-
-@feature('rubyext')
-@before_method('apply_incpaths', 'process_source', 'apply_bundle', 'apply_link')
-def init_rubyext(self):
- """
- Add required variables for ruby extensions
- """
- self.install_path = '${ARCHDIR_RUBY}'
- self.uselib = self.to_list(getattr(self, 'uselib', ''))
- if not 'RUBY' in self.uselib:
- self.uselib.append('RUBY')
- if not 'RUBYEXT' in self.uselib:
- self.uselib.append('RUBYEXT')
-
-@feature('rubyext')
-@before_method('apply_link', 'propagate_uselib_vars')
-def apply_ruby_so_name(self):
- """
- Strip the *lib* prefix from ruby extensions
- """
- self.env.cshlib_PATTERN = self.env.cxxshlib_PATTERN = self.env.rubyext_PATTERN
-
-@conf
-def check_ruby_version(self, minver=()):
- """
- Checks if ruby is installed.
- If installed the variable RUBY will be set in environment.
- The ruby binary can be overridden by ``--with-ruby-binary`` command-line option.
- """
-
- ruby = self.find_program('ruby', var='RUBY', value=Options.options.rubybinary)
-
- try:
- version = self.cmd_and_log(ruby + ['-e', 'puts defined?(VERSION) ? VERSION : RUBY_VERSION']).strip()
- except Errors.WafError:
- self.fatal('could not determine ruby version')
- self.env.RUBY_VERSION = version
-
- try:
- ver = tuple(map(int, version.split('.')))
- except Errors.WafError:
- self.fatal('unsupported ruby version %r' % version)
-
- cver = ''
- if minver:
- cver = '> ' + '.'.join(str(x) for x in minver)
- if ver < minver:
- self.fatal('ruby is too old %r' % ver)
-
- self.msg('Checking for ruby version %s' % cver, version)
-
-@conf
-def check_ruby_ext_devel(self):
- """
- Check if a ruby extension can be created
- """
- if not self.env.RUBY:
- self.fatal('ruby detection is required first')
-
- if not self.env.CC_NAME and not self.env.CXX_NAME:
- self.fatal('load a c/c++ compiler first')
-
- version = tuple(map(int, self.env.RUBY_VERSION.split(".")))
-
- def read_out(cmd):
- return Utils.to_list(self.cmd_and_log(self.env.RUBY + ['-rrbconfig', '-e', cmd]))
-
- def read_config(key):
- return read_out('puts RbConfig::CONFIG[%r]' % key)
-
- cpppath = archdir = read_config('archdir')
-
- if version >= (1, 9, 0):
- ruby_hdrdir = read_config('rubyhdrdir')
- cpppath += ruby_hdrdir
- if version >= (2, 0, 0):
- cpppath += read_config('rubyarchhdrdir')
- cpppath += [os.path.join(ruby_hdrdir[0], read_config('arch')[0])]
-
- self.check(header_name='ruby.h', includes=cpppath, errmsg='could not find ruby header file', link_header_test=False)
-
- self.env.LIBPATH_RUBYEXT = read_config('libdir')
- self.env.LIBPATH_RUBYEXT += archdir
- self.env.INCLUDES_RUBYEXT = cpppath
- self.env.CFLAGS_RUBYEXT = read_config('CCDLFLAGS')
- self.env.rubyext_PATTERN = '%s.' + read_config('DLEXT')[0]
-
- # ok this is really stupid, but the command and flags are combined.
- # so we try to find the first argument...
- flags = read_config('LDSHARED')
- while flags and flags[0][0] != '-':
- flags = flags[1:]
-
- # we also want to strip out the deprecated ppc flags
- if len(flags) > 1 and flags[1] == "ppc":
- flags = flags[2:]
-
- self.env.LINKFLAGS_RUBYEXT = flags
- self.env.LINKFLAGS_RUBYEXT += read_config('LIBS')
- self.env.LINKFLAGS_RUBYEXT += read_config('LIBRUBYARG_SHARED')
-
- if Options.options.rubyarchdir:
- self.env.ARCHDIR_RUBY = Options.options.rubyarchdir
- else:
- self.env.ARCHDIR_RUBY = read_config('sitearchdir')[0]
-
- if Options.options.rubylibdir:
- self.env.LIBDIR_RUBY = Options.options.rubylibdir
- else:
- self.env.LIBDIR_RUBY = read_config('sitelibdir')[0]
-
-@conf
-def check_ruby_module(self, module_name):
- """
- Check if the selected ruby interpreter can require the given ruby module::
-
- def configure(conf):
- conf.check_ruby_module('libxml')
-
- :param module_name: module
- :type module_name: string
- """
- self.start_msg('Ruby module %s' % module_name)
- try:
- self.cmd_and_log(self.env.RUBY + ['-e', 'require \'%s\';puts 1' % module_name])
- except Errors.WafError:
- self.end_msg(False)
- self.fatal('Could not find the ruby module %r' % module_name)
- self.end_msg(True)
-
-@extension('.rb')
-def process(self, node):
- return self.create_task('run_ruby', node)
-
-class run_ruby(Task.Task):
- """
- Task to run ruby files detected by file extension .rb::
-
- def options(opt):
- opt.load('ruby')
-
- def configure(ctx):
- ctx.check_ruby_version()
-
- def build(bld):
- bld.env.RBFLAGS = '-e puts "hello world"'
- bld(source='a_ruby_file.rb')
- """
- run_str = '${RUBY} ${RBFLAGS} -I ${SRC[0].parent.abspath()} ${SRC}'
-
-def options(opt):
- """
- Add the ``--with-ruby-archdir``, ``--with-ruby-libdir`` and ``--with-ruby-binary`` options
- """
- opt.add_option('--with-ruby-archdir', type='string', dest='rubyarchdir', help='Specify directory where to install arch specific files')
- opt.add_option('--with-ruby-libdir', type='string', dest='rubylibdir', help='Specify alternate ruby library path')
- opt.add_option('--with-ruby-binary', type='string', dest='rubybinary', help='Specify alternate ruby binary')
-
diff --git a/waflib/Tools/suncc.py b/waflib/Tools/suncc.py
deleted file mode 100644
index 33d34fc..0000000
--- a/waflib/Tools/suncc.py
+++ /dev/null
@@ -1,67 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2006-2018 (ita)
-# Ralf Habacker, 2006 (rh)
-
-from waflib import Errors
-from waflib.Tools import ccroot, ar
-from waflib.Configure import conf
-
-@conf
-def find_scc(conf):
- """
- Detects the Sun C compiler
- """
- v = conf.env
- cc = conf.find_program('cc', var='CC')
- try:
- conf.cmd_and_log(cc + ['-flags'])
- except Errors.WafError:
- conf.fatal('%r is not a Sun compiler' % cc)
- v.CC_NAME = 'sun'
- conf.get_suncc_version(cc)
-
-@conf
-def scc_common_flags(conf):
- """
- Flags required for executing the sun C compiler
- """
- v = conf.env
-
- v.CC_SRC_F = []
- v.CC_TGT_F = ['-c', '-o', '']
-
- if not v.LINK_CC:
- v.LINK_CC = v.CC
-
- v.CCLNK_SRC_F = ''
- v.CCLNK_TGT_F = ['-o', '']
- v.CPPPATH_ST = '-I%s'
- v.DEFINES_ST = '-D%s'
-
- v.LIB_ST = '-l%s' # template for adding libs
- v.LIBPATH_ST = '-L%s' # template for adding libpaths
- v.STLIB_ST = '-l%s'
- v.STLIBPATH_ST = '-L%s'
-
- v.SONAME_ST = '-Wl,-h,%s'
- v.SHLIB_MARKER = '-Bdynamic'
- v.STLIB_MARKER = '-Bstatic'
-
- v.cprogram_PATTERN = '%s'
-
- v.CFLAGS_cshlib = ['-xcode=pic32', '-DPIC']
- v.LINKFLAGS_cshlib = ['-G']
- v.cshlib_PATTERN = 'lib%s.so'
-
- v.LINKFLAGS_cstlib = ['-Bstatic']
- v.cstlib_PATTERN = 'lib%s.a'
-
-def configure(conf):
- conf.find_scc()
- conf.find_ar()
- conf.scc_common_flags()
- conf.cc_load_tools()
- conf.cc_add_flags()
- conf.link_add_flags()
-
diff --git a/waflib/Tools/suncxx.py b/waflib/Tools/suncxx.py
deleted file mode 100644
index 3b384f6..0000000
--- a/waflib/Tools/suncxx.py
+++ /dev/null
@@ -1,67 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2006-2018 (ita)
-# Ralf Habacker, 2006 (rh)
-
-from waflib import Errors
-from waflib.Tools import ccroot, ar
-from waflib.Configure import conf
-
-@conf
-def find_sxx(conf):
- """
- Detects the sun C++ compiler
- """
- v = conf.env
- cc = conf.find_program(['CC', 'c++'], var='CXX')
- try:
- conf.cmd_and_log(cc + ['-flags'])
- except Errors.WafError:
- conf.fatal('%r is not a Sun compiler' % cc)
- v.CXX_NAME = 'sun'
- conf.get_suncc_version(cc)
-
-@conf
-def sxx_common_flags(conf):
- """
- Flags required for executing the sun C++ compiler
- """
- v = conf.env
-
- v.CXX_SRC_F = []
- v.CXX_TGT_F = ['-c', '-o', '']
-
- if not v.LINK_CXX:
- v.LINK_CXX = v.CXX
-
- v.CXXLNK_SRC_F = []
- v.CXXLNK_TGT_F = ['-o', '']
- v.CPPPATH_ST = '-I%s'
- v.DEFINES_ST = '-D%s'
-
- v.LIB_ST = '-l%s' # template for adding libs
- v.LIBPATH_ST = '-L%s' # template for adding libpaths
- v.STLIB_ST = '-l%s'
- v.STLIBPATH_ST = '-L%s'
-
- v.SONAME_ST = '-Wl,-h,%s'
- v.SHLIB_MARKER = '-Bdynamic'
- v.STLIB_MARKER = '-Bstatic'
-
- v.cxxprogram_PATTERN = '%s'
-
- v.CXXFLAGS_cxxshlib = ['-xcode=pic32', '-DPIC']
- v.LINKFLAGS_cxxshlib = ['-G']
- v.cxxshlib_PATTERN = 'lib%s.so'
-
- v.LINKFLAGS_cxxstlib = ['-Bstatic']
- v.cxxstlib_PATTERN = 'lib%s.a'
-
-def configure(conf):
- conf.find_sxx()
- conf.find_ar()
- conf.sxx_common_flags()
- conf.cxx_load_tools()
- conf.cxx_add_flags()
- conf.link_add_flags()
-
diff --git a/waflib/Tools/tex.py b/waflib/Tools/tex.py
deleted file mode 100644
index eaf9fdb..0000000
--- a/waflib/Tools/tex.py
+++ /dev/null
@@ -1,543 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2006-2018 (ita)
-
-"""
-TeX/LaTeX/PDFLaTeX/XeLaTeX support
-
-Example::
-
- def configure(conf):
- conf.load('tex')
- if not conf.env.LATEX:
- conf.fatal('The program LaTex is required')
-
- def build(bld):
- bld(
- features = 'tex',
- type = 'latex', # pdflatex or xelatex
- source = 'document.ltx', # mandatory, the source
- outs = 'ps', # 'pdf' or 'ps pdf'
- deps = 'crossreferencing.lst', # to give dependencies directly
- prompt = 1, # 0 for the batch mode
- )
-
-Notes:
-
-- To configure with a special program, use::
-
- $ PDFLATEX=luatex waf configure
-
-- This tool does not use the target attribute of the task generator
- (``bld(target=...)``); the target file name is built from the source
- base name and the output type(s)
-"""
-
-import os, re
-from waflib import Utils, Task, Errors, Logs, Node
-from waflib.TaskGen import feature, before_method
-
-re_bibunit = re.compile(r'\\(?P<type>putbib)\[(?P<file>[^\[\]]*)\]',re.M)
-def bibunitscan(self):
- """
- Parses TeX inputs and try to find the *bibunit* file dependencies
-
- :return: list of bibunit files
- :rtype: list of :py:class:`waflib.Node.Node`
- """
- node = self.inputs[0]
-
- nodes = []
- if not node:
- return nodes
-
- code = node.read()
- for match in re_bibunit.finditer(code):
- path = match.group('file')
- if path:
- found = None
- for k in ('', '.bib'):
- # add another loop for the tex include paths?
- Logs.debug('tex: trying %s%s', path, k)
- fi = node.parent.find_resource(path + k)
- if fi:
- found = True
- nodes.append(fi)
- # no break
- if not found:
- Logs.debug('tex: could not find %s', path)
-
- Logs.debug('tex: found the following bibunit files: %s', nodes)
- return nodes
-
-exts_deps_tex = ['', '.ltx', '.tex', '.bib', '.pdf', '.png', '.eps', '.ps', '.sty']
-"""List of typical file extensions included in latex files"""
-
-exts_tex = ['.ltx', '.tex']
-"""List of typical file extensions that contain latex"""
-
-re_tex = re.compile(r'\\(?P<type>usepackage|RequirePackage|include|bibliography([^\[\]{}]*)|putbib|includegraphics|input|import|bringin|lstinputlisting)(\[[^\[\]]*\])?{(?P<file>[^{}]*)}',re.M)
-"""Regexp for expressions that may include latex files"""
-
-g_bibtex_re = re.compile('bibdata', re.M)
-"""Regexp for bibtex files"""
-
-g_glossaries_re = re.compile('\\@newglossary', re.M)
-"""Regexp for expressions that create glossaries"""
-
-class tex(Task.Task):
- """
- Compiles a tex/latex file.
-
- .. inheritance-diagram:: waflib.Tools.tex.latex waflib.Tools.tex.xelatex waflib.Tools.tex.pdflatex
- """
-
- bibtex_fun, _ = Task.compile_fun('${BIBTEX} ${BIBTEXFLAGS} ${SRCFILE}', shell=False)
- bibtex_fun.__doc__ = """
- Execute the program **bibtex**
- """
-
- makeindex_fun, _ = Task.compile_fun('${MAKEINDEX} ${MAKEINDEXFLAGS} ${SRCFILE}', shell=False)
- makeindex_fun.__doc__ = """
- Execute the program **makeindex**
- """
-
- makeglossaries_fun, _ = Task.compile_fun('${MAKEGLOSSARIES} ${SRCFILE}', shell=False)
- makeglossaries_fun.__doc__ = """
- Execute the program **makeglossaries**
- """
-
- def exec_command(self, cmd, **kw):
- """
- Executes TeX commands without buffering (latex may prompt for inputs)
-
- :return: the return code
- :rtype: int
- """
- if self.env.PROMPT_LATEX:
- # capture the outputs in configuration tests
- kw['stdout'] = kw['stderr'] = None
- return super(tex, self).exec_command(cmd, **kw)
-
- def scan_aux(self, node):
- """
- Recursive regex-based scanner that finds included auxiliary files.
- """
- nodes = [node]
- re_aux = re.compile(r'\\@input{(?P<file>[^{}]*)}', re.M)
-
- def parse_node(node):
- code = node.read()
- for match in re_aux.finditer(code):
- path = match.group('file')
- found = node.parent.find_or_declare(path)
- if found and found not in nodes:
- Logs.debug('tex: found aux node %r', found)
- nodes.append(found)
- parse_node(found)
- parse_node(node)
- return nodes
-
- def scan(self):
- """
- Recursive regex-based scanner that finds latex dependencies. It uses :py:attr:`waflib.Tools.tex.re_tex`
-
- Depending on your needs you might want:
-
- * to change re_tex::
-
- from waflib.Tools import tex
- tex.re_tex = myregex
-
- * or to change the method scan from the latex tasks::
-
- from waflib.Task import classes
- classes['latex'].scan = myscanfunction
- """
- node = self.inputs[0]
-
- nodes = []
- names = []
- seen = []
- if not node:
- return (nodes, names)
-
- def parse_node(node):
- if node in seen:
- return
- seen.append(node)
- code = node.read()
- for match in re_tex.finditer(code):
-
- multibib = match.group('type')
- if multibib and multibib.startswith('bibliography'):
- multibib = multibib[len('bibliography'):]
- if multibib.startswith('style'):
- continue
- else:
- multibib = None
-
- for path in match.group('file').split(','):
- if path:
- add_name = True
- found = None
- for k in exts_deps_tex:
-
- # issue 1067, scan in all texinputs folders
- for up in self.texinputs_nodes:
- Logs.debug('tex: trying %s%s', path, k)
- found = up.find_resource(path + k)
- if found:
- break
-
-
- for tsk in self.generator.tasks:
- if not found or found in tsk.outputs:
- break
- else:
- nodes.append(found)
- add_name = False
- for ext in exts_tex:
- if found.name.endswith(ext):
- parse_node(found)
- break
-
- # multibib stuff
- if found and multibib and found.name.endswith('.bib'):
- try:
- self.multibibs.append(found)
- except AttributeError:
- self.multibibs = [found]
-
- # no break, people are crazy
- if add_name:
- names.append(path)
- parse_node(node)
-
- for x in nodes:
- x.parent.get_bld().mkdir()
-
- Logs.debug("tex: found the following : %s and names %s", nodes, names)
- return (nodes, names)
-
- def check_status(self, msg, retcode):
- """
- Checks an exit status and raise an error with a particular message
-
- :param msg: message to display if the code is non-zero
- :type msg: string
- :param retcode: condition
- :type retcode: boolean
- """
- if retcode != 0:
- raise Errors.WafError('%r command exit status %r' % (msg, retcode))
-
- def info(self, *k, **kw):
- try:
- info = self.generator.bld.conf.logger.info
- except AttributeError:
- info = Logs.info
- info(*k, **kw)
-
- def bibfile(self):
- """
- Parses *.aux* files to find bibfiles to process.
- If present, execute :py:meth:`waflib.Tools.tex.tex.bibtex_fun`
- """
- for aux_node in self.aux_nodes:
- try:
- ct = aux_node.read()
- except EnvironmentError:
- Logs.error('Error reading %s: %r', aux_node.abspath())
- continue
-
- if g_bibtex_re.findall(ct):
- self.info('calling bibtex')
-
- self.env.env = {}
- self.env.env.update(os.environ)
- self.env.env.update({'BIBINPUTS': self.texinputs(), 'BSTINPUTS': self.texinputs()})
- self.env.SRCFILE = aux_node.name[:-4]
- self.check_status('error when calling bibtex', self.bibtex_fun())
-
- for node in getattr(self, 'multibibs', []):
- self.env.env = {}
- self.env.env.update(os.environ)
- self.env.env.update({'BIBINPUTS': self.texinputs(), 'BSTINPUTS': self.texinputs()})
- self.env.SRCFILE = node.name[:-4]
- self.check_status('error when calling bibtex', self.bibtex_fun())
-
- def bibunits(self):
- """
- Parses *.aux* file to find bibunit files. If there are bibunit files,
- runs :py:meth:`waflib.Tools.tex.tex.bibtex_fun`.
- """
- try:
- bibunits = bibunitscan(self)
- except OSError:
- Logs.error('error bibunitscan')
- else:
- if bibunits:
- fn = ['bu' + str(i) for i in range(1, len(bibunits) + 1)]
- if fn:
- self.info('calling bibtex on bibunits')
-
- for f in fn:
- self.env.env = {'BIBINPUTS': self.texinputs(), 'BSTINPUTS': self.texinputs()}
- self.env.SRCFILE = f
- self.check_status('error when calling bibtex', self.bibtex_fun())
-
- def makeindex(self):
- """
- Searches the filesystem for *.idx* files to process. If present,
- runs :py:meth:`waflib.Tools.tex.tex.makeindex_fun`
- """
- self.idx_node = self.inputs[0].change_ext('.idx')
- try:
- idx_path = self.idx_node.abspath()
- os.stat(idx_path)
- except OSError:
- self.info('index file %s absent, not calling makeindex', idx_path)
- else:
- self.info('calling makeindex')
-
- self.env.SRCFILE = self.idx_node.name
- self.env.env = {}
- self.check_status('error when calling makeindex %s' % idx_path, self.makeindex_fun())
-
- def bibtopic(self):
- """
- Lists additional .aux files from the bibtopic package
- """
- p = self.inputs[0].parent.get_bld()
- if os.path.exists(os.path.join(p.abspath(), 'btaux.aux')):
- self.aux_nodes += p.ant_glob('*[0-9].aux')
-
- def makeglossaries(self):
- """
- Lists additional glossaries from .aux files. If present, runs the makeglossaries program.
- """
- src_file = self.inputs[0].abspath()
- base_file = os.path.basename(src_file)
- base, _ = os.path.splitext(base_file)
- for aux_node in self.aux_nodes:
- try:
- ct = aux_node.read()
- except EnvironmentError:
- Logs.error('Error reading %s: %r', aux_node.abspath())
- continue
-
- if g_glossaries_re.findall(ct):
- if not self.env.MAKEGLOSSARIES:
- raise Errors.WafError("The program 'makeglossaries' is missing!")
- Logs.warn('calling makeglossaries')
- self.env.SRCFILE = base
- self.check_status('error when calling makeglossaries %s' % base, self.makeglossaries_fun())
- return
-
- def texinputs(self):
- """
- Returns the list of texinput nodes as a string suitable for the TEXINPUTS environment variables
-
- :rtype: string
- """
- return os.pathsep.join([k.abspath() for k in self.texinputs_nodes]) + os.pathsep
-
- def run(self):
- """
- Runs the whole TeX build process
-
- Multiple passes are required depending on the usage of cross-references,
- bibliographies, glossaries, indexes and additional contents
- The appropriate TeX compiler is called until the *.aux* files stop changing.
- """
- env = self.env
-
- if not env.PROMPT_LATEX:
- env.append_value('LATEXFLAGS', '-interaction=batchmode')
- env.append_value('PDFLATEXFLAGS', '-interaction=batchmode')
- env.append_value('XELATEXFLAGS', '-interaction=batchmode')
-
- # important, set the cwd for everybody
- self.cwd = self.inputs[0].parent.get_bld()
-
- self.info('first pass on %s', self.__class__.__name__)
-
- # Hash .aux files before even calling the LaTeX compiler
- cur_hash = self.hash_aux_nodes()
-
- self.call_latex()
-
- # Find the .aux files again since bibtex processing can require it
- self.hash_aux_nodes()
-
- self.bibtopic()
- self.bibfile()
- self.bibunits()
- self.makeindex()
- self.makeglossaries()
-
- for i in range(10):
- # There is no need to call latex again if the .aux hash value has not changed
- prev_hash = cur_hash
- cur_hash = self.hash_aux_nodes()
- if not cur_hash:
- Logs.error('No aux.h to process')
- if cur_hash and cur_hash == prev_hash:
- break
-
- # run the command
- self.info('calling %s', self.__class__.__name__)
- self.call_latex()
-
- def hash_aux_nodes(self):
- """
- Returns a hash of the .aux file contents
-
- :rtype: string or bytes
- """
- try:
- self.aux_nodes
- except AttributeError:
- try:
- self.aux_nodes = self.scan_aux(self.inputs[0].change_ext('.aux'))
- except IOError:
- return None
- return Utils.h_list([Utils.h_file(x.abspath()) for x in self.aux_nodes])
-
- def call_latex(self):
- """
- Runs the TeX compiler once
- """
- self.env.env = {}
- self.env.env.update(os.environ)
- self.env.env.update({'TEXINPUTS': self.texinputs()})
- self.env.SRCFILE = self.inputs[0].abspath()
- self.check_status('error when calling latex', self.texfun())
-
-class latex(tex):
- "Compiles LaTeX files"
- texfun, vars = Task.compile_fun('${LATEX} ${LATEXFLAGS} ${SRCFILE}', shell=False)
-
-class pdflatex(tex):
- "Compiles PdfLaTeX files"
- texfun, vars = Task.compile_fun('${PDFLATEX} ${PDFLATEXFLAGS} ${SRCFILE}', shell=False)
-
-class xelatex(tex):
- "XeLaTeX files"
- texfun, vars = Task.compile_fun('${XELATEX} ${XELATEXFLAGS} ${SRCFILE}', shell=False)
-
-class dvips(Task.Task):
- "Converts dvi files to postscript"
- run_str = '${DVIPS} ${DVIPSFLAGS} ${SRC} -o ${TGT}'
- color = 'BLUE'
- after = ['latex', 'pdflatex', 'xelatex']
-
-class dvipdf(Task.Task):
- "Converts dvi files to pdf"
- run_str = '${DVIPDF} ${DVIPDFFLAGS} ${SRC} ${TGT}'
- color = 'BLUE'
- after = ['latex', 'pdflatex', 'xelatex']
-
-class pdf2ps(Task.Task):
- "Converts pdf files to postscript"
- run_str = '${PDF2PS} ${PDF2PSFLAGS} ${SRC} ${TGT}'
- color = 'BLUE'
- after = ['latex', 'pdflatex', 'xelatex']
-
-@feature('tex')
-@before_method('process_source')
-def apply_tex(self):
- """
- Creates :py:class:`waflib.Tools.tex.tex` objects, and
- dvips/dvipdf/pdf2ps tasks if necessary (outs='ps', etc).
- """
- if not getattr(self, 'type', None) in ('latex', 'pdflatex', 'xelatex'):
- self.type = 'pdflatex'
-
- outs = Utils.to_list(getattr(self, 'outs', []))
-
- # prompt for incomplete files (else the batchmode is used)
- try:
- self.generator.bld.conf
- except AttributeError:
- default_prompt = False
- else:
- default_prompt = True
- self.env.PROMPT_LATEX = getattr(self, 'prompt', default_prompt)
-
- deps_lst = []
-
- if getattr(self, 'deps', None):
- deps = self.to_list(self.deps)
- for dep in deps:
- if isinstance(dep, str):
- n = self.path.find_resource(dep)
- if not n:
- self.bld.fatal('Could not find %r for %r' % (dep, self))
- if not n in deps_lst:
- deps_lst.append(n)
- elif isinstance(dep, Node.Node):
- deps_lst.append(dep)
-
- for node in self.to_nodes(self.source):
- if self.type == 'latex':
- task = self.create_task('latex', node, node.change_ext('.dvi'))
- elif self.type == 'pdflatex':
- task = self.create_task('pdflatex', node, node.change_ext('.pdf'))
- elif self.type == 'xelatex':
- task = self.create_task('xelatex', node, node.change_ext('.pdf'))
-
- task.env = self.env
-
- # add the manual dependencies
- if deps_lst:
- for n in deps_lst:
- if not n in task.dep_nodes:
- task.dep_nodes.append(n)
-
- # texinputs is a nasty beast
- if hasattr(self, 'texinputs_nodes'):
- task.texinputs_nodes = self.texinputs_nodes
- else:
- task.texinputs_nodes = [node.parent, node.parent.get_bld(), self.path, self.path.get_bld()]
- lst = os.environ.get('TEXINPUTS', '')
- if self.env.TEXINPUTS:
- lst += os.pathsep + self.env.TEXINPUTS
- if lst:
- lst = lst.split(os.pathsep)
- for x in lst:
- if x:
- if os.path.isabs(x):
- p = self.bld.root.find_node(x)
- if p:
- task.texinputs_nodes.append(p)
- else:
- Logs.error('Invalid TEXINPUTS folder %s', x)
- else:
- Logs.error('Cannot resolve relative paths in TEXINPUTS %s', x)
-
- if self.type == 'latex':
- if 'ps' in outs:
- tsk = self.create_task('dvips', task.outputs, node.change_ext('.ps'))
- tsk.env.env = dict(os.environ)
- if 'pdf' in outs:
- tsk = self.create_task('dvipdf', task.outputs, node.change_ext('.pdf'))
- tsk.env.env = dict(os.environ)
- elif self.type == 'pdflatex':
- if 'ps' in outs:
- self.create_task('pdf2ps', task.outputs, node.change_ext('.ps'))
- self.source = []
-
-def configure(self):
- """
- Find the programs tex, latex and others without raising errors.
- """
- v = self.env
- for p in 'tex latex pdflatex xelatex bibtex dvips dvipdf ps2pdf makeindex pdf2ps makeglossaries'.split():
- try:
- self.find_program(p, var=p.upper())
- except self.errors.ConfigurationError:
- pass
- v.DVIPSFLAGS = '-Ppdf'
-
diff --git a/waflib/Tools/vala.py b/waflib/Tools/vala.py
deleted file mode 100644
index 822ec50..0000000
--- a/waflib/Tools/vala.py
+++ /dev/null
@@ -1,355 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Ali Sabil, 2007
-# Radosław Szkodziński, 2010
-
-"""
-At this point, vala is still unstable, so do not expect
-this tool to be too stable either (apis, etc)
-"""
-
-import re
-from waflib import Build, Context, Errors, Logs, Node, Options, Task, Utils
-from waflib.TaskGen import extension, taskgen_method
-from waflib.Configure import conf
-
-class valac(Task.Task):
- """
- Compiles vala files
- """
- #run_str = "${VALAC} ${VALAFLAGS}" # ideally
- #vars = ['VALAC_VERSION']
- vars = ["VALAC", "VALAC_VERSION", "VALAFLAGS"]
- ext_out = ['.h']
-
- def run(self):
- cmd = self.env.VALAC + self.env.VALAFLAGS
- resources = getattr(self, 'vala_exclude', [])
- cmd.extend([a.abspath() for a in self.inputs if a not in resources])
- ret = self.exec_command(cmd, cwd=self.vala_dir_node.abspath())
-
- if ret:
- return ret
-
- if self.generator.dump_deps_node:
- self.generator.dump_deps_node.write('\n'.join(self.generator.packages))
-
- return ret
-
-@taskgen_method
-def init_vala_task(self):
- """
- Initializes the vala task with the relevant data (acts as a constructor)
- """
- self.profile = getattr(self, 'profile', 'gobject')
-
- self.packages = packages = Utils.to_list(getattr(self, 'packages', []))
- self.use = Utils.to_list(getattr(self, 'use', []))
- if packages and not self.use:
- self.use = packages[:] # copy
-
- if self.profile == 'gobject':
- if not 'GOBJECT' in self.use:
- self.use.append('GOBJECT')
-
- def addflags(flags):
- self.env.append_value('VALAFLAGS', flags)
-
- if self.profile:
- addflags('--profile=%s' % self.profile)
-
- valatask = self.valatask
-
- # output directory
- if hasattr(self, 'vala_dir'):
- if isinstance(self.vala_dir, str):
- valatask.vala_dir_node = self.path.get_bld().make_node(self.vala_dir)
- try:
- valatask.vala_dir_node.mkdir()
- except OSError:
- raise self.bld.fatal('Cannot create the vala dir %r' % valatask.vala_dir_node)
- else:
- valatask.vala_dir_node = self.vala_dir
- else:
- valatask.vala_dir_node = self.path.get_bld()
- addflags('--directory=%s' % valatask.vala_dir_node.abspath())
-
- if hasattr(self, 'thread'):
- if self.profile == 'gobject':
- if not 'GTHREAD' in self.use:
- self.use.append('GTHREAD')
- else:
- #Vala doesn't have threading support for dova nor posix
- Logs.warn('Profile %s means no threading support', self.profile)
- self.thread = False
-
- if self.thread:
- addflags('--thread')
-
- self.is_lib = 'cprogram' not in self.features
- if self.is_lib:
- addflags('--library=%s' % self.target)
-
- h_node = valatask.vala_dir_node.find_or_declare('%s.h' % self.target)
- valatask.outputs.append(h_node)
- addflags('--header=%s' % h_node.name)
-
- valatask.outputs.append(valatask.vala_dir_node.find_or_declare('%s.vapi' % self.target))
-
- if getattr(self, 'gir', None):
- gir_node = valatask.vala_dir_node.find_or_declare('%s.gir' % self.gir)
- addflags('--gir=%s' % gir_node.name)
- valatask.outputs.append(gir_node)
-
- self.vala_target_glib = getattr(self, 'vala_target_glib', getattr(Options.options, 'vala_target_glib', None))
- if self.vala_target_glib:
- addflags('--target-glib=%s' % self.vala_target_glib)
-
- addflags(['--define=%s' % x for x in Utils.to_list(getattr(self, 'vala_defines', []))])
-
- packages_private = Utils.to_list(getattr(self, 'packages_private', []))
- addflags(['--pkg=%s' % x for x in packages_private])
-
- def _get_api_version():
- api_version = '1.0'
- if hasattr(Context.g_module, 'API_VERSION'):
- version = Context.g_module.API_VERSION.split(".")
- if version[0] == "0":
- api_version = "0." + version[1]
- else:
- api_version = version[0] + ".0"
- return api_version
-
- self.includes = Utils.to_list(getattr(self, 'includes', []))
- valatask.install_path = getattr(self, 'install_path', '')
-
- valatask.vapi_path = getattr(self, 'vapi_path', '${DATAROOTDIR}/vala/vapi')
- valatask.pkg_name = getattr(self, 'pkg_name', self.env.PACKAGE)
- valatask.header_path = getattr(self, 'header_path', '${INCLUDEDIR}/%s-%s' % (valatask.pkg_name, _get_api_version()))
- valatask.install_binding = getattr(self, 'install_binding', True)
-
- self.vapi_dirs = vapi_dirs = Utils.to_list(getattr(self, 'vapi_dirs', []))
- #includes = []
-
- if hasattr(self, 'use'):
- local_packages = Utils.to_list(self.use)[:] # make sure to have a copy
- seen = []
- while len(local_packages) > 0:
- package = local_packages.pop()
- if package in seen:
- continue
- seen.append(package)
-
- # check if the package exists
- try:
- package_obj = self.bld.get_tgen_by_name(package)
- except Errors.WafError:
- continue
-
- # in practice the other task is already processed
- # but this makes it explicit
- package_obj.post()
- package_name = package_obj.target
- task = getattr(package_obj, 'valatask', None)
- if task:
- for output in task.outputs:
- if output.name == package_name + ".vapi":
- valatask.set_run_after(task)
- if package_name not in packages:
- packages.append(package_name)
- if output.parent not in vapi_dirs:
- vapi_dirs.append(output.parent)
- if output.parent not in self.includes:
- self.includes.append(output.parent)
-
- if hasattr(package_obj, 'use'):
- lst = self.to_list(package_obj.use)
- lst.reverse()
- local_packages = [pkg for pkg in lst if pkg not in seen] + local_packages
-
- addflags(['--pkg=%s' % p for p in packages])
-
- for vapi_dir in vapi_dirs:
- if isinstance(vapi_dir, Node.Node):
- v_node = vapi_dir
- else:
- v_node = self.path.find_dir(vapi_dir)
- if not v_node:
- Logs.warn('Unable to locate Vala API directory: %r', vapi_dir)
- else:
- addflags('--vapidir=%s' % v_node.abspath())
-
- self.dump_deps_node = None
- if self.is_lib and self.packages:
- self.dump_deps_node = valatask.vala_dir_node.find_or_declare('%s.deps' % self.target)
- valatask.outputs.append(self.dump_deps_node)
-
- if self.is_lib and valatask.install_binding:
- headers_list = [o for o in valatask.outputs if o.suffix() == ".h"]
- if headers_list:
- self.install_vheader = self.add_install_files(install_to=valatask.header_path, install_from=headers_list)
-
- vapi_list = [o for o in valatask.outputs if (o.suffix() in (".vapi", ".deps"))]
- if vapi_list:
- self.install_vapi = self.add_install_files(install_to=valatask.vapi_path, install_from=vapi_list)
-
- gir_list = [o for o in valatask.outputs if o.suffix() == '.gir']
- if gir_list:
- self.install_gir = self.add_install_files(
- install_to=getattr(self, 'gir_path', '${DATAROOTDIR}/gir-1.0'), install_from=gir_list)
-
- if hasattr(self, 'vala_resources'):
- nodes = self.to_nodes(self.vala_resources)
- valatask.vala_exclude = getattr(valatask, 'vala_exclude', []) + nodes
- valatask.inputs.extend(nodes)
- for x in nodes:
- addflags(['--gresources', x.abspath()])
-
-@extension('.vala', '.gs')
-def vala_file(self, node):
- """
- Compile a vala file and bind the task to *self.valatask*. If an existing vala task is already set, add the node
- to its inputs. The typical example is::
-
- def build(bld):
- bld.program(
- packages = 'gtk+-2.0',
- target = 'vala-gtk-example',
- use = 'GTK GLIB',
- source = 'vala-gtk-example.vala foo.vala',
- vala_defines = ['DEBUG'] # adds --define=<xyz> values to the command-line
-
- # the following arguments are for libraries
- #gir = 'hello-1.0',
- #gir_path = '/tmp',
- #vapi_path = '/tmp',
- #pkg_name = 'hello'
- # disable installing of gir, vapi and header
- #install_binding = False
-
- # profile = 'xyz' # adds --profile=<xyz> to enable profiling
- # thread = True, # adds --thread, except if profile is on or not on 'gobject'
- # vala_target_glib = 'xyz' # adds --target-glib=<xyz>, can be given through the command-line option --vala-target-glib=<xyz>
- )
-
-
- :param node: vala file
- :type node: :py:class:`waflib.Node.Node`
- """
-
- try:
- valatask = self.valatask
- except AttributeError:
- valatask = self.valatask = self.create_task('valac')
- self.init_vala_task()
-
- valatask.inputs.append(node)
- name = node.name[:node.name.rfind('.')] + '.c'
- c_node = valatask.vala_dir_node.find_or_declare(name)
- valatask.outputs.append(c_node)
- self.source.append(c_node)
-
-@extension('.vapi')
-def vapi_file(self, node):
- try:
- valatask = self.valatask
- except AttributeError:
- valatask = self.valatask = self.create_task('valac')
- self.init_vala_task()
- valatask.inputs.append(node)
-
-@conf
-def find_valac(self, valac_name, min_version):
- """
- Find the valac program, and execute it to store the version
- number in *conf.env.VALAC_VERSION*
-
- :param valac_name: program name
- :type valac_name: string or list of string
- :param min_version: minimum version acceptable
- :type min_version: tuple of int
- """
- valac = self.find_program(valac_name, var='VALAC')
- try:
- output = self.cmd_and_log(valac + ['--version'])
- except Errors.WafError:
- valac_version = None
- else:
- ver = re.search(r'\d+.\d+.\d+', output).group().split('.')
- valac_version = tuple([int(x) for x in ver])
-
- self.msg('Checking for %s version >= %r' % (valac_name, min_version),
- valac_version, valac_version and valac_version >= min_version)
- if valac and valac_version < min_version:
- self.fatal("%s version %r is too old, need >= %r" % (valac_name, valac_version, min_version))
-
- self.env.VALAC_VERSION = valac_version
- return valac
-
-@conf
-def check_vala(self, min_version=(0,8,0), branch=None):
- """
- Check if vala compiler from a given branch exists of at least a given
- version.
-
- :param min_version: minimum version acceptable (0.8.0)
- :type min_version: tuple
- :param branch: first part of the version number, in case a snapshot is used (0, 8)
- :type branch: tuple of int
- """
- if self.env.VALA_MINVER:
- min_version = self.env.VALA_MINVER
- if self.env.VALA_MINVER_BRANCH:
- branch = self.env.VALA_MINVER_BRANCH
- if not branch:
- branch = min_version[:2]
- try:
- find_valac(self, 'valac-%d.%d' % (branch[0], branch[1]), min_version)
- except self.errors.ConfigurationError:
- find_valac(self, 'valac', min_version)
-
-@conf
-def check_vala_deps(self):
- """
- Load the gobject and gthread packages if they are missing.
- """
- if not self.env.HAVE_GOBJECT:
- pkg_args = {'package': 'gobject-2.0',
- 'uselib_store': 'GOBJECT',
- 'args': '--cflags --libs'}
- if getattr(Options.options, 'vala_target_glib', None):
- pkg_args['atleast_version'] = Options.options.vala_target_glib
- self.check_cfg(**pkg_args)
-
- if not self.env.HAVE_GTHREAD:
- pkg_args = {'package': 'gthread-2.0',
- 'uselib_store': 'GTHREAD',
- 'args': '--cflags --libs'}
- if getattr(Options.options, 'vala_target_glib', None):
- pkg_args['atleast_version'] = Options.options.vala_target_glib
- self.check_cfg(**pkg_args)
-
-def configure(self):
- """
- Use the following to enforce minimum vala version::
-
- def configure(conf):
- conf.env.VALA_MINVER = (0, 10, 0)
- conf.load('vala')
- """
- self.load('gnu_dirs')
- self.check_vala_deps()
- self.check_vala()
- self.add_os_flags('VALAFLAGS')
- self.env.append_unique('VALAFLAGS', ['-C'])
-
-def options(opt):
- """
- Load the :py:mod:`waflib.Tools.gnu_dirs` tool and add the ``--vala-target-glib`` command-line option
- """
- opt.load('gnu_dirs')
- valaopts = opt.add_option_group('Vala Compiler Options')
- valaopts.add_option('--vala-target-glib', default=None,
- dest='vala_target_glib', metavar='MAJOR.MINOR',
- help='Target version of glib for Vala GObject code generation')
-
diff --git a/waflib/Tools/waf_unit_test.py b/waflib/Tools/waf_unit_test.py
deleted file mode 100644
index a71ed1c..0000000
--- a/waflib/Tools/waf_unit_test.py
+++ /dev/null
@@ -1,296 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Carlos Rafael Giani, 2006
-# Thomas Nagy, 2010-2018 (ita)
-
-"""
-Unit testing system for C/C++/D and interpreted languages providing test execution:
-
-* in parallel, by using ``waf -j``
-* partial (only the tests that have changed) or full (by using ``waf --alltests``)
-
-The tests are declared by adding the **test** feature to programs::
-
- def options(opt):
- opt.load('compiler_cxx waf_unit_test')
- def configure(conf):
- conf.load('compiler_cxx waf_unit_test')
- def build(bld):
- bld(features='cxx cxxprogram test', source='main.cpp', target='app')
- # or
- bld.program(features='test', source='main2.cpp', target='app2')
-
-When the build is executed, the program 'test' will be built and executed without arguments.
-The success/failure is detected by looking at the return code. The status and the standard output/error
-are stored on the build context.
-
-The results can be displayed by registering a callback function. Here is how to call
-the predefined callback::
-
- def build(bld):
- bld(features='cxx cxxprogram test', source='main.c', target='app')
- from waflib.Tools import waf_unit_test
- bld.add_post_fun(waf_unit_test.summary)
-
-By passing --dump-test-scripts the build outputs corresponding python files
-(with extension _run.py) that are useful for debugging purposes.
-"""
-
-import os, shlex, sys
-from waflib.TaskGen import feature, after_method, taskgen_method
-from waflib import Utils, Task, Logs, Options
-from waflib.Tools import ccroot
-testlock = Utils.threading.Lock()
-
-SCRIPT_TEMPLATE = """#! %(python)s
-import subprocess, sys
-cmd = %(cmd)r
-# if you want to debug with gdb:
-#cmd = ['gdb', '-args'] + cmd
-env = %(env)r
-status = subprocess.call(cmd, env=env, cwd=%(cwd)r, shell=isinstance(cmd, str))
-sys.exit(status)
-"""
-
-@taskgen_method
-def handle_ut_cwd(self, key):
- """
- Task generator method, used internally to limit code duplication.
- This method may disappear anytime.
- """
- cwd = getattr(self, key, None)
- if cwd:
- if isinstance(cwd, str):
- # we want a Node instance
- if os.path.isabs(cwd):
- self.ut_cwd = self.bld.root.make_node(cwd)
- else:
- self.ut_cwd = self.path.make_node(cwd)
-
-@feature('test_scripts')
-def make_interpreted_test(self):
- """Create interpreted unit tests."""
- for x in ['test_scripts_source', 'test_scripts_template']:
- if not hasattr(self, x):
- Logs.warn('a test_scripts taskgen i missing %s' % x)
- return
-
- self.ut_run, lst = Task.compile_fun(self.test_scripts_template, shell=getattr(self, 'test_scripts_shell', False))
-
- script_nodes = self.to_nodes(self.test_scripts_source)
- for script_node in script_nodes:
- tsk = self.create_task('utest', [script_node])
- tsk.vars = lst + tsk.vars
- tsk.env['SCRIPT'] = script_node.path_from(tsk.get_cwd())
-
- self.handle_ut_cwd('test_scripts_cwd')
-
- env = getattr(self, 'test_scripts_env', None)
- if env:
- self.ut_env = env
- else:
- self.ut_env = dict(os.environ)
-
- paths = getattr(self, 'test_scripts_paths', {})
- for (k,v) in paths.items():
- p = self.ut_env.get(k, '').split(os.pathsep)
- if isinstance(v, str):
- v = v.split(os.pathsep)
- self.ut_env[k] = os.pathsep.join(p + v)
-
-@feature('test')
-@after_method('apply_link', 'process_use')
-def make_test(self):
- """Create the unit test task. There can be only one unit test task by task generator."""
- if not getattr(self, 'link_task', None):
- return
-
- tsk = self.create_task('utest', self.link_task.outputs)
- if getattr(self, 'ut_str', None):
- self.ut_run, lst = Task.compile_fun(self.ut_str, shell=getattr(self, 'ut_shell', False))
- tsk.vars = lst + tsk.vars
-
- self.handle_ut_cwd('ut_cwd')
-
- if not hasattr(self, 'ut_paths'):
- paths = []
- for x in self.tmp_use_sorted:
- try:
- y = self.bld.get_tgen_by_name(x).link_task
- except AttributeError:
- pass
- else:
- if not isinstance(y, ccroot.stlink_task):
- paths.append(y.outputs[0].parent.abspath())
- self.ut_paths = os.pathsep.join(paths) + os.pathsep
-
- if not hasattr(self, 'ut_env'):
- self.ut_env = dct = dict(os.environ)
- def add_path(var):
- dct[var] = self.ut_paths + dct.get(var,'')
- if Utils.is_win32:
- add_path('PATH')
- elif Utils.unversioned_sys_platform() == 'darwin':
- add_path('DYLD_LIBRARY_PATH')
- add_path('LD_LIBRARY_PATH')
- else:
- add_path('LD_LIBRARY_PATH')
-
- if not hasattr(self, 'ut_cmd'):
- self.ut_cmd = getattr(Options.options, 'testcmd', False)
-
-@taskgen_method
-def add_test_results(self, tup):
- """Override and return tup[1] to interrupt the build immediately if a test does not run"""
- Logs.debug("ut: %r", tup)
- try:
- self.utest_results.append(tup)
- except AttributeError:
- self.utest_results = [tup]
- try:
- self.bld.utest_results.append(tup)
- except AttributeError:
- self.bld.utest_results = [tup]
-
-@Task.deep_inputs
-class utest(Task.Task):
- """
- Execute a unit test
- """
- color = 'PINK'
- after = ['vnum', 'inst']
- vars = []
-
- def runnable_status(self):
- """
- Always execute the task if `waf --alltests` was used or no
- tests if ``waf --notests`` was used
- """
- if getattr(Options.options, 'no_tests', False):
- return Task.SKIP_ME
-
- ret = super(utest, self).runnable_status()
- if ret == Task.SKIP_ME:
- if getattr(Options.options, 'all_tests', False):
- return Task.RUN_ME
- return ret
-
- def get_test_env(self):
- """
- In general, tests may require any library built anywhere in the project.
- Override this method if fewer paths are needed
- """
- return self.generator.ut_env
-
- def post_run(self):
- super(utest, self).post_run()
- if getattr(Options.options, 'clear_failed_tests', False) and self.waf_unit_test_results[1]:
- self.generator.bld.task_sigs[self.uid()] = None
-
- def run(self):
- """
- Execute the test. The execution is always successful, and the results
- are stored on ``self.generator.bld.utest_results`` for postprocessing.
-
- Override ``add_test_results`` to interrupt the build
- """
- if hasattr(self.generator, 'ut_run'):
- return self.generator.ut_run(self)
-
- self.ut_exec = getattr(self.generator, 'ut_exec', [self.inputs[0].abspath()])
- ut_cmd = getattr(self.generator, 'ut_cmd', False)
- if ut_cmd:
- self.ut_exec = shlex.split(ut_cmd % ' '.join(self.ut_exec))
-
- return self.exec_command(self.ut_exec)
-
- def exec_command(self, cmd, **kw):
- Logs.debug('runner: %r', cmd)
- if getattr(Options.options, 'dump_test_scripts', False):
- script_code = SCRIPT_TEMPLATE % {
- 'python': sys.executable,
- 'env': self.get_test_env(),
- 'cwd': self.get_cwd().abspath(),
- 'cmd': cmd
- }
- script_file = self.inputs[0].abspath() + '_run.py'
- Utils.writef(script_file, script_code)
- os.chmod(script_file, Utils.O755)
- if Logs.verbose > 1:
- Logs.info('Test debug file written as %r' % script_file)
-
- proc = Utils.subprocess.Popen(cmd, cwd=self.get_cwd().abspath(), env=self.get_test_env(),
- stderr=Utils.subprocess.PIPE, stdout=Utils.subprocess.PIPE, shell=isinstance(cmd,str))
- (stdout, stderr) = proc.communicate()
- self.waf_unit_test_results = tup = (self.inputs[0].abspath(), proc.returncode, stdout, stderr)
- testlock.acquire()
- try:
- return self.generator.add_test_results(tup)
- finally:
- testlock.release()
-
- def get_cwd(self):
- return getattr(self.generator, 'ut_cwd', self.inputs[0].parent)
-
-def summary(bld):
- """
- Display an execution summary::
-
- def build(bld):
- bld(features='cxx cxxprogram test', source='main.c', target='app')
- from waflib.Tools import waf_unit_test
- bld.add_post_fun(waf_unit_test.summary)
- """
- lst = getattr(bld, 'utest_results', [])
- if lst:
- Logs.pprint('CYAN', 'execution summary')
-
- total = len(lst)
- tfail = len([x for x in lst if x[1]])
-
- Logs.pprint('GREEN', ' tests that pass %d/%d' % (total-tfail, total))
- for (f, code, out, err) in lst:
- if not code:
- Logs.pprint('GREEN', ' %s' % f)
-
- Logs.pprint('GREEN' if tfail == 0 else 'RED', ' tests that fail %d/%d' % (tfail, total))
- for (f, code, out, err) in lst:
- if code:
- Logs.pprint('RED', ' %s' % f)
-
-def set_exit_code(bld):
- """
- If any of the tests fail waf will exit with that exit code.
- This is useful if you have an automated build system which need
- to report on errors from the tests.
- You may use it like this:
-
- def build(bld):
- bld(features='cxx cxxprogram test', source='main.c', target='app')
- from waflib.Tools import waf_unit_test
- bld.add_post_fun(waf_unit_test.set_exit_code)
- """
- lst = getattr(bld, 'utest_results', [])
- for (f, code, out, err) in lst:
- if code:
- msg = []
- if out:
- msg.append('stdout:%s%s' % (os.linesep, out.decode('utf-8')))
- if err:
- msg.append('stderr:%s%s' % (os.linesep, err.decode('utf-8')))
- bld.fatal(os.linesep.join(msg))
-
-
-def options(opt):
- """
- Provide the ``--alltests``, ``--notests`` and ``--testcmd`` command-line options.
- """
- opt.add_option('--notests', action='store_true', default=False, help='Exec no unit tests', dest='no_tests')
- opt.add_option('--alltests', action='store_true', default=False, help='Exec all unit tests', dest='all_tests')
- opt.add_option('--clear-failed', action='store_true', default=False,
- help='Force failed unit tests to run again next time', dest='clear_failed_tests')
- opt.add_option('--testcmd', action='store', default=False, dest='testcmd',
- help='Run the unit tests using the test-cmd string example "--testcmd="valgrind --error-exitcode=1 %s" to run under valgrind')
- opt.add_option('--dump-test-scripts', action='store_true', default=False,
- help='Create python scripts to help debug tests', dest='dump_test_scripts')
-
diff --git a/waflib/Tools/winres.py b/waflib/Tools/winres.py
deleted file mode 100644
index 586c596..0000000
--- a/waflib/Tools/winres.py
+++ /dev/null
@@ -1,78 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Brant Young, 2007
-
-"Process *.rc* files for C/C++: X{.rc -> [.res|.rc.o]}"
-
-import re
-from waflib import Task
-from waflib.TaskGen import extension
-from waflib.Tools import c_preproc
-
-@extension('.rc')
-def rc_file(self, node):
- """
- Binds the .rc extension to a winrc task
- """
- obj_ext = '.rc.o'
- if self.env.WINRC_TGT_F == '/fo':
- obj_ext = '.res'
- rctask = self.create_task('winrc', node, node.change_ext(obj_ext))
- try:
- self.compiled_tasks.append(rctask)
- except AttributeError:
- self.compiled_tasks = [rctask]
-
-re_lines = re.compile(
- '(?:^[ \t]*(#|%:)[ \t]*(ifdef|ifndef|if|else|elif|endif|include|import|define|undef|pragma)[ \t]*(.*?)\s*$)|'\
- '(?:^\w+[ \t]*(ICON|BITMAP|CURSOR|HTML|FONT|MESSAGETABLE|TYPELIB|REGISTRY|D3DFX)[ \t]*(.*?)\s*$)',
- re.IGNORECASE | re.MULTILINE)
-
-class rc_parser(c_preproc.c_parser):
- """
- Calculates dependencies in .rc files
- """
- def filter_comments(self, node):
- """
- Overrides :py:meth:`waflib.Tools.c_preproc.c_parser.filter_comments`
- """
- code = node.read()
- if c_preproc.use_trigraphs:
- for (a, b) in c_preproc.trig_def:
- code = code.split(a).join(b)
- code = c_preproc.re_nl.sub('', code)
- code = c_preproc.re_cpp.sub(c_preproc.repl, code)
- ret = []
- for m in re.finditer(re_lines, code):
- if m.group(2):
- ret.append((m.group(2), m.group(3)))
- else:
- ret.append(('include', m.group(5)))
- return ret
-
-class winrc(Task.Task):
- """
- Compiles resource files
- """
- run_str = '${WINRC} ${WINRCFLAGS} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${WINRC_TGT_F} ${TGT} ${WINRC_SRC_F} ${SRC}'
- color = 'BLUE'
- def scan(self):
- tmp = rc_parser(self.generator.includes_nodes)
- tmp.start(self.inputs[0], self.env)
- return (tmp.nodes, tmp.names)
-
-def configure(conf):
- """
- Detects the programs RC or windres, depending on the C/C++ compiler in use
- """
- v = conf.env
- if not v.WINRC:
- if v.CC_NAME == 'msvc':
- conf.find_program('RC', var='WINRC', path_list=v.PATH)
- v.WINRC_TGT_F = '/fo'
- v.WINRC_SRC_F = ''
- else:
- conf.find_program('windres', var='WINRC', path_list=v.PATH)
- v.WINRC_TGT_F = '-o'
- v.WINRC_SRC_F = '-i'
-
diff --git a/waflib/Tools/xlc.py b/waflib/Tools/xlc.py
deleted file mode 100644
index 134dd41..0000000
--- a/waflib/Tools/xlc.py
+++ /dev/null
@@ -1,65 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2006-2018 (ita)
-# Ralf Habacker, 2006 (rh)
-# Yinon Ehrlich, 2009
-# Michael Kuhn, 2009
-
-from waflib.Tools import ccroot, ar
-from waflib.Configure import conf
-
-@conf
-def find_xlc(conf):
- """
- Detects the Aix C compiler
- """
- cc = conf.find_program(['xlc_r', 'xlc'], var='CC')
- conf.get_xlc_version(cc)
- conf.env.CC_NAME = 'xlc'
-
-@conf
-def xlc_common_flags(conf):
- """
- Flags required for executing the Aix C compiler
- """
- v = conf.env
-
- v.CC_SRC_F = []
- v.CC_TGT_F = ['-c', '-o']
-
- if not v.LINK_CC:
- v.LINK_CC = v.CC
-
- v.CCLNK_SRC_F = []
- v.CCLNK_TGT_F = ['-o']
- v.CPPPATH_ST = '-I%s'
- v.DEFINES_ST = '-D%s'
-
- v.LIB_ST = '-l%s' # template for adding libs
- v.LIBPATH_ST = '-L%s' # template for adding libpaths
- v.STLIB_ST = '-l%s'
- v.STLIBPATH_ST = '-L%s'
- v.RPATH_ST = '-Wl,-rpath,%s'
-
- v.SONAME_ST = []
- v.SHLIB_MARKER = []
- v.STLIB_MARKER = []
-
- v.LINKFLAGS_cprogram = ['-Wl,-brtl']
- v.cprogram_PATTERN = '%s'
-
- v.CFLAGS_cshlib = ['-fPIC']
- v.LINKFLAGS_cshlib = ['-G', '-Wl,-brtl,-bexpfull']
- v.cshlib_PATTERN = 'lib%s.so'
-
- v.LINKFLAGS_cstlib = []
- v.cstlib_PATTERN = 'lib%s.a'
-
-def configure(conf):
- conf.find_xlc()
- conf.find_ar()
- conf.xlc_common_flags()
- conf.cc_load_tools()
- conf.cc_add_flags()
- conf.link_add_flags()
-
diff --git a/waflib/Tools/xlcxx.py b/waflib/Tools/xlcxx.py
deleted file mode 100644
index 76aa59b..0000000
--- a/waflib/Tools/xlcxx.py
+++ /dev/null
@@ -1,65 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2006-2018 (ita)
-# Ralf Habacker, 2006 (rh)
-# Yinon Ehrlich, 2009
-# Michael Kuhn, 2009
-
-from waflib.Tools import ccroot, ar
-from waflib.Configure import conf
-
-@conf
-def find_xlcxx(conf):
- """
- Detects the Aix C++ compiler
- """
- cxx = conf.find_program(['xlc++_r', 'xlc++'], var='CXX')
- conf.get_xlc_version(cxx)
- conf.env.CXX_NAME = 'xlc++'
-
-@conf
-def xlcxx_common_flags(conf):
- """
- Flags required for executing the Aix C++ compiler
- """
- v = conf.env
-
- v.CXX_SRC_F = []
- v.CXX_TGT_F = ['-c', '-o']
-
- if not v.LINK_CXX:
- v.LINK_CXX = v.CXX
-
- v.CXXLNK_SRC_F = []
- v.CXXLNK_TGT_F = ['-o']
- v.CPPPATH_ST = '-I%s'
- v.DEFINES_ST = '-D%s'
-
- v.LIB_ST = '-l%s' # template for adding libs
- v.LIBPATH_ST = '-L%s' # template for adding libpaths
- v.STLIB_ST = '-l%s'
- v.STLIBPATH_ST = '-L%s'
- v.RPATH_ST = '-Wl,-rpath,%s'
-
- v.SONAME_ST = []
- v.SHLIB_MARKER = []
- v.STLIB_MARKER = []
-
- v.LINKFLAGS_cxxprogram= ['-Wl,-brtl']
- v.cxxprogram_PATTERN = '%s'
-
- v.CXXFLAGS_cxxshlib = ['-fPIC']
- v.LINKFLAGS_cxxshlib = ['-G', '-Wl,-brtl,-bexpfull']
- v.cxxshlib_PATTERN = 'lib%s.so'
-
- v.LINKFLAGS_cxxstlib = []
- v.cxxstlib_PATTERN = 'lib%s.a'
-
-def configure(conf):
- conf.find_xlcxx()
- conf.find_ar()
- conf.xlcxx_common_flags()
- conf.cxx_load_tools()
- conf.cxx_add_flags()
- conf.link_add_flags()
-
diff --git a/waflib/Utils.py b/waflib/Utils.py
deleted file mode 100644
index b4665c4..0000000
--- a/waflib/Utils.py
+++ /dev/null
@@ -1,1021 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2005-2018 (ita)
-
-"""
-Utilities and platform-specific fixes
-
-The portability fixes try to provide a consistent behavior of the Waf API
-through Python versions 2.5 to 3.X and across different platforms (win32, linux, etc)
-"""
-
-from __future__ import with_statement
-
-import atexit, os, sys, errno, inspect, re, datetime, platform, base64, signal, functools, time
-
-try:
- import cPickle
-except ImportError:
- import pickle as cPickle
-
-# leave this
-if os.name == 'posix' and sys.version_info[0] < 3:
- try:
- import subprocess32 as subprocess
- except ImportError:
- import subprocess
-else:
- import subprocess
-
-try:
- TimeoutExpired = subprocess.TimeoutExpired
-except AttributeError:
- class TimeoutExpired(Exception):
- pass
-
-from collections import deque, defaultdict
-
-try:
- import _winreg as winreg
-except ImportError:
- try:
- import winreg
- except ImportError:
- winreg = None
-
-from waflib import Errors
-
-try:
- from hashlib import md5
-except ImportError:
- try:
- from md5 import md5
- except ImportError:
- # never fail to enable fixes from another module
- pass
-
-try:
- import threading
-except ImportError:
- if not 'JOBS' in os.environ:
- # no threading :-(
- os.environ['JOBS'] = '1'
-
- class threading(object):
- """
- A fake threading class for platforms lacking the threading module.
- Use ``waf -j1`` on those platforms
- """
- pass
- class Lock(object):
- """Fake Lock class"""
- def acquire(self):
- pass
- def release(self):
- pass
- threading.Lock = threading.Thread = Lock
-
-SIG_NIL = 'SIG_NIL_SIG_NIL_'.encode()
-"""Arbitrary null value for hashes. Modify this value according to the hash function in use"""
-
-O644 = 420
-"""Constant representing the permissions for regular files (0644 raises a syntax error on python 3)"""
-
-O755 = 493
-"""Constant representing the permissions for executable files (0755 raises a syntax error on python 3)"""
-
-rot_chr = ['\\', '|', '/', '-']
-"List of characters to use when displaying the throbber (progress bar)"
-
-rot_idx = 0
-"Index of the current throbber character (progress bar)"
-
-class ordered_iter_dict(dict):
- """Ordered dictionary that provides iteration from the most recently inserted keys first"""
- def __init__(self, *k, **kw):
- self.lst = deque()
- dict.__init__(self, *k, **kw)
- def clear(self):
- dict.clear(self)
- self.lst = deque()
- def __setitem__(self, key, value):
- if key in dict.keys(self):
- self.lst.remove(key)
- dict.__setitem__(self, key, value)
- self.lst.append(key)
- def __delitem__(self, key):
- dict.__delitem__(self, key)
- try:
- self.lst.remove(key)
- except ValueError:
- pass
- def __iter__(self):
- return reversed(self.lst)
- def keys(self):
- return reversed(self.lst)
-
-class lru_node(object):
- """
- Used by :py:class:`waflib.Utils.lru_cache`
- """
- __slots__ = ('next', 'prev', 'key', 'val')
- def __init__(self):
- self.next = self
- self.prev = self
- self.key = None
- self.val = None
-
-class lru_cache(object):
- """
- A simple least-recently used cache with lazy allocation
- """
- __slots__ = ('maxlen', 'table', 'head')
- def __init__(self, maxlen=100):
- self.maxlen = maxlen
- """
- Maximum amount of elements in the cache
- """
- self.table = {}
- """
- Mapping key-value
- """
- self.head = lru_node()
- self.head.next = self.head
- self.head.prev = self.head
-
- def __getitem__(self, key):
- node = self.table[key]
- # assert(key==node.key)
- if node is self.head:
- return node.val
-
- # detach the node found
- node.prev.next = node.next
- node.next.prev = node.prev
-
- # replace the head
- node.next = self.head.next
- node.prev = self.head
- self.head = node.next.prev = node.prev.next = node
-
- return node.val
-
- def __setitem__(self, key, val):
- if key in self.table:
- # update the value for an existing key
- node = self.table[key]
- node.val = val
- self.__getitem__(key)
- else:
- if len(self.table) < self.maxlen:
- # the very first item is unused until the maximum is reached
- node = lru_node()
- node.prev = self.head
- node.next = self.head.next
- node.prev.next = node.next.prev = node
- else:
- node = self.head = self.head.next
- try:
- # that's another key
- del self.table[node.key]
- except KeyError:
- pass
-
- node.key = key
- node.val = val
- self.table[key] = node
-
-class lazy_generator(object):
- def __init__(self, fun, params):
- self.fun = fun
- self.params = params
-
- def __iter__(self):
- return self
-
- def __next__(self):
- try:
- it = self.it
- except AttributeError:
- it = self.it = self.fun(*self.params)
- return next(it)
-
- next = __next__
-
-is_win32 = os.sep == '\\' or sys.platform == 'win32' # msys2
-"""
-Whether this system is a Windows series
-"""
-
-def readf(fname, m='r', encoding='latin-1'):
- """
- Reads an entire file into a string. See also :py:meth:`waflib.Node.Node.readf`::
-
- def build(ctx):
- from waflib import Utils
- txt = Utils.readf(self.path.find_node('wscript').abspath())
- txt = ctx.path.find_node('wscript').read()
-
- :type fname: string
- :param fname: Path to file
- :type m: string
- :param m: Open mode
- :type encoding: string
- :param encoding: encoding value, only used for python 3
- :rtype: string
- :return: Content of the file
- """
-
- if sys.hexversion > 0x3000000 and not 'b' in m:
- m += 'b'
- with open(fname, m) as f:
- txt = f.read()
- if encoding:
- txt = txt.decode(encoding)
- else:
- txt = txt.decode()
- else:
- with open(fname, m) as f:
- txt = f.read()
- return txt
-
-def writef(fname, data, m='w', encoding='latin-1'):
- """
- Writes an entire file from a string.
- See also :py:meth:`waflib.Node.Node.writef`::
-
- def build(ctx):
- from waflib import Utils
- txt = Utils.writef(self.path.make_node('i_like_kittens').abspath(), 'some data')
- self.path.make_node('i_like_kittens').write('some data')
-
- :type fname: string
- :param fname: Path to file
- :type data: string
- :param data: The contents to write to the file
- :type m: string
- :param m: Open mode
- :type encoding: string
- :param encoding: encoding value, only used for python 3
- """
- if sys.hexversion > 0x3000000 and not 'b' in m:
- data = data.encode(encoding)
- m += 'b'
- with open(fname, m) as f:
- f.write(data)
-
-def h_file(fname):
- """
- Computes a hash value for a file by using md5. Use the md5_tstamp
- extension to get faster build hashes if necessary.
-
- :type fname: string
- :param fname: path to the file to hash
- :return: hash of the file contents
- :rtype: string or bytes
- """
- m = md5()
- with open(fname, 'rb') as f:
- while fname:
- fname = f.read(200000)
- m.update(fname)
- return m.digest()
-
-def readf_win32(f, m='r', encoding='latin-1'):
- flags = os.O_NOINHERIT | os.O_RDONLY
- if 'b' in m:
- flags |= os.O_BINARY
- if '+' in m:
- flags |= os.O_RDWR
- try:
- fd = os.open(f, flags)
- except OSError:
- raise IOError('Cannot read from %r' % f)
-
- if sys.hexversion > 0x3000000 and not 'b' in m:
- m += 'b'
- with os.fdopen(fd, m) as f:
- txt = f.read()
- if encoding:
- txt = txt.decode(encoding)
- else:
- txt = txt.decode()
- else:
- with os.fdopen(fd, m) as f:
- txt = f.read()
- return txt
-
-def writef_win32(f, data, m='w', encoding='latin-1'):
- if sys.hexversion > 0x3000000 and not 'b' in m:
- data = data.encode(encoding)
- m += 'b'
- flags = os.O_CREAT | os.O_TRUNC | os.O_WRONLY | os.O_NOINHERIT
- if 'b' in m:
- flags |= os.O_BINARY
- if '+' in m:
- flags |= os.O_RDWR
- try:
- fd = os.open(f, flags)
- except OSError:
- raise OSError('Cannot write to %r' % f)
- with os.fdopen(fd, m) as f:
- f.write(data)
-
-def h_file_win32(fname):
- try:
- fd = os.open(fname, os.O_BINARY | os.O_RDONLY | os.O_NOINHERIT)
- except OSError:
- raise OSError('Cannot read from %r' % fname)
- m = md5()
- with os.fdopen(fd, 'rb') as f:
- while fname:
- fname = f.read(200000)
- m.update(fname)
- return m.digest()
-
-# always save these
-readf_unix = readf
-writef_unix = writef
-h_file_unix = h_file
-if hasattr(os, 'O_NOINHERIT') and sys.hexversion < 0x3040000:
- # replace the default functions
- readf = readf_win32
- writef = writef_win32
- h_file = h_file_win32
-
-try:
- x = ''.encode('hex')
-except LookupError:
- import binascii
- def to_hex(s):
- ret = binascii.hexlify(s)
- if not isinstance(ret, str):
- ret = ret.decode('utf-8')
- return ret
-else:
- def to_hex(s):
- return s.encode('hex')
-
-to_hex.__doc__ = """
-Return the hexadecimal representation of a string
-
-:param s: string to convert
-:type s: string
-"""
-
-def listdir_win32(s):
- """
- Lists the contents of a folder in a portable manner.
- On Win32, returns the list of drive letters: ['C:', 'X:', 'Z:'] when an empty string is given.
-
- :type s: string
- :param s: a string, which can be empty on Windows
- """
- if not s:
- try:
- import ctypes
- except ImportError:
- # there is nothing much we can do
- return [x + ':\\' for x in 'ABCDEFGHIJKLMNOPQRSTUVWXYZ']
- else:
- dlen = 4 # length of "?:\\x00"
- maxdrives = 26
- buf = ctypes.create_string_buffer(maxdrives * dlen)
- ndrives = ctypes.windll.kernel32.GetLogicalDriveStringsA(maxdrives*dlen, ctypes.byref(buf))
- return [ str(buf.raw[4*i:4*i+2].decode('ascii')) for i in range(int(ndrives/dlen)) ]
-
- if len(s) == 2 and s[1] == ":":
- s += os.sep
-
- if not os.path.isdir(s):
- e = OSError('%s is not a directory' % s)
- e.errno = errno.ENOENT
- raise e
- return os.listdir(s)
-
-listdir = os.listdir
-if is_win32:
- listdir = listdir_win32
-
-def num2ver(ver):
- """
- Converts a string, tuple or version number into an integer. The number is supposed to have at most 4 digits::
-
- from waflib.Utils import num2ver
- num2ver('1.3.2') == num2ver((1,3,2)) == num2ver((1,3,2,0))
-
- :type ver: string or tuple of numbers
- :param ver: a version number
- """
- if isinstance(ver, str):
- ver = tuple(ver.split('.'))
- if isinstance(ver, tuple):
- ret = 0
- for i in range(4):
- if i < len(ver):
- ret += 256**(3 - i) * int(ver[i])
- return ret
- return ver
-
-def to_list(val):
- """
- Converts a string argument to a list by splitting it by spaces.
- Returns the object if not a string::
-
- from waflib.Utils import to_list
- lst = to_list('a b c d')
-
- :param val: list of string or space-separated string
- :rtype: list
- :return: Argument converted to list
- """
- if isinstance(val, str):
- return val.split()
- else:
- return val
-
-def console_encoding():
- try:
- import ctypes
- except ImportError:
- pass
- else:
- try:
- codepage = ctypes.windll.kernel32.GetConsoleCP()
- except AttributeError:
- pass
- else:
- if codepage:
- return 'cp%d' % codepage
- return sys.stdout.encoding or ('cp1252' if is_win32 else 'latin-1')
-
-def split_path_unix(path):
- return path.split('/')
-
-def split_path_cygwin(path):
- if path.startswith('//'):
- ret = path.split('/')[2:]
- ret[0] = '/' + ret[0]
- return ret
- return path.split('/')
-
-re_sp = re.compile('[/\\\\]+')
-def split_path_win32(path):
- if path.startswith('\\\\'):
- ret = re_sp.split(path)[1:]
- ret[0] = '\\\\' + ret[0]
- if ret[0] == '\\\\?':
- return ret[1:]
- return ret
- return re_sp.split(path)
-
-msysroot = None
-def split_path_msys(path):
- if path.startswith(('/', '\\')) and not path.startswith(('//', '\\\\')):
- # msys paths can be in the form /usr/bin
- global msysroot
- if not msysroot:
- # msys has python 2.7 or 3, so we can use this
- msysroot = subprocess.check_output(['cygpath', '-w', '/']).decode(sys.stdout.encoding or 'latin-1')
- msysroot = msysroot.strip()
- path = os.path.normpath(msysroot + os.sep + path)
- return split_path_win32(path)
-
-if sys.platform == 'cygwin':
- split_path = split_path_cygwin
-elif is_win32:
- if os.environ.get('MSYSTEM'):
- split_path = split_path_msys
- else:
- split_path = split_path_win32
-else:
- split_path = split_path_unix
-
-split_path.__doc__ = """
-Splits a path by / or \\; do not confuse this function with with ``os.path.split``
-
-:type path: string
-:param path: path to split
-:return: list of string
-"""
-
-def check_dir(path):
- """
- Ensures that a directory exists (similar to ``mkdir -p``).
-
- :type path: string
- :param path: Path to directory
- :raises: :py:class:`waflib.Errors.WafError` if the folder cannot be added.
- """
- if not os.path.isdir(path):
- try:
- os.makedirs(path)
- except OSError as e:
- if not os.path.isdir(path):
- raise Errors.WafError('Cannot create the folder %r' % path, ex=e)
-
-def check_exe(name, env=None):
- """
- Ensures that a program exists
-
- :type name: string
- :param name: path to the program
- :param env: configuration object
- :type env: :py:class:`waflib.ConfigSet.ConfigSet`
- :return: path of the program or None
- :raises: :py:class:`waflib.Errors.WafError` if the folder cannot be added.
- """
- if not name:
- raise ValueError('Cannot execute an empty string!')
- def is_exe(fpath):
- return os.path.isfile(fpath) and os.access(fpath, os.X_OK)
-
- fpath, fname = os.path.split(name)
- if fpath and is_exe(name):
- return os.path.abspath(name)
- else:
- env = env or os.environ
- for path in env['PATH'].split(os.pathsep):
- path = path.strip('"')
- exe_file = os.path.join(path, name)
- if is_exe(exe_file):
- return os.path.abspath(exe_file)
- return None
-
-def def_attrs(cls, **kw):
- """
- Sets default attributes on a class instance
-
- :type cls: class
- :param cls: the class to update the given attributes in.
- :type kw: dict
- :param kw: dictionary of attributes names and values.
- """
- for k, v in kw.items():
- if not hasattr(cls, k):
- setattr(cls, k, v)
-
-def quote_define_name(s):
- """
- Converts a string into an identifier suitable for C defines.
-
- :type s: string
- :param s: String to convert
- :rtype: string
- :return: Identifier suitable for C defines
- """
- fu = re.sub('[^a-zA-Z0-9]', '_', s)
- fu = re.sub('_+', '_', fu)
- fu = fu.upper()
- return fu
-
-re_sh = re.compile('\\s|\'|"')
-"""
-Regexp used for shell_escape below
-"""
-
-def shell_escape(cmd):
- """
- Escapes a command:
- ['ls', '-l', 'arg space'] -> ls -l 'arg space'
- """
- if isinstance(cmd, str):
- return cmd
- return ' '.join(repr(x) if re_sh.search(x) else x for x in cmd)
-
-def h_list(lst):
- """
- Hashes lists of ordered data.
-
- Using hash(tup) for tuples would be much more efficient,
- but Python now enforces hash randomization
-
- :param lst: list to hash
- :type lst: list of strings
- :return: hash of the list
- """
- return md5(repr(lst).encode()).digest()
-
-def h_fun(fun):
- """
- Hash functions
-
- :param fun: function to hash
- :type fun: function
- :return: hash of the function
- :rtype: string or bytes
- """
- try:
- return fun.code
- except AttributeError:
- if isinstance(fun, functools.partial):
- code = list(fun.args)
- # The method items() provides a sequence of tuples where the first element
- # represents an optional argument of the partial function application
- #
- # The sorting result outcome will be consistent because:
- # 1. tuples are compared in order of their elements
- # 2. optional argument namess are unique
- code.extend(sorted(fun.keywords.items()))
- code.append(h_fun(fun.func))
- fun.code = h_list(code)
- return fun.code
- try:
- h = inspect.getsource(fun)
- except EnvironmentError:
- h = 'nocode'
- try:
- fun.code = h
- except AttributeError:
- pass
- return h
-
-def h_cmd(ins):
- """
- Hashes objects recursively
-
- :param ins: input object
- :type ins: string or list or tuple or function
- :rtype: string or bytes
- """
- # this function is not meant to be particularly fast
- if isinstance(ins, str):
- # a command is either a string
- ret = ins
- elif isinstance(ins, list) or isinstance(ins, tuple):
- # or a list of functions/strings
- ret = str([h_cmd(x) for x in ins])
- else:
- # or just a python function
- ret = str(h_fun(ins))
- if sys.hexversion > 0x3000000:
- ret = ret.encode('latin-1', 'xmlcharrefreplace')
- return ret
-
-reg_subst = re.compile(r"(\\\\)|(\$\$)|\$\{([^}]+)\}")
-def subst_vars(expr, params):
- """
- Replaces ${VAR} with the value of VAR taken from a dict or a config set::
-
- from waflib import Utils
- s = Utils.subst_vars('${PREFIX}/bin', env)
-
- :type expr: string
- :param expr: String to perform substitution on
- :param params: Dictionary or config set to look up variable values.
- """
- def repl_var(m):
- if m.group(1):
- return '\\'
- if m.group(2):
- return '$'
- try:
- # ConfigSet instances may contain lists
- return params.get_flat(m.group(3))
- except AttributeError:
- return params[m.group(3)]
- # if you get a TypeError, it means that 'expr' is not a string...
- # Utils.subst_vars(None, env) will not work
- return reg_subst.sub(repl_var, expr)
-
-def destos_to_binfmt(key):
- """
- Returns the binary format based on the unversioned platform name,
- and defaults to ``elf`` if nothing is found.
-
- :param key: platform name
- :type key: string
- :return: string representing the binary format
- """
- if key == 'darwin':
- return 'mac-o'
- elif key in ('win32', 'cygwin', 'uwin', 'msys'):
- return 'pe'
- return 'elf'
-
-def unversioned_sys_platform():
- """
- Returns the unversioned platform name.
- Some Python platform names contain versions, that depend on
- the build environment, e.g. linux2, freebsd6, etc.
- This returns the name without the version number. Exceptions are
- os2 and win32, which are returned verbatim.
-
- :rtype: string
- :return: Unversioned platform name
- """
- s = sys.platform
- if s.startswith('java'):
- # The real OS is hidden under the JVM.
- from java.lang import System
- s = System.getProperty('os.name')
- # see http://lopica.sourceforge.net/os.html for a list of possible values
- if s == 'Mac OS X':
- return 'darwin'
- elif s.startswith('Windows '):
- return 'win32'
- elif s == 'OS/2':
- return 'os2'
- elif s == 'HP-UX':
- return 'hp-ux'
- elif s in ('SunOS', 'Solaris'):
- return 'sunos'
- else: s = s.lower()
-
- # powerpc == darwin for our purposes
- if s == 'powerpc':
- return 'darwin'
- if s == 'win32' or s == 'os2':
- return s
- if s == 'cli' and os.name == 'nt':
- # ironpython is only on windows as far as we know
- return 'win32'
- return re.split('\d+$', s)[0]
-
-def nada(*k, **kw):
- """
- Does nothing
-
- :return: None
- """
- pass
-
-class Timer(object):
- """
- Simple object for timing the execution of commands.
- Its string representation is the duration::
-
- from waflib.Utils import Timer
- timer = Timer()
- a_few_operations()
- s = str(timer)
- """
- def __init__(self):
- self.start_time = self.now()
-
- def __str__(self):
- delta = self.now() - self.start_time
- if not isinstance(delta, datetime.timedelta):
- delta = datetime.timedelta(seconds=delta)
- days = delta.days
- hours, rem = divmod(delta.seconds, 3600)
- minutes, seconds = divmod(rem, 60)
- seconds += delta.microseconds * 1e-6
- result = ''
- if days:
- result += '%dd' % days
- if days or hours:
- result += '%dh' % hours
- if days or hours or minutes:
- result += '%dm' % minutes
- return '%s%.3fs' % (result, seconds)
-
- def now(self):
- return datetime.datetime.utcnow()
-
- if hasattr(time, 'perf_counter'):
- def now(self):
- return time.perf_counter()
-
-def read_la_file(path):
- """
- Reads property files, used by msvc.py
-
- :param path: file to read
- :type path: string
- """
- sp = re.compile(r'^([^=]+)=\'(.*)\'$')
- dc = {}
- for line in readf(path).splitlines():
- try:
- _, left, right, _ = sp.split(line.strip())
- dc[left] = right
- except ValueError:
- pass
- return dc
-
-def run_once(fun):
- """
- Decorator: let a function cache its results, use like this::
-
- @run_once
- def foo(k):
- return 345*2343
-
- .. note:: in practice this can cause memory leaks, prefer a :py:class:`waflib.Utils.lru_cache`
-
- :param fun: function to execute
- :type fun: function
- :return: the return value of the function executed
- """
- cache = {}
- def wrap(*k):
- try:
- return cache[k]
- except KeyError:
- ret = fun(*k)
- cache[k] = ret
- return ret
- wrap.__cache__ = cache
- wrap.__name__ = fun.__name__
- return wrap
-
-def get_registry_app_path(key, filename):
- """
- Returns the value of a registry key for an executable
-
- :type key: string
- :type filename: list of string
- """
- if not winreg:
- return None
- try:
- result = winreg.QueryValue(key, "Software\\Microsoft\\Windows\\CurrentVersion\\App Paths\\%s.exe" % filename[0])
- except OSError:
- pass
- else:
- if os.path.isfile(result):
- return result
-
-def lib64():
- """
- Guess the default ``/usr/lib`` extension for 64-bit applications
-
- :return: '64' or ''
- :rtype: string
- """
- # default settings for /usr/lib
- if os.sep == '/':
- if platform.architecture()[0] == '64bit':
- if os.path.exists('/usr/lib64') and not os.path.exists('/usr/lib32'):
- return '64'
- return ''
-
-def sane_path(p):
- # private function for the time being!
- return os.path.abspath(os.path.expanduser(p))
-
-process_pool = []
-"""
-List of processes started to execute sub-process commands
-"""
-
-def get_process():
- """
- Returns a process object that can execute commands as sub-processes
-
- :rtype: subprocess.Popen
- """
- try:
- return process_pool.pop()
- except IndexError:
- filepath = os.path.dirname(os.path.abspath(__file__)) + os.sep + 'processor.py'
- cmd = [sys.executable, '-c', readf(filepath)]
- return subprocess.Popen(cmd, stdout=subprocess.PIPE, stdin=subprocess.PIPE, bufsize=0)
-
-def run_prefork_process(cmd, kwargs, cargs):
- """
- Delegates process execution to a pre-forked process instance.
- """
- if not 'env' in kwargs:
- kwargs['env'] = dict(os.environ)
- try:
- obj = base64.b64encode(cPickle.dumps([cmd, kwargs, cargs]))
- except (TypeError, AttributeError):
- return run_regular_process(cmd, kwargs, cargs)
-
- proc = get_process()
- if not proc:
- return run_regular_process(cmd, kwargs, cargs)
-
- proc.stdin.write(obj)
- proc.stdin.write('\n'.encode())
- proc.stdin.flush()
- obj = proc.stdout.readline()
- if not obj:
- raise OSError('Preforked sub-process %r died' % proc.pid)
-
- process_pool.append(proc)
- lst = cPickle.loads(base64.b64decode(obj))
- # Jython wrapper failures (bash/execvp)
- assert len(lst) == 5
- ret, out, err, ex, trace = lst
- if ex:
- if ex == 'OSError':
- raise OSError(trace)
- elif ex == 'ValueError':
- raise ValueError(trace)
- elif ex == 'TimeoutExpired':
- exc = TimeoutExpired(cmd, timeout=cargs['timeout'], output=out)
- exc.stderr = err
- raise exc
- else:
- raise Exception(trace)
- return ret, out, err
-
-def lchown(path, user=-1, group=-1):
- """
- Change the owner/group of a path, raises an OSError if the
- ownership change fails.
-
- :param user: user to change
- :type user: int or str
- :param group: group to change
- :type group: int or str
- """
- if isinstance(user, str):
- import pwd
- entry = pwd.getpwnam(user)
- if not entry:
- raise OSError('Unknown user %r' % user)
- user = entry[2]
- if isinstance(group, str):
- import grp
- entry = grp.getgrnam(group)
- if not entry:
- raise OSError('Unknown group %r' % group)
- group = entry[2]
- return os.lchown(path, user, group)
-
-def run_regular_process(cmd, kwargs, cargs={}):
- """
- Executes a subprocess command by using subprocess.Popen
- """
- proc = subprocess.Popen(cmd, **kwargs)
- if kwargs.get('stdout') or kwargs.get('stderr'):
- try:
- out, err = proc.communicate(**cargs)
- except TimeoutExpired:
- if kwargs.get('start_new_session') and hasattr(os, 'killpg'):
- os.killpg(proc.pid, signal.SIGKILL)
- else:
- proc.kill()
- out, err = proc.communicate()
- exc = TimeoutExpired(proc.args, timeout=cargs['timeout'], output=out)
- exc.stderr = err
- raise exc
- status = proc.returncode
- else:
- out, err = (None, None)
- try:
- status = proc.wait(**cargs)
- except TimeoutExpired as e:
- if kwargs.get('start_new_session') and hasattr(os, 'killpg'):
- os.killpg(proc.pid, signal.SIGKILL)
- else:
- proc.kill()
- proc.wait()
- raise e
- return status, out, err
-
-def run_process(cmd, kwargs, cargs={}):
- """
- Executes a subprocess by using a pre-forked process when possible
- or falling back to subprocess.Popen. See :py:func:`waflib.Utils.run_prefork_process`
- and :py:func:`waflib.Utils.run_regular_process`
- """
- if kwargs.get('stdout') and kwargs.get('stderr'):
- return run_prefork_process(cmd, kwargs, cargs)
- else:
- return run_regular_process(cmd, kwargs, cargs)
-
-def alloc_process_pool(n, force=False):
- """
- Allocates an amount of processes to the default pool so its size is at least *n*.
- It is useful to call this function early so that the pre-forked
- processes use as little memory as possible.
-
- :param n: pool size
- :type n: integer
- :param force: if True then *n* more processes are added to the existing pool
- :type force: bool
- """
- # mandatory on python2, unnecessary on python >= 3.2
- global run_process, get_process, alloc_process_pool
- if not force:
- n = max(n - len(process_pool), 0)
- try:
- lst = [get_process() for x in range(n)]
- except OSError:
- run_process = run_regular_process
- get_process = alloc_process_pool = nada
- else:
- for x in lst:
- process_pool.append(x)
-
-def atexit_pool():
- for k in process_pool:
- try:
- os.kill(k.pid, 9)
- except OSError:
- pass
- else:
- k.wait()
-# see #1889
-if (sys.hexversion<0x207000f and not is_win32) or sys.hexversion>=0x306000f:
- atexit.register(atexit_pool)
-
-if os.environ.get('WAF_NO_PREFORK') or sys.platform == 'cli' or not sys.executable:
- run_process = run_regular_process
- get_process = alloc_process_pool = nada
-
diff --git a/waflib/__init__.py b/waflib/__init__.py
deleted file mode 100644
index 079df35..0000000
--- a/waflib/__init__.py
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2005-2018 (ita)
diff --git a/waflib/ansiterm.py b/waflib/ansiterm.py
deleted file mode 100644
index 0d20c63..0000000
--- a/waflib/ansiterm.py
+++ /dev/null
@@ -1,342 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-
-"""
-Emulate a vt100 terminal in cmd.exe
-
-By wrapping sys.stdout / sys.stderr with Ansiterm,
-the vt100 escape characters will be interpreted and
-the equivalent actions will be performed with Win32
-console commands.
-
-"""
-
-import os, re, sys
-from waflib import Utils
-
-wlock = Utils.threading.Lock()
-
-try:
- from ctypes import Structure, windll, c_short, c_ushort, c_ulong, c_int, byref, c_wchar, POINTER, c_long
-except ImportError:
-
- class AnsiTerm(object):
- def __init__(self, stream):
- self.stream = stream
- try:
- self.errors = self.stream.errors
- except AttributeError:
- pass # python 2.5
- self.encoding = self.stream.encoding
-
- def write(self, txt):
- try:
- wlock.acquire()
- self.stream.write(txt)
- self.stream.flush()
- finally:
- wlock.release()
-
- def fileno(self):
- return self.stream.fileno()
-
- def flush(self):
- self.stream.flush()
-
- def isatty(self):
- return self.stream.isatty()
-else:
-
- class COORD(Structure):
- _fields_ = [("X", c_short), ("Y", c_short)]
-
- class SMALL_RECT(Structure):
- _fields_ = [("Left", c_short), ("Top", c_short), ("Right", c_short), ("Bottom", c_short)]
-
- class CONSOLE_SCREEN_BUFFER_INFO(Structure):
- _fields_ = [("Size", COORD), ("CursorPosition", COORD), ("Attributes", c_ushort), ("Window", SMALL_RECT), ("MaximumWindowSize", COORD)]
-
- class CONSOLE_CURSOR_INFO(Structure):
- _fields_ = [('dwSize', c_ulong), ('bVisible', c_int)]
-
- try:
- _type = unicode
- except NameError:
- _type = str
-
- to_int = lambda number, default: number and int(number) or default
-
- STD_OUTPUT_HANDLE = -11
- STD_ERROR_HANDLE = -12
-
- windll.kernel32.GetStdHandle.argtypes = [c_ulong]
- windll.kernel32.GetStdHandle.restype = c_ulong
- windll.kernel32.GetConsoleScreenBufferInfo.argtypes = [c_ulong, POINTER(CONSOLE_SCREEN_BUFFER_INFO)]
- windll.kernel32.GetConsoleScreenBufferInfo.restype = c_long
- windll.kernel32.SetConsoleTextAttribute.argtypes = [c_ulong, c_ushort]
- windll.kernel32.SetConsoleTextAttribute.restype = c_long
- windll.kernel32.FillConsoleOutputCharacterW.argtypes = [c_ulong, c_wchar, c_ulong, POINTER(COORD), POINTER(c_ulong)]
- windll.kernel32.FillConsoleOutputCharacterW.restype = c_long
- windll.kernel32.FillConsoleOutputAttribute.argtypes = [c_ulong, c_ushort, c_ulong, POINTER(COORD), POINTER(c_ulong) ]
- windll.kernel32.FillConsoleOutputAttribute.restype = c_long
- windll.kernel32.SetConsoleCursorPosition.argtypes = [c_ulong, POINTER(COORD) ]
- windll.kernel32.SetConsoleCursorPosition.restype = c_long
- windll.kernel32.SetConsoleCursorInfo.argtypes = [c_ulong, POINTER(CONSOLE_CURSOR_INFO)]
- windll.kernel32.SetConsoleCursorInfo.restype = c_long
-
- class AnsiTerm(object):
- """
- emulate a vt100 terminal in cmd.exe
- """
- def __init__(self, s):
- self.stream = s
- try:
- self.errors = s.errors
- except AttributeError:
- pass # python2.5
- self.encoding = s.encoding
- self.cursor_history = []
-
- handle = (s.fileno() == 2) and STD_ERROR_HANDLE or STD_OUTPUT_HANDLE
- self.hconsole = windll.kernel32.GetStdHandle(handle)
-
- self._sbinfo = CONSOLE_SCREEN_BUFFER_INFO()
-
- self._csinfo = CONSOLE_CURSOR_INFO()
- windll.kernel32.GetConsoleCursorInfo(self.hconsole, byref(self._csinfo))
-
- # just to double check that the console is usable
- self._orig_sbinfo = CONSOLE_SCREEN_BUFFER_INFO()
- r = windll.kernel32.GetConsoleScreenBufferInfo(self.hconsole, byref(self._orig_sbinfo))
- self._isatty = r == 1
-
- def screen_buffer_info(self):
- """
- Updates self._sbinfo and returns it
- """
- windll.kernel32.GetConsoleScreenBufferInfo(self.hconsole, byref(self._sbinfo))
- return self._sbinfo
-
- def clear_line(self, param):
- mode = param and int(param) or 0
- sbinfo = self.screen_buffer_info()
- if mode == 1: # Clear from beginning of line to cursor position
- line_start = COORD(0, sbinfo.CursorPosition.Y)
- line_length = sbinfo.Size.X
- elif mode == 2: # Clear entire line
- line_start = COORD(sbinfo.CursorPosition.X, sbinfo.CursorPosition.Y)
- line_length = sbinfo.Size.X - sbinfo.CursorPosition.X
- else: # Clear from cursor position to end of line
- line_start = sbinfo.CursorPosition
- line_length = sbinfo.Size.X - sbinfo.CursorPosition.X
- chars_written = c_ulong()
- windll.kernel32.FillConsoleOutputCharacterW(self.hconsole, c_wchar(' '), line_length, line_start, byref(chars_written))
- windll.kernel32.FillConsoleOutputAttribute(self.hconsole, sbinfo.Attributes, line_length, line_start, byref(chars_written))
-
- def clear_screen(self, param):
- mode = to_int(param, 0)
- sbinfo = self.screen_buffer_info()
- if mode == 1: # Clear from beginning of screen to cursor position
- clear_start = COORD(0, 0)
- clear_length = sbinfo.CursorPosition.X * sbinfo.CursorPosition.Y
- elif mode == 2: # Clear entire screen and return cursor to home
- clear_start = COORD(0, 0)
- clear_length = sbinfo.Size.X * sbinfo.Size.Y
- windll.kernel32.SetConsoleCursorPosition(self.hconsole, clear_start)
- else: # Clear from cursor position to end of screen
- clear_start = sbinfo.CursorPosition
- clear_length = ((sbinfo.Size.X - sbinfo.CursorPosition.X) + sbinfo.Size.X * (sbinfo.Size.Y - sbinfo.CursorPosition.Y))
- chars_written = c_ulong()
- windll.kernel32.FillConsoleOutputCharacterW(self.hconsole, c_wchar(' '), clear_length, clear_start, byref(chars_written))
- windll.kernel32.FillConsoleOutputAttribute(self.hconsole, sbinfo.Attributes, clear_length, clear_start, byref(chars_written))
-
- def push_cursor(self, param):
- sbinfo = self.screen_buffer_info()
- self.cursor_history.append(sbinfo.CursorPosition)
-
- def pop_cursor(self, param):
- if self.cursor_history:
- old_pos = self.cursor_history.pop()
- windll.kernel32.SetConsoleCursorPosition(self.hconsole, old_pos)
-
- def set_cursor(self, param):
- y, sep, x = param.partition(';')
- x = to_int(x, 1) - 1
- y = to_int(y, 1) - 1
- sbinfo = self.screen_buffer_info()
- new_pos = COORD(
- min(max(0, x), sbinfo.Size.X),
- min(max(0, y), sbinfo.Size.Y)
- )
- windll.kernel32.SetConsoleCursorPosition(self.hconsole, new_pos)
-
- def set_column(self, param):
- x = to_int(param, 1) - 1
- sbinfo = self.screen_buffer_info()
- new_pos = COORD(
- min(max(0, x), sbinfo.Size.X),
- sbinfo.CursorPosition.Y
- )
- windll.kernel32.SetConsoleCursorPosition(self.hconsole, new_pos)
-
- def move_cursor(self, x_offset=0, y_offset=0):
- sbinfo = self.screen_buffer_info()
- new_pos = COORD(
- min(max(0, sbinfo.CursorPosition.X + x_offset), sbinfo.Size.X),
- min(max(0, sbinfo.CursorPosition.Y + y_offset), sbinfo.Size.Y)
- )
- windll.kernel32.SetConsoleCursorPosition(self.hconsole, new_pos)
-
- def move_up(self, param):
- self.move_cursor(y_offset = -to_int(param, 1))
-
- def move_down(self, param):
- self.move_cursor(y_offset = to_int(param, 1))
-
- def move_left(self, param):
- self.move_cursor(x_offset = -to_int(param, 1))
-
- def move_right(self, param):
- self.move_cursor(x_offset = to_int(param, 1))
-
- def next_line(self, param):
- sbinfo = self.screen_buffer_info()
- self.move_cursor(
- x_offset = -sbinfo.CursorPosition.X,
- y_offset = to_int(param, 1)
- )
-
- def prev_line(self, param):
- sbinfo = self.screen_buffer_info()
- self.move_cursor(
- x_offset = -sbinfo.CursorPosition.X,
- y_offset = -to_int(param, 1)
- )
-
- def rgb2bgr(self, c):
- return ((c&1) << 2) | (c&2) | ((c&4)>>2)
-
- def set_color(self, param):
- cols = param.split(';')
- sbinfo = self.screen_buffer_info()
- attr = sbinfo.Attributes
- for c in cols:
- c = to_int(c, 0)
- if 29 < c < 38: # fgcolor
- attr = (attr & 0xfff0) | self.rgb2bgr(c - 30)
- elif 39 < c < 48: # bgcolor
- attr = (attr & 0xff0f) | (self.rgb2bgr(c - 40) << 4)
- elif c == 0: # reset
- attr = self._orig_sbinfo.Attributes
- elif c == 1: # strong
- attr |= 0x08
- elif c == 4: # blink not available -> bg intensity
- attr |= 0x80
- elif c == 7: # negative
- attr = (attr & 0xff88) | ((attr & 0x70) >> 4) | ((attr & 0x07) << 4)
-
- windll.kernel32.SetConsoleTextAttribute(self.hconsole, attr)
-
- def show_cursor(self,param):
- self._csinfo.bVisible = 1
- windll.kernel32.SetConsoleCursorInfo(self.hconsole, byref(self._csinfo))
-
- def hide_cursor(self,param):
- self._csinfo.bVisible = 0
- windll.kernel32.SetConsoleCursorInfo(self.hconsole, byref(self._csinfo))
-
- ansi_command_table = {
- 'A': move_up,
- 'B': move_down,
- 'C': move_right,
- 'D': move_left,
- 'E': next_line,
- 'F': prev_line,
- 'G': set_column,
- 'H': set_cursor,
- 'f': set_cursor,
- 'J': clear_screen,
- 'K': clear_line,
- 'h': show_cursor,
- 'l': hide_cursor,
- 'm': set_color,
- 's': push_cursor,
- 'u': pop_cursor,
- }
- # Match either the escape sequence or text not containing escape sequence
- ansi_tokens = re.compile('(?:\x1b\[([0-9?;]*)([a-zA-Z])|([^\x1b]+))')
- def write(self, text):
- try:
- wlock.acquire()
- if self._isatty:
- for param, cmd, txt in self.ansi_tokens.findall(text):
- if cmd:
- cmd_func = self.ansi_command_table.get(cmd)
- if cmd_func:
- cmd_func(self, param)
- else:
- self.writeconsole(txt)
- else:
- # no support for colors in the console, just output the text:
- # eclipse or msys may be able to interpret the escape sequences
- self.stream.write(text)
- finally:
- wlock.release()
-
- def writeconsole(self, txt):
- chars_written = c_ulong()
- writeconsole = windll.kernel32.WriteConsoleA
- if isinstance(txt, _type):
- writeconsole = windll.kernel32.WriteConsoleW
-
- # MSDN says that there is a shared buffer of 64 KB for the console
- # writes. Attempt to not get ERROR_NOT_ENOUGH_MEMORY, see waf issue #746
- done = 0
- todo = len(txt)
- chunk = 32<<10
- while todo != 0:
- doing = min(chunk, todo)
- buf = txt[done:done+doing]
- r = writeconsole(self.hconsole, buf, doing, byref(chars_written), None)
- if r == 0:
- chunk >>= 1
- continue
- done += doing
- todo -= doing
-
-
- def fileno(self):
- return self.stream.fileno()
-
- def flush(self):
- pass
-
- def isatty(self):
- return self._isatty
-
- if sys.stdout.isatty() or sys.stderr.isatty():
- handle = sys.stdout.isatty() and STD_OUTPUT_HANDLE or STD_ERROR_HANDLE
- console = windll.kernel32.GetStdHandle(handle)
- sbinfo = CONSOLE_SCREEN_BUFFER_INFO()
- def get_term_cols():
- windll.kernel32.GetConsoleScreenBufferInfo(console, byref(sbinfo))
- # Issue 1401 - the progress bar cannot reach the last character
- return sbinfo.Size.X - 1
-
-# just try and see
-try:
- import struct, fcntl, termios
-except ImportError:
- pass
-else:
- if (sys.stdout.isatty() or sys.stderr.isatty()) and os.environ.get('TERM', '') not in ('dumb', 'emacs'):
- FD = sys.stdout.isatty() and sys.stdout.fileno() or sys.stderr.fileno()
- def fun():
- return struct.unpack("HHHH", fcntl.ioctl(FD, termios.TIOCGWINSZ, struct.pack("HHHH", 0, 0, 0, 0)))[1]
- try:
- fun()
- except Exception as e:
- pass
- else:
- get_term_cols = fun
-
diff --git a/waflib/extras/__init__.py b/waflib/extras/__init__.py
deleted file mode 100644
index c8a3c34..0000000
--- a/waflib/extras/__init__.py
+++ /dev/null
@@ -1,3 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2005-2010 (ita)
diff --git a/waflib/extras/autowaf.py b/waflib/extras/autowaf.py
deleted file mode 100644
index 92d0e57..0000000
--- a/waflib/extras/autowaf.py
+++ /dev/null
@@ -1,1244 +0,0 @@
-import glob
-import os
-import subprocess
-import sys
-import time
-
-from waflib import Build, Context, Logs, Options, Utils
-from waflib.TaskGen import feature, before, after
-
-global g_is_child
-g_is_child = False
-
-# Only run autowaf hooks once (even if sub projects call several times)
-global g_step
-g_step = 0
-
-global line_just
-line_just = 40
-
-# Compute dependencies globally
-# import preproc
-# preproc.go_absolute = True
-
-# Test context that inherits build context to make configuration available
-class TestContext(Build.BuildContext):
- "Run tests"
- cmd = 'test'
- fun = 'test'
-
-@feature('c', 'cxx')
-@after('apply_incpaths')
-def include_config_h(self):
- self.env.append_value('INCPATHS', self.bld.bldnode.abspath())
-
-def set_options(opt, debug_by_default=False, test=False):
- "Add standard autowaf options if they havn't been added yet"
- global g_step
- if g_step > 0:
- return
-
- opts = opt.get_option_group('Configuration options')
-
- # Standard directory options
- opts.add_option('--bindir', type='string',
- help="executable programs [default: PREFIX/bin]")
- opts.add_option('--configdir', type='string',
- help="configuration data [default: PREFIX/etc]")
- opts.add_option('--datadir', type='string',
- help="shared data [default: PREFIX/share]")
- opts.add_option('--includedir', type='string',
- help="header files [default: PREFIX/include]")
- opts.add_option('--libdir', type='string',
- help="libraries [default: PREFIX/lib]")
- opts.add_option('--mandir', type='string',
- help="manual pages [default: DATADIR/man]")
- opts.add_option('--docdir', type='string',
- help="HTML documentation [default: DATADIR/doc]")
-
- # Build options
- if debug_by_default:
- opts.add_option('--optimize', action='store_false', default=True,
- dest='debug', help="build optimized binaries")
- else:
- opts.add_option('-d', '--debug', action='store_true', default=False,
- dest='debug', help="build debuggable binaries")
- opts.add_option('--pardebug', action='store_true', default=False,
- dest='pardebug',
- help="build debug libraries with D suffix")
-
- opts.add_option('-s', '--strict', action='store_true', default=False,
- dest='strict',
- help="use strict compiler flags and show all warnings")
- opts.add_option('-S', '--ultra-strict', action='store_true', default=False,
- dest='ultra_strict',
- help="use extremely strict compiler flags (likely noisy)")
- opts.add_option('--docs', action='store_true', default=False, dest='docs',
- help="build documentation (requires doxygen)")
-
- # Test options
- if test:
- test_opts = opt.add_option_group('Test options', '')
- opts.add_option('-T', '--test', action='store_true', dest='build_tests',
- help='build unit tests')
- opts.add_option('--no-coverage', action='store_true',
- dest='no_coverage',
- help='do not instrument code for test coverage')
- test_opts.add_option('--wrapper', type='string',
- dest='test_wrapper',
- help='command prefix for tests (e.g. valgrind)')
-
- g_step = 1
-
-def add_flags(opt, flags):
- for name, desc in flags.items():
- opt.add_option('--' + name, action='store_true',
- dest=name.replace('-', '_'), help=desc)
-
-def get_check_func(conf, lang):
- if lang == 'c':
- return conf.check_cc
- elif lang == 'cxx':
- return conf.check_cxx
- else:
- Logs.error("Unknown header language `%s'" % lang)
-
-def check_header(conf, lang, name, define='', mandatory=True):
- "Check for a header"
- check_func = get_check_func(conf, lang)
- if define != '':
- check_func(header_name=name,
- define_name=define,
- mandatory=mandatory)
- else:
- check_func(header_name=name, mandatory=mandatory)
-
-def check_function(conf, lang, name, **args):
- "Check for a function"
- header_names = Utils.to_list(args['header_name'])
- includes = ''.join(['#include <%s>\n' % x for x in header_names])
- fragment = '''
-%s
-int main() { return !(void(*)())(%s); }
-''' % (includes, name)
-
- check_func = get_check_func(conf, lang)
- args['msg'] = 'Checking for %s' % name
- check_func(fragment=fragment, **args)
-
-def nameify(name):
- return (name.replace('/', '_').replace('++', 'PP')
- .replace('-', '_').replace('.', '_'))
-
-def define(conf, var_name, value):
- conf.define(var_name, value)
- conf.env[var_name] = value
-
-def check_pkg(conf, name, **args):
- "Check for a package iff it hasn't been checked for yet"
- if args['uselib_store'].lower() in conf.env['AUTOWAF_LOCAL_LIBS']:
- return
-
- class CheckType:
- OPTIONAL = 1
- MANDATORY = 2
-
- var_name = 'CHECKED_' + nameify(args['uselib_store'])
- check = var_name not in conf.env
- mandatory = 'mandatory' not in args or args['mandatory']
- if not check and 'atleast_version' in args:
- # Re-check if version is newer than previous check
- checked_version = conf.env['VERSION_' + name]
- if checked_version and checked_version < args['atleast_version']:
- check = True
- if not check and mandatory and conf.env[var_name] == CheckType.OPTIONAL:
- # Re-check if previous check was optional but this one is mandatory
- check = True
- if check:
- found = None
- pkg_var_name = 'PKG_' + name.replace('-', '_')
- pkg_name = name
- if conf.env.PARDEBUG:
- args['mandatory'] = False # Smash mandatory arg
- found = conf.check_cfg(package=pkg_name + 'D',
- args="--cflags --libs", **args)
- if found:
- pkg_name += 'D'
- if mandatory:
- args['mandatory'] = True # Unsmash mandatory arg
- if not found:
- found = conf.check_cfg(package=pkg_name, args="--cflags --libs",
- **args)
- if found:
- conf.env[pkg_var_name] = pkg_name
- if 'atleast_version' in args:
- conf.env['VERSION_' + name] = args['atleast_version']
- if mandatory:
- conf.env[var_name] = CheckType.MANDATORY
- else:
- conf.env[var_name] = CheckType.OPTIONAL
-
- if not conf.env.MSVC_COMPILER and 'system' in args and args['system']:
- includes = conf.env['INCLUDES_' + nameify(args['uselib_store'])]
- for path in includes:
- if 'COMPILER_CC' in conf.env:
- conf.env.append_value('CFLAGS', ['-isystem', path])
- if 'COMPILER_CXX' in conf.env:
- conf.env.append_value('CXXFLAGS', ['-isystem', path])
-
- conf.env.append_value('CXXFLAGS', ['-isystem', '/usr/local/include'])
-
-def normpath(path):
- if sys.platform == 'win32':
- return os.path.normpath(path).replace('\\', '/')
- else:
- return os.path.normpath(path)
-
-def configure(conf):
- global g_step
- if g_step > 1:
- return
-
- def append_cxx_flags(flags):
- conf.env.append_value('CFLAGS', flags)
- conf.env.append_value('CXXFLAGS', flags)
-
- if Options.options.docs:
- conf.load('doxygen')
-
- try:
- conf.load('clang_compilation_database')
- except Exception:
- pass
-
- prefix = normpath(os.path.abspath(os.path.expanduser(conf.env['PREFIX'])))
-
- conf.env['DOCS'] = Options.options.docs and conf.env.DOXYGEN
- conf.env['DEBUG'] = Options.options.debug or Options.options.pardebug
- conf.env['PARDEBUG'] = Options.options.pardebug
- conf.env['PREFIX'] = prefix
-
- def config_dir(var, opt, default):
- if opt:
- conf.env[var] = normpath(opt)
- else:
- conf.env[var] = normpath(default)
-
- opts = Options.options
-
- config_dir('BINDIR', opts.bindir, os.path.join(prefix, 'bin'))
- config_dir('SYSCONFDIR', opts.configdir, os.path.join(prefix, 'etc'))
- config_dir('DATADIR', opts.datadir, os.path.join(prefix, 'share'))
- config_dir('INCLUDEDIR', opts.includedir, os.path.join(prefix, 'include'))
- config_dir('LIBDIR', opts.libdir, os.path.join(prefix, 'lib'))
-
- datadir = conf.env['DATADIR']
- config_dir('MANDIR', opts.mandir, os.path.join(datadir, 'man'))
- config_dir('DOCDIR', opts.docdir, os.path.join(datadir, 'doc'))
-
- if Options.options.debug:
- if conf.env['MSVC_COMPILER']:
- conf.env['CFLAGS'] = ['/Od', '/Z7', '/MTd', '/FS']
- conf.env['CXXFLAGS'] = ['/Od', '/Z7', '/MTd', '/FS']
- conf.env['LINKFLAGS'] = ['/DEBUG', '/MANIFEST']
- else:
- conf.env['CFLAGS'] = ['-O0', '-g']
- conf.env['CXXFLAGS'] = ['-O0', '-g']
- else:
- if conf.env['MSVC_COMPILER']:
- append_cxx_flags(['/MD', '/FS', '/DNDEBUG'])
- else:
- append_cxx_flags(['-DNDEBUG'])
-
- if conf.env.MSVC_COMPILER:
- Options.options.no_coverage = True
- append_cxx_flags(['/nologo',
- '/FS',
- '/DNDEBUG',
- '/D_CRT_SECURE_NO_WARNINGS',
- '/experimental:external',
- '/external:W0',
- '/external:anglebrackets'])
- conf.env.append_value('LINKFLAGS', '/nologo')
- if Options.options.strict or Options.options.ultra_strict:
- ms_strict_flags = ['/Wall',
- '/wd4061',
- '/wd4200',
- '/wd4514',
- '/wd4571',
- '/wd4625',
- '/wd4626',
- '/wd4706',
- '/wd4710',
- '/wd4820',
- '/wd5026',
- '/wd5027',
- '/wd5045']
- conf.env.append_value('CFLAGS', ms_strict_flags)
- conf.env.append_value('CXXFLAGS', ms_strict_flags)
- conf.env.append_value('CXXFLAGS', ['/EHsc'])
- else:
- if Options.options.ultra_strict:
- Options.options.strict = True
- conf.env.append_value('CFLAGS', ['-Wredundant-decls',
- '-Wstrict-prototypes',
- '-Wmissing-prototypes',
- '-Wcast-qual'])
- conf.env.append_value('CXXFLAGS', ['-Wcast-qual'])
-
- if Options.options.strict:
- conf.env.append_value('CFLAGS', ['-pedantic', '-Wshadow'])
- if conf.env.DEST_OS != "darwin":
- conf.env.append_value('LINKFLAGS', ['-Wl,--no-undefined'])
- conf.env.append_value('CXXFLAGS', ['-Wnon-virtual-dtor',
- '-Woverloaded-virtual'])
- append_cxx_flags(['-Wall',
- '-Wcast-align',
- '-Wextra',
- '-Wmissing-declarations',
- '-Wno-unused-parameter',
- '-Wstrict-overflow',
- '-Wundef',
- '-Wwrite-strings',
- '-fstrict-overflow'])
-
- # Add less universal flags after checking they work
- extra_flags = ['-Wlogical-op',
- '-Wsuggest-attribute=noreturn',
- '-Wunsafe-loop-optimizations']
- if conf.check_cc(cflags=['-Werror'] + extra_flags, mandatory=False,
- msg="Checking for extra C warning flags"):
- conf.env.append_value('CFLAGS', extra_flags)
- if 'COMPILER_CXX' in conf.env:
- if conf.check_cxx(cxxflags=['-Werror'] + extra_flags,
- mandatory=False,
- msg="Checking for extra C++ warning flags"):
- conf.env.append_value('CXXFLAGS', extra_flags)
-
- if not conf.env['MSVC_COMPILER']:
- append_cxx_flags(['-fshow-column'])
-
- conf.env.NO_COVERAGE = True
- conf.env.BUILD_TESTS = False
- try:
- conf.env.BUILD_TESTS = Options.options.build_tests
- conf.env.NO_COVERAGE = Options.options.no_coverage
- if not Options.options.no_coverage:
- # Set up unit test code coverage
- if conf.is_defined('CLANG'):
- for cov in [conf.env.CC[0].replace('clang', 'llvm-cov'),
- 'llvm-cov']:
- if conf.find_program(cov, var='LLVM_COV', mandatory=False):
- break
- else:
- conf.check_cc(lib='gcov', define_name='HAVE_GCOV',
- mandatory=False)
- except Exception:
- pass # Test options do not exist
-
- # Define version in configuration
- appname = getattr(Context.g_module, Context.APPNAME, 'noname')
- version = getattr(Context.g_module, Context.VERSION, '0.0.0')
- defname = appname.upper().replace('-', '_').replace('.', '_')
- define(conf, defname + '_VERSION', version)
-
- conf.env.prepend_value('CFLAGS', '-I' + os.path.abspath('.'))
- conf.env.prepend_value('CXXFLAGS', '-I' + os.path.abspath('.'))
- g_step = 2
-
-def display_summary(conf, msgs=None):
- global g_is_child
- if not g_is_child:
- display_msg(conf, "Install prefix", conf.env['PREFIX'])
- if 'COMPILER_CC' in conf.env:
- display_msg(conf, "C Flags", ' '.join(conf.env['CFLAGS']))
- if 'COMPILER_CXX' in conf.env:
- display_msg(conf, "C++ Flags", ' '.join(conf.env['CXXFLAGS']))
- display_msg(conf, "Debuggable", bool(conf.env['DEBUG']))
- display_msg(conf, "Build documentation", bool(conf.env['DOCS']))
-
- if msgs is not None:
- display_msgs(conf, msgs)
-
-def set_c_lang(conf, lang):
- "Set a specific C language standard, like 'c99' or 'c11'"
- if conf.env.MSVC_COMPILER:
- # MSVC has no hope or desire to compile C99, just compile as C++
- conf.env.append_unique('CFLAGS', ['/TP'])
- else:
- flag = '-std=%s' % lang
- conf.check(cflags=['-Werror', flag],
- msg="Checking for flag '%s'" % flag)
- conf.env.append_unique('CFLAGS', [flag])
-
-def set_cxx_lang(conf, lang):
- "Set a specific C++ language standard, like 'c++11', 'c++14', or 'c++17'"
- if conf.env.MSVC_COMPILER:
- if lang != 'c++14':
- lang = 'c++latest'
- conf.env.append_unique('CXXFLAGS', ['/std:%s' % lang])
- else:
- flag = '-std=%s' % lang
- conf.check(cxxflags=['-Werror', flag],
- msg="Checking for flag '%s'" % flag)
- conf.env.append_unique('CXXFLAGS', [flag])
-
-def set_modern_c_flags(conf):
- "Use the most modern C language available"
- if 'COMPILER_CC' in conf.env:
- if conf.env.MSVC_COMPILER:
- # MSVC has no hope or desire to compile C99, just compile as C++
- conf.env.append_unique('CFLAGS', ['/TP'])
- else:
- for flag in ['-std=c11', '-std=c99']:
- if conf.check(cflags=['-Werror', flag], mandatory=False,
- msg="Checking for flag '%s'" % flag):
- conf.env.append_unique('CFLAGS', [flag])
- break
-
-def set_modern_cxx_flags(conf, mandatory=False):
- "Use the most modern C++ language available"
- if 'COMPILER_CXX' in conf.env:
- if conf.env.MSVC_COMPILER:
- conf.env.append_unique('CXXFLAGS', ['/std:c++latest'])
- else:
- for lang in ['c++14', 'c++1y', 'c++11', 'c++0x']:
- flag = '-std=%s' % lang
- if conf.check(cxxflags=['-Werror', flag], mandatory=False,
- msg="Checking for flag '%s'" % flag):
- conf.env.append_unique('CXXFLAGS', [flag])
- break
-
-def set_local_lib(conf, name, has_objects):
- var_name = 'HAVE_' + nameify(name.upper())
- define(conf, var_name, 1)
- if has_objects:
- if type(conf.env['AUTOWAF_LOCAL_LIBS']) != dict:
- conf.env['AUTOWAF_LOCAL_LIBS'] = {}
- conf.env['AUTOWAF_LOCAL_LIBS'][name.lower()] = True
- else:
- if type(conf.env['AUTOWAF_LOCAL_HEADERS']) != dict:
- conf.env['AUTOWAF_LOCAL_HEADERS'] = {}
- conf.env['AUTOWAF_LOCAL_HEADERS'][name.lower()] = True
-
-def append_property(obj, key, val):
- if hasattr(obj, key):
- setattr(obj, key, getattr(obj, key) + val)
- else:
- setattr(obj, key, val)
-
-def use_lib(bld, obj, libs):
- abssrcdir = os.path.abspath('.')
- libs_list = libs.split()
- for l in libs_list:
- in_headers = l.lower() in bld.env['AUTOWAF_LOCAL_HEADERS']
- in_libs = l.lower() in bld.env['AUTOWAF_LOCAL_LIBS']
- if in_libs:
- append_property(obj, 'use', ' lib%s ' % l.lower())
- append_property(obj, 'framework', bld.env['FRAMEWORK_' + l])
- if in_headers or in_libs:
- if bld.env.MSVC_COMPILER:
- inc_flag = '/I' + os.path.join(abssrcdir, l.lower())
- else:
- inc_flag = '-iquote ' + os.path.join(abssrcdir, l.lower())
- for f in ['CFLAGS', 'CXXFLAGS']:
- if inc_flag not in bld.env[f]:
- bld.env.prepend_value(f, inc_flag)
- else:
- append_property(obj, 'uselib', ' ' + l)
-
-@feature('c', 'cxx')
-@before('apply_link')
-def version_lib(self):
- if self.env.DEST_OS == 'win32':
- self.vnum = None # Prevent waf from automatically appending -0
- if self.env['PARDEBUG']:
- applicable = ['cshlib', 'cxxshlib', 'cstlib', 'cxxstlib']
- if [x for x in applicable if x in self.features]:
- self.target = self.target + 'D'
-
-def set_lib_env(conf, name, version):
- "Set up environment for local library as if found via pkg-config."
- NAME = name.upper()
- major_ver = version.split('.')[0]
- pkg_var_name = 'PKG_' + name.replace('-', '_') + '_' + major_ver
- lib_name = '%s-%s' % (name, major_ver)
- if conf.env.PARDEBUG:
- lib_name += 'D'
- conf.env[pkg_var_name] = lib_name
- conf.env['INCLUDES_' + NAME] = ['${INCLUDEDIR}/%s-%s' % (name, major_ver)]
- conf.env['LIBPATH_' + NAME] = [conf.env.LIBDIR]
- conf.env['LIB_' + NAME] = [lib_name]
-
- conf.define(NAME + '_VERSION', version)
-
-def set_line_just(conf, width):
- global line_just
- line_just = max(line_just, width)
- conf.line_just = line_just
-
-def display_header(title):
- global g_is_child
- if g_is_child:
- Logs.pprint('BOLD', title)
-
-def display_msg(conf, msg, status=None, color=None):
- color = 'CYAN'
- if type(status) == bool and status:
- color = 'GREEN'
- status = 'yes'
- elif type(status) == bool and not status or status == "False":
- color = 'YELLOW'
- status = 'no'
- Logs.pprint('BOLD', '%s' % msg.ljust(conf.line_just), sep='')
- Logs.pprint('BOLD', ":", sep='')
- Logs.pprint(color, status)
-
-def display_msgs(conf, msgs):
- for k, v in msgs.items():
- display_msg(conf, k, v)
-
-def link_flags(env, lib):
- return ' '.join(map(lambda x: env['LIB_ST'] % x,
- env['LIB_' + lib]))
-
-def compile_flags(env, lib):
- return ' '.join(map(lambda x: env['CPPPATH_ST'] % x,
- env['INCLUDES_' + lib]))
-
-def set_recursive():
- global g_is_child
- g_is_child = True
-
-def is_child():
- global g_is_child
- return g_is_child
-
-def build_pc(bld, name, version, version_suffix, libs, subst_dict={}):
- """Build a pkg-config file for a library.
-
- name -- uppercase variable name (e.g. 'SOMENAME')
- version -- version string (e.g. '1.2.3')
- version_suffix -- name version suffix (e.g. '2')
- libs -- string/list of dependencies (e.g. 'LIBFOO GLIB')
- """
-
- pkg_prefix = bld.env['PREFIX']
- if pkg_prefix[-1] == '/':
- pkg_prefix = pkg_prefix[:-1]
-
- target = name.lower()
- if version_suffix != '':
- target += '-' + version_suffix
-
- if bld.env['PARDEBUG']:
- target += 'D'
-
- target += '.pc'
-
- libdir = bld.env['LIBDIR']
- if libdir.startswith(pkg_prefix):
- libdir = libdir.replace(pkg_prefix, '${exec_prefix}')
-
- includedir = bld.env['INCLUDEDIR']
- if includedir.startswith(pkg_prefix):
- includedir = includedir.replace(pkg_prefix, '${prefix}')
-
- obj = bld(features='subst',
- source='%s.pc.in' % name.lower(),
- target=target,
- install_path=os.path.join(bld.env['LIBDIR'], 'pkgconfig'),
- exec_prefix='${prefix}',
- PREFIX=pkg_prefix,
- EXEC_PREFIX='${prefix}',
- LIBDIR=libdir,
- INCLUDEDIR=includedir)
-
- if type(libs) != list:
- libs = libs.split()
-
- subst_dict[name + '_VERSION'] = version
- subst_dict[name + '_MAJOR_VERSION'] = version[0:version.find('.')]
- for i in libs:
- subst_dict[i + '_LIBS'] = link_flags(bld.env, i)
- lib_cflags = compile_flags(bld.env, i)
- if lib_cflags == '':
- lib_cflags = ' '
- subst_dict[i + '_CFLAGS'] = lib_cflags
-
- obj.__dict__.update(subst_dict)
-
-def build_dir(name, subdir):
- if is_child():
- return os.path.join('build', name, subdir)
- else:
- return os.path.join('build', subdir)
-
-
-def make_simple_dox(name):
- "Clean up messy Doxygen documentation after it is built"
- name = name.lower()
- NAME = name.upper()
- try:
- top = os.getcwd()
- os.chdir(build_dir(name, 'doc/html'))
- page = 'group__%s.html' % name
- if not os.path.exists(page):
- return
- for i in [
- ['%s_API ' % NAME, ''],
- ['%s_DEPRECATED ' % NAME, ''],
- ['group__%s.html' % name, ''],
- ['&#160;', ''],
- [r'<script.*><\/script>', ''],
- [r'<hr\/><a name="details" id="details"><\/a><h2>.*<\/h2>', ''],
- [r'<link href=\"tabs.css\" rel=\"stylesheet\" type=\"text\/css\"\/>',
- ''],
- [r'<img class=\"footer\" src=\"doxygen.png\" alt=\"doxygen\"\/>',
- 'Doxygen']]:
- os.system("sed -i 's/%s/%s/g' %s" % (i[0], i[1], page))
- os.rename('group__%s.html' % name, 'index.html')
- for i in (glob.glob('*.png') +
- glob.glob('*.html') +
- glob.glob('*.js') +
- glob.glob('*.css')):
- if i != 'index.html' and i != 'style.css':
- os.remove(i)
- os.chdir(top)
- os.chdir(build_dir(name, 'doc/man/man3'))
- for i in glob.glob('*.3'):
- os.system("sed -i 's/%s_API //' %s" % (NAME, i))
- for i in glob.glob('_*'):
- os.remove(i)
- os.chdir(top)
- except Exception as e:
- Logs.error("Failed to fix up %s documentation: %s" % (name, e))
-
-
-def build_dox(bld, name, version, srcdir, blddir, outdir='', versioned=True):
- """Build Doxygen API documentation"""
- if not bld.env['DOCS']:
- return
-
- # Doxygen paths in are relative to the doxygen file, not build directory
- if is_child():
- src_dir = os.path.join(srcdir, name.lower())
- else:
- src_dir = srcdir
-
- subst_tg = bld(features='subst',
- source='doc/reference.doxygen.in',
- target='doc/reference.doxygen',
- install_path='',
- name='doxyfile')
-
- subst_dict = {
- name + '_VERSION': version,
- name + '_SRCDIR': os.path.abspath(src_dir),
- name + '_DOC_DIR': ''
- }
-
- subst_tg.__dict__.update(subst_dict)
-
- subst_tg.post()
-
- docs = bld(features='doxygen',
- doxyfile='doc/reference.doxygen')
-
- docs.post()
-
- outname = name.lower()
- if versioned:
- outname += '-%d' % int(version[0:version.find('.')])
- bld.install_files(
- os.path.join('${DOCDIR}', outname, outdir, 'html'),
- bld.path.get_bld().ant_glob('doc/html/*'))
- for i in range(1, 8):
- bld.install_files('${MANDIR}/man%d' % i,
- bld.path.get_bld().ant_glob('doc/man/man%d/*' % i,
- excl='**/_*'))
-
-
-def build_version_files(header_path, source_path, domain, major, minor, micro):
- """Generate version code header"""
- header_path = os.path.abspath(header_path)
- source_path = os.path.abspath(source_path)
- text = "int " + domain + "_major_version = " + str(major) + ";\n"
- text += "int " + domain + "_minor_version = " + str(minor) + ";\n"
- text += "int " + domain + "_micro_version = " + str(micro) + ";\n"
- try:
- o = open(source_path, 'w')
- o.write(text)
- o.close()
- except IOError:
- Logs.error('Failed to open %s for writing\n' % source_path)
- sys.exit(-1)
-
- text = "#ifndef __" + domain + "_version_h__\n"
- text += "#define __" + domain + "_version_h__\n"
- text += "extern const char* " + domain + "_revision;\n"
- text += "extern int " + domain + "_major_version;\n"
- text += "extern int " + domain + "_minor_version;\n"
- text += "extern int " + domain + "_micro_version;\n"
- text += "#endif /* __" + domain + "_version_h__ */\n"
- try:
- o = open(header_path, 'w')
- o.write(text)
- o.close()
- except IOError:
- Logs.warn('Failed to open %s for writing\n' % header_path)
- sys.exit(-1)
-
- return None
-
-def build_i18n_pot(bld, srcdir, dir, name, sources, copyright_holder=None):
- Logs.info('Generating pot file from %s' % name)
- pot_file = '%s.pot' % name
-
- cmd = ['xgettext',
- '--keyword=_',
- '--keyword=N_',
- '--keyword=S_',
- '--from-code=UTF-8',
- '-o', pot_file]
-
- if copyright_holder:
- cmd += ['--copyright-holder="%s"' % copyright_holder]
-
- cmd += sources
- Logs.info('Updating ' + pot_file)
- subprocess.call(cmd, cwd=os.path.join(srcdir, dir))
-
-def build_i18n_po(bld, srcdir, dir, name, sources, copyright_holder=None):
- pwd = os.getcwd()
- os.chdir(os.path.join(srcdir, dir))
- pot_file = '%s.pot' % name
- po_files = glob.glob('po/*.po')
- for po_file in po_files:
- cmd = ['msgmerge',
- '--update',
- po_file,
- pot_file]
- Logs.info('Updating ' + po_file)
- subprocess.call(cmd)
- os.chdir(pwd)
-
-def build_i18n_mo(bld, srcdir, dir, name, sources, copyright_holder=None):
- pwd = os.getcwd()
- os.chdir(os.path.join(srcdir, dir))
- po_files = glob.glob('po/*.po')
- for po_file in po_files:
- mo_file = po_file.replace('.po', '.mo')
- cmd = ['msgfmt',
- '-c',
- '-f',
- '-o',
- mo_file,
- po_file]
- Logs.info('Generating ' + po_file)
- subprocess.call(cmd)
- os.chdir(pwd)
-
-def build_i18n(bld, srcdir, dir, name, sources, copyright_holder=None):
- build_i18n_pot(bld, srcdir, dir, name, sources, copyright_holder)
- build_i18n_po(bld, srcdir, dir, name, sources, copyright_holder)
- build_i18n_mo(bld, srcdir, dir, name, sources, copyright_holder)
-
-def cd_to_build_dir(ctx, appname):
- top_level = (len(ctx.stack_path) > 1)
- if top_level:
- os.chdir(os.path.join('build', appname))
- else:
- os.chdir('build')
-
-def cd_to_orig_dir(ctx, child):
- if child:
- os.chdir(os.path.join('..', '..'))
- else:
- os.chdir('..')
-
-def bench_time():
- if hasattr(time, 'perf_counter'): # Added in Python 3.3
- return time.perf_counter()
- else:
- return time.time()
-
-def pre_test(ctx, appname, dirs=['src']):
- Logs.pprint('GREEN', '\n[==========] Running %s tests' % appname)
-
- if not hasattr(ctx, 'autowaf_tests_total'):
- ctx.autowaf_tests_start_time = bench_time()
- ctx.autowaf_tests_total = 0
- ctx.autowaf_tests_failed = 0
- ctx.autowaf_local_tests_total = 0
- ctx.autowaf_local_tests_failed = 0
- ctx.autowaf_tests = {}
-
- ctx.autowaf_tests[appname] = {'total': 0, 'failed': 0}
-
- cd_to_build_dir(ctx, appname)
- if not ctx.env.NO_COVERAGE:
- diropts = ''
- for i in dirs:
- diropts += ' -d ' + i
- clear_log = open('lcov-clear.log', 'w')
- try:
- try:
- # Clear coverage data
- subprocess.call(('lcov %s -z' % diropts).split(),
- stdout=clear_log, stderr=clear_log)
- except Exception:
- Logs.warn('Failed to run lcov, no coverage report generated')
- finally:
- clear_log.close()
-
-class TestFailed(Exception):
- pass
-
-def post_test(ctx, appname, dirs=['src'], remove=['*boost*', 'c++*']):
- if not ctx.env.NO_COVERAGE:
- diropts = ''
- for i in dirs:
- diropts += ' -d ' + i
- coverage_log = open('lcov-coverage.log', 'w')
- coverage_lcov = open('coverage.lcov', 'w')
- coverage_stripped_lcov = open('coverage-stripped.lcov', 'w')
- try:
- try:
- base = '.'
- if g_is_child:
- base = '..'
-
- # Generate coverage data
- lcov_cmd = 'lcov -c %s -b %s' % (diropts, base)
- if ctx.env.LLVM_COV:
- lcov_cmd += ' --gcov-tool %s' % ctx.env.LLVM_COV[0]
- subprocess.call(lcov_cmd.split(),
- stdout=coverage_lcov, stderr=coverage_log)
-
- # Strip unwanted stuff
- subprocess.call(
- ['lcov', '--remove', 'coverage.lcov'] + remove,
- stdout=coverage_stripped_lcov, stderr=coverage_log)
-
- # Generate HTML coverage output
- if not os.path.isdir('coverage'):
- os.makedirs('coverage')
- subprocess.call(
- 'genhtml -o coverage coverage-stripped.lcov'.split(),
- stdout=coverage_log, stderr=coverage_log)
-
- except Exception:
- Logs.warn('Failed to run lcov, no coverage report generated')
- finally:
- coverage_stripped_lcov.close()
- coverage_lcov.close()
- coverage_log.close()
-
- duration = (bench_time() - ctx.autowaf_tests_start_time) * 1000.0
- total_tests = ctx.autowaf_tests[appname]['total']
- failed_tests = ctx.autowaf_tests[appname]['failed']
- passed_tests = total_tests - failed_tests
- Logs.pprint('GREEN', '\n[==========] %d tests from %s ran (%d ms total)' % (
- total_tests, appname, duration))
- if not ctx.env.NO_COVERAGE:
- Logs.pprint('GREEN', '[----------] Coverage: <file://%s>'
- % os.path.abspath('coverage/index.html'))
-
- Logs.pprint('GREEN', '[ PASSED ] %d tests' % passed_tests)
- if failed_tests > 0:
- Logs.pprint('RED', '[ FAILED ] %d tests' % failed_tests)
- raise TestFailed('Tests from %s failed' % appname)
- Logs.pprint('', '')
-
- top_level = (len(ctx.stack_path) > 1)
- if top_level:
- cd_to_orig_dir(ctx, top_level)
-
-def run_test(ctx,
- appname,
- test,
- desired_status=0,
- dirs=['src'],
- name='',
- header=False,
- quiet=False):
- """Run an individual test.
-
- `test` is either a shell command string, or a list of [name, return status]
- for displaying tests implemented in the calling Python code.
- """
-
- ctx.autowaf_tests_total += 1
- ctx.autowaf_local_tests_total += 1
- ctx.autowaf_tests[appname]['total'] += 1
-
- out = (None, None)
- if type(test) == list:
- name = test[0]
- returncode = test[1]
- elif callable(test):
- returncode = test()
- else:
- s = test
- if isinstance(test, type([])):
- s = ' '.join(test)
- if header and not quiet:
- Logs.pprint('Green', '\n[ RUN ] %s' % s)
- cmd = test
- if Options.options.test_wrapper:
- cmd = Options.options.test_wrapper + ' ' + test
- if name == '':
- name = test
-
- proc = subprocess.Popen(cmd, shell=True,
- stdout=subprocess.PIPE, stderr=subprocess.PIPE)
- out = proc.communicate()
- returncode = proc.returncode
-
- success = desired_status is None or returncode == desired_status
- if success:
- if not quiet:
- Logs.pprint('GREEN', '[ OK ] %s' % name)
- else:
- Logs.pprint('RED', '[ FAILED ] %s' % name)
- ctx.autowaf_tests_failed += 1
- ctx.autowaf_local_tests_failed += 1
- ctx.autowaf_tests[appname]['failed'] += 1
- if type(test) != list and not callable(test):
- Logs.pprint('RED', test)
-
- if Options.options.verbose and type(test) != list and not callable(test):
- sys.stdout.write(out[0].decode('utf-8'))
- sys.stderr.write(out[1].decode('utf-8'))
-
- return (success, out)
-
-def tests_name(ctx, appname, name='*'):
- if name == '*':
- return appname
- else:
- return '%s.%s' % (appname, name)
-
-def begin_tests(ctx, appname, name='*'):
- ctx.autowaf_local_tests_failed = 0
- ctx.autowaf_local_tests_total = 0
- ctx.autowaf_local_tests_start_time = bench_time()
- Logs.pprint('GREEN', '\n[----------] %s' % (
- tests_name(ctx, appname, name)))
-
- class Handle:
- def __enter__(self):
- pass
-
- def __exit__(self, type, value, traceback):
- end_tests(ctx, appname, name)
-
- return Handle()
-
-def end_tests(ctx, appname, name='*'):
- duration = (bench_time() - ctx.autowaf_local_tests_start_time) * 1000.0
- total = ctx.autowaf_local_tests_total
- failures = ctx.autowaf_local_tests_failed
- if failures == 0:
- Logs.pprint('GREEN', '[----------] %d tests from %s (%d ms total)' % (
- ctx.autowaf_local_tests_total, tests_name(ctx, appname, name), duration))
- else:
- Logs.pprint('RED', '[----------] %d/%d tests from %s (%d ms total)' % (
- total - failures, total, tests_name(ctx, appname, name), duration))
-
-def run_tests(ctx,
- appname,
- tests,
- desired_status=0,
- dirs=['src'],
- name='*',
- headers=False):
- begin_tests(ctx, appname, name)
-
- diropts = ''
- for i in dirs:
- diropts += ' -d ' + i
-
- for i in tests:
- run_test(ctx, appname, i, desired_status, dirs, i, headers)
-
- end_tests(ctx, appname, name)
-
-def run_ldconfig(ctx):
- should_run = (ctx.cmd == 'install' and
- not ctx.env['RAN_LDCONFIG'] and
- ctx.env['LIBDIR'] and
- 'DESTDIR' not in os.environ and
- not Options.options.destdir)
-
- if should_run:
- try:
- Logs.info("Waf: Running `/sbin/ldconfig %s'" % ctx.env['LIBDIR'])
- subprocess.call(['/sbin/ldconfig', ctx.env['LIBDIR']])
- ctx.env['RAN_LDCONFIG'] = True
- except Exception:
- pass
-
-def get_rdf_news(name,
- in_files,
- top_entries=None,
- extra_entries=None,
- dev_dist=None):
- import rdflib
- from time import strptime
-
- doap = rdflib.Namespace('http://usefulinc.com/ns/doap#')
- dcs = rdflib.Namespace('http://ontologi.es/doap-changeset#')
- rdfs = rdflib.Namespace('http://www.w3.org/2000/01/rdf-schema#')
- foaf = rdflib.Namespace('http://xmlns.com/foaf/0.1/')
- rdf = rdflib.Namespace('http://www.w3.org/1999/02/22-rdf-syntax-ns#')
- m = rdflib.ConjunctiveGraph()
-
- try:
- for i in in_files:
- m.parse(i, format='n3')
- except Exception:
- Logs.warn('Error parsing data, unable to generate NEWS')
- return
-
- proj = m.value(None, rdf.type, doap.Project)
- for f in m.triples([proj, rdfs.seeAlso, None]):
- if f[2].endswith('.ttl'):
- m.parse(f[2], format='n3')
-
- entries = {}
- for r in m.triples([proj, doap.release, None]):
- release = r[2]
- revision = m.value(release, doap.revision, None)
- date = m.value(release, doap.created, None)
- blamee = m.value(release, dcs.blame, None)
- changeset = m.value(release, dcs.changeset, None)
- dist = m.value(release, doap['file-release'], None)
-
- if not dist:
- Logs.warn('No file release for %s %s' % (proj, revision))
- dist = dev_dist
-
- if revision and date and blamee and changeset:
- entry = {}
- entry['name'] = str(name)
- entry['revision'] = str(revision)
- entry['date'] = strptime(str(date), '%Y-%m-%d')
- entry['status'] = 'stable' if dist != dev_dist else 'unstable'
- entry['dist'] = str(dist)
- entry['items'] = []
-
- for i in m.triples([changeset, dcs.item, None]):
- item = str(m.value(i[2], rdfs.label, None))
- entry['items'] += [item]
- if dist and top_entries is not None:
- if not str(dist) in top_entries:
- top_entries[str(dist)] = {'items': []}
- top_entries[str(dist)]['items'] += [
- '%s: %s' % (name, item)]
-
- if extra_entries and dist:
- for i in extra_entries[str(dist)]:
- entry['items'] += extra_entries[str(dist)]['items']
-
- entry['blamee_name'] = str(m.value(blamee, foaf.name, None))
- entry['blamee_mbox'] = str(m.value(blamee, foaf.mbox, None))
-
- entries[(str(date), str(revision))] = entry
- else:
- Logs.warn('Ignored incomplete %s release description' % name)
-
- return entries
-
-def write_news(entries, out_file):
- import textwrap
- from time import strftime
-
- if len(entries) == 0:
- return
-
- news = open(out_file, 'w')
- for e in sorted(entries.keys(), reverse=True):
- entry = entries[e]
- news.write('%s (%s) %s;\n' % (entry['name'], entry['revision'], entry['status']))
- for item in entry['items']:
- wrapped = textwrap.wrap(item, width=79)
- news.write('\n * ' + '\n '.join(wrapped))
-
- news.write('\n\n --')
- news.write(' %s <%s>' % (entry['blamee_name'],
- entry['blamee_mbox'].replace('mailto:', '')))
-
- news.write(' %s\n\n' % (
- strftime('%a, %d %b %Y %H:%M:%S +0000', entry['date'])))
-
- news.close()
-
-def write_posts(entries, meta, out_dir, status='stable'):
- "write news posts in Pelican Markdown format"
- from time import strftime
- try:
- os.mkdir(out_dir)
- except Exception:
- pass
-
- for i in entries:
- entry = entries[i]
- revision = i[1]
- if entry['status'] != status:
- continue
-
- date_str = strftime('%Y-%m-%d', entry['date'])
- datetime_str = strftime('%Y-%m-%d %H:%M', entry['date'])
-
- path = os.path.join(out_dir, '%s-%s-%s.md' % (
- date_str, entry['name'], revision.replace('.', '-')))
- post = open(path, 'w')
- title = entry['title'] if 'title' in entry else entry['name']
- post.write('Title: %s %s\n' % (title, revision))
- post.write('Date: %s\n' % datetime_str)
- post.write('Slug: %s-%s\n' % (entry['name'], revision.replace('.', '-')))
- for k in meta:
- post.write('%s: %s\n' % (k, meta[k]))
- post.write('\n')
-
- url = entry['dist']
- if entry['status'] == status:
- post.write('[%s %s](%s) has been released.' % (
- (entry['name'], revision, url)))
-
- if 'description' in entry:
- post.write(' ' + entry['description'])
-
- post.write('\n')
- if (len(entry['items']) > 0 and
- not (len(entry['items']) == 1 and
- entry['items'][0] == 'Initial release')):
- post.write('\nChanges:\n\n')
- for i in entry['items']:
- post.write(' * %s\n' % i)
-
- post.close()
-
-def get_blurb(in_file):
- "Get the first paragram of a Markdown formatted file, skipping the title"
- f = open(in_file, 'r')
- f.readline() # Title
- f.readline() # Title underline
- f.readline() # Blank
- out = ''
- line = f.readline()
- while len(line) > 0 and line != '\n':
- out += line.replace('\n', ' ')
- line = f.readline()
- return out.strip()
-
-def get_news(in_file, entry_props={}):
- """Get NEWS entries in the format expected by write_posts().
-
- Properties that should be set on every entry can be passed in
- `entry_props`. If `entry_props` has a 'dist_pattern' value, it is used to
- set the 'dist' entry of entries by substituting the version number.
- """
-
- import re
- import rfc822
-
- f = open(in_file, 'r')
- entries = {}
- while True:
- # Read header line
- head = f.readline()
- matches = re.compile(r'([^ ]*) \((.*)\) ([a-zA-z]*);').match(head)
- if matches is None:
- break
-
- entry = {}
- entry['name'] = matches.group(1)
- entry['revision'] = matches.group(2)
- entry['status'] = matches.group(3)
- entry['items'] = []
- if 'dist_pattern' in entry_props:
- entry['dist'] = entry_props['dist_pattern'] % entry['revision']
-
- # Read blank line after header
- if f.readline() != '\n':
- raise SyntaxError('expected blank line after NEWS header')
-
- def add_item(item):
- if len(item) > 0:
- entry['items'] += [item.replace('\n', ' ').strip()]
-
- # Read entries for this revision
- item = ''
- line = ''
- while line != '\n':
- line = f.readline()
- if line.startswith(' * '):
- add_item(item)
- item = line[3:].lstrip()
- else:
- item += line.lstrip()
- add_item(item)
-
- # Read footer line
- foot = f.readline()
- matches = re.compile(' -- (.*) <(.*)> (.*)').match(foot)
- entry['date'] = rfc822.parsedate(matches.group(3))
- entry['blamee_name'] = matches.group(1)
- entry['blamee_mbox'] = matches.group(2)
- entry.update(entry_props)
- entries[(entry['date'], entry['revision'])] = entry
-
- # Skip trailing blank line before next entry
- f.readline()
-
- f.close()
-
- return entries
-
-def news_to_posts(news_file, entry_props, post_meta, default_post_dir):
- post_dir = os.getenv('POST_DIR')
- if not post_dir:
- post_dir = default_post_dir
- sys.stderr.write('POST_DIR not set in environment, writing to %s\n' % post_dir)
- else:
- sys.stderr.write('writing posts to %s\n' % post_dir)
-
- entries = get_news(news_file, entry_props)
- write_posts(entries, post_meta, post_dir)
-
-def run_script(cmds):
- for cmd in cmds:
- subprocess.check_call(cmd, shell=True)
-
-def release(name, version, dist_name=None):
- if dist_name is None:
- dist_name = name.lower()
-
- dist = '%s-%s.tar.bz2' % (dist_name or name.lower(), version)
- try:
- os.remove(dist)
- os.remove(dist + '.sig')
- except Exception:
- pass
-
- status = subprocess.check_output('git status --porcelain', shell=True)
- if status:
- Logs.error('error: git working copy is dirty\n' + status)
- raise Exception('git working copy is dirty')
-
- head = subprocess.check_output('git show -s --oneline', shell=True)
- head_summary = head[8:].strip().lower()
- expected_summary = '%s %s' % (name.lower(), version)
- if head_summary != expected_summary:
- raise Exception('latest commit "%s" does not match "%s"' % (
- head_summary, expected_summary))
-
- run_script(['./waf configure --docs',
- './waf',
- './waf distcheck',
- './waf posts',
- 'gpg -b %s' % dist,
- 'git tag -s v%s -m "%s %s"' % (version, name, version)])
diff --git a/waflib/extras/batched_cc.py b/waflib/extras/batched_cc.py
deleted file mode 100644
index aad2872..0000000
--- a/waflib/extras/batched_cc.py
+++ /dev/null
@@ -1,173 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2006-2015 (ita)
-
-"""
-Instead of compiling object files one by one, c/c++ compilers are often able to compile at once:
-cc -c ../file1.c ../file2.c ../file3.c
-
-Files are output on the directory where the compiler is called, and dependencies are more difficult
-to track (do not run the command on all source files if only one file changes)
-As such, we do as if the files were compiled one by one, but no command is actually run:
-replace each cc/cpp Task by a TaskSlave. A new task called TaskMaster collects the
-signatures from each slave and finds out the command-line to run.
-
-Just import this module to start using it:
-def build(bld):
- bld.load('batched_cc')
-
-Note that this is provided as an example, unity builds are recommended
-for best performance results (fewer tasks and fewer jobs to execute).
-See waflib/extras/unity.py.
-"""
-
-from waflib import Task, Utils
-from waflib.TaskGen import extension, feature, after_method
-from waflib.Tools import c, cxx
-
-MAX_BATCH = 50
-
-c_str = '${CC} ${ARCH_ST:ARCH} ${CFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${tsk.batch_incpaths()} ${DEFINES_ST:DEFINES} -c ${SRCLST} ${CXX_TGT_F_BATCHED} ${CPPFLAGS}'
-c_fun, _ = Task.compile_fun_noshell(c_str)
-
-cxx_str = '${CXX} ${ARCH_ST:ARCH} ${CXXFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${tsk.batch_incpaths()} ${DEFINES_ST:DEFINES} -c ${SRCLST} ${CXX_TGT_F_BATCHED} ${CPPFLAGS}'
-cxx_fun, _ = Task.compile_fun_noshell(cxx_str)
-
-count = 70000
-class batch(Task.Task):
- color = 'PINK'
-
- after = ['c', 'cxx']
- before = ['cprogram', 'cshlib', 'cstlib', 'cxxprogram', 'cxxshlib', 'cxxstlib']
-
- def uid(self):
- return Utils.h_list([Task.Task.uid(self), self.generator.idx, self.generator.path.abspath(), self.generator.target])
-
- def __str__(self):
- return 'Batch compilation for %d slaves' % len(self.slaves)
-
- def __init__(self, *k, **kw):
- Task.Task.__init__(self, *k, **kw)
- self.slaves = []
- self.inputs = []
- self.hasrun = 0
-
- global count
- count += 1
- self.idx = count
-
- def add_slave(self, slave):
- self.slaves.append(slave)
- self.set_run_after(slave)
-
- def runnable_status(self):
- for t in self.run_after:
- if not t.hasrun:
- return Task.ASK_LATER
-
- for t in self.slaves:
- #if t.executed:
- if t.hasrun != Task.SKIPPED:
- return Task.RUN_ME
-
- return Task.SKIP_ME
-
- def get_cwd(self):
- return self.slaves[0].outputs[0].parent
-
- def batch_incpaths(self):
- st = self.env.CPPPATH_ST
- return [st % node.abspath() for node in self.generator.includes_nodes]
-
- def run(self):
- self.outputs = []
-
- srclst = []
- slaves = []
- for t in self.slaves:
- if t.hasrun != Task.SKIPPED:
- slaves.append(t)
- srclst.append(t.inputs[0].abspath())
-
- self.env.SRCLST = srclst
-
- if self.slaves[0].__class__.__name__ == 'c':
- ret = c_fun(self)
- else:
- ret = cxx_fun(self)
-
- if ret:
- return ret
-
- for t in slaves:
- t.old_post_run()
-
-def hook(cls_type):
- def n_hook(self, node):
-
- ext = '.obj' if self.env.CC_NAME == 'msvc' else '.o'
- name = node.name
- k = name.rfind('.')
- if k >= 0:
- basename = name[:k] + ext
- else:
- basename = name + ext
-
- outdir = node.parent.get_bld().make_node('%d' % self.idx)
- outdir.mkdir()
- out = outdir.find_or_declare(basename)
-
- task = self.create_task(cls_type, node, out)
-
- try:
- self.compiled_tasks.append(task)
- except AttributeError:
- self.compiled_tasks = [task]
-
- if not getattr(self, 'masters', None):
- self.masters = {}
- self.allmasters = []
-
- def fix_path(tsk):
- if self.env.CC_NAME == 'msvc':
- tsk.env.append_unique('CXX_TGT_F_BATCHED', '/Fo%s\\' % outdir.abspath())
-
- if not node.parent in self.masters:
- m = self.masters[node.parent] = self.master = self.create_task('batch')
- fix_path(m)
- self.allmasters.append(m)
- else:
- m = self.masters[node.parent]
- if len(m.slaves) > MAX_BATCH:
- m = self.masters[node.parent] = self.master = self.create_task('batch')
- fix_path(m)
- self.allmasters.append(m)
- m.add_slave(task)
- return task
- return n_hook
-
-extension('.c')(hook('c'))
-extension('.cpp','.cc','.cxx','.C','.c++')(hook('cxx'))
-
-@feature('cprogram', 'cshlib', 'cstaticlib', 'cxxprogram', 'cxxshlib', 'cxxstlib')
-@after_method('apply_link')
-def link_after_masters(self):
- if getattr(self, 'allmasters', None):
- for m in self.allmasters:
- self.link_task.set_run_after(m)
-
-# Modify the c and cxx task classes - in theory it would be best to
-# create subclasses and to re-map the c/c++ extensions
-for x in ('c', 'cxx'):
- t = Task.classes[x]
- def run(self):
- pass
-
- def post_run(self):
- pass
-
- setattr(t, 'oldrun', getattr(t, 'run', None))
- setattr(t, 'run', run)
- setattr(t, 'old_post_run', t.post_run)
- setattr(t, 'post_run', post_run)
-
diff --git a/waflib/extras/biber.py b/waflib/extras/biber.py
deleted file mode 100644
index fd9db4e..0000000
--- a/waflib/extras/biber.py
+++ /dev/null
@@ -1,58 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2011 (ita)
-
-"""
-Latex processing using "biber"
-"""
-
-import os
-from waflib import Task, Logs
-
-from waflib.Tools import tex as texmodule
-
-class tex(texmodule.tex):
- biber_fun, _ = Task.compile_fun('${BIBER} ${BIBERFLAGS} ${SRCFILE}',shell=False)
- biber_fun.__doc__ = """
- Execute the program **biber**
- """
-
- def bibfile(self):
- return None
-
- def bibunits(self):
- self.env.env = {}
- self.env.env.update(os.environ)
- self.env.env.update({'BIBINPUTS': self.texinputs(), 'BSTINPUTS': self.texinputs()})
- self.env.SRCFILE = self.aux_nodes[0].name[:-4]
-
- if not self.env['PROMPT_LATEX']:
- self.env.append_unique('BIBERFLAGS', '--quiet')
-
- path = self.aux_nodes[0].abspath()[:-4] + '.bcf'
- if os.path.isfile(path):
- Logs.warn('calling biber')
- self.check_status('error when calling biber, check %s.blg for errors' % (self.env.SRCFILE), self.biber_fun())
- else:
- super(tex, self).bibfile()
- super(tex, self).bibunits()
-
-class latex(tex):
- texfun, vars = Task.compile_fun('${LATEX} ${LATEXFLAGS} ${SRCFILE}', shell=False)
-class pdflatex(tex):
- texfun, vars = Task.compile_fun('${PDFLATEX} ${PDFLATEXFLAGS} ${SRCFILE}', shell=False)
-class xelatex(tex):
- texfun, vars = Task.compile_fun('${XELATEX} ${XELATEXFLAGS} ${SRCFILE}', shell=False)
-
-def configure(self):
- """
- Almost the same as in tex.py, but try to detect 'biber'
- """
- v = self.env
- for p in ' biber tex latex pdflatex xelatex bibtex dvips dvipdf ps2pdf makeindex pdf2ps'.split():
- try:
- self.find_program(p, var=p.upper())
- except self.errors.ConfigurationError:
- pass
- v['DVIPSFLAGS'] = '-Ppdf'
-
diff --git a/waflib/extras/bjam.py b/waflib/extras/bjam.py
deleted file mode 100644
index 8e04d3a..0000000
--- a/waflib/extras/bjam.py
+++ /dev/null
@@ -1,128 +0,0 @@
-#! /usr/bin/env python
-# per rosengren 2011
-
-from os import sep, readlink
-from waflib import Logs
-from waflib.TaskGen import feature, after_method
-from waflib.Task import Task, always_run
-
-def options(opt):
- grp = opt.add_option_group('Bjam Options')
- grp.add_option('--bjam_src', default=None, help='You can find it in <boost root>/tools/jam/src')
- grp.add_option('--bjam_uname', default='linuxx86_64', help='bjam is built in <src>/bin.<uname>/bjam')
- grp.add_option('--bjam_config', default=None)
- grp.add_option('--bjam_toolset', default=None)
-
-def configure(cnf):
- if not cnf.env.BJAM_SRC:
- cnf.env.BJAM_SRC = cnf.options.bjam_src
- if not cnf.env.BJAM_UNAME:
- cnf.env.BJAM_UNAME = cnf.options.bjam_uname
- try:
- cnf.find_program('bjam', path_list=[
- cnf.env.BJAM_SRC + sep + 'bin.' + cnf.env.BJAM_UNAME
- ])
- except Exception:
- cnf.env.BJAM = None
- if not cnf.env.BJAM_CONFIG:
- cnf.env.BJAM_CONFIG = cnf.options.bjam_config
- if not cnf.env.BJAM_TOOLSET:
- cnf.env.BJAM_TOOLSET = cnf.options.bjam_toolset
-
-@feature('bjam')
-@after_method('process_rule')
-def process_bjam(self):
- if not self.bld.env.BJAM:
- self.create_task('bjam_creator')
- self.create_task('bjam_build')
- self.create_task('bjam_installer')
- if getattr(self, 'always', False):
- always_run(bjam_creator)
- always_run(bjam_build)
- always_run(bjam_installer)
-
-class bjam_creator(Task):
- ext_out = 'bjam_exe'
- vars=['BJAM_SRC', 'BJAM_UNAME']
- def run(self):
- env = self.env
- gen = self.generator
- bjam = gen.bld.root.find_dir(env.BJAM_SRC)
- if not bjam:
- Logs.error('Can not find bjam source')
- return -1
- bjam_exe_relpath = 'bin.' + env.BJAM_UNAME + '/bjam'
- bjam_exe = bjam.find_resource(bjam_exe_relpath)
- if bjam_exe:
- env.BJAM = bjam_exe.srcpath()
- return 0
- bjam_cmd = ['./build.sh']
- Logs.debug('runner: ' + bjam.srcpath() + '> ' + str(bjam_cmd))
- result = self.exec_command(bjam_cmd, cwd=bjam.srcpath())
- if not result == 0:
- Logs.error('bjam failed')
- return -1
- bjam_exe = bjam.find_resource(bjam_exe_relpath)
- if bjam_exe:
- env.BJAM = bjam_exe.srcpath()
- return 0
- Logs.error('bjam failed')
- return -1
-
-class bjam_build(Task):
- ext_in = 'bjam_exe'
- ext_out = 'install'
- vars = ['BJAM_TOOLSET']
- def run(self):
- env = self.env
- gen = self.generator
- path = gen.path
- bld = gen.bld
- if hasattr(gen, 'root'):
- build_root = path.find_node(gen.root)
- else:
- build_root = path
- jam = bld.srcnode.find_resource(env.BJAM_CONFIG)
- if jam:
- Logs.debug('bjam: Using jam configuration from ' + jam.srcpath())
- jam_rel = jam.relpath_gen(build_root)
- else:
- Logs.warn('No build configuration in build_config/user-config.jam. Using default')
- jam_rel = None
- bjam_exe = bld.srcnode.find_node(env.BJAM)
- if not bjam_exe:
- Logs.error('env.BJAM is not set')
- return -1
- bjam_exe_rel = bjam_exe.relpath_gen(build_root)
- cmd = ([bjam_exe_rel] +
- (['--user-config=' + jam_rel] if jam_rel else []) +
- ['--stagedir=' + path.get_bld().path_from(build_root)] +
- ['--debug-configuration'] +
- ['--with-' + lib for lib in self.generator.target] +
- (['toolset=' + env.BJAM_TOOLSET] if env.BJAM_TOOLSET else []) +
- ['link=' + 'shared'] +
- ['variant=' + 'release']
- )
- Logs.debug('runner: ' + build_root.srcpath() + '> ' + str(cmd))
- ret = self.exec_command(cmd, cwd=build_root.srcpath())
- if ret != 0:
- return ret
- self.set_outputs(path.get_bld().ant_glob('lib/*') + path.get_bld().ant_glob('bin/*'))
- return 0
-
-class bjam_installer(Task):
- ext_in = 'install'
- def run(self):
- gen = self.generator
- path = gen.path
- for idir, pat in (('${LIBDIR}', 'lib/*'), ('${BINDIR}', 'bin/*')):
- files = []
- for n in path.get_bld().ant_glob(pat):
- try:
- t = readlink(n.srcpath())
- gen.bld.symlink_as(sep.join([idir, n.name]), t, postpone=False)
- except OSError:
- files.append(n)
- gen.bld.install_files(idir, files, postpone=False)
- return 0
-
diff --git a/waflib/extras/blender.py b/waflib/extras/blender.py
deleted file mode 100644
index e5efc28..0000000
--- a/waflib/extras/blender.py
+++ /dev/null
@@ -1,108 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Michal Proszek, 2014 (poxip)
-
-"""
-Detect the version of Blender, path
-and install the extension:
-
- def options(opt):
- opt.load('blender')
- def configure(cnf):
- cnf.load('blender')
- def build(bld):
- bld(name='io_mesh_raw',
- feature='blender',
- files=['file1.py', 'file2.py']
- )
-If name variable is empty, files are installed in scripts/addons, otherwise scripts/addons/name
-Use ./waf configure --system to set the installation directory to system path
-"""
-import os
-import re
-from getpass import getuser
-
-from waflib import Utils
-from waflib.TaskGen import feature
-from waflib.Configure import conf
-
-def options(opt):
- opt.add_option(
- '-s', '--system',
- dest='directory_system',
- default=False,
- action='store_true',
- help='determines installation directory (default: user)'
- )
-
-@conf
-def find_blender(ctx):
- '''Return version number of blender, if not exist return None'''
- blender = ctx.find_program('blender')
- output = ctx.cmd_and_log(blender + ['--version'])
- m = re.search(r'Blender\s*((\d+(\.|))*)', output)
- if not m:
- ctx.fatal('Could not retrieve blender version')
-
- try:
- blender_version = m.group(1)
- except IndexError:
- ctx.fatal('Could not retrieve blender version')
-
- ctx.env['BLENDER_VERSION'] = blender_version
- return blender
-
-@conf
-def configure_paths(ctx):
- """Setup blender paths"""
- # Get the username
- user = getuser()
- _platform = Utils.unversioned_sys_platform()
- config_path = {'user': '', 'system': ''}
- if _platform.startswith('linux'):
- config_path['user'] = '/home/%s/.config/blender/' % user
- config_path['system'] = '/usr/share/blender/'
- elif _platform == 'darwin':
- # MAC OS X
- config_path['user'] = \
- '/Users/%s/Library/Application Support/Blender/' % user
- config_path['system'] = '/Library/Application Support/Blender/'
- elif Utils.is_win32:
- # Windows
- appdata_path = ctx.getenv('APPDATA').replace('\\', '/')
- homedrive = ctx.getenv('HOMEDRIVE').replace('\\', '/')
-
- config_path['user'] = '%s/Blender Foundation/Blender/' % appdata_path
- config_path['system'] = \
- '%sAll Users/AppData/Roaming/Blender Foundation/Blender/' % homedrive
- else:
- ctx.fatal(
- 'Unsupported platform. '
- 'Available platforms: Linux, OSX, MS-Windows.'
- )
-
- blender_version = ctx.env['BLENDER_VERSION']
-
- config_path['user'] += blender_version + '/'
- config_path['system'] += blender_version + '/'
-
- ctx.env['BLENDER_CONFIG_DIR'] = os.path.abspath(config_path['user'])
- if ctx.options.directory_system:
- ctx.env['BLENDER_CONFIG_DIR'] = config_path['system']
-
- ctx.env['BLENDER_ADDONS_DIR'] = os.path.join(
- ctx.env['BLENDER_CONFIG_DIR'], 'scripts/addons'
- )
- Utils.check_dir(ctx.env['BLENDER_ADDONS_DIR'])
-
-def configure(ctx):
- ctx.find_blender()
- ctx.configure_paths()
-
-@feature('blender_list')
-def blender(self):
- # Two ways to install a blender extension: as a module or just .py files
- dest_dir = os.path.join(self.env.BLENDER_ADDONS_DIR, self.get_name())
- Utils.check_dir(dest_dir)
- self.add_install_files(install_to=dest_dir, install_from=getattr(self, 'files', '.'))
-
diff --git a/waflib/extras/boo.py b/waflib/extras/boo.py
deleted file mode 100644
index 06623d4..0000000
--- a/waflib/extras/boo.py
+++ /dev/null
@@ -1,81 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# Yannick LM 2011
-
-"""
-Support for the boo programming language, for example::
-
- bld(features = "boo", # necessary feature
- source = "src.boo", # list of boo files
- gen = "world.dll", # target
- type = "library", # library/exe ("-target:xyz" flag)
- name = "world" # necessary if the target is referenced by 'use'
- )
-"""
-
-from waflib import Task
-from waflib.Configure import conf
-from waflib.TaskGen import feature, after_method, before_method, extension
-
-@extension('.boo')
-def boo_hook(self, node):
- # Nothing here yet ...
- # TODO filter the non-boo source files in 'apply_booc' and remove this method
- pass
-
-@feature('boo')
-@before_method('process_source')
-def apply_booc(self):
- """Create a booc task """
- src_nodes = self.to_nodes(self.source)
- out_node = self.path.find_or_declare(self.gen)
-
- self.boo_task = self.create_task('booc', src_nodes, [out_node])
-
- # Set variables used by the 'booc' task
- self.boo_task.env.OUT = '-o:%s' % out_node.abspath()
-
- # type is "exe" by default
- type = getattr(self, "type", "exe")
- self.boo_task.env.BOO_TARGET_TYPE = "-target:%s" % type
-
-@feature('boo')
-@after_method('apply_boo')
-def use_boo(self):
- """"
- boo applications honor the **use** keyword::
- """
- dep_names = self.to_list(getattr(self, 'use', []))
- for dep_name in dep_names:
- dep_task_gen = self.bld.get_tgen_by_name(dep_name)
- if not dep_task_gen:
- continue
- dep_task_gen.post()
- dep_task = getattr(dep_task_gen, 'boo_task', None)
- if not dep_task:
- # Try a cs task:
- dep_task = getattr(dep_task_gen, 'cs_task', None)
- if not dep_task:
- # Try a link task:
- dep_task = getattr(dep_task, 'link_task', None)
- if not dep_task:
- # Abort ...
- continue
- self.boo_task.set_run_after(dep_task) # order
- self.boo_task.dep_nodes.extend(dep_task.outputs) # dependency
- self.boo_task.env.append_value('BOO_FLAGS', '-reference:%s' % dep_task.outputs[0].abspath())
-
-class booc(Task.Task):
- """Compiles .boo files """
- color = 'YELLOW'
- run_str = '${BOOC} ${BOO_FLAGS} ${BOO_TARGET_TYPE} ${OUT} ${SRC}'
-
-@conf
-def check_booc(self):
- self.find_program('booc', 'BOOC')
- self.env.BOO_FLAGS = ['-nologo']
-
-def configure(self):
- """Check that booc is available """
- self.check_booc()
-
diff --git a/waflib/extras/boost.py b/waflib/extras/boost.py
deleted file mode 100644
index c2aaaa9..0000000
--- a/waflib/extras/boost.py
+++ /dev/null
@@ -1,525 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-#
-# partially based on boost.py written by Gernot Vormayr
-# written by Ruediger Sonderfeld <ruediger@c-plusplus.de>, 2008
-# modified by Bjoern Michaelsen, 2008
-# modified by Luca Fossati, 2008
-# rewritten for waf 1.5.1, Thomas Nagy, 2008
-# rewritten for waf 1.6.2, Sylvain Rouquette, 2011
-
-'''
-
-This is an extra tool, not bundled with the default waf binary.
-To add the boost tool to the waf file:
-$ ./waf-light --tools=compat15,boost
- or, if you have waf >= 1.6.2
-$ ./waf update --files=boost
-
-When using this tool, the wscript will look like:
-
- def options(opt):
- opt.load('compiler_cxx boost')
-
- def configure(conf):
- conf.load('compiler_cxx boost')
- conf.check_boost(lib='system filesystem')
-
- def build(bld):
- bld(source='main.cpp', target='app', use='BOOST')
-
-Options are generated, in order to specify the location of boost includes/libraries.
-The `check_boost` configuration function allows to specify the used boost libraries.
-It can also provide default arguments to the --boost-mt command-line arguments.
-Everything will be packaged together in a BOOST component that you can use.
-
-When using MSVC, a lot of compilation flags need to match your BOOST build configuration:
- - you may have to add /EHsc to your CXXFLAGS or define boost::throw_exception if BOOST_NO_EXCEPTIONS is defined.
- Errors: C4530
- - boost libraries will try to be smart and use the (pretty but often not useful) auto-linking feature of MSVC
- So before calling `conf.check_boost` you might want to disabling by adding
- conf.env.DEFINES_BOOST += ['BOOST_ALL_NO_LIB']
- Errors:
- - boost might also be compiled with /MT, which links the runtime statically.
- If you have problems with redefined symbols,
- self.env['DEFINES_%s' % var] += ['BOOST_ALL_NO_LIB']
- self.env['CXXFLAGS_%s' % var] += ['/MD', '/EHsc']
-Passing `--boost-linkage_autodetect` might help ensuring having a correct linkage in some basic cases.
-
-'''
-
-import sys
-import re
-from waflib import Utils, Logs, Errors
-from waflib.Configure import conf
-from waflib.TaskGen import feature, after_method
-
-BOOST_LIBS = ['/usr/lib', '/usr/local/lib', '/opt/local/lib', '/sw/lib', '/lib']
-BOOST_INCLUDES = ['/usr/include', '/usr/local/include', '/opt/local/include', '/sw/include']
-BOOST_VERSION_FILE = 'boost/version.hpp'
-BOOST_VERSION_CODE = '''
-#include <iostream>
-#include <boost/version.hpp>
-int main() { std::cout << BOOST_LIB_VERSION << ":" << BOOST_VERSION << std::endl; }
-'''
-
-BOOST_ERROR_CODE = '''
-#include <boost/system/error_code.hpp>
-int main() { boost::system::error_code c; }
-'''
-
-PTHREAD_CODE = '''
-#include <pthread.h>
-static void* f(void*) { return 0; }
-int main() {
- pthread_t th;
- pthread_attr_t attr;
- pthread_attr_init(&attr);
- pthread_create(&th, &attr, &f, 0);
- pthread_join(th, 0);
- pthread_cleanup_push(0, 0);
- pthread_cleanup_pop(0);
- pthread_attr_destroy(&attr);
-}
-'''
-
-BOOST_THREAD_CODE = '''
-#include <boost/thread.hpp>
-int main() { boost::thread t; }
-'''
-
-BOOST_LOG_CODE = '''
-#include <boost/log/trivial.hpp>
-#include <boost/log/utility/setup/console.hpp>
-#include <boost/log/utility/setup/common_attributes.hpp>
-int main() {
- using namespace boost::log;
- add_common_attributes();
- add_console_log(std::clog, keywords::format = "%Message%");
- BOOST_LOG_TRIVIAL(debug) << "log is working" << std::endl;
-}
-'''
-
-# toolsets from {boost_dir}/tools/build/v2/tools/common.jam
-PLATFORM = Utils.unversioned_sys_platform()
-detect_intel = lambda env: (PLATFORM == 'win32') and 'iw' or 'il'
-detect_clang = lambda env: (PLATFORM == 'darwin') and 'clang-darwin' or 'clang'
-detect_mingw = lambda env: (re.search('MinGW', env.CXX[0])) and 'mgw' or 'gcc'
-BOOST_TOOLSETS = {
- 'borland': 'bcb',
- 'clang': detect_clang,
- 'como': 'como',
- 'cw': 'cw',
- 'darwin': 'xgcc',
- 'edg': 'edg',
- 'g++': detect_mingw,
- 'gcc': detect_mingw,
- 'icpc': detect_intel,
- 'intel': detect_intel,
- 'kcc': 'kcc',
- 'kylix': 'bck',
- 'mipspro': 'mp',
- 'mingw': 'mgw',
- 'msvc': 'vc',
- 'qcc': 'qcc',
- 'sun': 'sw',
- 'sunc++': 'sw',
- 'tru64cxx': 'tru',
- 'vacpp': 'xlc'
-}
-
-
-def options(opt):
- opt = opt.add_option_group('Boost Options')
- opt.add_option('--boost-includes', type='string',
- default='', dest='boost_includes',
- help='''path to the directory where the boost includes are,
- e.g., /path/to/boost_1_55_0/stage/include''')
- opt.add_option('--boost-libs', type='string',
- default='', dest='boost_libs',
- help='''path to the directory where the boost libs are,
- e.g., path/to/boost_1_55_0/stage/lib''')
- opt.add_option('--boost-mt', action='store_true',
- default=False, dest='boost_mt',
- help='select multi-threaded libraries')
- opt.add_option('--boost-abi', type='string', default='', dest='boost_abi',
- help='''select libraries with tags (gd for debug, static is automatically added),
- see doc Boost, Getting Started, chapter 6.1''')
- opt.add_option('--boost-linkage_autodetect', action="store_true", dest='boost_linkage_autodetect',
- help="auto-detect boost linkage options (don't get used to it / might break other stuff)")
- opt.add_option('--boost-toolset', type='string',
- default='', dest='boost_toolset',
- help='force a toolset e.g. msvc, vc90, \
- gcc, mingw, mgw45 (default: auto)')
- py_version = '%d%d' % (sys.version_info[0], sys.version_info[1])
- opt.add_option('--boost-python', type='string',
- default=py_version, dest='boost_python',
- help='select the lib python with this version \
- (default: %s)' % py_version)
-
-
-@conf
-def __boost_get_version_file(self, d):
- if not d:
- return None
- dnode = self.root.find_dir(d)
- if dnode:
- return dnode.find_node(BOOST_VERSION_FILE)
- return None
-
-@conf
-def boost_get_version(self, d):
- """silently retrieve the boost version number"""
- node = self.__boost_get_version_file(d)
- if node:
- try:
- txt = node.read()
- except EnvironmentError:
- Logs.error("Could not read the file %r", node.abspath())
- else:
- re_but1 = re.compile('^#define\\s+BOOST_LIB_VERSION\\s+"(.+)"', re.M)
- m1 = re_but1.search(txt)
- re_but2 = re.compile('^#define\\s+BOOST_VERSION\\s+(\\d+)', re.M)
- m2 = re_but2.search(txt)
- if m1 and m2:
- return (m1.group(1), m2.group(1))
- return self.check_cxx(fragment=BOOST_VERSION_CODE, includes=[d], execute=True, define_ret=True).split(":")
-
-@conf
-def boost_get_includes(self, *k, **kw):
- includes = k and k[0] or kw.get('includes')
- if includes and self.__boost_get_version_file(includes):
- return includes
- for d in self.environ.get('INCLUDE', '').split(';') + BOOST_INCLUDES:
- if self.__boost_get_version_file(d):
- return d
- if includes:
- self.end_msg('headers not found in %s' % includes)
- self.fatal('The configuration failed')
- else:
- self.end_msg('headers not found, please provide a --boost-includes argument (see help)')
- self.fatal('The configuration failed')
-
-
-@conf
-def boost_get_toolset(self, cc):
- toolset = cc
- if not cc:
- build_platform = Utils.unversioned_sys_platform()
- if build_platform in BOOST_TOOLSETS:
- cc = build_platform
- else:
- cc = self.env.CXX_NAME
- if cc in BOOST_TOOLSETS:
- toolset = BOOST_TOOLSETS[cc]
- return isinstance(toolset, str) and toolset or toolset(self.env)
-
-
-@conf
-def __boost_get_libs_path(self, *k, **kw):
- ''' return the lib path and all the files in it '''
- if 'files' in kw:
- return self.root.find_dir('.'), Utils.to_list(kw['files'])
- libs = k and k[0] or kw.get('libs')
- if libs:
- path = self.root.find_dir(libs)
- files = path.ant_glob('*boost_*')
- if not libs or not files:
- for d in self.environ.get('LIB', '').split(';') + BOOST_LIBS:
- if not d:
- continue
- path = self.root.find_dir(d)
- if path:
- files = path.ant_glob('*boost_*')
- if files:
- break
- path = self.root.find_dir(d + '64')
- if path:
- files = path.ant_glob('*boost_*')
- if files:
- break
- if not path:
- if libs:
- self.end_msg('libs not found in %s' % libs)
- self.fatal('The configuration failed')
- else:
- self.end_msg('libs not found, please provide a --boost-libs argument (see help)')
- self.fatal('The configuration failed')
-
- self.to_log('Found the boost path in %r with the libraries:' % path)
- for x in files:
- self.to_log(' %r' % x)
- return path, files
-
-@conf
-def boost_get_libs(self, *k, **kw):
- '''
- return the lib path and the required libs
- according to the parameters
- '''
- path, files = self.__boost_get_libs_path(**kw)
- files = sorted(files, key=lambda f: (len(f.name), f.name), reverse=True)
- toolset = self.boost_get_toolset(kw.get('toolset', ''))
- toolset_pat = '(-%s[0-9]{0,3})' % toolset
- version = '-%s' % self.env.BOOST_VERSION
-
- def find_lib(re_lib, files):
- for file in files:
- if re_lib.search(file.name):
- self.to_log('Found boost lib %s' % file)
- return file
- return None
-
- def format_lib_name(name):
- if name.startswith('lib') and self.env.CC_NAME != 'msvc':
- name = name[3:]
- return name[:name.rfind('.')]
-
- def match_libs(lib_names, is_static):
- libs = []
- lib_names = Utils.to_list(lib_names)
- if not lib_names:
- return libs
- t = []
- if kw.get('mt', False):
- t.append('-mt')
- if kw.get('abi'):
- t.append('%s%s' % (is_static and '-s' or '-', kw['abi']))
- elif is_static:
- t.append('-s')
- tags_pat = t and ''.join(t) or ''
- ext = is_static and self.env.cxxstlib_PATTERN or self.env.cxxshlib_PATTERN
- ext = ext.partition('%s')[2] # remove '%s' or 'lib%s' from PATTERN
-
- for lib in lib_names:
- if lib == 'python':
- # for instance, with python='27',
- # accepts '-py27', '-py2', '27', '-2.7' and '2'
- # but will reject '-py3', '-py26', '26' and '3'
- tags = '({0})?((-py{2})|(-py{1}(?=[^0-9]))|({2})|(-{1}.{3})|({1}(?=[^0-9]))|(?=[^0-9])(?!-py))'.format(tags_pat, kw['python'][0], kw['python'], kw['python'][1])
- else:
- tags = tags_pat
- # Trying libraries, from most strict match to least one
- for pattern in ['boost_%s%s%s%s%s$' % (lib, toolset_pat, tags, version, ext),
- 'boost_%s%s%s%s$' % (lib, tags, version, ext),
- # Give up trying to find the right version
- 'boost_%s%s%s%s$' % (lib, toolset_pat, tags, ext),
- 'boost_%s%s%s$' % (lib, tags, ext),
- 'boost_%s%s$' % (lib, ext),
- 'boost_%s' % lib]:
- self.to_log('Trying pattern %s' % pattern)
- file = find_lib(re.compile(pattern), files)
- if file:
- libs.append(format_lib_name(file.name))
- break
- else:
- self.end_msg('lib %s not found in %s' % (lib, path.abspath()))
- self.fatal('The configuration failed')
- return libs
-
- return path.abspath(), match_libs(kw.get('lib'), False), match_libs(kw.get('stlib'), True)
-
-@conf
-def _check_pthread_flag(self, *k, **kw):
- '''
- Computes which flags should be added to CXXFLAGS and LINKFLAGS to compile in multi-threading mode
-
- Yes, we *need* to put the -pthread thing in CPPFLAGS because with GCC3,
- boost/thread.hpp will trigger a #error if -pthread isn't used:
- boost/config/requires_threads.hpp:47:5: #error "Compiler threading support
- is not turned on. Please set the correct command line options for
- threading: -pthread (Linux), -pthreads (Solaris) or -mthreads (Mingw32)"
-
- Based on _BOOST_PTHREAD_FLAG(): https://github.com/tsuna/boost.m4/blob/master/build-aux/boost.m4
- '''
-
- var = kw.get('uselib_store', 'BOOST')
-
- self.start_msg('Checking the flags needed to use pthreads')
-
- # The ordering *is* (sometimes) important. Some notes on the
- # individual items follow:
- # (none): in case threads are in libc; should be tried before -Kthread and
- # other compiler flags to prevent continual compiler warnings
- # -lpthreads: AIX (must check this before -lpthread)
- # -Kthread: Sequent (threads in libc, but -Kthread needed for pthread.h)
- # -kthread: FreeBSD kernel threads (preferred to -pthread since SMP-able)
- # -llthread: LinuxThreads port on FreeBSD (also preferred to -pthread)
- # -pthread: GNU Linux/GCC (kernel threads), BSD/GCC (userland threads)
- # -pthreads: Solaris/GCC
- # -mthreads: MinGW32/GCC, Lynx/GCC
- # -mt: Sun Workshop C (may only link SunOS threads [-lthread], but it
- # doesn't hurt to check since this sometimes defines pthreads too;
- # also defines -D_REENTRANT)
- # ... -mt is also the pthreads flag for HP/aCC
- # -lpthread: GNU Linux, etc.
- # --thread-safe: KAI C++
- if Utils.unversioned_sys_platform() == "sunos":
- # On Solaris (at least, for some versions), libc contains stubbed
- # (non-functional) versions of the pthreads routines, so link-based
- # tests will erroneously succeed. (We need to link with -pthreads/-mt/
- # -lpthread.) (The stubs are missing pthread_cleanup_push, or rather
- # a function called by this macro, so we could check for that, but
- # who knows whether they'll stub that too in a future libc.) So,
- # we'll just look for -pthreads and -lpthread first:
- boost_pthread_flags = ["-pthreads", "-lpthread", "-mt", "-pthread"]
- else:
- boost_pthread_flags = ["", "-lpthreads", "-Kthread", "-kthread", "-llthread", "-pthread",
- "-pthreads", "-mthreads", "-lpthread", "--thread-safe", "-mt"]
-
- for boost_pthread_flag in boost_pthread_flags:
- try:
- self.env.stash()
- self.env.append_value('CXXFLAGS_%s' % var, boost_pthread_flag)
- self.env.append_value('LINKFLAGS_%s' % var, boost_pthread_flag)
- self.check_cxx(code=PTHREAD_CODE, msg=None, use=var, execute=False)
-
- self.end_msg(boost_pthread_flag)
- return
- except self.errors.ConfigurationError:
- self.env.revert()
- self.end_msg('None')
-
-@conf
-def check_boost(self, *k, **kw):
- """
- Initialize boost libraries to be used.
-
- Keywords: you can pass the same parameters as with the command line (without "--boost-").
- Note that the command line has the priority, and should preferably be used.
- """
- if not self.env['CXX']:
- self.fatal('load a c++ compiler first, conf.load("compiler_cxx")')
-
- params = {
- 'lib': k and k[0] or kw.get('lib'),
- 'stlib': kw.get('stlib')
- }
- for key, value in self.options.__dict__.items():
- if not key.startswith('boost_'):
- continue
- key = key[len('boost_'):]
- params[key] = value and value or kw.get(key, '')
-
- var = kw.get('uselib_store', 'BOOST')
-
- self.find_program('dpkg-architecture', var='DPKG_ARCHITECTURE', mandatory=False)
- if self.env.DPKG_ARCHITECTURE:
- deb_host_multiarch = self.cmd_and_log([self.env.DPKG_ARCHITECTURE[0], '-qDEB_HOST_MULTIARCH'])
- BOOST_LIBS.insert(0, '/usr/lib/%s' % deb_host_multiarch.strip())
-
- self.start_msg('Checking boost includes')
- self.env['INCLUDES_%s' % var] = inc = self.boost_get_includes(**params)
- versions = self.boost_get_version(inc)
- self.env.BOOST_VERSION = versions[0]
- self.env.BOOST_VERSION_NUMBER = int(versions[1])
- self.end_msg("%d.%d.%d" % (int(versions[1]) / 100000,
- int(versions[1]) / 100 % 1000,
- int(versions[1]) % 100))
- if Logs.verbose:
- Logs.pprint('CYAN', ' path : %s' % self.env['INCLUDES_%s' % var])
-
- if not params['lib'] and not params['stlib']:
- return
- if 'static' in kw or 'static' in params:
- Logs.warn('boost: static parameter is deprecated, use stlib instead.')
- self.start_msg('Checking boost libs')
- path, libs, stlibs = self.boost_get_libs(**params)
- self.env['LIBPATH_%s' % var] = [path]
- self.env['STLIBPATH_%s' % var] = [path]
- self.env['LIB_%s' % var] = libs
- self.env['STLIB_%s' % var] = stlibs
- self.end_msg('ok')
- if Logs.verbose:
- Logs.pprint('CYAN', ' path : %s' % path)
- Logs.pprint('CYAN', ' shared libs : %s' % libs)
- Logs.pprint('CYAN', ' static libs : %s' % stlibs)
-
- def has_shlib(lib):
- return params['lib'] and lib in params['lib']
- def has_stlib(lib):
- return params['stlib'] and lib in params['stlib']
- def has_lib(lib):
- return has_shlib(lib) or has_stlib(lib)
- if has_lib('thread'):
- # not inside try_link to make check visible in the output
- self._check_pthread_flag(k, kw)
-
- def try_link():
- if has_lib('system'):
- self.check_cxx(fragment=BOOST_ERROR_CODE, use=var, execute=False)
- if has_lib('thread'):
- self.check_cxx(fragment=BOOST_THREAD_CODE, use=var, execute=False)
- if has_lib('log'):
- if not has_lib('thread'):
- self.env['DEFINES_%s' % var] += ['BOOST_LOG_NO_THREADS']
- if has_shlib('log'):
- self.env['DEFINES_%s' % var] += ['BOOST_LOG_DYN_LINK']
- self.check_cxx(fragment=BOOST_LOG_CODE, use=var, execute=False)
-
- if params.get('linkage_autodetect', False):
- self.start_msg("Attempting to detect boost linkage flags")
- toolset = self.boost_get_toolset(kw.get('toolset', ''))
- if toolset in ('vc',):
- # disable auto-linking feature, causing error LNK1181
- # because the code wants to be linked against
- self.env['DEFINES_%s' % var] += ['BOOST_ALL_NO_LIB']
-
- # if no dlls are present, we guess the .lib files are not stubs
- has_dlls = False
- for x in Utils.listdir(path):
- if x.endswith(self.env.cxxshlib_PATTERN % ''):
- has_dlls = True
- break
- if not has_dlls:
- self.env['STLIBPATH_%s' % var] = [path]
- self.env['STLIB_%s' % var] = libs
- del self.env['LIB_%s' % var]
- del self.env['LIBPATH_%s' % var]
-
- # we attempt to play with some known-to-work CXXFLAGS combinations
- for cxxflags in (['/MD', '/EHsc'], []):
- self.env.stash()
- self.env["CXXFLAGS_%s" % var] += cxxflags
- try:
- try_link()
- except Errors.ConfigurationError as e:
- self.env.revert()
- exc = e
- else:
- self.end_msg("ok: winning cxxflags combination: %s" % (self.env["CXXFLAGS_%s" % var]))
- exc = None
- self.env.commit()
- break
-
- if exc is not None:
- self.end_msg("Could not auto-detect boost linking flags combination, you may report it to boost.py author", ex=exc)
- self.fatal('The configuration failed')
- else:
- self.end_msg("Boost linkage flags auto-detection not implemented (needed ?) for this toolchain")
- self.fatal('The configuration failed')
- else:
- self.start_msg('Checking for boost linkage')
- try:
- try_link()
- except Errors.ConfigurationError as e:
- self.end_msg("Could not link against boost libraries using supplied options")
- self.fatal('The configuration failed')
- self.end_msg('ok')
-
-
-@feature('cxx')
-@after_method('apply_link')
-def install_boost(self):
- if install_boost.done or not Utils.is_win32 or not self.bld.cmd.startswith('install'):
- return
- install_boost.done = True
- inst_to = getattr(self, 'install_path', '${BINDIR}')
- for lib in self.env.LIB_BOOST:
- try:
- file = self.bld.find_file(self.env.cxxshlib_PATTERN % lib, self.env.LIBPATH_BOOST)
- self.add_install_files(install_to=inst_to, install_from=self.bld.root.find_node(file))
- except:
- continue
-install_boost.done = False
-
diff --git a/waflib/extras/build_file_tracker.py b/waflib/extras/build_file_tracker.py
deleted file mode 100644
index c4f26fd..0000000
--- a/waflib/extras/build_file_tracker.py
+++ /dev/null
@@ -1,28 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2015
-
-"""
-Force files to depend on the timestamps of those located in the build directory. You may
-want to use this to force partial rebuilds, see playground/track_output_files/ for a working example.
-
-Note that there is a variety of ways to implement this, one may want use timestamps on source files too for example,
-or one may want to hash the files in the source directory only under certain conditions (md5_tstamp tool)
-or to hash the file in the build directory with its timestamp
-"""
-
-import os
-from waflib import Node, Utils
-
-def get_bld_sig(self):
- if not self.is_bld() or self.ctx.bldnode is self.ctx.srcnode:
- return Utils.h_file(self.abspath())
-
- try:
- # add the creation time to the signature
- return self.sig + str(os.stat(self.abspath()).st_mtime)
- except AttributeError:
- return None
-
-Node.Node.get_bld_sig = get_bld_sig
-
diff --git a/waflib/extras/build_logs.py b/waflib/extras/build_logs.py
deleted file mode 100644
index cdf8ed0..0000000
--- a/waflib/extras/build_logs.py
+++ /dev/null
@@ -1,110 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2013 (ita)
-
-"""
-A system for recording all outputs to a log file. Just add the following to your wscript file::
-
- def init(ctx):
- ctx.load('build_logs')
-"""
-
-import atexit, sys, time, os, shutil, threading
-from waflib import ansiterm, Logs, Context
-
-# adding the logs under the build/ directory will clash with the clean/ command
-try:
- up = os.path.dirname(Context.g_module.__file__)
-except AttributeError:
- up = '.'
-LOGFILE = os.path.join(up, 'logs', time.strftime('%Y_%m_%d_%H_%M.log'))
-
-wlock = threading.Lock()
-class log_to_file(object):
- def __init__(self, stream, fileobj, filename):
- self.stream = stream
- self.encoding = self.stream.encoding
- self.fileobj = fileobj
- self.filename = filename
- self.is_valid = True
- def replace_colors(self, data):
- for x in Logs.colors_lst.values():
- if isinstance(x, str):
- data = data.replace(x, '')
- return data
- def write(self, data):
- try:
- wlock.acquire()
- self.stream.write(data)
- self.stream.flush()
- if self.is_valid:
- self.fileobj.write(self.replace_colors(data))
- finally:
- wlock.release()
- def fileno(self):
- return self.stream.fileno()
- def flush(self):
- self.stream.flush()
- if self.is_valid:
- self.fileobj.flush()
- def isatty(self):
- return self.stream.isatty()
-
-def init(ctx):
- global LOGFILE
- filename = os.path.abspath(LOGFILE)
- try:
- os.makedirs(os.path.dirname(os.path.abspath(filename)))
- except OSError:
- pass
-
- if hasattr(os, 'O_NOINHERIT'):
- fd = os.open(LOGFILE, os.O_CREAT | os.O_TRUNC | os.O_WRONLY | os.O_NOINHERIT)
- fileobj = os.fdopen(fd, 'w')
- else:
- fileobj = open(LOGFILE, 'w')
- old_stderr = sys.stderr
-
- # sys.stdout has already been replaced, so __stdout__ will be faster
- #sys.stdout = log_to_file(sys.stdout, fileobj, filename)
- #sys.stderr = log_to_file(sys.stderr, fileobj, filename)
- def wrap(stream):
- if stream.isatty():
- return ansiterm.AnsiTerm(stream)
- return stream
- sys.stdout = log_to_file(wrap(sys.__stdout__), fileobj, filename)
- sys.stderr = log_to_file(wrap(sys.__stderr__), fileobj, filename)
-
- # now mess with the logging module...
- for x in Logs.log.handlers:
- try:
- stream = x.stream
- except AttributeError:
- pass
- else:
- if id(stream) == id(old_stderr):
- x.stream = sys.stderr
-
-def exit_cleanup():
- try:
- fileobj = sys.stdout.fileobj
- except AttributeError:
- pass
- else:
- sys.stdout.is_valid = False
- sys.stderr.is_valid = False
- fileobj.close()
- filename = sys.stdout.filename
-
- Logs.info('Output logged to %r', filename)
-
- # then copy the log file to "latest.log" if possible
- up = os.path.dirname(os.path.abspath(filename))
- try:
- shutil.copy(filename, os.path.join(up, 'latest.log'))
- except OSError:
- # this may fail on windows due to processes spawned
- pass
-
-atexit.register(exit_cleanup)
-
diff --git a/waflib/extras/buildcopy.py b/waflib/extras/buildcopy.py
deleted file mode 100644
index a6d9ac8..0000000
--- a/waflib/extras/buildcopy.py
+++ /dev/null
@@ -1,82 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# Calle Rosenquist, 2017 (xbreak)
-"""
-Create task that copies source files to the associated build node.
-This is useful to e.g. construct a complete Python package so it can be unit tested
-without installation.
-
-Source files to be copied can be specified either in `buildcopy_source` attribute, or
-`source` attribute. If both are specified `buildcopy_source` has priority.
-
-Examples::
-
- def build(bld):
- bld(name = 'bar',
- features = 'py buildcopy',
- source = bld.path.ant_glob('src/bar/*.py'))
-
- bld(name = 'py baz',
- features = 'buildcopy',
- buildcopy_source = bld.path.ant_glob('src/bar/*.py') + ['src/bar/resource.txt'])
-
-"""
-import os, shutil
-from waflib import Errors, Task, TaskGen, Utils, Node
-
-@TaskGen.before_method('process_source')
-@TaskGen.feature('buildcopy')
-def make_buildcopy(self):
- """
- Creates the buildcopy task.
- """
- def to_src_nodes(lst):
- """Find file nodes only in src, TaskGen.to_nodes will not work for this since it gives
- preference to nodes in build.
- """
- if isinstance(lst, Node.Node):
- if not lst.is_src():
- raise Errors.WafError('buildcopy: node %s is not in src'%lst)
- if not os.path.isfile(lst.abspath()):
- raise Errors.WafError('buildcopy: Cannot copy directory %s (unsupported action)'%lst)
- return lst
-
- if isinstance(lst, str):
- lst = [x for x in Utils.split_path(lst) if x and x != '.']
-
- node = self.bld.path.get_src().search_node(lst)
- if node:
- if not os.path.isfile(node.abspath()):
- raise Errors.WafError('buildcopy: Cannot copy directory %s (unsupported action)'%node)
- return node
-
- node = self.bld.path.get_src().find_node(lst)
- if node:
- if not os.path.isfile(node.abspath()):
- raise Errors.WafError('buildcopy: Cannot copy directory %s (unsupported action)'%node)
- return node
- raise Errors.WafError('buildcopy: File not found in src: %s'%os.path.join(*lst))
-
- nodes = [ to_src_nodes(n) for n in getattr(self, 'buildcopy_source', getattr(self, 'source', [])) ]
- node_pairs = [(n, n.get_bld()) for n in nodes]
- self.create_task('buildcopy', [n[0] for n in node_pairs], [n[1] for n in node_pairs], node_pairs=node_pairs)
-
-
-class buildcopy(Task.Task):
- """
- Copy for each pair `n` in `node_pairs`: n[0] -> n[1].
-
- Attribute `node_pairs` should contain a list of tuples describing source and target:
-
- node_pairs = [(in, out), ...]
-
- """
- color = 'PINK'
-
- def keyword(self):
- return 'Copying'
-
- def run(self):
- for f,t in self.node_pairs:
- t.parent.mkdir()
- shutil.copy2(f.abspath(), t.abspath())
diff --git a/waflib/extras/c_bgxlc.py b/waflib/extras/c_bgxlc.py
deleted file mode 100644
index 6e3eaf7..0000000
--- a/waflib/extras/c_bgxlc.py
+++ /dev/null
@@ -1,32 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# harald at klimachs.de
-
-"""
-IBM XL Compiler for Blue Gene
-"""
-
-from waflib.Tools import ccroot,ar
-from waflib.Configure import conf
-
-from waflib.Tools import xlc # method xlc_common_flags
-from waflib.Tools.compiler_c import c_compiler
-c_compiler['linux'].append('c_bgxlc')
-
-@conf
-def find_bgxlc(conf):
- cc = conf.find_program(['bgxlc_r','bgxlc'], var='CC')
- conf.get_xlc_version(cc)
- conf.env.CC = cc
- conf.env.CC_NAME = 'bgxlc'
-
-def configure(conf):
- conf.find_bgxlc()
- conf.find_ar()
- conf.xlc_common_flags()
- conf.env.LINKFLAGS_cshlib = ['-G','-Wl,-bexpfull']
- conf.env.LINKFLAGS_cprogram = []
- conf.cc_load_tools()
- conf.cc_add_flags()
- conf.link_add_flags()
-
diff --git a/waflib/extras/c_dumbpreproc.py b/waflib/extras/c_dumbpreproc.py
deleted file mode 100644
index ce9e1a4..0000000
--- a/waflib/extras/c_dumbpreproc.py
+++ /dev/null
@@ -1,72 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2006-2010 (ita)
-
-"""
-Dumb C/C++ preprocessor for finding dependencies
-
-It will look at all include files it can find after removing the comments, so the following
-will always add the dependency on both "a.h" and "b.h"::
-
- #include "a.h"
- #ifdef B
- #include "b.h"
- #endif
- int main() {
- return 0;
- }
-
-To use::
-
- def configure(conf):
- conf.load('compiler_c')
- conf.load('c_dumbpreproc')
-"""
-
-import re
-from waflib.Tools import c_preproc
-
-re_inc = re.compile(
- '^[ \t]*(#|%:)[ \t]*(include)[ \t]*[<"](.*)[>"]\r*$',
- re.IGNORECASE | re.MULTILINE)
-
-def lines_includes(node):
- code = node.read()
- if c_preproc.use_trigraphs:
- for (a, b) in c_preproc.trig_def:
- code = code.split(a).join(b)
- code = c_preproc.re_nl.sub('', code)
- code = c_preproc.re_cpp.sub(c_preproc.repl, code)
- return [(m.group(2), m.group(3)) for m in re.finditer(re_inc, code)]
-
-parser = c_preproc.c_parser
-class dumb_parser(parser):
- def addlines(self, node):
- if node in self.nodes[:-1]:
- return
- self.currentnode_stack.append(node.parent)
-
- # Avoid reading the same files again
- try:
- lines = self.parse_cache[node]
- except KeyError:
- lines = self.parse_cache[node] = lines_includes(node)
-
- self.lines = lines + [(c_preproc.POPFILE, '')] + self.lines
-
- def start(self, node, env):
- try:
- self.parse_cache = node.ctx.parse_cache
- except AttributeError:
- self.parse_cache = node.ctx.parse_cache = {}
-
- self.addlines(node)
- while self.lines:
- (x, y) = self.lines.pop(0)
- if x == c_preproc.POPFILE:
- self.currentnode_stack.pop()
- continue
- self.tryfind(y)
-
-c_preproc.c_parser = dumb_parser
-
diff --git a/waflib/extras/c_emscripten.py b/waflib/extras/c_emscripten.py
deleted file mode 100644
index e1ac494..0000000
--- a/waflib/extras/c_emscripten.py
+++ /dev/null
@@ -1,87 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 vi:ts=4:noexpandtab
-
-import subprocess, shlex, sys
-
-from waflib.Tools import ccroot, gcc, gxx
-from waflib.Configure import conf
-from waflib.TaskGen import after_method, feature
-
-from waflib.Tools.compiler_c import c_compiler
-from waflib.Tools.compiler_cxx import cxx_compiler
-
-for supported_os in ('linux', 'darwin', 'gnu', 'aix'):
- c_compiler[supported_os].append('c_emscripten')
- cxx_compiler[supported_os].append('c_emscripten')
-
-
-@conf
-def get_emscripten_version(conf, cc):
- """
- Emscripten doesn't support processing '-' like clang/gcc
- """
-
- dummy = conf.cachedir.parent.make_node("waf-emscripten.c")
- dummy.write("")
- cmd = cc + ['-dM', '-E', '-x', 'c', dummy.abspath()]
- env = conf.env.env or None
- try:
- p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env)
- out = p.communicate()[0]
- except Exception as e:
- conf.fatal('Could not determine emscripten version %r: %s' % (cmd, e))
-
- if not isinstance(out, str):
- out = out.decode(sys.stdout.encoding or 'latin-1')
-
- k = {}
- out = out.splitlines()
- for line in out:
- lst = shlex.split(line)
- if len(lst)>2:
- key = lst[1]
- val = lst[2]
- k[key] = val
-
- if not ('__clang__' in k and 'EMSCRIPTEN' in k):
- conf.fatal('Could not determine the emscripten compiler version.')
-
- conf.env.DEST_OS = 'generic'
- conf.env.DEST_BINFMT = 'elf'
- conf.env.DEST_CPU = 'asm-js'
- conf.env.CC_VERSION = (k['__clang_major__'], k['__clang_minor__'], k['__clang_patchlevel__'])
- return k
-
-@conf
-def find_emscripten(conf):
- cc = conf.find_program(['emcc'], var='CC')
- conf.get_emscripten_version(cc)
- conf.env.CC = cc
- conf.env.CC_NAME = 'emscripten'
- cxx = conf.find_program(['em++'], var='CXX')
- conf.env.CXX = cxx
- conf.env.CXX_NAME = 'emscripten'
- conf.find_program(['emar'], var='AR')
-
-def configure(conf):
- conf.find_emscripten()
- conf.find_ar()
- conf.gcc_common_flags()
- conf.gxx_common_flags()
- conf.cc_load_tools()
- conf.cc_add_flags()
- conf.cxx_load_tools()
- conf.cxx_add_flags()
- conf.link_add_flags()
- conf.env.ARFLAGS = ['rcs']
- conf.env.cshlib_PATTERN = '%s.js'
- conf.env.cxxshlib_PATTERN = '%s.js'
- conf.env.cstlib_PATTERN = '%s.a'
- conf.env.cxxstlib_PATTERN = '%s.a'
- conf.env.cprogram_PATTERN = '%s.html'
- conf.env.cxxprogram_PATTERN = '%s.html'
- conf.env.CXX_TGT_F = ['-c', '-o', '']
- conf.env.CC_TGT_F = ['-c', '-o', '']
- conf.env.CXXLNK_TGT_F = ['-o', '']
- conf.env.CCLNK_TGT_F = ['-o', '']
- conf.env.append_value('LINKFLAGS',['-Wl,--enable-auto-import'])
diff --git a/waflib/extras/c_nec.py b/waflib/extras/c_nec.py
deleted file mode 100644
index 96bfae4..0000000
--- a/waflib/extras/c_nec.py
+++ /dev/null
@@ -1,74 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# harald at klimachs.de
-
-"""
-NEC SX Compiler for SX vector systems
-"""
-
-import re
-from waflib import Utils
-from waflib.Tools import ccroot,ar
-from waflib.Configure import conf
-
-from waflib.Tools import xlc # method xlc_common_flags
-from waflib.Tools.compiler_c import c_compiler
-c_compiler['linux'].append('c_nec')
-
-@conf
-def find_sxc(conf):
- cc = conf.find_program(['sxcc'], var='CC')
- conf.get_sxc_version(cc)
- conf.env.CC = cc
- conf.env.CC_NAME = 'sxcc'
-
-@conf
-def get_sxc_version(conf, fc):
- version_re = re.compile(r"C\+\+/SX\s*Version\s*(?P<major>\d*)\.(?P<minor>\d*)", re.I).search
- cmd = fc + ['-V']
- p = Utils.subprocess.Popen(cmd, stdin=False, stdout=Utils.subprocess.PIPE, stderr=Utils.subprocess.PIPE, env=None)
- out, err = p.communicate()
-
- if out:
- match = version_re(out)
- else:
- match = version_re(err)
- if not match:
- conf.fatal('Could not determine the NEC C compiler version.')
- k = match.groupdict()
- conf.env['C_VERSION'] = (k['major'], k['minor'])
-
-@conf
-def sxc_common_flags(conf):
- v=conf.env
- v['CC_SRC_F']=[]
- v['CC_TGT_F']=['-c','-o']
- if not v['LINK_CC']:
- v['LINK_CC']=v['CC']
- v['CCLNK_SRC_F']=[]
- v['CCLNK_TGT_F']=['-o']
- v['CPPPATH_ST']='-I%s'
- v['DEFINES_ST']='-D%s'
- v['LIB_ST']='-l%s'
- v['LIBPATH_ST']='-L%s'
- v['STLIB_ST']='-l%s'
- v['STLIBPATH_ST']='-L%s'
- v['RPATH_ST']=''
- v['SONAME_ST']=[]
- v['SHLIB_MARKER']=[]
- v['STLIB_MARKER']=[]
- v['LINKFLAGS_cprogram']=['']
- v['cprogram_PATTERN']='%s'
- v['CFLAGS_cshlib']=['-fPIC']
- v['LINKFLAGS_cshlib']=['']
- v['cshlib_PATTERN']='lib%s.so'
- v['LINKFLAGS_cstlib']=[]
- v['cstlib_PATTERN']='lib%s.a'
-
-def configure(conf):
- conf.find_sxc()
- conf.find_program('sxar',VAR='AR')
- conf.sxc_common_flags()
- conf.cc_load_tools()
- conf.cc_add_flags()
- conf.link_add_flags()
diff --git a/waflib/extras/cabal.py b/waflib/extras/cabal.py
deleted file mode 100644
index e10a0d1..0000000
--- a/waflib/extras/cabal.py
+++ /dev/null
@@ -1,152 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Anton Feldmann, 2012
-# "Base for cabal"
-
-from waflib import Task, Utils
-from waflib.TaskGen import extension
-from waflib.Utils import threading
-from shutil import rmtree
-
-lock = threading.Lock()
-registering = False
-
-def configure(self):
- self.find_program('cabal', var='CABAL')
- self.find_program('ghc-pkg', var='GHCPKG')
- pkgconfd = self.bldnode.abspath() + '/package.conf.d'
- self.env.PREFIX = self.bldnode.abspath() + '/dist'
- self.env.PKGCONFD = pkgconfd
- if self.root.find_node(pkgconfd + '/package.cache'):
- self.msg('Using existing package database', pkgconfd, color='CYAN')
- else:
- pkgdir = self.root.find_dir(pkgconfd)
- if pkgdir:
- self.msg('Deleting corrupt package database', pkgdir.abspath(), color ='RED')
- rmtree(pkgdir.abspath())
- pkgdir = None
-
- self.cmd_and_log(self.env.GHCPKG + ['init', pkgconfd])
- self.msg('Created package database', pkgconfd, color = 'YELLOW' if pkgdir else 'GREEN')
-
-@extension('.cabal')
-def process_cabal(self, node):
- out_dir_node = self.bld.root.find_dir(self.bld.out_dir)
- package_node = node.change_ext('.package')
- package_node = out_dir_node.find_or_declare(package_node.name)
- build_node = node.parent.get_bld()
- build_path = build_node.abspath()
- config_node = build_node.find_or_declare('setup-config')
- inplace_node = build_node.find_or_declare('package.conf.inplace')
-
- config_task = self.create_task('cabal_configure', node)
- config_task.cwd = node.parent.abspath()
- config_task.depends_on = getattr(self, 'depends_on', '')
- config_task.build_path = build_path
- config_task.set_outputs(config_node)
-
- build_task = self.create_task('cabal_build', config_node)
- build_task.cwd = node.parent.abspath()
- build_task.build_path = build_path
- build_task.set_outputs(inplace_node)
-
- copy_task = self.create_task('cabal_copy', inplace_node)
- copy_task.cwd = node.parent.abspath()
- copy_task.depends_on = getattr(self, 'depends_on', '')
- copy_task.build_path = build_path
-
- last_task = copy_task
- task_list = [config_task, build_task, copy_task]
-
- if (getattr(self, 'register', False)):
- register_task = self.create_task('cabal_register', inplace_node)
- register_task.cwd = node.parent.abspath()
- register_task.set_run_after(copy_task)
- register_task.build_path = build_path
-
- pkgreg_task = self.create_task('ghcpkg_register', inplace_node)
- pkgreg_task.cwd = node.parent.abspath()
- pkgreg_task.set_run_after(register_task)
- pkgreg_task.build_path = build_path
-
- last_task = pkgreg_task
- task_list += [register_task, pkgreg_task]
-
- touch_task = self.create_task('cabal_touch', inplace_node)
- touch_task.set_run_after(last_task)
- touch_task.set_outputs(package_node)
- touch_task.build_path = build_path
-
- task_list += [touch_task]
-
- return task_list
-
-def get_all_src_deps(node):
- hs_deps = node.ant_glob('**/*.hs')
- hsc_deps = node.ant_glob('**/*.hsc')
- lhs_deps = node.ant_glob('**/*.lhs')
- c_deps = node.ant_glob('**/*.c')
- cpp_deps = node.ant_glob('**/*.cpp')
- proto_deps = node.ant_glob('**/*.proto')
- return sum([hs_deps, hsc_deps, lhs_deps, c_deps, cpp_deps, proto_deps], [])
-
-class Cabal(Task.Task):
- def scan(self):
- return (get_all_src_deps(self.generator.path), ())
-
-class cabal_configure(Cabal):
- run_str = '${CABAL} configure -v0 --prefix=${PREFIX} --global --user --package-db=${PKGCONFD} --builddir=${tsk.build_path}'
- shell = True
-
- def scan(self):
- out_node = self.generator.bld.root.find_dir(self.generator.bld.out_dir)
- deps = [out_node.find_or_declare(dep).change_ext('.package') for dep in Utils.to_list(self.depends_on)]
- return (deps, ())
-
-class cabal_build(Cabal):
- run_str = '${CABAL} build -v1 --builddir=${tsk.build_path}/'
- shell = True
-
-class cabal_copy(Cabal):
- run_str = '${CABAL} copy -v0 --builddir=${tsk.build_path}'
- shell = True
-
-class cabal_register(Cabal):
- run_str = '${CABAL} register -v0 --gen-pkg-config=${tsk.build_path}/pkg.config --builddir=${tsk.build_path}'
- shell = True
-
-class ghcpkg_register(Cabal):
- run_str = '${GHCPKG} update -v0 --global --user --package-conf=${PKGCONFD} ${tsk.build_path}/pkg.config'
- shell = True
-
- def runnable_status(self):
- global lock, registering
-
- val = False
- lock.acquire()
- val = registering
- lock.release()
-
- if val:
- return Task.ASK_LATER
-
- ret = Task.Task.runnable_status(self)
- if ret == Task.RUN_ME:
- lock.acquire()
- registering = True
- lock.release()
-
- return ret
-
- def post_run(self):
- global lock, registering
-
- lock.acquire()
- registering = False
- lock.release()
-
- return Task.Task.post_run(self)
-
-class cabal_touch(Cabal):
- run_str = 'touch ${TGT}'
-
diff --git a/waflib/extras/cfg_altoptions.py b/waflib/extras/cfg_altoptions.py
deleted file mode 100644
index 47b1189..0000000
--- a/waflib/extras/cfg_altoptions.py
+++ /dev/null
@@ -1,110 +0,0 @@
-#!/usr/bin/python
-# -*- coding: utf-8 -*-
-# Tool to extend c_config.check_cfg()
-
-__author__ = __maintainer__ = "Jérôme Carretero <cJ-waf@zougloub.eu>"
-__copyright__ = "Jérôme Carretero, 2014"
-
-"""
-
-This tool allows to work around the absence of ``*-config`` programs
-on systems, by keeping the same clean configuration syntax but inferring
-values or permitting their modification via the options interface.
-
-Note that pkg-config can also support setting ``PKG_CONFIG_PATH``,
-so you can put custom files in a folder containing new .pc files.
-This tool could also be implemented by taking advantage of this fact.
-
-Usage::
-
- def options(opt):
- opt.load('c_config_alt')
- opt.add_package_option('package')
-
- def configure(cfg):
- conf.load('c_config_alt')
- conf.check_cfg(...)
-
-Known issues:
-
-- Behavior with different build contexts...
-
-"""
-
-import os
-import functools
-from waflib import Configure, Options, Errors
-
-def name_to_dest(x):
- return x.lower().replace('-', '_')
-
-
-def options(opt):
- def x(opt, param):
- dest = name_to_dest(param)
- gr = opt.get_option_group("configure options")
- gr.add_option('--%s-root' % dest,
- help="path containing include and lib subfolders for %s" \
- % param,
- )
-
- opt.add_package_option = functools.partial(x, opt)
-
-
-check_cfg_old = getattr(Configure.ConfigurationContext, 'check_cfg')
-
-@Configure.conf
-def check_cfg(conf, *k, **kw):
- if k:
- lst = k[0].split()
- kw['package'] = lst[0]
- kw['args'] = ' '.join(lst[1:])
-
- if not 'package' in kw:
- return check_cfg_old(conf, **kw)
-
- package = kw['package']
-
- package_lo = name_to_dest(package)
- package_hi = package.upper().replace('-', '_') # TODO FIXME
- package_hi = kw.get('uselib_store', package_hi)
-
- def check_folder(path, name):
- try:
- assert os.path.isdir(path)
- except AssertionError:
- raise Errors.ConfigurationError(
- "%s_%s (%s) is not a folder!" \
- % (package_lo, name, path))
- return path
-
- root = getattr(Options.options, '%s_root' % package_lo, None)
-
- if root is None:
- return check_cfg_old(conf, **kw)
- else:
- def add_manual_var(k, v):
- conf.start_msg('Adding for %s a manual var' % (package))
- conf.env["%s_%s" % (k, package_hi)] = v
- conf.end_msg("%s = %s" % (k, v))
-
-
- check_folder(root, 'root')
-
- pkg_inc = check_folder(os.path.join(root, "include"), 'inc')
- add_manual_var('INCLUDES', [pkg_inc])
- pkg_lib = check_folder(os.path.join(root, "lib"), 'libpath')
- add_manual_var('LIBPATH', [pkg_lib])
- add_manual_var('LIB', [package])
-
- for x in kw.get('manual_deps', []):
- for k, v in sorted(conf.env.get_merged_dict().items()):
- if k.endswith('_%s' % x):
- k = k.replace('_%s' % x, '')
- conf.start_msg('Adding for %s a manual dep' \
- %(package))
- conf.env["%s_%s" % (k, package_hi)] += v
- conf.end_msg('%s += %s' % (k, v))
-
- return True
-
diff --git a/waflib/extras/clang_compilation_database.py b/waflib/extras/clang_compilation_database.py
deleted file mode 100644
index 4d9b5e2..0000000
--- a/waflib/extras/clang_compilation_database.py
+++ /dev/null
@@ -1,85 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Christoph Koke, 2013
-
-"""
-Writes the c and cpp compile commands into build/compile_commands.json
-see http://clang.llvm.org/docs/JSONCompilationDatabase.html
-
-Usage:
-
- def configure(conf):
- conf.load('compiler_cxx')
- ...
- conf.load('clang_compilation_database')
-"""
-
-import sys, os, json, shlex, pipes
-from waflib import Logs, TaskGen, Task
-
-Task.Task.keep_last_cmd = True
-
-@TaskGen.feature('c', 'cxx')
-@TaskGen.after_method('process_use')
-def collect_compilation_db_tasks(self):
- "Add a compilation database entry for compiled tasks"
- try:
- clang_db = self.bld.clang_compilation_database_tasks
- except AttributeError:
- clang_db = self.bld.clang_compilation_database_tasks = []
- self.bld.add_post_fun(write_compilation_database)
-
- tup = tuple(y for y in [Task.classes.get(x) for x in ('c', 'cxx')] if y)
- for task in getattr(self, 'compiled_tasks', []):
- if isinstance(task, tup):
- clang_db.append(task)
-
-def write_compilation_database(ctx):
- "Write the clang compilation database as JSON"
- database_file = ctx.bldnode.make_node('compile_commands.json')
- Logs.info('Build commands will be stored in %s', database_file.path_from(ctx.path))
- try:
- root = json.load(database_file)
- except IOError:
- root = []
- clang_db = dict((x['file'], x) for x in root)
- for task in getattr(ctx, 'clang_compilation_database_tasks', []):
- try:
- cmd = task.last_cmd
- except AttributeError:
- continue
- directory = getattr(task, 'cwd', ctx.variant_dir)
- f_node = task.inputs[0]
- filename = os.path.relpath(f_node.abspath(), directory)
- entry = {
- "directory": directory,
- "arguments": cmd,
- "file": filename,
- }
- clang_db[filename] = entry
- root = list(clang_db.values())
- database_file.write(json.dumps(root, indent=2))
-
-# Override the runnable_status function to do a dummy/dry run when the file doesn't need to be compiled.
-# This will make sure compile_commands.json is always fully up to date.
-# Previously you could end up with a partial compile_commands.json if the build failed.
-for x in ('c', 'cxx'):
- if x not in Task.classes:
- continue
-
- t = Task.classes[x]
-
- def runnable_status(self):
- def exec_command(cmd, **kw):
- pass
-
- run_status = self.old_runnable_status()
- if run_status == Task.SKIP_ME:
- setattr(self, 'old_exec_command', getattr(self, 'exec_command', None))
- setattr(self, 'exec_command', exec_command)
- self.run()
- setattr(self, 'exec_command', getattr(self, 'old_exec_command', None))
- return run_status
-
- setattr(t, 'old_runnable_status', getattr(t, 'runnable_status', None))
- setattr(t, 'runnable_status', runnable_status)
diff --git a/waflib/extras/codelite.py b/waflib/extras/codelite.py
deleted file mode 100644
index 523302c..0000000
--- a/waflib/extras/codelite.py
+++ /dev/null
@@ -1,875 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# CodeLite Project
-# Christian Klein (chrikle@berlios.de)
-# Created: Jan 2012
-# As templete for this file I used the msvs.py
-# I hope this template will work proper
-
-"""
-Redistribution and use in source and binary forms, with or without
-modification, are permitted provided that the following conditions
-are met:
-
-1. Redistributions of source code must retain the above copyright
- notice, this list of conditions and the following disclaimer.
-
-2. Redistributions in binary form must reproduce the above copyright
- notice, this list of conditions and the following disclaimer in the
- documentation and/or other materials provided with the distribution.
-
-3. The name of the author may not be used to endorse or promote products
- derived from this software without specific prior written permission.
-
-THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR
-IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
-WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
-DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT,
-INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
-(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
-SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
-HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
-STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
-IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
-POSSIBILITY OF SUCH DAMAGE.
-"""
-
-"""
-
-
-To add this tool to your project:
-def options(conf):
- opt.load('codelite')
-
-It can be a good idea to add the sync_exec tool too.
-
-To generate solution files:
-$ waf configure codelite
-
-To customize the outputs, provide subclasses in your wscript files:
-
-from waflib.extras import codelite
-class vsnode_target(codelite.vsnode_target):
- def get_build_command(self, props):
- # likely to be required
- return "waf.bat build"
- def collect_source(self):
- # likely to be required
- ...
-class codelite_bar(codelite.codelite_generator):
- def init(self):
- codelite.codelite_generator.init(self)
- self.vsnode_target = vsnode_target
-
-The codelite class re-uses the same build() function for reading the targets (task generators),
-you may therefore specify codelite settings on the context object:
-
-def build(bld):
- bld.codelite_solution_name = 'foo.workspace'
- bld.waf_command = 'waf.bat'
- bld.projects_dir = bld.srcnode.make_node('')
- bld.projects_dir.mkdir()
-
-
-ASSUMPTIONS:
-* a project can be either a directory or a target, project files are written only for targets that have source files
-* each project is a vcxproj file, therefore the project uuid needs only to be a hash of the absolute path
-"""
-
-import os, re, sys
-import uuid # requires python 2.5
-from waflib.Build import BuildContext
-from waflib import Utils, TaskGen, Logs, Task, Context, Node, Options
-
-HEADERS_GLOB = '**/(*.h|*.hpp|*.H|*.inl)'
-
-PROJECT_TEMPLATE = r'''<?xml version="1.0" encoding="utf-8"?>
-<CodeLite_Project Name="${project.name}" InternalType="Library">
- <Plugins>
- <Plugin Name="qmake">
- <![CDATA[00010001N0005Release000000000000]]>
- </Plugin>
- </Plugins>
- <Description/>
- <Dependencies/>
- <VirtualDirectory Name="src">
- ${for x in project.source}
- ${if (project.get_key(x)=="sourcefile")}
- <File Name="${x.abspath()}"/>
- ${endif}
- ${endfor}
- </VirtualDirectory>
- <VirtualDirectory Name="include">
- ${for x in project.source}
- ${if (project.get_key(x)=="headerfile")}
- <File Name="${x.abspath()}"/>
- ${endif}
- ${endfor}
- </VirtualDirectory>
- <Settings Type="Dynamic Library">
- <GlobalSettings>
- <Compiler Options="" C_Options="">
- <IncludePath Value="."/>
- </Compiler>
- <Linker Options="">
- <LibraryPath Value="."/>
- </Linker>
- <ResourceCompiler Options=""/>
- </GlobalSettings>
- <Configuration Name="Release" CompilerType="gnu gcc" ReleasegerType="GNU gdb Releaseger" Type="Dynamic Library" BuildCmpWithGlobalSettings="append" BuildLnkWithGlobalSettings="append" BuildResWithGlobalSettings="append">
- <Compiler Options="" C_Options="" Required="yes" PreCompiledHeader="" PCHInCommandLine="no" UseDifferentPCHFlags="no" PCHFlags="">
- <IncludePath Value="."/>
- <IncludePath Value="."/>
- </Compiler>
- <Linker Options="" Required="yes">
- <LibraryPath Value=""/>
- </Linker>
- <ResourceCompiler Options="" Required="no"/>
- <General OutputFile="${xml:project.build_properties[0].output_file}" IntermediateDirectory="" Command="" CommandArguments="" PauseExecWhenProcTerminates="yes"/>
- <Environment EnvVarSetName="&lt;Use Defaults&gt;" DbgSetName="&lt;Use Defaults&gt;">
- <![CDATA[]]>
- </Environment>
- <Releaseger IsRemote="no" RemoteHostName="" RemoteHostPort="" ReleasegerPath="">
- <PostConnectCommands/>
- <StartupCommands/>
- </Releaseger>
- <PreBuild/>
- <PostBuild/>
- <CustomBuild Enabled="yes">
- $b = project.build_properties[0]}
- <RebuildCommand>${xml:project.get_rebuild_command(project.build_properties[0])}</RebuildCommand>
- <CleanCommand>${xml:project.get_clean_command(project.build_properties[0])}</CleanCommand>
- <BuildCommand>${xml:project.get_build_command(project.build_properties[0])}</BuildCommand>
- <Target Name="Install">${xml:project.get_install_command(project.build_properties[0])}</Target>
- <Target Name="Build and Install">${xml:project.get_build_and_install_command(project.build_properties[0])}</Target>
- <Target Name="Build All">${xml:project.get_build_all_command(project.build_properties[0])}</Target>
- <Target Name="Rebuild All">${xml:project.get_rebuild_all_command(project.build_properties[0])}</Target>
- <Target Name="Clean All">${xml:project.get_clean_all_command(project.build_properties[0])}</Target>
- <Target Name="Build and Install All">${xml:project.get_build_and_install_all_command(project.build_properties[0])}</Target>
- <PreprocessFileCommand/>
- <SingleFileCommand/>
- <MakefileGenerationCommand/>
- <ThirdPartyToolName>None</ThirdPartyToolName>
- <WorkingDirectory/>
- </CustomBuild>
- <AdditionalRules>
- <CustomPostBuild/>
- <CustomPreBuild/>
- </AdditionalRules>
- <Completion>
- <ClangCmpFlags/>
- <ClangPP/>
- <SearchPaths/>
- </Completion>
- </Configuration>
- <Configuration Name="Release" CompilerType="gnu gcc" ReleasegerType="GNU gdb Releaseger" Type="" BuildCmpWithGlobalSettings="append" BuildLnkWithGlobalSettings="append" BuildResWithGlobalSettings="append">
- <Compiler Options="" C_Options="" Required="yes" PreCompiledHeader="" PCHInCommandLine="no" UseDifferentPCHFlags="no" PCHFlags="">
- <IncludePath Value="."/>
- </Compiler>
- <Linker Options="" Required="yes"/>
- <ResourceCompiler Options="" Required="no"/>
- <General OutputFile="" IntermediateDirectory="./Release" Command="" CommandArguments="" UseSeparateReleaseArgs="no" ReleaseArguments="" WorkingDirectory="$(IntermediateDirectory)" PauseExecWhenProcTerminates="yes"/>
- <Environment EnvVarSetName="&lt;Use Defaults&gt;" DbgSetName="&lt;Use Defaults&gt;">
- <![CDATA[
-
-
-
- ]]>
- </Environment>
- <Releaseger IsRemote="no" RemoteHostName="" RemoteHostPort="" ReleasegerPath="">
- <PostConnectCommands/>
- <StartupCommands/>
- </Releaseger>
- <PreBuild/>
- <PostBuild/>
- <CustomBuild Enabled="no">
- <RebuildCommand/>
- <CleanCommand/>
- <BuildCommand/>
- <PreprocessFileCommand/>
- <SingleFileCommand/>
- <MakefileGenerationCommand/>
- <ThirdPartyToolName/>
- <WorkingDirectory/>
- </CustomBuild>
- <AdditionalRules>
- <CustomPostBuild/>
- <CustomPreBuild/>
- </AdditionalRules>
- <Completion>
- <ClangCmpFlags/>
- <ClangPP/>
- <SearchPaths/>
- </Completion>
- </Configuration>
- </Settings>
-</CodeLite_Project>'''
-
-
-
-
-SOLUTION_TEMPLATE = '''<?xml version="1.0" encoding="utf-8"?>
-<CodeLite_Workspace Name="${getattr(project, 'codelite_solution_name', None)[:-10]}" Database="./${getattr(project, 'codelite_solution_name', None)[:-10]}.tags">
-${for p in project.all_projects}
- <Project Name = "${p.name}" Path = "${p.title}" Active="No"/>
-${endfor}
- <BuildMatrix>
- <WorkspaceConfiguration Name="Release" Selected="yes">
-${for p in project.all_projects}
- <Project Name="${p.name}" ConfigName="Release"/>
-${endfor}
- </WorkspaceConfiguration>
- </BuildMatrix>
-</CodeLite_Workspace>'''
-
-
-
-COMPILE_TEMPLATE = '''def f(project):
- lst = []
- def xml_escape(value):
- return value.replace("&", "&amp;").replace('"', "&quot;").replace("'", "&apos;").replace("<", "&lt;").replace(">", "&gt;")
-
- %s
-
- #f = open('cmd.txt', 'w')
- #f.write(str(lst))
- #f.close()
- return ''.join(lst)
-'''
-reg_act = re.compile(r"(?P<backslash>\\)|(?P<dollar>\$\$)|(?P<subst>\$\{(?P<code>[^}]*?)\})", re.M)
-def compile_template(line):
- """
- Compile a template expression into a python function (like jsps, but way shorter)
- """
- extr = []
- def repl(match):
- g = match.group
- if g('dollar'):
- return "$"
- elif g('backslash'):
- return "\\"
- elif g('subst'):
- extr.append(g('code'))
- return "<<|@|>>"
- return None
-
- line2 = reg_act.sub(repl, line)
- params = line2.split('<<|@|>>')
- assert(extr)
-
-
- indent = 0
- buf = []
- app = buf.append
-
- def app(txt):
- buf.append(indent * '\t' + txt)
-
- for x in range(len(extr)):
- if params[x]:
- app("lst.append(%r)" % params[x])
-
- f = extr[x]
- if f.startswith(('if', 'for')):
- app(f + ':')
- indent += 1
- elif f.startswith('py:'):
- app(f[3:])
- elif f.startswith(('endif', 'endfor')):
- indent -= 1
- elif f.startswith(('else', 'elif')):
- indent -= 1
- app(f + ':')
- indent += 1
- elif f.startswith('xml:'):
- app('lst.append(xml_escape(%s))' % f[4:])
- else:
- #app('lst.append((%s) or "cannot find %s")' % (f, f))
- app('lst.append(%s)' % f)
-
- if extr:
- if params[-1]:
- app("lst.append(%r)" % params[-1])
-
- fun = COMPILE_TEMPLATE % "\n\t".join(buf)
- #print(fun)
- return Task.funex(fun)
-
-
-re_blank = re.compile('(\n|\r|\\s)*\n', re.M)
-def rm_blank_lines(txt):
- txt = re_blank.sub('\r\n', txt)
- return txt
-
-BOM = '\xef\xbb\xbf'
-try:
- BOM = bytes(BOM, 'latin-1') # python 3
-except (TypeError, NameError):
- pass
-
-def stealth_write(self, data, flags='wb'):
- try:
- unicode
- except NameError:
- data = data.encode('utf-8') # python 3
- else:
- data = data.decode(sys.getfilesystemencoding(), 'replace')
- data = data.encode('utf-8')
-
- if self.name.endswith('.project'):
- data = BOM + data
-
- try:
- txt = self.read(flags='rb')
- if txt != data:
- raise ValueError('must write')
- except (IOError, ValueError):
- self.write(data, flags=flags)
- else:
- Logs.debug('codelite: skipping %r', self)
-Node.Node.stealth_write = stealth_write
-
-re_quote = re.compile("[^a-zA-Z0-9-]")
-def quote(s):
- return re_quote.sub("_", s)
-
-def xml_escape(value):
- return value.replace("&", "&amp;").replace('"', "&quot;").replace("'", "&apos;").replace("<", "&lt;").replace(">", "&gt;")
-
-def make_uuid(v, prefix = None):
- """
- simple utility function
- """
- if isinstance(v, dict):
- keys = list(v.keys())
- keys.sort()
- tmp = str([(k, v[k]) for k in keys])
- else:
- tmp = str(v)
- d = Utils.md5(tmp.encode()).hexdigest().upper()
- if prefix:
- d = '%s%s' % (prefix, d[8:])
- gid = uuid.UUID(d, version = 4)
- return str(gid).upper()
-
-def diff(node, fromnode):
- # difference between two nodes, but with "(..)" instead of ".."
- c1 = node
- c2 = fromnode
-
- c1h = c1.height()
- c2h = c2.height()
-
- lst = []
- up = 0
-
- while c1h > c2h:
- lst.append(c1.name)
- c1 = c1.parent
- c1h -= 1
-
- while c2h > c1h:
- up += 1
- c2 = c2.parent
- c2h -= 1
-
- while id(c1) != id(c2):
- lst.append(c1.name)
- up += 1
-
- c1 = c1.parent
- c2 = c2.parent
-
- for i in range(up):
- lst.append('(..)')
- lst.reverse()
- return tuple(lst)
-
-class build_property(object):
- pass
-
-class vsnode(object):
- """
- Abstract class representing visual studio elements
- We assume that all visual studio nodes have a uuid and a parent
- """
- def __init__(self, ctx):
- self.ctx = ctx # codelite context
- self.name = '' # string, mandatory
- self.vspath = '' # path in visual studio (name for dirs, absolute path for projects)
- self.uuid = '' # string, mandatory
- self.parent = None # parent node for visual studio nesting
-
- def get_waf(self):
- """
- Override in subclasses...
- """
- return '%s/%s' % (self.ctx.srcnode.abspath(), getattr(self.ctx, 'waf_command', 'waf'))
-
- def ptype(self):
- """
- Return a special uuid for projects written in the solution file
- """
- pass
-
- def write(self):
- """
- Write the project file, by default, do nothing
- """
- pass
-
- def make_uuid(self, val):
- """
- Alias for creating uuid values easily (the templates cannot access global variables)
- """
- return make_uuid(val)
-
-class vsnode_vsdir(vsnode):
- """
- Nodes representing visual studio folders (which do not match the filesystem tree!)
- """
- VS_GUID_SOLUTIONFOLDER = "2150E333-8FDC-42A3-9474-1A3956D46DE8"
- def __init__(self, ctx, uuid, name, vspath=''):
- vsnode.__init__(self, ctx)
- self.title = self.name = name
- self.uuid = uuid
- self.vspath = vspath or name
-
- def ptype(self):
- return self.VS_GUID_SOLUTIONFOLDER
-
-class vsnode_project(vsnode):
- """
- Abstract class representing visual studio project elements
- A project is assumed to be writable, and has a node representing the file to write to
- """
- VS_GUID_VCPROJ = "8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942"
- def ptype(self):
- return self.VS_GUID_VCPROJ
-
- def __init__(self, ctx, node):
- vsnode.__init__(self, ctx)
- self.path = node
- self.uuid = make_uuid(node.abspath())
- self.name = node.name
- self.title = self.path.abspath()
- self.source = [] # list of node objects
- self.build_properties = [] # list of properties (nmake commands, output dir, etc)
-
- def dirs(self):
- """
- Get the list of parent folders of the source files (header files included)
- for writing the filters
- """
- lst = []
- def add(x):
- if x.height() > self.tg.path.height() and x not in lst:
- lst.append(x)
- add(x.parent)
- for x in self.source:
- add(x.parent)
- return lst
-
- def write(self):
- Logs.debug('codelite: creating %r', self.path)
- #print "self.name:",self.name
-
- # first write the project file
- template1 = compile_template(PROJECT_TEMPLATE)
- proj_str = template1(self)
- proj_str = rm_blank_lines(proj_str)
- self.path.stealth_write(proj_str)
-
- # then write the filter
- #template2 = compile_template(FILTER_TEMPLATE)
- #filter_str = template2(self)
- #filter_str = rm_blank_lines(filter_str)
- #tmp = self.path.parent.make_node(self.path.name + '.filters')
- #tmp.stealth_write(filter_str)
-
- def get_key(self, node):
- """
- required for writing the source files
- """
- name = node.name
- if name.endswith(('.cpp', '.c')):
- return 'sourcefile'
- return 'headerfile'
-
- def collect_properties(self):
- """
- Returns a list of triplet (configuration, platform, output_directory)
- """
- ret = []
- for c in self.ctx.configurations:
- for p in self.ctx.platforms:
- x = build_property()
- x.outdir = ''
-
- x.configuration = c
- x.platform = p
-
- x.preprocessor_definitions = ''
- x.includes_search_path = ''
-
- # can specify "deploy_dir" too
- ret.append(x)
- self.build_properties = ret
-
- def get_build_params(self, props):
- opt = ''
- return (self.get_waf(), opt)
-
- def get_build_command(self, props):
- return "%s build %s" % self.get_build_params(props)
-
- def get_clean_command(self, props):
- return "%s clean %s" % self.get_build_params(props)
-
- def get_rebuild_command(self, props):
- return "%s clean build %s" % self.get_build_params(props)
-
- def get_install_command(self, props):
- return "%s install %s" % self.get_build_params(props)
- def get_build_and_install_command(self, props):
- return "%s build install %s" % self.get_build_params(props)
-
- def get_build_and_install_all_command(self, props):
- return "%s build install" % self.get_build_params(props)[0]
-
- def get_clean_all_command(self, props):
- return "%s clean" % self.get_build_params(props)[0]
-
- def get_build_all_command(self, props):
- return "%s build" % self.get_build_params(props)[0]
-
- def get_rebuild_all_command(self, props):
- return "%s clean build" % self.get_build_params(props)[0]
-
- def get_filter_name(self, node):
- lst = diff(node, self.tg.path)
- return '\\'.join(lst) or '.'
-
-class vsnode_alias(vsnode_project):
- def __init__(self, ctx, node, name):
- vsnode_project.__init__(self, ctx, node)
- self.name = name
- self.output_file = ''
-
-class vsnode_build_all(vsnode_alias):
- """
- Fake target used to emulate the behaviour of "make all" (starting one process by target is slow)
- This is the only alias enabled by default
- """
- def __init__(self, ctx, node, name='build_all_projects'):
- vsnode_alias.__init__(self, ctx, node, name)
- self.is_active = True
-
-class vsnode_install_all(vsnode_alias):
- """
- Fake target used to emulate the behaviour of "make install"
- """
- def __init__(self, ctx, node, name='install_all_projects'):
- vsnode_alias.__init__(self, ctx, node, name)
-
- def get_build_command(self, props):
- return "%s build install %s" % self.get_build_params(props)
-
- def get_clean_command(self, props):
- return "%s clean %s" % self.get_build_params(props)
-
- def get_rebuild_command(self, props):
- return "%s clean build install %s" % self.get_build_params(props)
-
-class vsnode_project_view(vsnode_alias):
- """
- Fake target used to emulate a file system view
- """
- def __init__(self, ctx, node, name='project_view'):
- vsnode_alias.__init__(self, ctx, node, name)
- self.tg = self.ctx() # fake one, cannot remove
- self.exclude_files = Node.exclude_regs + '''
-waf-2*
-waf3-2*/**
-.waf-2*
-.waf3-2*/**
-**/*.sdf
-**/*.suo
-**/*.ncb
-**/%s
- ''' % Options.lockfile
-
- def collect_source(self):
- # this is likely to be slow
- self.source = self.ctx.srcnode.ant_glob('**', excl=self.exclude_files)
-
- def get_build_command(self, props):
- params = self.get_build_params(props) + (self.ctx.cmd,)
- return "%s %s %s" % params
-
- def get_clean_command(self, props):
- return ""
-
- def get_rebuild_command(self, props):
- return self.get_build_command(props)
-
-class vsnode_target(vsnode_project):
- """
- CodeLite project representing a targets (programs, libraries, etc) and bound
- to a task generator
- """
- def __init__(self, ctx, tg):
- """
- A project is more or less equivalent to a file/folder
- """
- base = getattr(ctx, 'projects_dir', None) or tg.path
- node = base.make_node(quote(tg.name) + ctx.project_extension) # the project file as a Node
- vsnode_project.__init__(self, ctx, node)
- self.name = quote(tg.name)
- self.tg = tg # task generator
-
- def get_build_params(self, props):
- """
- Override the default to add the target name
- """
- opt = ''
- if getattr(self, 'tg', None):
- opt += " --targets=%s" % self.tg.name
- return (self.get_waf(), opt)
-
- def collect_source(self):
- tg = self.tg
- source_files = tg.to_nodes(getattr(tg, 'source', []))
- include_dirs = Utils.to_list(getattr(tg, 'codelite_includes', []))
- include_files = []
- for x in include_dirs:
- if isinstance(x, str):
- x = tg.path.find_node(x)
- if x:
- lst = [y for y in x.ant_glob(HEADERS_GLOB, flat=False)]
- include_files.extend(lst)
-
- # remove duplicates
- self.source.extend(list(set(source_files + include_files)))
- self.source.sort(key=lambda x: x.abspath())
-
- def collect_properties(self):
- """
- CodeLite projects are associated with platforms and configurations (for building especially)
- """
- super(vsnode_target, self).collect_properties()
- for x in self.build_properties:
- x.outdir = self.path.parent.abspath()
- x.preprocessor_definitions = ''
- x.includes_search_path = ''
-
- try:
- tsk = self.tg.link_task
- except AttributeError:
- pass
- else:
- x.output_file = tsk.outputs[0].abspath()
- x.preprocessor_definitions = ';'.join(tsk.env.DEFINES)
- x.includes_search_path = ';'.join(self.tg.env.INCPATHS)
-
-class codelite_generator(BuildContext):
- '''generates a CodeLite workspace'''
- cmd = 'codelite'
- fun = 'build'
-
- def init(self):
- """
- Some data that needs to be present
- """
- if not getattr(self, 'configurations', None):
- self.configurations = ['Release'] # LocalRelease, RemoteDebug, etc
- if not getattr(self, 'platforms', None):
- self.platforms = ['Win32']
- if not getattr(self, 'all_projects', None):
- self.all_projects = []
- if not getattr(self, 'project_extension', None):
- self.project_extension = '.project'
- if not getattr(self, 'projects_dir', None):
- self.projects_dir = self.srcnode.make_node('')
- self.projects_dir.mkdir()
-
- # bind the classes to the object, so that subclass can provide custom generators
- if not getattr(self, 'vsnode_vsdir', None):
- self.vsnode_vsdir = vsnode_vsdir
- if not getattr(self, 'vsnode_target', None):
- self.vsnode_target = vsnode_target
- if not getattr(self, 'vsnode_build_all', None):
- self.vsnode_build_all = vsnode_build_all
- if not getattr(self, 'vsnode_install_all', None):
- self.vsnode_install_all = vsnode_install_all
- if not getattr(self, 'vsnode_project_view', None):
- self.vsnode_project_view = vsnode_project_view
-
- self.numver = '11.00'
- self.vsver = '2010'
-
- def execute(self):
- """
- Entry point
- """
- self.restore()
- if not self.all_envs:
- self.load_envs()
- self.recurse([self.run_dir])
-
- # user initialization
- self.init()
-
- # two phases for creating the solution
- self.collect_projects() # add project objects into "self.all_projects"
- self.write_files() # write the corresponding project and solution files
-
- def collect_projects(self):
- """
- Fill the list self.all_projects with project objects
- Fill the list of build targets
- """
- self.collect_targets()
- #self.add_aliases()
- #self.collect_dirs()
- default_project = getattr(self, 'default_project', None)
- def sortfun(x):
- if x.name == default_project:
- return ''
- return getattr(x, 'path', None) and x.path.abspath() or x.name
- self.all_projects.sort(key=sortfun)
-
- def write_files(self):
- """
- Write the project and solution files from the data collected
- so far. It is unlikely that you will want to change this
- """
- for p in self.all_projects:
- p.write()
-
- # and finally write the solution file
- node = self.get_solution_node()
- node.parent.mkdir()
- Logs.warn('Creating %r', node)
- #a = dir(self.root)
- #for b in a:
- # print b
- #print self.group_names
- #print "Hallo2: ",self.root.listdir()
- #print getattr(self, 'codelite_solution_name', None)
- template1 = compile_template(SOLUTION_TEMPLATE)
- sln_str = template1(self)
- sln_str = rm_blank_lines(sln_str)
- node.stealth_write(sln_str)
-
- def get_solution_node(self):
- """
- The solution filename is required when writing the .vcproj files
- return self.solution_node and if it does not exist, make one
- """
- try:
- return self.solution_node
- except:
- pass
-
- codelite_solution_name = getattr(self, 'codelite_solution_name', None)
- if not codelite_solution_name:
- codelite_solution_name = getattr(Context.g_module, Context.APPNAME, 'project') + '.workspace'
- setattr(self, 'codelite_solution_name', codelite_solution_name)
- if os.path.isabs(codelite_solution_name):
- self.solution_node = self.root.make_node(codelite_solution_name)
- else:
- self.solution_node = self.srcnode.make_node(codelite_solution_name)
- return self.solution_node
-
- def project_configurations(self):
- """
- Helper that returns all the pairs (config,platform)
- """
- ret = []
- for c in self.configurations:
- for p in self.platforms:
- ret.append((c, p))
- return ret
-
- def collect_targets(self):
- """
- Process the list of task generators
- """
- for g in self.groups:
- for tg in g:
- if not isinstance(tg, TaskGen.task_gen):
- continue
-
- if not hasattr(tg, 'codelite_includes'):
- tg.codelite_includes = tg.to_list(getattr(tg, 'includes', [])) + tg.to_list(getattr(tg, 'export_includes', []))
- tg.post()
- if not getattr(tg, 'link_task', None):
- continue
-
- p = self.vsnode_target(self, tg)
- p.collect_source() # delegate this processing
- p.collect_properties()
- self.all_projects.append(p)
-
- def add_aliases(self):
- """
- Add a specific target that emulates the "make all" necessary for Visual studio when pressing F7
- We also add an alias for "make install" (disabled by default)
- """
- base = getattr(self, 'projects_dir', None) or self.tg.path
-
- node_project = base.make_node('build_all_projects' + self.project_extension) # Node
- p_build = self.vsnode_build_all(self, node_project)
- p_build.collect_properties()
- self.all_projects.append(p_build)
-
- node_project = base.make_node('install_all_projects' + self.project_extension) # Node
- p_install = self.vsnode_install_all(self, node_project)
- p_install.collect_properties()
- self.all_projects.append(p_install)
-
- node_project = base.make_node('project_view' + self.project_extension) # Node
- p_view = self.vsnode_project_view(self, node_project)
- p_view.collect_source()
- p_view.collect_properties()
- self.all_projects.append(p_view)
-
- n = self.vsnode_vsdir(self, make_uuid(self.srcnode.abspath() + 'build_aliases'), "build_aliases")
- p_build.parent = p_install.parent = p_view.parent = n
- self.all_projects.append(n)
-
- def collect_dirs(self):
- """
- Create the folder structure in the CodeLite project view
- """
- seen = {}
- def make_parents(proj):
- # look at a project, try to make a parent
- if getattr(proj, 'parent', None):
- # aliases already have parents
- return
- x = proj.iter_path
- if x in seen:
- proj.parent = seen[x]
- return
-
- # There is not vsnode_vsdir for x.
- # So create a project representing the folder "x"
- n = proj.parent = seen[x] = self.vsnode_vsdir(self, make_uuid(x.abspath()), x.name)
- n.iter_path = x.parent
- self.all_projects.append(n)
-
- # recurse up to the project directory
- if x.height() > self.srcnode.height() + 1:
- make_parents(n)
-
- for p in self.all_projects[:]: # iterate over a copy of all projects
- if not getattr(p, 'tg', None):
- # but only projects that have a task generator
- continue
-
- # make a folder for each task generator
- p.iter_path = p.tg.path
- make_parents(p)
-
diff --git a/waflib/extras/color_gcc.py b/waflib/extras/color_gcc.py
deleted file mode 100644
index b68c5eb..0000000
--- a/waflib/extras/color_gcc.py
+++ /dev/null
@@ -1,39 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-
-# Replaces the default formatter by one which understands GCC output and colorizes it.
-
-__author__ = __maintainer__ = "Jérôme Carretero <cJ-waf@zougloub.eu>"
-__copyright__ = "Jérôme Carretero, 2012"
-
-import sys
-from waflib import Logs
-
-class ColorGCCFormatter(Logs.formatter):
- def __init__(self, colors):
- self.colors = colors
- Logs.formatter.__init__(self)
- def format(self, rec):
- frame = sys._getframe()
- while frame:
- func = frame.f_code.co_name
- if func == 'exec_command':
- cmd = frame.f_locals.get('cmd')
- if isinstance(cmd, list) and ('gcc' in cmd[0] or 'g++' in cmd[0]):
- lines = []
- for line in rec.msg.splitlines():
- if 'warning: ' in line:
- lines.append(self.colors.YELLOW + line)
- elif 'error: ' in line:
- lines.append(self.colors.RED + line)
- elif 'note: ' in line:
- lines.append(self.colors.CYAN + line)
- else:
- lines.append(line)
- rec.msg = "\n".join(lines)
- frame = frame.f_back
- return Logs.formatter.format(self, rec)
-
-def options(opt):
- Logs.log.handlers[0].setFormatter(ColorGCCFormatter(Logs.colors))
-
diff --git a/waflib/extras/color_rvct.py b/waflib/extras/color_rvct.py
deleted file mode 100644
index f89ccbd..0000000
--- a/waflib/extras/color_rvct.py
+++ /dev/null
@@ -1,51 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-
-# Replaces the default formatter by one which understands RVCT output and colorizes it.
-
-__author__ = __maintainer__ = "Jérôme Carretero <cJ-waf@zougloub.eu>"
-__copyright__ = "Jérôme Carretero, 2012"
-
-import sys
-import atexit
-from waflib import Logs
-
-errors = []
-
-def show_errors():
- for i, e in enumerate(errors):
- if i > 5:
- break
- print("Error: %s" % e)
-
-atexit.register(show_errors)
-
-class RcvtFormatter(Logs.formatter):
- def __init__(self, colors):
- Logs.formatter.__init__(self)
- self.colors = colors
- def format(self, rec):
- frame = sys._getframe()
- while frame:
- func = frame.f_code.co_name
- if func == 'exec_command':
- cmd = frame.f_locals['cmd']
- if isinstance(cmd, list) and ('armcc' in cmd[0] or 'armld' in cmd[0]):
- lines = []
- for line in rec.msg.splitlines():
- if 'Warning: ' in line:
- lines.append(self.colors.YELLOW + line)
- elif 'Error: ' in line:
- lines.append(self.colors.RED + line)
- errors.append(line)
- elif 'note: ' in line:
- lines.append(self.colors.CYAN + line)
- else:
- lines.append(line)
- rec.msg = "\n".join(lines)
- frame = frame.f_back
- return Logs.formatter.format(self, rec)
-
-def options(opt):
- Logs.log.handlers[0].setFormatter(RcvtFormatter(Logs.colors))
-
diff --git a/waflib/extras/compat15.py b/waflib/extras/compat15.py
deleted file mode 100644
index 0e74df8..0000000
--- a/waflib/extras/compat15.py
+++ /dev/null
@@ -1,406 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2010 (ita)
-
-"""
-This file is provided to enable compatibility with waf 1.5
-It was enabled by default in waf 1.6, but it is not used in waf 1.7
-"""
-
-import sys
-from waflib import ConfigSet, Logs, Options, Scripting, Task, Build, Configure, Node, Runner, TaskGen, Utils, Errors, Context
-
-# the following is to bring some compatibility with waf 1.5 "import waflib.Configure → import Configure"
-sys.modules['Environment'] = ConfigSet
-ConfigSet.Environment = ConfigSet.ConfigSet
-
-sys.modules['Logs'] = Logs
-sys.modules['Options'] = Options
-sys.modules['Scripting'] = Scripting
-sys.modules['Task'] = Task
-sys.modules['Build'] = Build
-sys.modules['Configure'] = Configure
-sys.modules['Node'] = Node
-sys.modules['Runner'] = Runner
-sys.modules['TaskGen'] = TaskGen
-sys.modules['Utils'] = Utils
-sys.modules['Constants'] = Context
-Context.SRCDIR = ''
-Context.BLDDIR = ''
-
-from waflib.Tools import c_preproc
-sys.modules['preproc'] = c_preproc
-
-from waflib.Tools import c_config
-sys.modules['config_c'] = c_config
-
-ConfigSet.ConfigSet.copy = ConfigSet.ConfigSet.derive
-ConfigSet.ConfigSet.set_variant = Utils.nada
-
-Utils.pproc = Utils.subprocess
-
-Build.BuildContext.add_subdirs = Build.BuildContext.recurse
-Build.BuildContext.new_task_gen = Build.BuildContext.__call__
-Build.BuildContext.is_install = 0
-Node.Node.relpath_gen = Node.Node.path_from
-
-Utils.pproc = Utils.subprocess
-Utils.get_term_cols = Logs.get_term_cols
-
-def cmd_output(cmd, **kw):
-
- silent = False
- if 'silent' in kw:
- silent = kw['silent']
- del(kw['silent'])
-
- if 'e' in kw:
- tmp = kw['e']
- del(kw['e'])
- kw['env'] = tmp
-
- kw['shell'] = isinstance(cmd, str)
- kw['stdout'] = Utils.subprocess.PIPE
- if silent:
- kw['stderr'] = Utils.subprocess.PIPE
-
- try:
- p = Utils.subprocess.Popen(cmd, **kw)
- output = p.communicate()[0]
- except OSError as e:
- raise ValueError(str(e))
-
- if p.returncode:
- if not silent:
- msg = "command execution failed: %s -> %r" % (cmd, str(output))
- raise ValueError(msg)
- output = ''
- return output
-Utils.cmd_output = cmd_output
-
-def name_to_obj(self, s, env=None):
- if Logs.verbose:
- Logs.warn('compat: change "name_to_obj(name, env)" by "get_tgen_by_name(name)"')
- return self.get_tgen_by_name(s)
-Build.BuildContext.name_to_obj = name_to_obj
-
-def env_of_name(self, name):
- try:
- return self.all_envs[name]
- except KeyError:
- Logs.error('no such environment: '+name)
- return None
-Build.BuildContext.env_of_name = env_of_name
-
-
-def set_env_name(self, name, env):
- self.all_envs[name] = env
- return env
-Configure.ConfigurationContext.set_env_name = set_env_name
-
-def retrieve(self, name, fromenv=None):
- try:
- env = self.all_envs[name]
- except KeyError:
- env = ConfigSet.ConfigSet()
- self.prepare_env(env)
- self.all_envs[name] = env
- else:
- if fromenv:
- Logs.warn('The environment %s may have been configured already', name)
- return env
-Configure.ConfigurationContext.retrieve = retrieve
-
-Configure.ConfigurationContext.sub_config = Configure.ConfigurationContext.recurse
-Configure.ConfigurationContext.check_tool = Configure.ConfigurationContext.load
-Configure.conftest = Configure.conf
-Configure.ConfigurationError = Errors.ConfigurationError
-Utils.WafError = Errors.WafError
-
-Options.OptionsContext.sub_options = Options.OptionsContext.recurse
-Options.OptionsContext.tool_options = Context.Context.load
-Options.Handler = Options.OptionsContext
-
-Task.simple_task_type = Task.task_type_from_func = Task.task_factory
-Task.Task.classes = Task.classes
-
-def setitem(self, key, value):
- if key.startswith('CCFLAGS'):
- key = key[1:]
- self.table[key] = value
-ConfigSet.ConfigSet.__setitem__ = setitem
-
-@TaskGen.feature('d')
-@TaskGen.before('apply_incpaths')
-def old_importpaths(self):
- if getattr(self, 'importpaths', []):
- self.includes = self.importpaths
-
-from waflib import Context
-eld = Context.load_tool
-def load_tool(*k, **kw):
- ret = eld(*k, **kw)
- if 'set_options' in ret.__dict__:
- if Logs.verbose:
- Logs.warn('compat: rename "set_options" to options')
- ret.options = ret.set_options
- if 'detect' in ret.__dict__:
- if Logs.verbose:
- Logs.warn('compat: rename "detect" to "configure"')
- ret.configure = ret.detect
- return ret
-Context.load_tool = load_tool
-
-def get_curdir(self):
- return self.path.abspath()
-Context.Context.curdir = property(get_curdir, Utils.nada)
-
-def get_srcdir(self):
- return self.srcnode.abspath()
-Configure.ConfigurationContext.srcdir = property(get_srcdir, Utils.nada)
-
-def get_blddir(self):
- return self.bldnode.abspath()
-Configure.ConfigurationContext.blddir = property(get_blddir, Utils.nada)
-
-Configure.ConfigurationContext.check_message_1 = Configure.ConfigurationContext.start_msg
-Configure.ConfigurationContext.check_message_2 = Configure.ConfigurationContext.end_msg
-
-rev = Context.load_module
-def load_module(path, encoding=None):
- ret = rev(path, encoding)
- if 'set_options' in ret.__dict__:
- if Logs.verbose:
- Logs.warn('compat: rename "set_options" to "options" (%r)', path)
- ret.options = ret.set_options
- if 'srcdir' in ret.__dict__:
- if Logs.verbose:
- Logs.warn('compat: rename "srcdir" to "top" (%r)', path)
- ret.top = ret.srcdir
- if 'blddir' in ret.__dict__:
- if Logs.verbose:
- Logs.warn('compat: rename "blddir" to "out" (%r)', path)
- ret.out = ret.blddir
- Utils.g_module = Context.g_module
- Options.launch_dir = Context.launch_dir
- return ret
-Context.load_module = load_module
-
-old_post = TaskGen.task_gen.post
-def post(self):
- self.features = self.to_list(self.features)
- if 'cc' in self.features:
- if Logs.verbose:
- Logs.warn('compat: the feature cc does not exist anymore (use "c")')
- self.features.remove('cc')
- self.features.append('c')
- if 'cstaticlib' in self.features:
- if Logs.verbose:
- Logs.warn('compat: the feature cstaticlib does not exist anymore (use "cstlib" or "cxxstlib")')
- self.features.remove('cstaticlib')
- self.features.append(('cxx' in self.features) and 'cxxstlib' or 'cstlib')
- if getattr(self, 'ccflags', None):
- if Logs.verbose:
- Logs.warn('compat: "ccflags" was renamed to "cflags"')
- self.cflags = self.ccflags
- return old_post(self)
-TaskGen.task_gen.post = post
-
-def waf_version(*k, **kw):
- Logs.warn('wrong version (waf_version was removed in waf 1.6)')
-Utils.waf_version = waf_version
-
-
-import os
-@TaskGen.feature('c', 'cxx', 'd')
-@TaskGen.before('apply_incpaths', 'propagate_uselib_vars')
-@TaskGen.after('apply_link', 'process_source')
-def apply_uselib_local(self):
- """
- process the uselib_local attribute
- execute after apply_link because of the execution order set on 'link_task'
- """
- env = self.env
- from waflib.Tools.ccroot import stlink_task
-
- # 1. the case of the libs defined in the project (visit ancestors first)
- # the ancestors external libraries (uselib) will be prepended
- self.uselib = self.to_list(getattr(self, 'uselib', []))
- self.includes = self.to_list(getattr(self, 'includes', []))
- names = self.to_list(getattr(self, 'uselib_local', []))
- get = self.bld.get_tgen_by_name
- seen = set()
- seen_uselib = set()
- tmp = Utils.deque(names) # consume a copy of the list of names
- if tmp:
- if Logs.verbose:
- Logs.warn('compat: "uselib_local" is deprecated, replace by "use"')
- while tmp:
- lib_name = tmp.popleft()
- # visit dependencies only once
- if lib_name in seen:
- continue
-
- y = get(lib_name)
- y.post()
- seen.add(lib_name)
-
- # object has ancestors to process (shared libraries): add them to the end of the list
- if getattr(y, 'uselib_local', None):
- for x in self.to_list(getattr(y, 'uselib_local', [])):
- obj = get(x)
- obj.post()
- if getattr(obj, 'link_task', None):
- if not isinstance(obj.link_task, stlink_task):
- tmp.append(x)
-
- # link task and flags
- if getattr(y, 'link_task', None):
-
- link_name = y.target[y.target.rfind(os.sep) + 1:]
- if isinstance(y.link_task, stlink_task):
- env.append_value('STLIB', [link_name])
- else:
- # some linkers can link against programs
- env.append_value('LIB', [link_name])
-
- # the order
- self.link_task.set_run_after(y.link_task)
-
- # for the recompilation
- self.link_task.dep_nodes += y.link_task.outputs
-
- # add the link path too
- tmp_path = y.link_task.outputs[0].parent.bldpath()
- if not tmp_path in env['LIBPATH']:
- env.prepend_value('LIBPATH', [tmp_path])
-
- # add ancestors uselib too - but only propagate those that have no staticlib defined
- for v in self.to_list(getattr(y, 'uselib', [])):
- if v not in seen_uselib:
- seen_uselib.add(v)
- if not env['STLIB_' + v]:
- if not v in self.uselib:
- self.uselib.insert(0, v)
-
- # if the library task generator provides 'export_includes', add to the include path
- # the export_includes must be a list of paths relative to the other library
- if getattr(y, 'export_includes', None):
- self.includes.extend(y.to_incnodes(y.export_includes))
-
-@TaskGen.feature('cprogram', 'cxxprogram', 'cstlib', 'cxxstlib', 'cshlib', 'cxxshlib', 'dprogram', 'dstlib', 'dshlib')
-@TaskGen.after('apply_link')
-def apply_objdeps(self):
- "add the .o files produced by some other object files in the same manner as uselib_local"
- names = getattr(self, 'add_objects', [])
- if not names:
- return
- names = self.to_list(names)
-
- get = self.bld.get_tgen_by_name
- seen = []
- while names:
- x = names[0]
-
- # visit dependencies only once
- if x in seen:
- names = names[1:]
- continue
-
- # object does not exist ?
- y = get(x)
-
- # object has ancestors to process first ? update the list of names
- if getattr(y, 'add_objects', None):
- added = 0
- lst = y.to_list(y.add_objects)
- lst.reverse()
- for u in lst:
- if u in seen:
- continue
- added = 1
- names = [u]+names
- if added:
- continue # list of names modified, loop
-
- # safe to process the current object
- y.post()
- seen.append(x)
-
- for t in getattr(y, 'compiled_tasks', []):
- self.link_task.inputs.extend(t.outputs)
-
-@TaskGen.after('apply_link')
-def process_obj_files(self):
- if not hasattr(self, 'obj_files'):
- return
- for x in self.obj_files:
- node = self.path.find_resource(x)
- self.link_task.inputs.append(node)
-
-@TaskGen.taskgen_method
-def add_obj_file(self, file):
- """Small example on how to link object files as if they were source
- obj = bld.create_obj('cc')
- obj.add_obj_file('foo.o')"""
- if not hasattr(self, 'obj_files'):
- self.obj_files = []
- if not 'process_obj_files' in self.meths:
- self.meths.append('process_obj_files')
- self.obj_files.append(file)
-
-
-old_define = Configure.ConfigurationContext.__dict__['define']
-
-@Configure.conf
-def define(self, key, val, quote=True, comment=''):
- old_define(self, key, val, quote, comment)
- if key.startswith('HAVE_'):
- self.env[key] = 1
-
-old_undefine = Configure.ConfigurationContext.__dict__['undefine']
-
-@Configure.conf
-def undefine(self, key, comment=''):
- old_undefine(self, key, comment)
- if key.startswith('HAVE_'):
- self.env[key] = 0
-
-# some people might want to use export_incdirs, but it was renamed
-def set_incdirs(self, val):
- Logs.warn('compat: change "export_incdirs" by "export_includes"')
- self.export_includes = val
-TaskGen.task_gen.export_incdirs = property(None, set_incdirs)
-
-def install_dir(self, path):
- if not path:
- return []
-
- destpath = Utils.subst_vars(path, self.env)
-
- if self.is_install > 0:
- Logs.info('* creating %s', destpath)
- Utils.check_dir(destpath)
- elif self.is_install < 0:
- Logs.info('* removing %s', destpath)
- try:
- os.remove(destpath)
- except OSError:
- pass
-Build.BuildContext.install_dir = install_dir
-
-# before/after names
-repl = {'apply_core': 'process_source',
- 'apply_lib_vars': 'process_source',
- 'apply_obj_vars': 'propagate_uselib_vars',
- 'exec_rule': 'process_rule'
-}
-def after(*k):
- k = [repl.get(key, key) for key in k]
- return TaskGen.after_method(*k)
-
-def before(*k):
- k = [repl.get(key, key) for key in k]
- return TaskGen.before_method(*k)
-TaskGen.before = before
-
diff --git a/waflib/extras/cppcheck.py b/waflib/extras/cppcheck.py
deleted file mode 100644
index 13ff424..0000000
--- a/waflib/extras/cppcheck.py
+++ /dev/null
@@ -1,591 +0,0 @@
-#! /usr/bin/env python
-# -*- encoding: utf-8 -*-
-# Michel Mooij, michel.mooij7@gmail.com
-
-"""
-Tool Description
-================
-This module provides a waf wrapper (i.e. waftool) around the C/C++ source code
-checking tool 'cppcheck'.
-
-See http://cppcheck.sourceforge.net/ for more information on the cppcheck tool
-itself.
-Note that many linux distributions already provide a ready to install version
-of cppcheck. On fedora, for instance, it can be installed using yum:
-
- 'sudo yum install cppcheck'
-
-
-Usage
-=====
-In order to use this waftool simply add it to the 'options' and 'configure'
-functions of your main waf script as shown in the example below:
-
- def options(opt):
- opt.load('cppcheck', tooldir='./waftools')
-
- def configure(conf):
- conf.load('cppcheck')
-
-Note that example shown above assumes that the cppcheck waftool is located in
-the sub directory named 'waftools'.
-
-When configured as shown in the example above, cppcheck will automatically
-perform a source code analysis on all C/C++ build tasks that have been
-defined in your waf build system.
-
-The example shown below for a C program will be used as input for cppcheck when
-building the task.
-
- def build(bld):
- bld.program(name='foo', src='foobar.c')
-
-The result of the source code analysis will be stored both as xml and html
-files in the build location for the task. Should any error be detected by
-cppcheck the build will be aborted and a link to the html report will be shown.
-By default, one index.html file is created for each task generator. A global
-index.html file can be obtained by setting the following variable
-in the configuration section:
-
- conf.env.CPPCHECK_SINGLE_HTML = False
-
-When needed source code checking by cppcheck can be disabled per task, per
-detected error or warning for a particular task. It can be also be disabled for
-all tasks.
-
-In order to exclude a task from source code checking add the skip option to the
-task as shown below:
-
- def build(bld):
- bld.program(
- name='foo',
- src='foobar.c'
- cppcheck_skip=True
- )
-
-When needed problems detected by cppcheck may be suppressed using a file
-containing a list of suppression rules. The relative or absolute path to this
-file can be added to the build task as shown in the example below:
-
- bld.program(
- name='bar',
- src='foobar.c',
- cppcheck_suppress='bar.suppress'
- )
-
-A cppcheck suppress file should contain one suppress rule per line. Each of
-these rules will be passed as an '--suppress=<rule>' argument to cppcheck.
-
-Dependencies
-================
-This waftool depends on the python pygments module, it is used for source code
-syntax highlighting when creating the html reports. see http://pygments.org/ for
-more information on this package.
-
-Remarks
-================
-The generation of the html report is originally based on the cppcheck-htmlreport.py
-script that comes shipped with the cppcheck tool.
-"""
-
-import sys
-import xml.etree.ElementTree as ElementTree
-from waflib import Task, TaskGen, Logs, Context, Options
-
-PYGMENTS_EXC_MSG= '''
-The required module 'pygments' could not be found. Please install it using your
-platform package manager (e.g. apt-get or yum), using 'pip' or 'easy_install',
-see 'http://pygments.org/download/' for installation instructions.
-'''
-
-try:
- import pygments
- from pygments import formatters, lexers
-except ImportError as e:
- Logs.warn(PYGMENTS_EXC_MSG)
- raise e
-
-
-def options(opt):
- opt.add_option('--cppcheck-skip', dest='cppcheck_skip',
- default=False, action='store_true',
- help='do not check C/C++ sources (default=False)')
-
- opt.add_option('--cppcheck-err-resume', dest='cppcheck_err_resume',
- default=False, action='store_true',
- help='continue in case of errors (default=False)')
-
- opt.add_option('--cppcheck-bin-enable', dest='cppcheck_bin_enable',
- default='warning,performance,portability,style,unusedFunction', action='store',
- help="cppcheck option '--enable=' for binaries (default=warning,performance,portability,style,unusedFunction)")
-
- opt.add_option('--cppcheck-lib-enable', dest='cppcheck_lib_enable',
- default='warning,performance,portability,style', action='store',
- help="cppcheck option '--enable=' for libraries (default=warning,performance,portability,style)")
-
- opt.add_option('--cppcheck-std-c', dest='cppcheck_std_c',
- default='c99', action='store',
- help='cppcheck standard to use when checking C (default=c99)')
-
- opt.add_option('--cppcheck-std-cxx', dest='cppcheck_std_cxx',
- default='c++03', action='store',
- help='cppcheck standard to use when checking C++ (default=c++03)')
-
- opt.add_option('--cppcheck-check-config', dest='cppcheck_check_config',
- default=False, action='store_true',
- help='forced check for missing buildin include files, e.g. stdio.h (default=False)')
-
- opt.add_option('--cppcheck-max-configs', dest='cppcheck_max_configs',
- default='20', action='store',
- help='maximum preprocessor (--max-configs) define iterations (default=20)')
-
- opt.add_option('--cppcheck-jobs', dest='cppcheck_jobs',
- default='1', action='store',
- help='number of jobs (-j) to do the checking work (default=1)')
-
-def configure(conf):
- if conf.options.cppcheck_skip:
- conf.env.CPPCHECK_SKIP = [True]
- conf.env.CPPCHECK_STD_C = conf.options.cppcheck_std_c
- conf.env.CPPCHECK_STD_CXX = conf.options.cppcheck_std_cxx
- conf.env.CPPCHECK_MAX_CONFIGS = conf.options.cppcheck_max_configs
- conf.env.CPPCHECK_BIN_ENABLE = conf.options.cppcheck_bin_enable
- conf.env.CPPCHECK_LIB_ENABLE = conf.options.cppcheck_lib_enable
- conf.env.CPPCHECK_JOBS = conf.options.cppcheck_jobs
- if conf.options.cppcheck_jobs != '1' and ('unusedFunction' in conf.options.cppcheck_bin_enable or 'unusedFunction' in conf.options.cppcheck_lib_enable or 'all' in conf.options.cppcheck_bin_enable or 'all' in conf.options.cppcheck_lib_enable):
- Logs.warn('cppcheck: unusedFunction cannot be used with multiple threads, cppcheck will disable it automatically')
- conf.find_program('cppcheck', var='CPPCHECK')
-
- # set to True to get a single index.html file
- conf.env.CPPCHECK_SINGLE_HTML = False
-
-@TaskGen.feature('c')
-@TaskGen.feature('cxx')
-def cppcheck_execute(self):
- if hasattr(self.bld, 'conf'):
- return
- if len(self.env.CPPCHECK_SKIP) or Options.options.cppcheck_skip:
- return
- if getattr(self, 'cppcheck_skip', False):
- return
- task = self.create_task('cppcheck')
- task.cmd = _tgen_create_cmd(self)
- task.fatal = []
- if not Options.options.cppcheck_err_resume:
- task.fatal.append('error')
-
-
-def _tgen_create_cmd(self):
- features = getattr(self, 'features', [])
- std_c = self.env.CPPCHECK_STD_C
- std_cxx = self.env.CPPCHECK_STD_CXX
- max_configs = self.env.CPPCHECK_MAX_CONFIGS
- bin_enable = self.env.CPPCHECK_BIN_ENABLE
- lib_enable = self.env.CPPCHECK_LIB_ENABLE
- jobs = self.env.CPPCHECK_JOBS
-
- cmd = self.env.CPPCHECK
- args = ['--inconclusive','--report-progress','--verbose','--xml','--xml-version=2']
- args.append('--max-configs=%s' % max_configs)
- args.append('-j %s' % jobs)
-
- if 'cxx' in features:
- args.append('--language=c++')
- args.append('--std=%s' % std_cxx)
- else:
- args.append('--language=c')
- args.append('--std=%s' % std_c)
-
- if Options.options.cppcheck_check_config:
- args.append('--check-config')
-
- if set(['cprogram','cxxprogram']) & set(features):
- args.append('--enable=%s' % bin_enable)
- else:
- args.append('--enable=%s' % lib_enable)
-
- for src in self.to_list(getattr(self, 'source', [])):
- if not isinstance(src, str):
- src = repr(src)
- args.append(src)
- for inc in self.to_incnodes(self.to_list(getattr(self, 'includes', []))):
- if not isinstance(inc, str):
- inc = repr(inc)
- args.append('-I%s' % inc)
- for inc in self.to_incnodes(self.to_list(self.env.INCLUDES)):
- if not isinstance(inc, str):
- inc = repr(inc)
- args.append('-I%s' % inc)
- return cmd + args
-
-
-class cppcheck(Task.Task):
- quiet = True
-
- def run(self):
- stderr = self.generator.bld.cmd_and_log(self.cmd, quiet=Context.STDERR, output=Context.STDERR)
- self._save_xml_report(stderr)
- defects = self._get_defects(stderr)
- index = self._create_html_report(defects)
- self._errors_evaluate(defects, index)
- return 0
-
- def _save_xml_report(self, s):
- '''use cppcheck xml result string, add the command string used to invoke cppcheck
- and save as xml file.
- '''
- header = '%s\n' % s.splitlines()[0]
- root = ElementTree.fromstring(s)
- cmd = ElementTree.SubElement(root.find('cppcheck'), 'cmd')
- cmd.text = str(self.cmd)
- body = ElementTree.tostring(root).decode('us-ascii')
- body_html_name = 'cppcheck-%s.xml' % self.generator.get_name()
- if self.env.CPPCHECK_SINGLE_HTML:
- body_html_name = 'cppcheck.xml'
- node = self.generator.path.get_bld().find_or_declare(body_html_name)
- node.write(header + body)
-
- def _get_defects(self, xml_string):
- '''evaluate the xml string returned by cppcheck (on sdterr) and use it to create
- a list of defects.
- '''
- defects = []
- for error in ElementTree.fromstring(xml_string).iter('error'):
- defect = {}
- defect['id'] = error.get('id')
- defect['severity'] = error.get('severity')
- defect['msg'] = str(error.get('msg')).replace('<','&lt;')
- defect['verbose'] = error.get('verbose')
- for location in error.findall('location'):
- defect['file'] = location.get('file')
- defect['line'] = str(int(location.get('line')) - 1)
- defects.append(defect)
- return defects
-
- def _create_html_report(self, defects):
- files, css_style_defs = self._create_html_files(defects)
- index = self._create_html_index(files)
- self._create_css_file(css_style_defs)
- return index
-
- def _create_html_files(self, defects):
- sources = {}
- defects = [defect for defect in defects if 'file' in defect]
- for defect in defects:
- name = defect['file']
- if not name in sources:
- sources[name] = [defect]
- else:
- sources[name].append(defect)
-
- files = {}
- css_style_defs = None
- bpath = self.generator.path.get_bld().abspath()
- names = list(sources.keys())
- for i in range(0,len(names)):
- name = names[i]
- if self.env.CPPCHECK_SINGLE_HTML:
- htmlfile = 'cppcheck/%i.html' % (i)
- else:
- htmlfile = 'cppcheck/%s%i.html' % (self.generator.get_name(),i)
- errors = sources[name]
- files[name] = { 'htmlfile': '%s/%s' % (bpath, htmlfile), 'errors': errors }
- css_style_defs = self._create_html_file(name, htmlfile, errors)
- return files, css_style_defs
-
- def _create_html_file(self, sourcefile, htmlfile, errors):
- name = self.generator.get_name()
- root = ElementTree.fromstring(CPPCHECK_HTML_FILE)
- title = root.find('head/title')
- title.text = 'cppcheck - report - %s' % name
-
- body = root.find('body')
- for div in body.findall('div'):
- if div.get('id') == 'page':
- page = div
- break
- for div in page.findall('div'):
- if div.get('id') == 'header':
- h1 = div.find('h1')
- h1.text = 'cppcheck report - %s' % name
- if div.get('id') == 'menu':
- indexlink = div.find('a')
- if self.env.CPPCHECK_SINGLE_HTML:
- indexlink.attrib['href'] = 'index.html'
- else:
- indexlink.attrib['href'] = 'index-%s.html' % name
- if div.get('id') == 'content':
- content = div
- srcnode = self.generator.bld.root.find_node(sourcefile)
- hl_lines = [e['line'] for e in errors if 'line' in e]
- formatter = CppcheckHtmlFormatter(linenos=True, style='colorful', hl_lines=hl_lines, lineanchors='line')
- formatter.errors = [e for e in errors if 'line' in e]
- css_style_defs = formatter.get_style_defs('.highlight')
- lexer = pygments.lexers.guess_lexer_for_filename(sourcefile, "")
- s = pygments.highlight(srcnode.read(), lexer, formatter)
- table = ElementTree.fromstring(s)
- content.append(table)
-
- s = ElementTree.tostring(root, method='html').decode('us-ascii')
- s = CCPCHECK_HTML_TYPE + s
- node = self.generator.path.get_bld().find_or_declare(htmlfile)
- node.write(s)
- return css_style_defs
-
- def _create_html_index(self, files):
- name = self.generator.get_name()
- root = ElementTree.fromstring(CPPCHECK_HTML_FILE)
- title = root.find('head/title')
- title.text = 'cppcheck - report - %s' % name
-
- body = root.find('body')
- for div in body.findall('div'):
- if div.get('id') == 'page':
- page = div
- break
- for div in page.findall('div'):
- if div.get('id') == 'header':
- h1 = div.find('h1')
- h1.text = 'cppcheck report - %s' % name
- if div.get('id') == 'content':
- content = div
- self._create_html_table(content, files)
- if div.get('id') == 'menu':
- indexlink = div.find('a')
- if self.env.CPPCHECK_SINGLE_HTML:
- indexlink.attrib['href'] = 'index.html'
- else:
- indexlink.attrib['href'] = 'index-%s.html' % name
-
- s = ElementTree.tostring(root, method='html').decode('us-ascii')
- s = CCPCHECK_HTML_TYPE + s
- index_html_name = 'cppcheck/index-%s.html' % name
- if self.env.CPPCHECK_SINGLE_HTML:
- index_html_name = 'cppcheck/index.html'
- node = self.generator.path.get_bld().find_or_declare(index_html_name)
- node.write(s)
- return node
-
- def _create_html_table(self, content, files):
- table = ElementTree.fromstring(CPPCHECK_HTML_TABLE)
- for name, val in files.items():
- f = val['htmlfile']
- s = '<tr><td colspan="4"><a href="%s">%s</a></td></tr>\n' % (f,name)
- row = ElementTree.fromstring(s)
- table.append(row)
-
- errors = sorted(val['errors'], key=lambda e: int(e['line']) if 'line' in e else sys.maxint)
- for e in errors:
- if not 'line' in e:
- s = '<tr><td></td><td>%s</td><td>%s</td><td>%s</td></tr>\n' % (e['id'], e['severity'], e['msg'])
- else:
- attr = ''
- if e['severity'] == 'error':
- attr = 'class="error"'
- s = '<tr><td><a href="%s#line-%s">%s</a></td>' % (f, e['line'], e['line'])
- s+= '<td>%s</td><td>%s</td><td %s>%s</td></tr>\n' % (e['id'], e['severity'], attr, e['msg'])
- row = ElementTree.fromstring(s)
- table.append(row)
- content.append(table)
-
- def _create_css_file(self, css_style_defs):
- css = str(CPPCHECK_CSS_FILE)
- if css_style_defs:
- css = "%s\n%s\n" % (css, css_style_defs)
- node = self.generator.path.get_bld().find_or_declare('cppcheck/style.css')
- node.write(css)
-
- def _errors_evaluate(self, errors, http_index):
- name = self.generator.get_name()
- fatal = self.fatal
- severity = [err['severity'] for err in errors]
- problems = [err for err in errors if err['severity'] != 'information']
-
- if set(fatal) & set(severity):
- exc = "\n"
- exc += "\nccpcheck detected fatal error(s) in task '%s', see report for details:" % name
- exc += "\n file://%r" % (http_index)
- exc += "\n"
- self.generator.bld.fatal(exc)
-
- elif len(problems):
- msg = "\nccpcheck detected (possible) problem(s) in task '%s', see report for details:" % name
- msg += "\n file://%r" % http_index
- msg += "\n"
- Logs.error(msg)
-
-
-class CppcheckHtmlFormatter(pygments.formatters.HtmlFormatter):
- errors = []
-
- def wrap(self, source, outfile):
- line_no = 1
- for i, t in super(CppcheckHtmlFormatter, self).wrap(source, outfile):
- # If this is a source code line we want to add a span tag at the end.
- if i == 1:
- for error in self.errors:
- if int(error['line']) == line_no:
- t = t.replace('\n', CPPCHECK_HTML_ERROR % error['msg'])
- line_no += 1
- yield i, t
-
-
-CCPCHECK_HTML_TYPE = \
-'<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN" "http://www.w3.org/TR/html4/strict.dtd">\n'
-
-CPPCHECK_HTML_FILE = """
-<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN" "http://www.w3.org/TR/html4/strict.dtd" [<!ENTITY nbsp "&#160;">]>
-<html>
- <head>
- <title>cppcheck - report - XXX</title>
- <link href="style.css" rel="stylesheet" type="text/css" />
- <style type="text/css">
- </style>
- </head>
- <body class="body">
- <div id="page-header">&nbsp;</div>
- <div id="page">
- <div id="header">
- <h1>cppcheck report - XXX</h1>
- </div>
- <div id="menu">
- <a href="index.html">Defect list</a>
- </div>
- <div id="content">
- </div>
- <div id="footer">
- <div>cppcheck - a tool for static C/C++ code analysis</div>
- <div>
- Internet: <a href="http://cppcheck.sourceforge.net">http://cppcheck.sourceforge.net</a><br/>
- Forum: <a href="http://apps.sourceforge.net/phpbb/cppcheck/">http://apps.sourceforge.net/phpbb/cppcheck/</a><br/>
- IRC: #cppcheck at irc.freenode.net
- </div>
- &nbsp;
- </div>
- &nbsp;
- </div>
- <div id="page-footer">&nbsp;</div>
- </body>
-</html>
-"""
-
-CPPCHECK_HTML_TABLE = """
-<table>
- <tr>
- <th>Line</th>
- <th>Id</th>
- <th>Severity</th>
- <th>Message</th>
- </tr>
-</table>
-"""
-
-CPPCHECK_HTML_ERROR = \
-'<span style="background: #ffaaaa;padding: 3px;">&lt;--- %s</span>\n'
-
-CPPCHECK_CSS_FILE = """
-body.body {
- font-family: Arial;
- font-size: 13px;
- background-color: black;
- padding: 0px;
- margin: 0px;
-}
-
-.error {
- font-family: Arial;
- font-size: 13px;
- background-color: #ffb7b7;
- padding: 0px;
- margin: 0px;
-}
-
-th, td {
- min-width: 100px;
- text-align: left;
-}
-
-#page-header {
- clear: both;
- width: 1200px;
- margin: 20px auto 0px auto;
- height: 10px;
- border-bottom-width: 2px;
- border-bottom-style: solid;
- border-bottom-color: #aaaaaa;
-}
-
-#page {
- width: 1160px;
- margin: auto;
- border-left-width: 2px;
- border-left-style: solid;
- border-left-color: #aaaaaa;
- border-right-width: 2px;
- border-right-style: solid;
- border-right-color: #aaaaaa;
- background-color: White;
- padding: 20px;
-}
-
-#page-footer {
- clear: both;
- width: 1200px;
- margin: auto;
- height: 10px;
- border-top-width: 2px;
- border-top-style: solid;
- border-top-color: #aaaaaa;
-}
-
-#header {
- width: 100%;
- height: 70px;
- background-image: url(logo.png);
- background-repeat: no-repeat;
- background-position: left top;
- border-bottom-style: solid;
- border-bottom-width: thin;
- border-bottom-color: #aaaaaa;
-}
-
-#menu {
- margin-top: 5px;
- text-align: left;
- float: left;
- width: 100px;
- height: 300px;
-}
-
-#menu > a {
- margin-left: 10px;
- display: block;
-}
-
-#content {
- float: left;
- width: 1020px;
- margin: 5px;
- padding: 0px 10px 10px 10px;
- border-left-style: solid;
- border-left-width: thin;
- border-left-color: #aaaaaa;
-}
-
-#footer {
- padding-bottom: 5px;
- padding-top: 5px;
- border-top-style: solid;
- border-top-width: thin;
- border-top-color: #aaaaaa;
- clear: both;
- font-size: 10px;
-}
-
-#footer > div {
- float: left;
- width: 33%;
-}
-
-"""
-
diff --git a/waflib/extras/cpplint.py b/waflib/extras/cpplint.py
deleted file mode 100644
index e3302e5..0000000
--- a/waflib/extras/cpplint.py
+++ /dev/null
@@ -1,209 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-#
-# written by Sylvain Rouquette, 2014
-
-'''
-
-This is an extra tool, not bundled with the default waf binary.
-To add the cpplint tool to the waf file:
-$ ./waf-light --tools=compat15,cpplint
-
-this tool also requires cpplint for python.
-If you have PIP, you can install it like this: pip install cpplint
-
-When using this tool, the wscript will look like:
-
- def options(opt):
- opt.load('compiler_cxx cpplint')
-
- def configure(conf):
- conf.load('compiler_cxx cpplint')
- # optional, you can also specify them on the command line
- conf.env.CPPLINT_FILTERS = ','.join((
- '-whitespace/newline', # c++11 lambda
- '-readability/braces', # c++11 constructor
- '-whitespace/braces', # c++11 constructor
- '-build/storage_class', # c++11 for-range
- '-whitespace/blank_line', # user pref
- '-whitespace/labels' # user pref
- ))
-
- def build(bld):
- bld(features='cpplint', source='main.cpp', target='app')
- # add include files, because they aren't usually built
- bld(features='cpplint', source=bld.path.ant_glob('**/*.hpp'))
-'''
-
-from __future__ import absolute_import
-import sys, re
-import logging
-from waflib import Errors, Task, TaskGen, Logs, Options, Node, Utils
-
-
-critical_errors = 0
-CPPLINT_FORMAT = '[CPPLINT] %(filename)s:\nline %(linenum)s, severity %(confidence)s, category: %(category)s\n%(message)s\n'
-RE_EMACS = re.compile('(?P<filename>.*):(?P<linenum>\d+): (?P<message>.*) \[(?P<category>.*)\] \[(?P<confidence>\d+)\]')
-CPPLINT_RE = {
- 'waf': RE_EMACS,
- 'emacs': RE_EMACS,
- 'vs7': re.compile('(?P<filename>.*)\((?P<linenum>\d+)\): (?P<message>.*) \[(?P<category>.*)\] \[(?P<confidence>\d+)\]'),
- 'eclipse': re.compile('(?P<filename>.*):(?P<linenum>\d+): warning: (?P<message>.*) \[(?P<category>.*)\] \[(?P<confidence>\d+)\]'),
-}
-CPPLINT_STR = ('${CPPLINT} '
- '--verbose=${CPPLINT_LEVEL} '
- '--output=${CPPLINT_OUTPUT} '
- '--filter=${CPPLINT_FILTERS} '
- '--root=${CPPLINT_ROOT} '
- '--linelength=${CPPLINT_LINE_LENGTH} ')
-
-
-def options(opt):
- opt.add_option('--cpplint-filters', type='string',
- default='', dest='CPPLINT_FILTERS',
- help='add filters to cpplint')
- opt.add_option('--cpplint-length', type='int',
- default=80, dest='CPPLINT_LINE_LENGTH',
- help='specify the line length (default: 80)')
- opt.add_option('--cpplint-level', default=1, type='int', dest='CPPLINT_LEVEL',
- help='specify the log level (default: 1)')
- opt.add_option('--cpplint-break', default=5, type='int', dest='CPPLINT_BREAK',
- help='break the build if error >= level (default: 5)')
- opt.add_option('--cpplint-root', type='string',
- default='', dest='CPPLINT_ROOT',
- help='root directory used to derive header guard')
- opt.add_option('--cpplint-skip', action='store_true',
- default=False, dest='CPPLINT_SKIP',
- help='skip cpplint during build')
- opt.add_option('--cpplint-output', type='string',
- default='waf', dest='CPPLINT_OUTPUT',
- help='select output format (waf, emacs, vs7, eclipse)')
-
-
-def configure(conf):
- try:
- conf.find_program('cpplint', var='CPPLINT')
- except Errors.ConfigurationError:
- conf.env.CPPLINT_SKIP = True
-
-
-class cpplint_formatter(Logs.formatter, object):
- def __init__(self, fmt):
- logging.Formatter.__init__(self, CPPLINT_FORMAT)
- self.fmt = fmt
-
- def format(self, rec):
- if self.fmt == 'waf':
- result = CPPLINT_RE[self.fmt].match(rec.msg).groupdict()
- rec.msg = CPPLINT_FORMAT % result
- if rec.levelno <= logging.INFO:
- rec.c1 = Logs.colors.CYAN
- return super(cpplint_formatter, self).format(rec)
-
-
-class cpplint_handler(Logs.log_handler, object):
- def __init__(self, stream=sys.stderr, **kw):
- super(cpplint_handler, self).__init__(stream, **kw)
- self.stream = stream
-
- def emit(self, rec):
- rec.stream = self.stream
- self.emit_override(rec)
- self.flush()
-
-
-class cpplint_wrapper(object):
- def __init__(self, logger, threshold, fmt):
- self.logger = logger
- self.threshold = threshold
- self.fmt = fmt
-
- def __enter__(self):
- return self
-
- def __exit__(self, exc_type, exc_value, traceback):
- if isinstance(exc_value, Utils.subprocess.CalledProcessError):
- messages = [m for m in exc_value.output.splitlines()
- if 'Done processing' not in m
- and 'Total errors found' not in m]
- for message in messages:
- self.write(message)
- return True
-
- def write(self, message):
- global critical_errors
- result = CPPLINT_RE[self.fmt].match(message)
- if not result:
- return
- level = int(result.groupdict()['confidence'])
- if level >= self.threshold:
- critical_errors += 1
- if level <= 2:
- self.logger.info(message)
- elif level <= 4:
- self.logger.warning(message)
- else:
- self.logger.error(message)
-
-
-cpplint_logger = None
-def get_cpplint_logger(fmt):
- global cpplint_logger
- if cpplint_logger:
- return cpplint_logger
- cpplint_logger = logging.getLogger('cpplint')
- hdlr = cpplint_handler()
- hdlr.setFormatter(cpplint_formatter(fmt))
- cpplint_logger.addHandler(hdlr)
- cpplint_logger.setLevel(logging.DEBUG)
- return cpplint_logger
-
-
-class cpplint(Task.Task):
- color = 'PINK'
-
- def __init__(self, *k, **kw):
- super(cpplint, self).__init__(*k, **kw)
-
- def run(self):
- global critical_errors
- with cpplint_wrapper(get_cpplint_logger(self.env.CPPLINT_OUTPUT), self.env.CPPLINT_BREAK, self.env.CPPLINT_OUTPUT):
- params = {key: str(self.env[key]) for key in self.env if 'CPPLINT_' in key}
- if params['CPPLINT_OUTPUT'] is 'waf':
- params['CPPLINT_OUTPUT'] = 'emacs'
- params['CPPLINT'] = self.env.get_flat('CPPLINT')
- cmd = Utils.subst_vars(CPPLINT_STR, params)
- env = self.env.env or None
- Utils.subprocess.check_output(cmd + self.inputs[0].abspath(),
- stderr=Utils.subprocess.STDOUT,
- env=env, shell=True)
- return critical_errors
-
-@TaskGen.extension('.h', '.hh', '.hpp', '.hxx')
-def cpplint_includes(self, node):
- pass
-
-@TaskGen.feature('cpplint')
-@TaskGen.before_method('process_source')
-def post_cpplint(self):
- if not self.env.CPPLINT_INITIALIZED:
- for key, value in Options.options.__dict__.items():
- if not key.startswith('CPPLINT_') or self.env[key]:
- continue
- self.env[key] = value
- self.env.CPPLINT_INITIALIZED = True
-
- if self.env.CPPLINT_SKIP:
- return
-
- if not self.env.CPPLINT_OUTPUT in CPPLINT_RE:
- return
-
- for src in self.to_list(getattr(self, 'source', [])):
- if isinstance(src, Node.Node):
- node = src
- else:
- node = self.path.find_or_declare(src)
- if not node:
- self.bld.fatal('Could not find %r' % src)
- self.create_task('cpplint', node)
diff --git a/waflib/extras/cross_gnu.py b/waflib/extras/cross_gnu.py
deleted file mode 100644
index 309f53b..0000000
--- a/waflib/extras/cross_gnu.py
+++ /dev/null
@@ -1,227 +0,0 @@
-#!/usr/bin/python
-# -*- coding: utf-8 vi:ts=4:noexpandtab
-# Tool to provide dedicated variables for cross-compilation
-
-__author__ = __maintainer__ = "Jérôme Carretero <cJ-waf@zougloub.eu>"
-__copyright__ = "Jérôme Carretero, 2014"
-
-"""
-This tool allows to use environment variables to define cross-compilation
-variables intended for build variants.
-
-The variables are obtained from the environment in 3 ways:
-
-1. By defining CHOST, they can be derived as ${CHOST}-${TOOL}
-2. By defining HOST_x
-3. By defining ${CHOST//-/_}_x
-
-else one can set ``cfg.env.CHOST`` in ``wscript`` before loading ``cross_gnu``.
-
-Usage:
-
-- In your build script::
-
- def configure(cfg):
- ...
- for variant in x_variants:
- setenv(variant)
- conf.load('cross_gnu')
- conf.xcheck_host_var('POUET')
- ...
-
-
-- Then::
-
- CHOST=arm-hardfloat-linux-gnueabi waf configure
- env arm-hardfloat-linux-gnueabi-CC="clang -..." waf configure
- CFLAGS=... CHOST=arm-hardfloat-linux-gnueabi HOST_CFLAGS=-g waf configure
- HOST_CC="clang -..." waf configure
-
-This example ``wscript`` compiles to Microchip PIC (xc16-gcc-xyz must be in PATH):
-
-.. code:: python
-
- from waflib import Configure
-
- #from https://gist.github.com/rpuntaie/2bddfb5d7b77db26415ee14371289971
- import waf_variants
-
- variants='pc fw/variant1 fw/variant2'.split()
-
- top = "."
- out = "../build"
-
- PIC = '33FJ128GP804' #dsPICxxx
-
- @Configure.conf
- def gcc_modifier_xc16(cfg):
- v = cfg.env
- v.cprogram_PATTERN = '%s.elf'
- v.LINKFLAGS_cprogram = ','.join(['-Wl','','','--defsym=__MPLAB_BUILD=0','','--script=p'+PIC+'.gld',
- '--stack=16','--check-sections','--data-init','--pack-data','--handles','--isr','--no-gc-sections',
- '--fill-upper=0','--stackguard=16','--no-force-link','--smart-io']) #,'--report-mem'])
- v.CFLAGS_cprogram=['-mcpu='+PIC,'-omf=elf','-mlarge-code','-msmart-io=1',
- '-msfr-warn=off','-mno-override-inline','-finline','-Winline']
-
- def configure(cfg):
- if 'fw' in cfg.variant: #firmware
- cfg.env.DEST_OS = 'xc16' #cfg.env.CHOST = 'xc16' #works too
- cfg.load('c cross_gnu') #cfg.env.CHOST becomes ['xc16']
- ...
- else: #configure for pc SW
- ...
-
- def build(bld):
- if 'fw' in bld.variant: #firmware
- bld.program(source='maintst.c', target='maintst');
- bld(source='maintst.elf', target='maintst.hex', rule="xc16-bin2hex ${SRC} -a -omf=elf")
- else: #build for pc SW
- ...
-
-"""
-
-import os
-from waflib import Utils, Configure
-from waflib.Tools import ccroot, gcc
-
-try:
- from shlex import quote
-except ImportError:
- from pipes import quote
-
-def get_chost_stuff(conf):
- """
- Get the CHOST environment variable contents
- """
- chost = None
- chost_envar = None
- if conf.env.CHOST:
- chost = conf.env.CHOST[0]
- chost_envar = chost.replace('-', '_')
- return chost, chost_envar
-
-
-@Configure.conf
-def xcheck_var(conf, name, wafname=None, cross=False):
- wafname = wafname or name
-
- if wafname in conf.env:
- value = conf.env[wafname]
- if isinstance(value, str):
- value = [value]
- else:
- envar = os.environ.get(name)
- if not envar:
- return
- value = Utils.to_list(envar) if envar != '' else [envar]
-
- conf.env[wafname] = value
- if cross:
- pretty = 'cross-compilation %s' % wafname
- else:
- pretty = wafname
- conf.msg('Will use %s' % pretty, " ".join(quote(x) for x in value))
-
-@Configure.conf
-def xcheck_host_prog(conf, name, tool, wafname=None):
- wafname = wafname or name
-
- chost, chost_envar = get_chost_stuff(conf)
-
- specific = None
- if chost:
- specific = os.environ.get('%s_%s' % (chost_envar, name))
-
- if specific:
- value = Utils.to_list(specific)
- conf.env[wafname] += value
- conf.msg('Will use cross-compilation %s from %s_%s' % (name, chost_envar, name),
- " ".join(quote(x) for x in value))
- return
- else:
- envar = os.environ.get('HOST_%s' % name)
- if envar is not None:
- value = Utils.to_list(envar)
- conf.env[wafname] = value
- conf.msg('Will use cross-compilation %s from HOST_%s' % (name, name),
- " ".join(quote(x) for x in value))
- return
-
- if conf.env[wafname]:
- return
-
- value = None
- if chost:
- value = '%s-%s' % (chost, tool)
-
- if value:
- conf.env[wafname] = value
- conf.msg('Will use cross-compilation %s from CHOST' % wafname, value)
-
-@Configure.conf
-def xcheck_host_envar(conf, name, wafname=None):
- wafname = wafname or name
-
- chost, chost_envar = get_chost_stuff(conf)
-
- specific = None
- if chost:
- specific = os.environ.get('%s_%s' % (chost_envar, name))
-
- if specific:
- value = Utils.to_list(specific)
- conf.env[wafname] += value
- conf.msg('Will use cross-compilation %s from %s_%s' \
- % (name, chost_envar, name),
- " ".join(quote(x) for x in value))
- return
-
-
- envar = os.environ.get('HOST_%s' % name)
- if envar is None:
- return
-
- value = Utils.to_list(envar) if envar != '' else [envar]
-
- conf.env[wafname] = value
- conf.msg('Will use cross-compilation %s from HOST_%s' % (name, name),
- " ".join(quote(x) for x in value))
-
-
-@Configure.conf
-def xcheck_host(conf):
- conf.xcheck_var('CHOST', cross=True)
- conf.env.CHOST = conf.env.CHOST or [conf.env.DEST_OS]
- conf.env.DEST_OS = conf.env.CHOST[0].replace('-','_')
- conf.xcheck_host_prog('CC', 'gcc')
- conf.xcheck_host_prog('CXX', 'g++')
- conf.xcheck_host_prog('LINK_CC', 'gcc')
- conf.xcheck_host_prog('LINK_CXX', 'g++')
- conf.xcheck_host_prog('AR', 'ar')
- conf.xcheck_host_prog('AS', 'as')
- conf.xcheck_host_prog('LD', 'ld')
- conf.xcheck_host_envar('CFLAGS')
- conf.xcheck_host_envar('CXXFLAGS')
- conf.xcheck_host_envar('LDFLAGS', 'LINKFLAGS')
- conf.xcheck_host_envar('LIB')
- conf.xcheck_host_envar('PKG_CONFIG_LIBDIR')
- conf.xcheck_host_envar('PKG_CONFIG_PATH')
-
- if not conf.env.env:
- conf.env.env = {}
- conf.env.env.update(os.environ)
- if conf.env.PKG_CONFIG_LIBDIR:
- conf.env.env['PKG_CONFIG_LIBDIR'] = conf.env.PKG_CONFIG_LIBDIR[0]
- if conf.env.PKG_CONFIG_PATH:
- conf.env.env['PKG_CONFIG_PATH'] = conf.env.PKG_CONFIG_PATH[0]
-
-def configure(conf):
- """
- Configuration example for gcc, it will not work for g++/clang/clang++
- """
- conf.xcheck_host()
- conf.gcc_common_flags()
- conf.gcc_modifier_platform()
- conf.cc_load_tools()
- conf.cc_add_flags()
- conf.link_add_flags()
diff --git a/waflib/extras/cython.py b/waflib/extras/cython.py
deleted file mode 100644
index 481d6f4..0000000
--- a/waflib/extras/cython.py
+++ /dev/null
@@ -1,146 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2010-2015
-
-import re
-from waflib import Task, Logs
-from waflib.TaskGen import extension
-
-cy_api_pat = re.compile(r'\s*?cdef\s*?(public|api)\w*')
-re_cyt = re.compile(r"""
- (?:from\s+(\w+)\s+)? # optionally match "from foo" and capture foo
- c?import\s(\w+|[*]) # require "import bar" and capture bar
- """, re.M | re.VERBOSE)
-
-@extension('.pyx')
-def add_cython_file(self, node):
- """
- Process a *.pyx* file given in the list of source files. No additional
- feature is required::
-
- def build(bld):
- bld(features='c cshlib pyext', source='main.c foo.pyx', target='app')
- """
- ext = '.c'
- if 'cxx' in self.features:
- self.env.append_unique('CYTHONFLAGS', '--cplus')
- ext = '.cc'
-
- for x in getattr(self, 'cython_includes', []):
- # TODO re-use these nodes in "scan" below
- d = self.path.find_dir(x)
- if d:
- self.env.append_unique('CYTHONFLAGS', '-I%s' % d.abspath())
-
- tsk = self.create_task('cython', node, node.change_ext(ext))
- self.source += tsk.outputs
-
-class cython(Task.Task):
- run_str = '${CYTHON} ${CYTHONFLAGS} -o ${TGT[0].abspath()} ${SRC}'
- color = 'GREEN'
-
- vars = ['INCLUDES']
- """
- Rebuild whenever the INCLUDES change. The variables such as CYTHONFLAGS will be appended
- by the metaclass.
- """
-
- ext_out = ['.h']
- """
- The creation of a .h file is known only after the build has begun, so it is not
- possible to compute a build order just by looking at the task inputs/outputs.
- """
-
- def runnable_status(self):
- """
- Perform a double-check to add the headers created by cython
- to the output nodes. The scanner is executed only when the cython task
- must be executed (optimization).
- """
- ret = super(cython, self).runnable_status()
- if ret == Task.ASK_LATER:
- return ret
- for x in self.generator.bld.raw_deps[self.uid()]:
- if x.startswith('header:'):
- self.outputs.append(self.inputs[0].parent.find_or_declare(x.replace('header:', '')))
- return super(cython, self).runnable_status()
-
- def post_run(self):
- for x in self.outputs:
- if x.name.endswith('.h'):
- if not x.exists():
- if Logs.verbose:
- Logs.warn('Expected %r', x.abspath())
- x.write('')
- return Task.Task.post_run(self)
-
- def scan(self):
- """
- Return the dependent files (.pxd) by looking in the include folders.
- Put the headers to generate in the custom list "bld.raw_deps".
- To inspect the scanne results use::
-
- $ waf clean build --zones=deps
- """
- node = self.inputs[0]
- txt = node.read()
-
- mods = []
- for m in re_cyt.finditer(txt):
- if m.group(1): # matches "from foo import bar"
- mods.append(m.group(1))
- else:
- mods.append(m.group(2))
-
- Logs.debug('cython: mods %r', mods)
- incs = getattr(self.generator, 'cython_includes', [])
- incs = [self.generator.path.find_dir(x) for x in incs]
- incs.append(node.parent)
-
- found = []
- missing = []
- for x in mods:
- for y in incs:
- k = y.find_resource(x + '.pxd')
- if k:
- found.append(k)
- break
- else:
- missing.append(x)
-
- # the cython file implicitly depends on a pxd file that might be present
- implicit = node.parent.find_resource(node.name[:-3] + 'pxd')
- if implicit:
- found.append(implicit)
-
- Logs.debug('cython: found %r', found)
-
- # Now the .h created - store them in bld.raw_deps for later use
- has_api = False
- has_public = False
- for l in txt.splitlines():
- if cy_api_pat.match(l):
- if ' api ' in l:
- has_api = True
- if ' public ' in l:
- has_public = True
- name = node.name.replace('.pyx', '')
- if has_api:
- missing.append('header:%s_api.h' % name)
- if has_public:
- missing.append('header:%s.h' % name)
-
- return (found, missing)
-
-def options(ctx):
- ctx.add_option('--cython-flags', action='store', default='', help='space separated list of flags to pass to cython')
-
-def configure(ctx):
- if not ctx.env.CC and not ctx.env.CXX:
- ctx.fatal('Load a C/C++ compiler first')
- if not ctx.env.PYTHON:
- ctx.fatal('Load the python tool first!')
- ctx.find_program('cython', var='CYTHON')
- if hasattr(ctx.options, 'cython_flags'):
- ctx.env.CYTHONFLAGS = ctx.options.cython_flags
-
diff --git a/waflib/extras/dcc.py b/waflib/extras/dcc.py
deleted file mode 100644
index c1a57c0..0000000
--- a/waflib/extras/dcc.py
+++ /dev/null
@@ -1,72 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Jérôme Carretero, 2011 (zougloub)
-
-from waflib import Options
-from waflib.Tools import ccroot
-from waflib.Configure import conf
-
-@conf
-def find_dcc(conf):
- conf.find_program(['dcc'], var='CC', path_list=getattr(Options.options, 'diabbindir', ""))
- conf.env.CC_NAME = 'dcc'
-
-@conf
-def find_dld(conf):
- conf.find_program(['dld'], var='LINK_CC', path_list=getattr(Options.options, 'diabbindir', ""))
- conf.env.LINK_CC_NAME = 'dld'
-
-@conf
-def find_dar(conf):
- conf.find_program(['dar'], var='AR', path_list=getattr(Options.options, 'diabbindir', ""))
- conf.env.AR_NAME = 'dar'
- conf.env.ARFLAGS = 'rcs'
-
-@conf
-def find_ddump(conf):
- conf.find_program(['ddump'], var='DDUMP', path_list=getattr(Options.options, 'diabbindir', ""))
-
-@conf
-def dcc_common_flags(conf):
- v = conf.env
- v['CC_SRC_F'] = []
- v['CC_TGT_F'] = ['-c', '-o']
-
- # linker
- if not v['LINK_CC']:
- v['LINK_CC'] = v['CC']
- v['CCLNK_SRC_F'] = []
- v['CCLNK_TGT_F'] = ['-o']
- v['CPPPATH_ST'] = '-I%s'
- v['DEFINES_ST'] = '-D%s'
-
- v['LIB_ST'] = '-l:%s' # template for adding libs
- v['LIBPATH_ST'] = '-L%s' # template for adding libpaths
- v['STLIB_ST'] = '-l:%s'
- v['STLIBPATH_ST'] = '-L%s'
- v['RPATH_ST'] = '-Wl,-rpath,%s'
- #v['STLIB_MARKER'] = '-Wl,-Bstatic'
-
- # program
- v['cprogram_PATTERN'] = '%s.elf'
-
- # static lib
- v['LINKFLAGS_cstlib'] = ['-Wl,-Bstatic']
- v['cstlib_PATTERN'] = 'lib%s.a'
-
-def configure(conf):
- conf.find_dcc()
- conf.find_dar()
- conf.find_dld()
- conf.find_ddump()
- conf.dcc_common_flags()
- conf.cc_load_tools()
- conf.cc_add_flags()
- conf.link_add_flags()
-
-def options(opt):
- """
- Add the ``--with-diab-bindir`` command-line options.
- """
- opt.add_option('--with-diab-bindir', type='string', dest='diabbindir', help = 'Specify alternate diab bin folder', default="")
-
diff --git a/waflib/extras/distnet.py b/waflib/extras/distnet.py
deleted file mode 100644
index 09a31a6..0000000
--- a/waflib/extras/distnet.py
+++ /dev/null
@@ -1,430 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-
-"""
-waf-powered distributed network builds, with a network cache.
-
-Caching files from a server has advantages over a NFS/Samba shared folder:
-
-- builds are much faster because they use local files
-- builds just continue to work in case of a network glitch
-- permissions are much simpler to manage
-"""
-
-import os, urllib, tarfile, re, shutil, tempfile, sys
-from collections import OrderedDict
-from waflib import Context, Utils, Logs
-
-try:
- from urllib.parse import urlencode
-except ImportError:
- urlencode = urllib.urlencode
-
-def safe_urlencode(data):
- x = urlencode(data)
- try:
- x = x.encode('utf-8')
- except Exception:
- pass
- return x
-
-try:
- from urllib.error import URLError
-except ImportError:
- from urllib2 import URLError
-
-try:
- from urllib.request import Request, urlopen
-except ImportError:
- from urllib2 import Request, urlopen
-
-DISTNETCACHE = os.environ.get('DISTNETCACHE', '/tmp/distnetcache')
-DISTNETSERVER = os.environ.get('DISTNETSERVER', 'http://localhost:8000/cgi-bin/')
-TARFORMAT = 'w:bz2'
-TIMEOUT = 60
-REQUIRES = 'requires.txt'
-
-re_com = re.compile('\s*#.*', re.M)
-
-def total_version_order(num):
- lst = num.split('.')
- template = '%10s' * len(lst)
- ret = template % tuple(lst)
- return ret
-
-def get_distnet_cache():
- return getattr(Context.g_module, 'DISTNETCACHE', DISTNETCACHE)
-
-def get_server_url():
- return getattr(Context.g_module, 'DISTNETSERVER', DISTNETSERVER)
-
-def get_download_url():
- return '%s/download.py' % get_server_url()
-
-def get_upload_url():
- return '%s/upload.py' % get_server_url()
-
-def get_resolve_url():
- return '%s/resolve.py' % get_server_url()
-
-def send_package_name():
- out = getattr(Context.g_module, 'out', 'build')
- pkgfile = '%s/package_to_upload.tarfile' % out
- return pkgfile
-
-class package(Context.Context):
- fun = 'package'
- cmd = 'package'
-
- def execute(self):
- try:
- files = self.files
- except AttributeError:
- files = self.files = []
-
- Context.Context.execute(self)
- pkgfile = send_package_name()
- if not pkgfile in files:
- if not REQUIRES in files:
- files.append(REQUIRES)
- self.make_tarfile(pkgfile, files, add_to_package=False)
-
- def make_tarfile(self, filename, files, **kw):
- if kw.get('add_to_package', True):
- self.files.append(filename)
-
- with tarfile.open(filename, TARFORMAT) as tar:
- endname = os.path.split(filename)[-1]
- endname = endname.split('.')[0] + '/'
- for x in files:
- tarinfo = tar.gettarinfo(x, x)
- tarinfo.uid = tarinfo.gid = 0
- tarinfo.uname = tarinfo.gname = 'root'
- tarinfo.size = os.stat(x).st_size
-
- # TODO - more archive creation options?
- if kw.get('bare', True):
- tarinfo.name = os.path.split(x)[1]
- else:
- tarinfo.name = endname + x # todo, if tuple, then..
- Logs.debug('distnet: adding %r to %s', tarinfo.name, filename)
- with open(x, 'rb') as f:
- tar.addfile(tarinfo, f)
- Logs.info('Created %s', filename)
-
-class publish(Context.Context):
- fun = 'publish'
- cmd = 'publish'
- def execute(self):
- if hasattr(Context.g_module, 'publish'):
- Context.Context.execute(self)
- mod = Context.g_module
-
- rfile = getattr(self, 'rfile', send_package_name())
- if not os.path.isfile(rfile):
- self.fatal('Create the release file with "waf release" first! %r' % rfile)
-
- fdata = Utils.readf(rfile, m='rb')
- data = safe_urlencode([('pkgdata', fdata), ('pkgname', mod.APPNAME), ('pkgver', mod.VERSION)])
-
- req = Request(get_upload_url(), data)
- response = urlopen(req, timeout=TIMEOUT)
- data = response.read().strip()
-
- if sys.hexversion>0x300000f:
- data = data.decode('utf-8')
-
- if data != 'ok':
- self.fatal('Could not publish the package %r' % data)
-
-class constraint(object):
- def __init__(self, line=''):
- self.required_line = line
- self.info = []
-
- line = line.strip()
- if not line:
- return
-
- lst = line.split(',')
- if lst:
- self.pkgname = lst[0]
- self.required_version = lst[1]
- for k in lst:
- a, b, c = k.partition('=')
- if a and c:
- self.info.append((a, c))
- def __str__(self):
- buf = []
- buf.append(self.pkgname)
- buf.append(self.required_version)
- for k in self.info:
- buf.append('%s=%s' % k)
- return ','.join(buf)
-
- def __repr__(self):
- return "requires %s-%s" % (self.pkgname, self.required_version)
-
- def human_display(self, pkgname, pkgver):
- return '%s-%s requires %s-%s' % (pkgname, pkgver, self.pkgname, self.required_version)
-
- def why(self):
- ret = []
- for x in self.info:
- if x[0] == 'reason':
- ret.append(x[1])
- return ret
-
- def add_reason(self, reason):
- self.info.append(('reason', reason))
-
-def parse_constraints(text):
- assert(text is not None)
- constraints = []
- text = re.sub(re_com, '', text)
- lines = text.splitlines()
- for line in lines:
- line = line.strip()
- if not line:
- continue
- constraints.append(constraint(line))
- return constraints
-
-def list_package_versions(cachedir, pkgname):
- pkgdir = os.path.join(cachedir, pkgname)
- try:
- versions = os.listdir(pkgdir)
- except OSError:
- return []
- versions.sort(key=total_version_order)
- versions.reverse()
- return versions
-
-class package_reader(Context.Context):
- cmd = 'solver'
- fun = 'solver'
-
- def __init__(self, **kw):
- Context.Context.__init__(self, **kw)
-
- self.myproject = getattr(Context.g_module, 'APPNAME', 'project')
- self.myversion = getattr(Context.g_module, 'VERSION', '1.0')
- self.cache_constraints = {}
- self.constraints = []
-
- def compute_dependencies(self, filename=REQUIRES):
- text = Utils.readf(filename)
- data = safe_urlencode([('text', text)])
-
- if '--offline' in sys.argv:
- self.constraints = self.local_resolve(text)
- else:
- req = Request(get_resolve_url(), data)
- try:
- response = urlopen(req, timeout=TIMEOUT)
- except URLError as e:
- Logs.warn('The package server is down! %r', e)
- self.constraints = self.local_resolve(text)
- else:
- ret = response.read()
- try:
- ret = ret.decode('utf-8')
- except Exception:
- pass
- self.trace(ret)
- self.constraints = parse_constraints(ret)
- self.check_errors()
-
- def check_errors(self):
- errors = False
- for c in self.constraints:
- if not c.required_version:
- errors = True
-
- reasons = c.why()
- if len(reasons) == 1:
- Logs.error('%s but no matching package could be found in this repository', reasons[0])
- else:
- Logs.error('Conflicts on package %r:', c.pkgname)
- for r in reasons:
- Logs.error(' %s', r)
- if errors:
- self.fatal('The package requirements cannot be satisfied!')
-
- def load_constraints(self, pkgname, pkgver, requires=REQUIRES):
- try:
- return self.cache_constraints[(pkgname, pkgver)]
- except KeyError:
- text = Utils.readf(os.path.join(get_distnet_cache(), pkgname, pkgver, requires))
- ret = parse_constraints(text)
- self.cache_constraints[(pkgname, pkgver)] = ret
- return ret
-
- def apply_constraint(self, domain, constraint):
- vname = constraint.required_version.replace('*', '.*')
- rev = re.compile(vname, re.M)
- ret = [x for x in domain if rev.match(x)]
- return ret
-
- def trace(self, *k):
- if getattr(self, 'debug', None):
- Logs.error(*k)
-
- def solve(self, packages_to_versions={}, packages_to_constraints={}, pkgname='', pkgver='', todo=[], done=[]):
- # breadth first search
- n_packages_to_versions = dict(packages_to_versions)
- n_packages_to_constraints = dict(packages_to_constraints)
-
- self.trace("calling solve with %r %r %r" % (packages_to_versions, todo, done))
- done = done + [pkgname]
-
- constraints = self.load_constraints(pkgname, pkgver)
- self.trace("constraints %r" % constraints)
-
- for k in constraints:
- try:
- domain = n_packages_to_versions[k.pkgname]
- except KeyError:
- domain = list_package_versions(get_distnet_cache(), k.pkgname)
-
-
- self.trace("constraints?")
- if not k.pkgname in done:
- todo = todo + [k.pkgname]
-
- self.trace("domain before %s -> %s, %r" % (pkgname, k.pkgname, domain))
-
- # apply the constraint
- domain = self.apply_constraint(domain, k)
-
- self.trace("domain after %s -> %s, %r" % (pkgname, k.pkgname, domain))
-
- n_packages_to_versions[k.pkgname] = domain
-
- # then store the constraint applied
- constraints = list(packages_to_constraints.get(k.pkgname, []))
- constraints.append((pkgname, pkgver, k))
- n_packages_to_constraints[k.pkgname] = constraints
-
- if not domain:
- self.trace("no domain while processing constraint %r from %r %r" % (domain, pkgname, pkgver))
- return (n_packages_to_versions, n_packages_to_constraints)
-
- # next package on the todo list
- if not todo:
- return (n_packages_to_versions, n_packages_to_constraints)
-
- n_pkgname = todo[0]
- n_pkgver = n_packages_to_versions[n_pkgname][0]
- tmp = dict(n_packages_to_versions)
- tmp[n_pkgname] = [n_pkgver]
-
- self.trace("fixed point %s" % n_pkgname)
-
- return self.solve(tmp, n_packages_to_constraints, n_pkgname, n_pkgver, todo[1:], done)
-
- def get_results(self):
- return '\n'.join([str(c) for c in self.constraints])
-
- def solution_to_constraints(self, versions, constraints):
- solution = []
- for p in versions:
- c = constraint()
- solution.append(c)
-
- c.pkgname = p
- if versions[p]:
- c.required_version = versions[p][0]
- else:
- c.required_version = ''
- for (from_pkgname, from_pkgver, c2) in constraints.get(p, ''):
- c.add_reason(c2.human_display(from_pkgname, from_pkgver))
- return solution
-
- def local_resolve(self, text):
- self.cache_constraints[(self.myproject, self.myversion)] = parse_constraints(text)
- p2v = OrderedDict({self.myproject: [self.myversion]})
- (versions, constraints) = self.solve(p2v, {}, self.myproject, self.myversion, [])
- return self.solution_to_constraints(versions, constraints)
-
- def download_to_file(self, pkgname, pkgver, subdir, tmp):
- data = safe_urlencode([('pkgname', pkgname), ('pkgver', pkgver), ('pkgfile', subdir)])
- req = urlopen(get_download_url(), data, timeout=TIMEOUT)
- with open(tmp, 'wb') as f:
- while True:
- buf = req.read(8192)
- if not buf:
- break
- f.write(buf)
-
- def extract_tar(self, subdir, pkgdir, tmpfile):
- with tarfile.open(tmpfile) as f:
- temp = tempfile.mkdtemp(dir=pkgdir)
- try:
- f.extractall(temp)
- os.rename(temp, os.path.join(pkgdir, subdir))
- finally:
- try:
- shutil.rmtree(temp)
- except Exception:
- pass
-
- def get_pkg_dir(self, pkgname, pkgver, subdir):
- pkgdir = os.path.join(get_distnet_cache(), pkgname, pkgver)
- if not os.path.isdir(pkgdir):
- os.makedirs(pkgdir)
-
- target = os.path.join(pkgdir, subdir)
-
- if os.path.exists(target):
- return target
-
- (fd, tmp) = tempfile.mkstemp(dir=pkgdir)
- try:
- os.close(fd)
- self.download_to_file(pkgname, pkgver, subdir, tmp)
- if subdir == REQUIRES:
- os.rename(tmp, target)
- else:
- self.extract_tar(subdir, pkgdir, tmp)
- finally:
- try:
- os.remove(tmp)
- except OSError:
- pass
-
- return target
-
- def __iter__(self):
- if not self.constraints:
- self.compute_dependencies()
- for x in self.constraints:
- if x.pkgname == self.myproject:
- continue
- yield x
-
- def execute(self):
- self.compute_dependencies()
-
-packages = package_reader()
-
-def load_tools(ctx, extra):
- global packages
- for c in packages:
- packages.get_pkg_dir(c.pkgname, c.required_version, extra)
- noarchdir = packages.get_pkg_dir(c.pkgname, c.required_version, 'noarch')
- for x in os.listdir(noarchdir):
- if x.startswith('waf_') and x.endswith('.py'):
- ctx.load([x.rstrip('.py')], tooldir=[noarchdir])
-
-def options(opt):
- opt.add_option('--offline', action='store_true')
- packages.execute()
- load_tools(opt, REQUIRES)
-
-def configure(conf):
- load_tools(conf, conf.variant)
-
-def build(bld):
- load_tools(bld, bld.variant)
-
diff --git a/waflib/extras/doxygen.py b/waflib/extras/doxygen.py
deleted file mode 100644
index 28f56e9..0000000
--- a/waflib/extras/doxygen.py
+++ /dev/null
@@ -1,227 +0,0 @@
-#! /usr/bin/env python
-# encoding: UTF-8
-# Thomas Nagy 2008-2010 (ita)
-
-"""
-
-Doxygen support
-
-Variables passed to bld():
-* doxyfile -- the Doxyfile to use
-* doxy_tar -- destination archive for generated documentation (if desired)
-* install_path -- where to install the documentation
-* pars -- dictionary overriding doxygen configuration settings
-
-When using this tool, the wscript will look like:
-
- def options(opt):
- opt.load('doxygen')
-
- def configure(conf):
- conf.load('doxygen')
- # check conf.env.DOXYGEN, if it is mandatory
-
- def build(bld):
- if bld.env.DOXYGEN:
- bld(features="doxygen", doxyfile='Doxyfile', ...)
-"""
-
-import os, os.path, re
-from waflib import Task, Utils, Node
-from waflib.TaskGen import feature
-
-DOXY_STR = '"${DOXYGEN}" - '
-DOXY_FMTS = 'html latex man rft xml'.split()
-DOXY_FILE_PATTERNS = '*.' + ' *.'.join('''
-c cc cxx cpp c++ java ii ixx ipp i++ inl h hh hxx hpp h++ idl odl cs php php3
-inc m mm py f90c cc cxx cpp c++ java ii ixx ipp i++ inl h hh hxx
-'''.split())
-
-re_rl = re.compile('\\\\\r*\n', re.MULTILINE)
-re_nl = re.compile('\r*\n', re.M)
-def parse_doxy(txt):
- tbl = {}
- txt = re_rl.sub('', txt)
- lines = re_nl.split(txt)
- for x in lines:
- x = x.strip()
- if not x or x.startswith('#') or x.find('=') < 0:
- continue
- if x.find('+=') >= 0:
- tmp = x.split('+=')
- key = tmp[0].strip()
- if key in tbl:
- tbl[key] += ' ' + '+='.join(tmp[1:]).strip()
- else:
- tbl[key] = '+='.join(tmp[1:]).strip()
- else:
- tmp = x.split('=')
- tbl[tmp[0].strip()] = '='.join(tmp[1:]).strip()
- return tbl
-
-class doxygen(Task.Task):
- vars = ['DOXYGEN', 'DOXYFLAGS']
- color = 'BLUE'
-
- def runnable_status(self):
- '''
- self.pars are populated in runnable_status - because this function is being
- run *before* both self.pars "consumers" - scan() and run()
-
- set output_dir (node) for the output
- '''
-
- for x in self.run_after:
- if not x.hasrun:
- return Task.ASK_LATER
-
- if not getattr(self, 'pars', None):
- txt = self.inputs[0].read()
- self.pars = parse_doxy(txt)
- if self.pars.get('OUTPUT_DIRECTORY'):
- # Use the path parsed from the Doxyfile as an absolute path
- output_node = self.inputs[0].parent.get_bld().make_node(self.pars['OUTPUT_DIRECTORY'])
- else:
- # If no OUTPUT_PATH was specified in the Doxyfile, build path from the Doxyfile name + '.doxy'
- output_node = self.inputs[0].parent.get_bld().make_node(self.inputs[0].name + '.doxy')
- output_node.mkdir()
- self.pars['OUTPUT_DIRECTORY'] = output_node.abspath()
-
- # Override with any parameters passed to the task generator
- if getattr(self.generator, 'pars', None):
- for k, v in self.generator.pars.items():
- self.pars[k] = v
-
- self.doxy_inputs = getattr(self, 'doxy_inputs', [])
- if not self.pars.get('INPUT'):
- self.doxy_inputs.append(self.inputs[0].parent)
- else:
- for i in self.pars.get('INPUT').split():
- if os.path.isabs(i):
- node = self.generator.bld.root.find_node(i)
- else:
- node = self.inputs[0].parent.find_node(i)
- if not node:
- self.generator.bld.fatal('Could not find the doxygen input %r' % i)
- self.doxy_inputs.append(node)
-
- if not getattr(self, 'output_dir', None):
- bld = self.generator.bld
- # Output path is always an absolute path as it was transformed above.
- self.output_dir = bld.root.find_dir(self.pars['OUTPUT_DIRECTORY'])
-
- self.signature()
- ret = Task.Task.runnable_status(self)
- if ret == Task.SKIP_ME:
- # in case the files were removed
- self.add_install()
- return ret
-
- def scan(self):
- exclude_patterns = self.pars.get('EXCLUDE_PATTERNS','').split()
- exclude_patterns = [pattern.replace('*/', '**/') for pattern in exclude_patterns]
- file_patterns = self.pars.get('FILE_PATTERNS','').split()
- if not file_patterns:
- file_patterns = DOXY_FILE_PATTERNS.split()
- if self.pars.get('RECURSIVE') == 'YES':
- file_patterns = ["**/%s" % pattern for pattern in file_patterns]
- nodes = []
- names = []
- for node in self.doxy_inputs:
- if os.path.isdir(node.abspath()):
- for m in node.ant_glob(incl=file_patterns, excl=exclude_patterns):
- nodes.append(m)
- else:
- nodes.append(node)
- return (nodes, names)
-
- def run(self):
- dct = self.pars.copy()
- code = '\n'.join(['%s = %s' % (x, dct[x]) for x in self.pars])
- code = code.encode() # for python 3
- #fmt = DOXY_STR % (self.inputs[0].parent.abspath())
- cmd = Utils.subst_vars(DOXY_STR, self.env)
- env = self.env.env or None
- proc = Utils.subprocess.Popen(cmd, shell=True, stdin=Utils.subprocess.PIPE, env=env, cwd=self.inputs[0].parent.abspath())
- proc.communicate(code)
- return proc.returncode
-
- def post_run(self):
- nodes = self.output_dir.ant_glob('**/*', quiet=True)
- for x in nodes:
- self.generator.bld.node_sigs[x] = self.uid()
- self.add_install()
- return Task.Task.post_run(self)
-
- def add_install(self):
- nodes = self.output_dir.ant_glob('**/*', quiet=True)
- self.outputs += nodes
- if getattr(self.generator, 'install_path', None):
- if not getattr(self.generator, 'doxy_tar', None):
- self.generator.add_install_files(install_to=self.generator.install_path,
- install_from=self.outputs,
- postpone=False,
- cwd=self.output_dir,
- relative_trick=True)
-
-class tar(Task.Task):
- "quick tar creation"
- run_str = '${TAR} ${TAROPTS} ${TGT} ${SRC}'
- color = 'RED'
- after = ['doxygen']
- def runnable_status(self):
- for x in getattr(self, 'input_tasks', []):
- if not x.hasrun:
- return Task.ASK_LATER
-
- if not getattr(self, 'tar_done_adding', None):
- # execute this only once
- self.tar_done_adding = True
- for x in getattr(self, 'input_tasks', []):
- self.set_inputs(x.outputs)
- if not self.inputs:
- return Task.SKIP_ME
- return Task.Task.runnable_status(self)
-
- def __str__(self):
- tgt_str = ' '.join([a.path_from(a.ctx.launch_node()) for a in self.outputs])
- return '%s: %s\n' % (self.__class__.__name__, tgt_str)
-
-@feature('doxygen')
-def process_doxy(self):
- if not getattr(self, 'doxyfile', None):
- self.bld.fatal('no doxyfile variable specified??')
-
- node = self.doxyfile
- if not isinstance(node, Node.Node):
- node = self.path.find_resource(node)
- if not node:
- self.bld.fatal('doxygen file %s not found' % self.doxyfile)
-
- # the task instance
- dsk = self.create_task('doxygen', node)
-
- if getattr(self, 'doxy_tar', None):
- tsk = self.create_task('tar')
- tsk.input_tasks = [dsk]
- tsk.set_outputs(self.path.find_or_declare(self.doxy_tar))
- if self.doxy_tar.endswith('bz2'):
- tsk.env['TAROPTS'] = ['cjf']
- elif self.doxy_tar.endswith('gz'):
- tsk.env['TAROPTS'] = ['czf']
- else:
- tsk.env['TAROPTS'] = ['cf']
- if getattr(self, 'install_path', None):
- self.add_install_files(install_to=self.install_path, install_from=tsk.outputs)
-
-def configure(conf):
- '''
- Check if doxygen and tar commands are present in the system
-
- If the commands are present, then conf.env.DOXYGEN and conf.env.TAR
- variables will be set. Detection can be controlled by setting DOXYGEN and
- TAR environmental variables.
- '''
-
- conf.find_program('doxygen', var='DOXYGEN', mandatory=False)
- conf.find_program('tar', var='TAR', mandatory=False)
diff --git a/waflib/extras/dpapi.py b/waflib/extras/dpapi.py
deleted file mode 100644
index b94d482..0000000
--- a/waflib/extras/dpapi.py
+++ /dev/null
@@ -1,87 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# Matt Clarkson, 2012
-
-'''
-DPAPI access library (http://msdn.microsoft.com/en-us/library/ms995355.aspx)
-This file uses code originally created by Crusher Joe:
-http://article.gmane.org/gmane.comp.python.ctypes/420
-And modified by Wayne Koorts:
-http://stackoverflow.com/questions/463832/using-dpapi-with-python
-'''
-
-from ctypes import windll, byref, cdll, Structure, POINTER, c_char, c_buffer
-from ctypes.wintypes import DWORD
-from waflib.Configure import conf
-
-LocalFree = windll.kernel32.LocalFree
-memcpy = cdll.msvcrt.memcpy
-CryptProtectData = windll.crypt32.CryptProtectData
-CryptUnprotectData = windll.crypt32.CryptUnprotectData
-CRYPTPROTECT_UI_FORBIDDEN = 0x01
-try:
- extra_entropy = 'cl;ad13 \0al;323kjd #(adl;k$#ajsd'.encode('ascii')
-except AttributeError:
- extra_entropy = 'cl;ad13 \0al;323kjd #(adl;k$#ajsd'
-
-class DATA_BLOB(Structure):
- _fields_ = [
- ('cbData', DWORD),
- ('pbData', POINTER(c_char))
- ]
-
-def get_data(blob_out):
- cbData = int(blob_out.cbData)
- pbData = blob_out.pbData
- buffer = c_buffer(cbData)
- memcpy(buffer, pbData, cbData)
- LocalFree(pbData)
- return buffer.raw
-
-@conf
-def dpapi_encrypt_data(self, input_bytes, entropy = extra_entropy):
- '''
- Encrypts data and returns byte string
-
- :param input_bytes: The data to be encrypted
- :type input_bytes: String or Bytes
- :param entropy: Extra entropy to add to the encryption process (optional)
- :type entropy: String or Bytes
- '''
- if not isinstance(input_bytes, bytes) or not isinstance(entropy, bytes):
- self.fatal('The inputs to dpapi must be bytes')
- buffer_in = c_buffer(input_bytes, len(input_bytes))
- buffer_entropy = c_buffer(entropy, len(entropy))
- blob_in = DATA_BLOB(len(input_bytes), buffer_in)
- blob_entropy = DATA_BLOB(len(entropy), buffer_entropy)
- blob_out = DATA_BLOB()
-
- if CryptProtectData(byref(blob_in), 'python_data', byref(blob_entropy),
- None, None, CRYPTPROTECT_UI_FORBIDDEN, byref(blob_out)):
- return get_data(blob_out)
- else:
- self.fatal('Failed to decrypt data')
-
-@conf
-def dpapi_decrypt_data(self, encrypted_bytes, entropy = extra_entropy):
- '''
- Decrypts data and returns byte string
-
- :param encrypted_bytes: The encrypted data
- :type encrypted_bytes: Bytes
- :param entropy: Extra entropy to add to the encryption process (optional)
- :type entropy: String or Bytes
- '''
- if not isinstance(encrypted_bytes, bytes) or not isinstance(entropy, bytes):
- self.fatal('The inputs to dpapi must be bytes')
- buffer_in = c_buffer(encrypted_bytes, len(encrypted_bytes))
- buffer_entropy = c_buffer(entropy, len(entropy))
- blob_in = DATA_BLOB(len(encrypted_bytes), buffer_in)
- blob_entropy = DATA_BLOB(len(entropy), buffer_entropy)
- blob_out = DATA_BLOB()
- if CryptUnprotectData(byref(blob_in), None, byref(blob_entropy), None,
- None, CRYPTPROTECT_UI_FORBIDDEN, byref(blob_out)):
- return get_data(blob_out)
- else:
- self.fatal('Failed to decrypt data')
-
diff --git a/waflib/extras/eclipse.py b/waflib/extras/eclipse.py
deleted file mode 100644
index bb78741..0000000
--- a/waflib/extras/eclipse.py
+++ /dev/null
@@ -1,431 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# Eclipse CDT 5.0 generator for Waf
-# Richard Quirk 2009-1011 (New BSD License)
-# Thomas Nagy 2011 (ported to Waf 1.6)
-
-"""
-Usage:
-
-def options(opt):
- opt.load('eclipse')
-
-$ waf configure eclipse
-"""
-
-import sys, os
-from waflib import Utils, Logs, Context, Build, TaskGen, Scripting, Errors, Node
-from xml.dom.minidom import Document
-
-STANDARD_INCLUDES = [ '/usr/local/include', '/usr/include' ]
-
-oe_cdt = 'org.eclipse.cdt'
-cdt_mk = oe_cdt + '.make.core'
-cdt_core = oe_cdt + '.core'
-cdt_bld = oe_cdt + '.build.core'
-extbuilder_dir = '.externalToolBuilders'
-extbuilder_name = 'Waf_Builder.launch'
-
-class eclipse(Build.BuildContext):
- cmd = 'eclipse'
- fun = Scripting.default_cmd
-
- def execute(self):
- """
- Entry point
- """
- self.restore()
- if not self.all_envs:
- self.load_envs()
- self.recurse([self.run_dir])
-
- appname = getattr(Context.g_module, Context.APPNAME, os.path.basename(self.srcnode.abspath()))
- self.create_cproject(appname, pythonpath=self.env['ECLIPSE_PYTHON_PATH'])
-
- # Helper to dump the XML document content to XML with UTF-8 encoding
- def write_conf_to_xml(self, filename, document):
- self.srcnode.make_node(filename).write(document.toprettyxml(encoding='UTF-8'), flags='wb')
-
- def create_cproject(self, appname, workspace_includes=[], pythonpath=[]):
- """
- Create the Eclipse CDT .project and .cproject files
- @param appname The name that will appear in the Project Explorer
- @param build The BuildContext object to extract includes from
- @param workspace_includes Optional project includes to prevent
- "Unresolved Inclusion" errors in the Eclipse editor
- @param pythonpath Optional project specific python paths
- """
- hasc = hasjava = haspython = False
- source_dirs = []
- cpppath = self.env['CPPPATH']
- javasrcpath = []
- javalibpath = []
- includes = STANDARD_INCLUDES
- if sys.platform != 'win32':
- cc = self.env.CC or self.env.CXX
- if cc:
- cmd = cc + ['-xc++', '-E', '-Wp,-v', '-']
- try:
- gccout = self.cmd_and_log(cmd, output=Context.STDERR, quiet=Context.BOTH, input='\n'.encode()).splitlines()
- except Errors.WafError:
- pass
- else:
- includes = []
- for ipath in gccout:
- if ipath.startswith(' /'):
- includes.append(ipath[1:])
- cpppath += includes
- Logs.warn('Generating Eclipse CDT project files')
-
- for g in self.groups:
- for tg in g:
- if not isinstance(tg, TaskGen.task_gen):
- continue
-
- tg.post()
-
- # Add local Python modules paths to configuration so object resolving will work in IDE
- # This may also contain generated files (ie. pyqt5 or protoc) that get picked from build
- if 'py' in tg.features:
- pypath = tg.path.relpath()
- py_installfrom = getattr(tg, 'install_from', None)
- if isinstance(py_installfrom, Node.Node):
- pypath = py_installfrom.path_from(self.root.make_node(self.top_dir))
- if pypath not in pythonpath:
- pythonpath.append(pypath)
- haspython = True
-
- # Add Java source directories so object resolving works in IDE
- # This may also contain generated files (ie. protoc) that get picked from build
- if 'javac' in tg.features:
- java_src = tg.path.relpath()
- java_srcdir = getattr(tg.javac_task, 'srcdir', None)
- if java_srcdir:
- if isinstance(java_srcdir, Node.Node):
- java_srcdir = [java_srcdir]
- for x in Utils.to_list(java_srcdir):
- x = x.path_from(self.root.make_node(self.top_dir))
- if x not in javasrcpath:
- javasrcpath.append(x)
- else:
- if java_src not in javasrcpath:
- javasrcpath.append(java_src)
- hasjava = True
-
- # Check if there are external dependencies and add them as external jar so they will be resolved by Eclipse
- usedlibs=getattr(tg, 'use', [])
- for x in Utils.to_list(usedlibs):
- for cl in Utils.to_list(tg.env['CLASSPATH_'+x]):
- if cl not in javalibpath:
- javalibpath.append(cl)
-
- if not getattr(tg, 'link_task', None):
- continue
-
- features = Utils.to_list(getattr(tg, 'features', ''))
-
- is_cc = 'c' in features or 'cxx' in features
-
- incnodes = tg.to_incnodes(tg.to_list(getattr(tg, 'includes', [])) + tg.env['INCLUDES'])
- for p in incnodes:
- path = p.path_from(self.srcnode)
-
- if (path.startswith("/")):
- cpppath.append(path)
- else:
- workspace_includes.append(path)
-
- if is_cc and path not in source_dirs:
- source_dirs.append(path)
-
- hasc = True
-
- waf_executable = os.path.abspath(sys.argv[0])
- project = self.impl_create_project(sys.executable, appname, hasc, hasjava, haspython, waf_executable)
- self.write_conf_to_xml('.project', project)
-
- if hasc:
- project = self.impl_create_cproject(sys.executable, waf_executable, appname, workspace_includes, cpppath, source_dirs)
- self.write_conf_to_xml('.cproject', project)
-
- if haspython:
- project = self.impl_create_pydevproject(sys.path, pythonpath)
- self.write_conf_to_xml('.pydevproject', project)
-
- if hasjava:
- project = self.impl_create_javaproject(javasrcpath, javalibpath)
- self.write_conf_to_xml('.classpath', project)
-
- def impl_create_project(self, executable, appname, hasc, hasjava, haspython, waf_executable):
- doc = Document()
- projectDescription = doc.createElement('projectDescription')
- self.add(doc, projectDescription, 'name', appname)
- self.add(doc, projectDescription, 'comment')
- self.add(doc, projectDescription, 'projects')
- buildSpec = self.add(doc, projectDescription, 'buildSpec')
- buildCommand = self.add(doc, buildSpec, 'buildCommand')
- self.add(doc, buildCommand, 'triggers', 'clean,full,incremental,')
- arguments = self.add(doc, buildCommand, 'arguments')
- dictionaries = {}
-
- # If CDT is present, instruct this one to call waf as it is more flexible (separate build/clean ...)
- if hasc:
- self.add(doc, buildCommand, 'name', oe_cdt + '.managedbuilder.core.genmakebuilder')
- # the default make-style targets are overwritten by the .cproject values
- dictionaries = {
- cdt_mk + '.contents': cdt_mk + '.activeConfigSettings',
- cdt_mk + '.enableAutoBuild': 'false',
- cdt_mk + '.enableCleanBuild': 'true',
- cdt_mk + '.enableFullBuild': 'true',
- }
- else:
- # Otherwise for Java/Python an external builder tool is created that will call waf build
- self.add(doc, buildCommand, 'name', 'org.eclipse.ui.externaltools.ExternalToolBuilder')
- dictionaries = {
- 'LaunchConfigHandle': '<project>/%s/%s'%(extbuilder_dir, extbuilder_name),
- }
- # The definition is in a separate directory XML file
- try:
- os.mkdir(extbuilder_dir)
- except OSError:
- pass # Ignore error if already exists
-
- # Populate here the external builder XML calling waf
- builder = Document()
- launchConfiguration = doc.createElement('launchConfiguration')
- launchConfiguration.setAttribute('type', 'org.eclipse.ui.externaltools.ProgramBuilderLaunchConfigurationType')
- self.add(doc, launchConfiguration, 'booleanAttribute', {'key': 'org.eclipse.debug.ui.ATTR_LAUNCH_IN_BACKGROUND', 'value': 'false'})
- self.add(doc, launchConfiguration, 'booleanAttribute', {'key': 'org.eclipse.ui.externaltools.ATTR_TRIGGERS_CONFIGURED', 'value': 'true'})
- self.add(doc, launchConfiguration, 'stringAttribute', {'key': 'org.eclipse.ui.externaltools.ATTR_LOCATION', 'value': waf_executable})
- self.add(doc, launchConfiguration, 'stringAttribute', {'key': 'org.eclipse.ui.externaltools.ATTR_RUN_BUILD_KINDS', 'value': 'full,incremental,'})
- self.add(doc, launchConfiguration, 'stringAttribute', {'key': 'org.eclipse.ui.externaltools.ATTR_TOOL_ARGUMENTS', 'value': 'build'})
- self.add(doc, launchConfiguration, 'stringAttribute', {'key': 'org.eclipse.ui.externaltools.ATTR_WORKING_DIRECTORY', 'value': '${project_loc}'})
- builder.appendChild(launchConfiguration)
- # And write the XML to the file references before
- self.write_conf_to_xml('%s%s%s'%(extbuilder_dir, os.path.sep, extbuilder_name), builder)
-
-
- for k, v in dictionaries.items():
- self.addDictionary(doc, arguments, k, v)
-
- natures = self.add(doc, projectDescription, 'natures')
-
- if hasc:
- nature_list = """
- core.ccnature
- managedbuilder.core.ScannerConfigNature
- managedbuilder.core.managedBuildNature
- core.cnature
- """.split()
- for n in nature_list:
- self.add(doc, natures, 'nature', oe_cdt + '.' + n)
-
- if haspython:
- self.add(doc, natures, 'nature', 'org.python.pydev.pythonNature')
- if hasjava:
- self.add(doc, natures, 'nature', 'org.eclipse.jdt.core.javanature')
-
- doc.appendChild(projectDescription)
- return doc
-
- def impl_create_cproject(self, executable, waf_executable, appname, workspace_includes, cpppath, source_dirs=[]):
- doc = Document()
- doc.appendChild(doc.createProcessingInstruction('fileVersion', '4.0.0'))
- cconf_id = cdt_core + '.default.config.1'
- cproject = doc.createElement('cproject')
- storageModule = self.add(doc, cproject, 'storageModule',
- {'moduleId': cdt_core + '.settings'})
- cconf = self.add(doc, storageModule, 'cconfiguration', {'id':cconf_id})
-
- storageModule = self.add(doc, cconf, 'storageModule',
- {'buildSystemId': oe_cdt + '.managedbuilder.core.configurationDataProvider',
- 'id': cconf_id,
- 'moduleId': cdt_core + '.settings',
- 'name': 'Default'})
-
- self.add(doc, storageModule, 'externalSettings')
-
- extensions = self.add(doc, storageModule, 'extensions')
- extension_list = """
- VCErrorParser
- MakeErrorParser
- GCCErrorParser
- GASErrorParser
- GLDErrorParser
- """.split()
- self.add(doc, extensions, 'extension', {'id': cdt_core + '.ELF', 'point':cdt_core + '.BinaryParser'})
- for e in extension_list:
- self.add(doc, extensions, 'extension', {'id': cdt_core + '.' + e, 'point':cdt_core + '.ErrorParser'})
-
- storageModule = self.add(doc, cconf, 'storageModule',
- {'moduleId': 'cdtBuildSystem', 'version': '4.0.0'})
- config = self.add(doc, storageModule, 'configuration',
- {'artifactName': appname,
- 'id': cconf_id,
- 'name': 'Default',
- 'parent': cdt_bld + '.prefbase.cfg'})
- folderInfo = self.add(doc, config, 'folderInfo',
- {'id': cconf_id+'.', 'name': '/', 'resourcePath': ''})
-
- toolChain = self.add(doc, folderInfo, 'toolChain',
- {'id': cdt_bld + '.prefbase.toolchain.1',
- 'name': 'No ToolChain',
- 'resourceTypeBasedDiscovery': 'false',
- 'superClass': cdt_bld + '.prefbase.toolchain'})
-
- self.add(doc, toolChain, 'targetPlatform', {'binaryParser': 'org.eclipse.cdt.core.ELF', 'id': cdt_bld + '.prefbase.toolchain.1', 'name': ''})
-
- waf_build = '"%s" %s'%(waf_executable, eclipse.fun)
- waf_clean = '"%s" clean'%(waf_executable)
- self.add(doc, toolChain, 'builder',
- {'autoBuildTarget': waf_build,
- 'command': executable,
- 'enableAutoBuild': 'false',
- 'cleanBuildTarget': waf_clean,
- 'enableIncrementalBuild': 'true',
- 'id': cdt_bld + '.settings.default.builder.1',
- 'incrementalBuildTarget': waf_build,
- 'managedBuildOn': 'false',
- 'name': 'Gnu Make Builder',
- 'superClass': cdt_bld + '.settings.default.builder'})
-
- tool_index = 1;
- for tool_name in ("Assembly", "GNU C++", "GNU C"):
- tool = self.add(doc, toolChain, 'tool',
- {'id': cdt_bld + '.settings.holder.' + str(tool_index),
- 'name': tool_name,
- 'superClass': cdt_bld + '.settings.holder'})
- if cpppath or workspace_includes:
- incpaths = cdt_bld + '.settings.holder.incpaths'
- option = self.add(doc, tool, 'option',
- {'id': incpaths + '.' + str(tool_index),
- 'name': 'Include Paths',
- 'superClass': incpaths,
- 'valueType': 'includePath'})
- for i in workspace_includes:
- self.add(doc, option, 'listOptionValue',
- {'builtIn': 'false',
- 'value': '"${workspace_loc:/%s/%s}"'%(appname, i)})
- for i in cpppath:
- self.add(doc, option, 'listOptionValue',
- {'builtIn': 'false',
- 'value': '"%s"'%(i)})
- if tool_name == "GNU C++" or tool_name == "GNU C":
- self.add(doc,tool,'inputType',{ 'id':'org.eclipse.cdt.build.core.settings.holder.inType.' + str(tool_index), \
- 'languageId':'org.eclipse.cdt.core.gcc' if tool_name == "GNU C" else 'org.eclipse.cdt.core.g++','languageName':tool_name, \
- 'sourceContentType':'org.eclipse.cdt.core.cSource,org.eclipse.cdt.core.cHeader', \
- 'superClass':'org.eclipse.cdt.build.core.settings.holder.inType' })
- tool_index += 1
-
- if source_dirs:
- sourceEntries = self.add(doc, config, 'sourceEntries')
- for i in source_dirs:
- self.add(doc, sourceEntries, 'entry',
- {'excluding': i,
- 'flags': 'VALUE_WORKSPACE_PATH|RESOLVED',
- 'kind': 'sourcePath',
- 'name': ''})
- self.add(doc, sourceEntries, 'entry',
- {
- 'flags': 'VALUE_WORKSPACE_PATH|RESOLVED',
- 'kind': 'sourcePath',
- 'name': i})
-
- storageModule = self.add(doc, cconf, 'storageModule',
- {'moduleId': cdt_mk + '.buildtargets'})
- buildTargets = self.add(doc, storageModule, 'buildTargets')
- def addTargetWrap(name, runAll):
- return self.addTarget(doc, buildTargets, executable, name,
- '"%s" %s'%(waf_executable, name), runAll)
- addTargetWrap('configure', True)
- addTargetWrap('dist', False)
- addTargetWrap('install', False)
- addTargetWrap('check', False)
-
- storageModule = self.add(doc, cproject, 'storageModule',
- {'moduleId': 'cdtBuildSystem',
- 'version': '4.0.0'})
-
- self.add(doc, storageModule, 'project', {'id': '%s.null.1'%appname, 'name': appname})
-
- doc.appendChild(cproject)
- return doc
-
- def impl_create_pydevproject(self, system_path, user_path):
- # create a pydevproject file
- doc = Document()
- doc.appendChild(doc.createProcessingInstruction('eclipse-pydev', 'version="1.0"'))
- pydevproject = doc.createElement('pydev_project')
- prop = self.add(doc, pydevproject,
- 'pydev_property',
- 'python %d.%d'%(sys.version_info[0], sys.version_info[1]))
- prop.setAttribute('name', 'org.python.pydev.PYTHON_PROJECT_VERSION')
- prop = self.add(doc, pydevproject, 'pydev_property', 'Default')
- prop.setAttribute('name', 'org.python.pydev.PYTHON_PROJECT_INTERPRETER')
- # add waf's paths
- wafadmin = [p for p in system_path if p.find('wafadmin') != -1]
- if wafadmin:
- prop = self.add(doc, pydevproject, 'pydev_pathproperty',
- {'name':'org.python.pydev.PROJECT_EXTERNAL_SOURCE_PATH'})
- for i in wafadmin:
- self.add(doc, prop, 'path', i)
- if user_path:
- prop = self.add(doc, pydevproject, 'pydev_pathproperty',
- {'name':'org.python.pydev.PROJECT_SOURCE_PATH'})
- for i in user_path:
- self.add(doc, prop, 'path', '/${PROJECT_DIR_NAME}/'+i)
-
- doc.appendChild(pydevproject)
- return doc
-
- def impl_create_javaproject(self, javasrcpath, javalibpath):
- # create a .classpath file for java usage
- doc = Document()
- javaproject = doc.createElement('classpath')
- if javasrcpath:
- for i in javasrcpath:
- self.add(doc, javaproject, 'classpathentry',
- {'kind': 'src', 'path': i})
-
- if javalibpath:
- for i in javalibpath:
- self.add(doc, javaproject, 'classpathentry',
- {'kind': 'lib', 'path': i})
-
- self.add(doc, javaproject, 'classpathentry', {'kind': 'con', 'path': 'org.eclipse.jdt.launching.JRE_CONTAINER'})
- self.add(doc, javaproject, 'classpathentry', {'kind': 'output', 'path': self.bldnode.name })
- doc.appendChild(javaproject)
- return doc
-
- def addDictionary(self, doc, parent, k, v):
- dictionary = self.add(doc, parent, 'dictionary')
- self.add(doc, dictionary, 'key', k)
- self.add(doc, dictionary, 'value', v)
- return dictionary
-
- def addTarget(self, doc, buildTargets, executable, name, buildTarget, runAllBuilders=True):
- target = self.add(doc, buildTargets, 'target',
- {'name': name,
- 'path': '',
- 'targetID': oe_cdt + '.build.MakeTargetBuilder'})
- self.add(doc, target, 'buildCommand', executable)
- self.add(doc, target, 'buildArguments', None)
- self.add(doc, target, 'buildTarget', buildTarget)
- self.add(doc, target, 'stopOnError', 'true')
- self.add(doc, target, 'useDefaultCommand', 'false')
- self.add(doc, target, 'runAllBuilders', str(runAllBuilders).lower())
-
- def add(self, doc, parent, tag, value = None):
- el = doc.createElement(tag)
- if (value):
- if type(value) == type(str()):
- el.appendChild(doc.createTextNode(value))
- elif type(value) == type(dict()):
- self.setAttributes(el, value)
- parent.appendChild(el)
- return el
-
- def setAttributes(self, node, attrs):
- for k, v in attrs.items():
- node.setAttribute(k, v)
-
diff --git a/waflib/extras/erlang.py b/waflib/extras/erlang.py
deleted file mode 100644
index 49f6d5b..0000000
--- a/waflib/extras/erlang.py
+++ /dev/null
@@ -1,110 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2010 (ita)
-# Przemyslaw Rzepecki, 2016
-
-"""
-Erlang support
-"""
-
-import re
-from waflib import Task, TaskGen
-from waflib.TaskGen import feature, after_method, before_method
-# to load the method "to_incnodes" below
-from waflib.Tools import ccroot
-
-# Those flags are required by the Erlang VM to execute/evaluate code in
-# non-interactive mode. It is used in this tool to create Erlang modules
-# documentation and run unit tests. The user can pass additional arguments to the
-# 'erl' command with ERL_FLAGS environment variable.
-EXEC_NON_INTERACTIVE = ['-noshell', '-noinput', '-eval']
-
-def configure(conf):
- conf.find_program('erlc', var='ERLC')
- conf.find_program('erl', var='ERL')
- conf.add_os_flags('ERLC_FLAGS')
- conf.add_os_flags('ERL_FLAGS')
- conf.env.ERLC_DEF_PATTERN = '-D%s'
- conf.env.ERLC_INC_PATTERN = '-I%s'
-
-@TaskGen.extension('.erl')
-def process_erl_node(self, node):
- tsk = self.create_task('erl', node, node.change_ext('.beam'))
- tsk.erlc_incnodes = [tsk.outputs[0].parent] + self.to_incnodes(self.includes)
- tsk.env.append_value('ERLC_INCPATHS', [x.abspath() for x in tsk.erlc_incnodes])
- tsk.env.append_value('ERLC_DEFINES', self.to_list(getattr(self, 'defines', [])))
- tsk.env.append_value('ERLC_FLAGS', self.to_list(getattr(self, 'flags', [])))
- tsk.cwd = tsk.outputs[0].parent
-
-class erl(Task.Task):
- color = 'GREEN'
- run_str = '${ERLC} ${ERL_FLAGS} ${ERLC_INC_PATTERN:ERLC_INCPATHS} ${ERLC_DEF_PATTERN:ERLC_DEFINES} ${SRC}'
-
- def scan(task):
- node = task.inputs[0]
-
- deps = []
- scanned = set([])
- nodes_to_scan = [node]
-
- for n in nodes_to_scan:
- if n.abspath() in scanned:
- continue
-
- for i in re.findall('-include\("(.*)"\)\.', n.read()):
- for d in task.erlc_incnodes:
- r = d.find_node(i)
- if r:
- deps.append(r)
- nodes_to_scan.append(r)
- break
- scanned.add(n.abspath())
-
- return (deps, [])
-
-@TaskGen.extension('.beam')
-def process(self, node):
- pass
-
-
-class erl_test(Task.Task):
- color = 'BLUE'
- run_str = '${ERL} ${ERL_FLAGS} ${ERL_TEST_FLAGS}'
-
-@feature('eunit')
-@after_method('process_source')
-def add_erl_test_run(self):
- test_modules = [t.outputs[0] for t in self.tasks]
- test_task = self.create_task('erl_test')
- test_task.set_inputs(self.source + test_modules)
- test_task.cwd = test_modules[0].parent
-
- test_task.env.append_value('ERL_FLAGS', self.to_list(getattr(self, 'flags', [])))
-
- test_list = ", ".join([m.change_ext("").path_from(test_task.cwd)+":test()" for m in test_modules])
- test_flag = 'halt(case lists:all(fun(Elem) -> Elem == ok end, [%s]) of true -> 0; false -> 1 end).' % test_list
- test_task.env.append_value('ERL_TEST_FLAGS', EXEC_NON_INTERACTIVE)
- test_task.env.append_value('ERL_TEST_FLAGS', test_flag)
-
-
-class edoc(Task.Task):
- color = 'BLUE'
- run_str = "${ERL} ${ERL_FLAGS} ${ERL_DOC_FLAGS}"
- def keyword(self):
- return 'Generating edoc'
-
-@feature('edoc')
-@before_method('process_source')
-def add_edoc_task(self):
- # do not process source, it would create double erl->beam task
- self.meths.remove('process_source')
- e = self.path.find_resource(self.source)
- t = e.change_ext('.html')
- png = t.parent.make_node('erlang.png')
- css = t.parent.make_node('stylesheet.css')
- tsk = self.create_task('edoc', e, [t, png, css])
- tsk.cwd = tsk.outputs[0].parent
- tsk.env.append_value('ERL_DOC_FLAGS', EXEC_NON_INTERACTIVE)
- tsk.env.append_value('ERL_DOC_FLAGS', 'edoc:files(["%s"]), halt(0).' % tsk.inputs[0].abspath())
- # TODO the above can break if a file path contains '"'
-
diff --git a/waflib/extras/fast_partial.py b/waflib/extras/fast_partial.py
deleted file mode 100644
index b3af513..0000000
--- a/waflib/extras/fast_partial.py
+++ /dev/null
@@ -1,518 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2017-2018 (ita)
-
-"""
-A system for fast partial rebuilds
-
-Creating a large amount of task objects up front can take some time.
-By making a few assumptions, it is possible to avoid posting creating
-task objects for targets that are already up-to-date.
-
-On a silly benchmark the gain observed for 1M tasks can be 5m->10s
-for a single file change.
-
-Usage::
-
- def options(opt):
- opt.load('fast_partial')
-
-Assuptions:
-* Mostly for C/C++/Fortran targets with link tasks (object-only targets are not handled)
-* For full project builds: no --targets and no pruning from subfolders
-* The installation phase is ignored
-* `use=` dependencies are specified up front even across build groups
-* Task generator source files are not obtained from globs
-
-Implementation details:
-* The first layer obtains file timestamps to recalculate file hashes only
- when necessary (similar to md5_tstamp); the timestamps are then stored
- in a dedicated pickle file
-* A second layer associates each task generator to a file set to help
- detecting changes. Task generators are to create their tasks only when
- the related files have been modified. A specific db file is created
- to store such data (5m -> 1m10)
-* A third layer binds build context proxies onto task generators, replacing
- the default context. While loading data for the full build uses more memory
- (4GB -> 9GB), partial builds are then much faster (1m10 -> 13s)
-* A fourth layer enables a 2-level cache on file signatures to
- reduce the size of the main pickle file (13s -> 10s)
-"""
-
-import os
-from waflib import Build, Context, Errors, Logs, Task, TaskGen, Utils
-from waflib.TaskGen import feature, after_method, taskgen_method
-import waflib.Node
-
-DONE = 0
-DIRTY = 1
-NEEDED = 2
-
-SKIPPABLE = ['cshlib', 'cxxshlib', 'cstlib', 'cxxstlib', 'cprogram', 'cxxprogram']
-
-TSTAMP_DB = '.wafpickle_tstamp_db_file'
-
-SAVED_ATTRS = 'root node_sigs task_sigs imp_sigs raw_deps node_deps'.split()
-
-class bld_proxy(object):
- def __init__(self, bld):
- object.__setattr__(self, 'bld', bld)
-
- object.__setattr__(self, 'node_class', type('Nod3', (waflib.Node.Node,), {}))
- self.node_class.__module__ = 'waflib.Node'
- self.node_class.ctx = self
-
- object.__setattr__(self, 'root', self.node_class('', None))
- for x in SAVED_ATTRS:
- if x != 'root':
- object.__setattr__(self, x, {})
-
- self.fix_nodes()
-
- def __setattr__(self, name, value):
- bld = object.__getattribute__(self, 'bld')
- setattr(bld, name, value)
-
- def __delattr__(self, name):
- bld = object.__getattribute__(self, 'bld')
- delattr(bld, name)
-
- def __getattribute__(self, name):
- try:
- return object.__getattribute__(self, name)
- except AttributeError:
- bld = object.__getattribute__(self, 'bld')
- return getattr(bld, name)
-
- def __call__(self, *k, **kw):
- return self.bld(*k, **kw)
-
- def fix_nodes(self):
- for x in ('srcnode', 'path', 'bldnode'):
- node = self.root.find_dir(getattr(self.bld, x).abspath())
- object.__setattr__(self, x, node)
-
- def set_key(self, store_key):
- object.__setattr__(self, 'store_key', store_key)
-
- def fix_tg_path(self, *tgs):
- # changing Node objects on task generators is possible
- # yet, all Node objects must belong to the same parent
- for tg in tgs:
- tg.path = self.root.make_node(tg.path.abspath())
-
- def restore(self):
- dbfn = os.path.join(self.variant_dir, Context.DBFILE + self.store_key)
- Logs.debug('rev_use: reading %s', dbfn)
- try:
- data = Utils.readf(dbfn, 'rb')
- except (EnvironmentError, EOFError):
- # handle missing file/empty file
- Logs.debug('rev_use: Could not load the build cache %s (missing)', dbfn)
- else:
- try:
- waflib.Node.pickle_lock.acquire()
- waflib.Node.Nod3 = self.node_class
- try:
- data = Build.cPickle.loads(data)
- except Exception as e:
- Logs.debug('rev_use: Could not pickle the build cache %s: %r', dbfn, e)
- else:
- for x in SAVED_ATTRS:
- object.__setattr__(self, x, data.get(x, {}))
- finally:
- waflib.Node.pickle_lock.release()
- self.fix_nodes()
-
- def store(self):
- data = {}
- for x in Build.SAVED_ATTRS:
- data[x] = getattr(self, x)
- db = os.path.join(self.variant_dir, Context.DBFILE + self.store_key)
-
- try:
- waflib.Node.pickle_lock.acquire()
- waflib.Node.Nod3 = self.node_class
- x = Build.cPickle.dumps(data, Build.PROTOCOL)
- finally:
- waflib.Node.pickle_lock.release()
-
- Logs.debug('rev_use: storing %s', db)
- Utils.writef(db + '.tmp', x, m='wb')
- try:
- st = os.stat(db)
- os.remove(db)
- if not Utils.is_win32:
- os.chown(db + '.tmp', st.st_uid, st.st_gid)
- except (AttributeError, OSError):
- pass
- os.rename(db + '.tmp', db)
-
-class bld(Build.BuildContext):
- def __init__(self, **kw):
- super(bld, self).__init__(**kw)
- self.hashes_md5_tstamp = {}
-
- def __call__(self, *k, **kw):
- # this is one way of doing it, one could use a task generator method too
- bld = kw['bld'] = bld_proxy(self)
- ret = TaskGen.task_gen(*k, **kw)
- self.task_gen_cache_names = {}
- self.add_to_group(ret, group=kw.get('group'))
- ret.bld = bld
- bld.set_key(ret.path.abspath().replace(os.sep, '') + str(ret.idx))
- return ret
-
- def is_dirty(self):
- return True
-
- def store_tstamps(self):
- # Called after a build is finished
- # For each task generator, record all files involved in task objects
- # optimization: done only if there was something built
- do_store = False
- try:
- f_deps = self.f_deps
- except AttributeError:
- f_deps = self.f_deps = {}
- self.f_tstamps = {}
-
- allfiles = set()
- for g in self.groups:
- for tg in g:
- try:
- staleness = tg.staleness
- except AttributeError:
- staleness = DIRTY
-
- if staleness != DIRTY:
- # DONE case: there was nothing built
- # NEEDED case: the tg was brought in because of 'use' propagation
- # but nothing really changed for them, there may be incomplete
- # tasks (object files) and in this case it is best to let the next build
- # figure out if an input/output file changed
- continue
-
- do_cache = False
- for tsk in tg.tasks:
- if tsk.hasrun == Task.SUCCESS:
- do_cache = True
- pass
- elif tsk.hasrun == Task.SKIPPED:
- pass
- else:
- # one failed task, clear the cache for this tg
- try:
- del f_deps[(tg.path.abspath(), tg.idx)]
- except KeyError:
- pass
- else:
- # just store the new state because there is a change
- do_store = True
-
- # skip the rest because there is no valid cache possible
- break
- else:
- if not do_cache:
- # all skipped, but is there anything in cache?
- try:
- f_deps[(tg.path.abspath(), tg.idx)]
- except KeyError:
- # probably cleared because a wscript file changed
- # store it
- do_cache = True
-
- if do_cache:
-
- # there was a rebuild, store the data structure too
- tg.bld.store()
-
- # all tasks skipped but no cache
- # or a successful task build
- do_store = True
- st = set()
- for tsk in tg.tasks:
- st.update(tsk.inputs)
- st.update(self.node_deps.get(tsk.uid(), []))
-
- # TODO do last/when loading the tgs?
- lst = []
- for k in ('wscript', 'wscript_build'):
- n = tg.path.find_node(k)
- if n:
- n.get_bld_sig()
- lst.append(n.abspath())
-
- lst.extend(sorted(x.abspath() for x in st))
- allfiles.update(lst)
- f_deps[(tg.path.abspath(), tg.idx)] = lst
-
- for x in allfiles:
- # f_tstamps has everything, while md5_tstamp can be relatively empty on partial builds
- self.f_tstamps[x] = self.hashes_md5_tstamp[x][0]
-
- if do_store:
- dbfn = os.path.join(self.variant_dir, TSTAMP_DB)
- Logs.debug('rev_use: storing %s', dbfn)
- dbfn_tmp = dbfn + '.tmp'
- x = Build.cPickle.dumps([self.f_tstamps, f_deps], Build.PROTOCOL)
- Utils.writef(dbfn_tmp, x, m='wb')
- os.rename(dbfn_tmp, dbfn)
- Logs.debug('rev_use: stored %s', dbfn)
-
- def store(self):
- self.store_tstamps()
- if self.producer.dirty:
- Build.BuildContext.store(self)
-
- def compute_needed_tgs(self):
- # assume the 'use' keys are not modified during the build phase
-
- dbfn = os.path.join(self.variant_dir, TSTAMP_DB)
- Logs.debug('rev_use: Loading %s', dbfn)
- try:
- data = Utils.readf(dbfn, 'rb')
- except (EnvironmentError, EOFError):
- Logs.debug('rev_use: Could not load the build cache %s (missing)', dbfn)
- self.f_deps = {}
- self.f_tstamps = {}
- else:
- try:
- self.f_tstamps, self.f_deps = Build.cPickle.loads(data)
- except Exception as e:
- Logs.debug('rev_use: Could not pickle the build cache %s: %r', dbfn, e)
- self.f_deps = {}
- self.f_tstamps = {}
- else:
- Logs.debug('rev_use: Loaded %s', dbfn)
-
-
- # 1. obtain task generators that contain rebuilds
- # 2. obtain the 'use' graph and its dual
- stales = set()
- reverse_use_map = Utils.defaultdict(list)
- use_map = Utils.defaultdict(list)
-
- for g in self.groups:
- for tg in g:
- if tg.is_stale():
- stales.add(tg)
-
- try:
- lst = tg.use = Utils.to_list(tg.use)
- except AttributeError:
- pass
- else:
- for x in lst:
- try:
- xtg = self.get_tgen_by_name(x)
- except Errors.WafError:
- pass
- else:
- use_map[tg].append(xtg)
- reverse_use_map[xtg].append(tg)
-
- Logs.debug('rev_use: found %r stale tgs', len(stales))
-
- # 3. dfs to post downstream tg as stale
- visited = set()
- def mark_down(tg):
- if tg in visited:
- return
- visited.add(tg)
- Logs.debug('rev_use: marking down %r as stale', tg.name)
- tg.staleness = DIRTY
- for x in reverse_use_map[tg]:
- mark_down(x)
- for tg in stales:
- mark_down(tg)
-
- # 4. dfs to find ancestors tg to mark as needed
- self.needed_tgs = needed_tgs = set()
- def mark_needed(tg):
- if tg in needed_tgs:
- return
- needed_tgs.add(tg)
- if tg.staleness == DONE:
- Logs.debug('rev_use: marking up %r as needed', tg.name)
- tg.staleness = NEEDED
- for x in use_map[tg]:
- mark_needed(x)
- for xx in visited:
- mark_needed(xx)
-
- # so we have the whole tg trees to post in the set "needed"
- # load their build trees
- for tg in needed_tgs:
- tg.bld.restore()
- tg.bld.fix_tg_path(tg)
-
- # the stale ones should be fully build, while the needed ones
- # may skip a few tasks, see create_compiled_task and apply_link_after below
- Logs.debug('rev_use: amount of needed task gens: %r', len(needed_tgs))
-
- def post_group(self):
- # assumption: we can ignore the folder/subfolders cuts
- def tgpost(tg):
- try:
- f = tg.post
- except AttributeError:
- pass
- else:
- f()
-
- if not self.targets or self.targets == '*':
- for tg in self.groups[self.current_group]:
- # this can cut quite a lot of tg objects
- if tg in self.needed_tgs:
- tgpost(tg)
- else:
- # default implementation
- return Build.BuildContext.post_group()
-
- def get_build_iterator(self):
- if not self.targets or self.targets == '*':
- self.compute_needed_tgs()
- return Build.BuildContext.get_build_iterator(self)
-
-@taskgen_method
-def is_stale(self):
- # assume no globs
- self.staleness = DIRTY
-
- # 1. the case of always stale targets
- if getattr(self, 'always_stale', False):
- return True
-
- # 2. check if the db file exists
- db = os.path.join(self.bld.variant_dir, Context.DBFILE)
- try:
- dbstat = os.stat(db).st_mtime
- except OSError:
- Logs.debug('rev_use: must post %r because this is a clean build')
- return True
-
- # 3. check if the configuration changed
- if os.stat(self.bld.bldnode.find_node('c4che/build.config.py').abspath()).st_mtime > dbstat:
- Logs.debug('rev_use: must post %r because the configuration has changed', self.name)
- return True
-
- # 3.a any tstamp data?
- try:
- f_deps = self.bld.f_deps
- except AttributeError:
- Logs.debug('rev_use: must post %r because there is no f_deps', self.name)
- return True
-
- # 4. check if this is the first build (no cache)
- try:
- lst = f_deps[(self.path.abspath(), self.idx)]
- except KeyError:
- Logs.debug('rev_use: must post %r because there it has no cached data', self.name)
- return True
-
- try:
- cache = self.bld.cache_tstamp_rev_use
- except AttributeError:
- cache = self.bld.cache_tstamp_rev_use = {}
-
- # 5. check the timestamp of each dependency files listed is unchanged
- f_tstamps = self.bld.f_tstamps
- for x in lst:
- try:
- old_ts = f_tstamps[x]
- except KeyError:
- Logs.debug('rev_use: must post %r because %r is not in cache', self.name, x)
- return True
-
- try:
- try:
- ts = cache[x]
- except KeyError:
- ts = cache[x] = os.stat(x).st_mtime
- except OSError:
- del f_deps[(self.path.abspath(), self.idx)]
- Logs.debug('rev_use: must post %r because %r does not exist anymore', self.name, x)
- return True
- else:
- if ts != old_ts:
- Logs.debug('rev_use: must post %r because the timestamp on %r changed %r %r', self.name, x, old_ts, ts)
- return True
-
- self.staleness = DONE
- return False
-
-@taskgen_method
-def create_compiled_task(self, name, node):
- # skip the creation of object files
- # assumption: object-only targets are not skippable
- if self.staleness == NEEDED:
- # only libraries/programs can skip object files
- for x in SKIPPABLE:
- if x in self.features:
- return None
-
- out = '%s.%d.o' % (node.name, self.idx)
- task = self.create_task(name, node, node.parent.find_or_declare(out))
- try:
- self.compiled_tasks.append(task)
- except AttributeError:
- self.compiled_tasks = [task]
- return task
-
-@feature(*SKIPPABLE)
-@after_method('apply_link')
-def apply_link_after(self):
- # cprogram/cxxprogram might be unnecessary
- if self.staleness != NEEDED:
- return
- for tsk in self.tasks:
- tsk.hasrun = Task.SKIPPED
-
-def path_from(self, node):
- # handle nodes of distinct types
- if node.ctx is not self.ctx:
- node = self.ctx.root.make_node(node.abspath())
- return self.default_path_from(node)
-waflib.Node.Node.default_path_from = waflib.Node.Node.path_from
-waflib.Node.Node.path_from = path_from
-
-def h_file(self):
- # similar to md5_tstamp.py, but with 2-layer cache
- # global_cache for the build context common for all task generators
- # local_cache for the build context proxy (one by task generator)
- #
- # the global cache is not persistent
- # the local cache is persistent and meant for partial builds
- #
- # assume all calls are made from a single thread
- #
- filename = self.abspath()
- st = os.stat(filename)
-
- global_cache = self.ctx.bld.hashes_md5_tstamp
- local_cache = self.ctx.hashes_md5_tstamp
-
- if filename in global_cache:
- # value already calculated in this build
- cval = global_cache[filename]
-
- # the value in global cache is assumed to be calculated once
- # reverifying it could cause task generators
- # to get distinct tstamp values, thus missing rebuilds
- local_cache[filename] = cval
- return cval[1]
-
- if filename in local_cache:
- cval = local_cache[filename]
- if cval[0] == st.st_mtime:
- # correct value from a previous build
- # put it in the global cache
- global_cache[filename] = cval
- return cval[1]
-
- ret = Utils.h_file(filename)
- local_cache[filename] = global_cache[filename] = (st.st_mtime, ret)
- return ret
-waflib.Node.Node.h_file = h_file
-
diff --git a/waflib/extras/fc_bgxlf.py b/waflib/extras/fc_bgxlf.py
deleted file mode 100644
index cca1810..0000000
--- a/waflib/extras/fc_bgxlf.py
+++ /dev/null
@@ -1,32 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# harald at klimachs.de
-
-from waflib.Tools import fc, fc_config, fc_scan
-from waflib.Configure import conf
-
-from waflib.Tools.compiler_fc import fc_compiler
-fc_compiler['linux'].insert(0, 'fc_bgxlf')
-
-@conf
-def find_bgxlf(conf):
- fc = conf.find_program(['bgxlf2003_r','bgxlf2003'], var='FC')
- conf.get_xlf_version(fc)
- conf.env.FC_NAME = 'BGXLF'
-
-@conf
-def bg_flags(self):
- self.env.SONAME_ST = ''
- self.env.FCSHLIB_MARKER = ''
- self.env.FCSTLIB_MARKER = ''
- self.env.FCFLAGS_fcshlib = ['-fPIC']
- self.env.LINKFLAGS_fcshlib = ['-G', '-Wl,-bexpfull']
-
-def configure(conf):
- conf.find_bgxlf()
- conf.find_ar()
- conf.fc_flags()
- conf.fc_add_flags()
- conf.xlf_flags()
- conf.bg_flags()
-
diff --git a/waflib/extras/fc_cray.py b/waflib/extras/fc_cray.py
deleted file mode 100644
index da733fa..0000000
--- a/waflib/extras/fc_cray.py
+++ /dev/null
@@ -1,51 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# harald at klimachs.de
-
-import re
-from waflib.Tools import fc, fc_config, fc_scan
-from waflib.Configure import conf
-
-from waflib.Tools.compiler_fc import fc_compiler
-fc_compiler['linux'].append('fc_cray')
-
-@conf
-def find_crayftn(conf):
- """Find the Cray fortran compiler (will look in the environment variable 'FC')"""
- fc = conf.find_program(['crayftn'], var='FC')
- conf.get_crayftn_version(fc)
- conf.env.FC_NAME = 'CRAY'
- conf.env.FC_MOD_CAPITALIZATION = 'UPPER.mod'
-
-@conf
-def crayftn_flags(conf):
- v = conf.env
- v['_FCMODOUTFLAGS'] = ['-em', '-J.'] # enable module files and put them in the current directory
- v['FCFLAGS_DEBUG'] = ['-m1'] # more verbose compiler warnings
- v['FCFLAGS_fcshlib'] = ['-h pic']
- v['LINKFLAGS_fcshlib'] = ['-h shared']
-
- v['FCSTLIB_MARKER'] = '-h static'
- v['FCSHLIB_MARKER'] = '-h dynamic'
-
-@conf
-def get_crayftn_version(conf, fc):
- version_re = re.compile(r"Cray Fortran\s*:\s*Version\s*(?P<major>\d*)\.(?P<minor>\d*)", re.I).search
- cmd = fc + ['-V']
- out,err = fc_config.getoutput(conf, cmd, stdin=False)
- if out:
- match = version_re(out)
- else:
- match = version_re(err)
- if not match:
- conf.fatal('Could not determine the Cray Fortran compiler version.')
- k = match.groupdict()
- conf.env['FC_VERSION'] = (k['major'], k['minor'])
-
-def configure(conf):
- conf.find_crayftn()
- conf.find_ar()
- conf.fc_flags()
- conf.fc_add_flags()
- conf.crayftn_flags()
-
diff --git a/waflib/extras/fc_nag.py b/waflib/extras/fc_nag.py
deleted file mode 100644
index edcb218..0000000
--- a/waflib/extras/fc_nag.py
+++ /dev/null
@@ -1,61 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# harald at klimachs.de
-
-import re
-from waflib import Utils
-from waflib.Tools import fc,fc_config,fc_scan
-from waflib.Configure import conf
-
-from waflib.Tools.compiler_fc import fc_compiler
-fc_compiler['linux'].insert(0, 'fc_nag')
-
-@conf
-def find_nag(conf):
- """Find the NAG Fortran Compiler (will look in the environment variable 'FC')"""
-
- fc = conf.find_program(['nagfor'], var='FC')
- conf.get_nag_version(fc)
- conf.env.FC_NAME = 'NAG'
- conf.env.FC_MOD_CAPITALIZATION = 'lower'
-
-@conf
-def nag_flags(conf):
- v = conf.env
- v.FCFLAGS_DEBUG = ['-C=all']
- v.FCLNK_TGT_F = ['-o', '']
- v.FC_TGT_F = ['-c', '-o', '']
-
-@conf
-def nag_modifier_platform(conf):
- dest_os = conf.env['DEST_OS'] or Utils.unversioned_sys_platform()
- nag_modifier_func = getattr(conf, 'nag_modifier_' + dest_os, None)
- if nag_modifier_func:
- nag_modifier_func()
-
-@conf
-def get_nag_version(conf, fc):
- """Get the NAG compiler version"""
-
- version_re = re.compile(r"^NAG Fortran Compiler *Release *(?P<major>\d*)\.(?P<minor>\d*)", re.M).search
- cmd = fc + ['-V']
-
- out, err = fc_config.getoutput(conf,cmd,stdin=False)
- if out:
- match = version_re(out)
- if not match:
- match = version_re(err)
- else: match = version_re(err)
- if not match:
- conf.fatal('Could not determine the NAG version.')
- k = match.groupdict()
- conf.env['FC_VERSION'] = (k['major'], k['minor'])
-
-def configure(conf):
- conf.find_nag()
- conf.find_ar()
- conf.fc_flags()
- conf.fc_add_flags()
- conf.nag_flags()
- conf.nag_modifier_platform()
-
diff --git a/waflib/extras/fc_nec.py b/waflib/extras/fc_nec.py
deleted file mode 100644
index 67c8680..0000000
--- a/waflib/extras/fc_nec.py
+++ /dev/null
@@ -1,60 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# harald at klimachs.de
-
-import re
-from waflib.Tools import fc, fc_config, fc_scan
-from waflib.Configure import conf
-
-from waflib.Tools.compiler_fc import fc_compiler
-fc_compiler['linux'].append('fc_nec')
-
-@conf
-def find_sxfc(conf):
- """Find the NEC fortran compiler (will look in the environment variable 'FC')"""
- fc = conf.find_program(['sxf90','sxf03'], var='FC')
- conf.get_sxfc_version(fc)
- conf.env.FC_NAME = 'NEC'
- conf.env.FC_MOD_CAPITALIZATION = 'lower'
-
-@conf
-def sxfc_flags(conf):
- v = conf.env
- v['_FCMODOUTFLAGS'] = [] # enable module files and put them in the current directory
- v['FCFLAGS_DEBUG'] = [] # more verbose compiler warnings
- v['FCFLAGS_fcshlib'] = []
- v['LINKFLAGS_fcshlib'] = []
-
- v['FCSTLIB_MARKER'] = ''
- v['FCSHLIB_MARKER'] = ''
-
-@conf
-def get_sxfc_version(conf, fc):
- version_re = re.compile(r"FORTRAN90/SX\s*Version\s*(?P<major>\d*)\.(?P<minor>\d*)", re.I).search
- cmd = fc + ['-V']
- out,err = fc_config.getoutput(conf, cmd, stdin=False)
- if out:
- match = version_re(out)
- else:
- match = version_re(err)
- if not match:
- version_re=re.compile(r"NEC Fortran 2003 Compiler for\s*(?P<major>\S*)\s*\(c\)\s*(?P<minor>\d*)",re.I).search
- if out:
- match = version_re(out)
- else:
- match = version_re(err)
- if not match:
- conf.fatal('Could not determine the NEC Fortran compiler version.')
- k = match.groupdict()
- conf.env['FC_VERSION'] = (k['major'], k['minor'])
-
-def configure(conf):
- conf.find_sxfc()
- conf.find_program('sxar',var='AR')
- conf.add_os_flags('ARFLAGS')
- if not conf.env.ARFLAGS:
- conf.env.ARFLAGS=['rcs']
-
- conf.fc_flags()
- conf.fc_add_flags()
- conf.sxfc_flags()
diff --git a/waflib/extras/fc_open64.py b/waflib/extras/fc_open64.py
deleted file mode 100644
index 413719f..0000000
--- a/waflib/extras/fc_open64.py
+++ /dev/null
@@ -1,58 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# harald at klimachs.de
-
-import re
-from waflib import Utils
-from waflib.Tools import fc,fc_config,fc_scan
-from waflib.Configure import conf
-
-from waflib.Tools.compiler_fc import fc_compiler
-fc_compiler['linux'].insert(0, 'fc_open64')
-
-@conf
-def find_openf95(conf):
- """Find the Open64 Fortran Compiler (will look in the environment variable 'FC')"""
-
- fc = conf.find_program(['openf95', 'openf90'], var='FC')
- conf.get_open64_version(fc)
- conf.env.FC_NAME = 'OPEN64'
- conf.env.FC_MOD_CAPITALIZATION = 'UPPER.mod'
-
-@conf
-def openf95_flags(conf):
- v = conf.env
- v['FCFLAGS_DEBUG'] = ['-fullwarn']
-
-@conf
-def openf95_modifier_platform(conf):
- dest_os = conf.env['DEST_OS'] or Utils.unversioned_sys_platform()
- openf95_modifier_func = getattr(conf, 'openf95_modifier_' + dest_os, None)
- if openf95_modifier_func:
- openf95_modifier_func()
-
-@conf
-def get_open64_version(conf, fc):
- """Get the Open64 compiler version"""
-
- version_re = re.compile(r"Open64 Compiler Suite: *Version *(?P<major>\d*)\.(?P<minor>\d*)", re.I).search
- cmd = fc + ['-version']
-
- out, err = fc_config.getoutput(conf,cmd,stdin=False)
- if out:
- match = version_re(out)
- else:
- match = version_re(err)
- if not match:
- conf.fatal('Could not determine the Open64 version.')
- k = match.groupdict()
- conf.env['FC_VERSION'] = (k['major'], k['minor'])
-
-def configure(conf):
- conf.find_openf95()
- conf.find_ar()
- conf.fc_flags()
- conf.fc_add_flags()
- conf.openf95_flags()
- conf.openf95_modifier_platform()
-
diff --git a/waflib/extras/fc_pgfortran.py b/waflib/extras/fc_pgfortran.py
deleted file mode 100644
index afb2817..0000000
--- a/waflib/extras/fc_pgfortran.py
+++ /dev/null
@@ -1,68 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# harald at klimachs.de
-
-import re
-from waflib.Tools import fc, fc_config, fc_scan
-from waflib.Configure import conf
-
-from waflib.Tools.compiler_fc import fc_compiler
-fc_compiler['linux'].append('fc_pgfortran')
-
-@conf
-def find_pgfortran(conf):
- """Find the PGI fortran compiler (will look in the environment variable 'FC')"""
- fc = conf.find_program(['pgfortran', 'pgf95', 'pgf90'], var='FC')
- conf.get_pgfortran_version(fc)
- conf.env.FC_NAME = 'PGFC'
-
-@conf
-def pgfortran_flags(conf):
- v = conf.env
- v['FCFLAGS_fcshlib'] = ['-shared']
- v['FCFLAGS_DEBUG'] = ['-Minform=inform', '-Mstandard'] # why not
- v['FCSTLIB_MARKER'] = '-Bstatic'
- v['FCSHLIB_MARKER'] = '-Bdynamic'
- v['SONAME_ST'] = '-soname %s'
-
-@conf
-def get_pgfortran_version(conf,fc):
- version_re = re.compile(r"The Portland Group", re.I).search
- cmd = fc + ['-V']
- out,err = fc_config.getoutput(conf, cmd, stdin=False)
- if out:
- match = version_re(out)
- else:
- match = version_re(err)
- if not match:
- conf.fatal('Could not verify PGI signature')
- cmd = fc + ['-help=variable']
- out,err = fc_config.getoutput(conf, cmd, stdin=False)
- if out.find('COMPVER')<0:
- conf.fatal('Could not determine the compiler type')
- k = {}
- prevk = ''
- out = out.splitlines()
- for line in out:
- lst = line.partition('=')
- if lst[1] == '=':
- key = lst[0].rstrip()
- if key == '':
- key = prevk
- val = lst[2].rstrip()
- k[key] = val
- else:
- prevk = line.partition(' ')[0]
- def isD(var):
- return var in k
- def isT(var):
- return var in k and k[var]!='0'
- conf.env['FC_VERSION'] = (k['COMPVER'].split('.'))
-
-def configure(conf):
- conf.find_pgfortran()
- conf.find_ar()
- conf.fc_flags()
- conf.fc_add_flags()
- conf.pgfortran_flags()
-
diff --git a/waflib/extras/fc_solstudio.py b/waflib/extras/fc_solstudio.py
deleted file mode 100644
index 53766df..0000000
--- a/waflib/extras/fc_solstudio.py
+++ /dev/null
@@ -1,62 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# harald at klimachs.de
-
-import re
-from waflib import Utils
-from waflib.Tools import fc,fc_config,fc_scan
-from waflib.Configure import conf
-
-from waflib.Tools.compiler_fc import fc_compiler
-fc_compiler['linux'].append('fc_solstudio')
-
-@conf
-def find_solstudio(conf):
- """Find the Solaris Studio compiler (will look in the environment variable 'FC')"""
-
- fc = conf.find_program(['sunf95', 'f95', 'sunf90', 'f90'], var='FC')
- conf.get_solstudio_version(fc)
- conf.env.FC_NAME = 'SOL'
-
-@conf
-def solstudio_flags(conf):
- v = conf.env
- v['FCFLAGS_fcshlib'] = ['-Kpic']
- v['FCFLAGS_DEBUG'] = ['-w3']
- v['LINKFLAGS_fcshlib'] = ['-G']
- v['FCSTLIB_MARKER'] = '-Bstatic'
- v['FCSHLIB_MARKER'] = '-Bdynamic'
- v['SONAME_ST'] = '-h %s'
-
-@conf
-def solstudio_modifier_platform(conf):
- dest_os = conf.env['DEST_OS'] or Utils.unversioned_sys_platform()
- solstudio_modifier_func = getattr(conf, 'solstudio_modifier_' + dest_os, None)
- if solstudio_modifier_func:
- solstudio_modifier_func()
-
-@conf
-def get_solstudio_version(conf, fc):
- """Get the compiler version"""
-
- version_re = re.compile(r"Sun Fortran 95 *(?P<major>\d*)\.(?P<minor>\d*)", re.I).search
- cmd = fc + ['-V']
-
- out, err = fc_config.getoutput(conf,cmd,stdin=False)
- if out:
- match = version_re(out)
- else:
- match = version_re(err)
- if not match:
- conf.fatal('Could not determine the Sun Studio Fortran version.')
- k = match.groupdict()
- conf.env['FC_VERSION'] = (k['major'], k['minor'])
-
-def configure(conf):
- conf.find_solstudio()
- conf.find_ar()
- conf.fc_flags()
- conf.fc_add_flags()
- conf.solstudio_flags()
- conf.solstudio_modifier_platform()
-
diff --git a/waflib/extras/fc_xlf.py b/waflib/extras/fc_xlf.py
deleted file mode 100644
index 5a3da03..0000000
--- a/waflib/extras/fc_xlf.py
+++ /dev/null
@@ -1,63 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# harald at klimachs.de
-
-import re
-from waflib import Utils,Errors
-from waflib.Tools import fc,fc_config,fc_scan
-from waflib.Configure import conf
-
-from waflib.Tools.compiler_fc import fc_compiler
-fc_compiler['aix'].insert(0, 'fc_xlf')
-
-@conf
-def find_xlf(conf):
- """Find the xlf program (will look in the environment variable 'FC')"""
-
- fc = conf.find_program(['xlf2003_r', 'xlf2003', 'xlf95_r', 'xlf95', 'xlf90_r', 'xlf90', 'xlf_r', 'xlf'], var='FC')
- conf.get_xlf_version(fc)
- conf.env.FC_NAME='XLF'
-
-@conf
-def xlf_flags(conf):
- v = conf.env
- v['FCDEFINES_ST'] = '-WF,-D%s'
- v['FCFLAGS_fcshlib'] = ['-qpic=small']
- v['FCFLAGS_DEBUG'] = ['-qhalt=w']
- v['LINKFLAGS_fcshlib'] = ['-Wl,-shared']
-
-@conf
-def xlf_modifier_platform(conf):
- dest_os = conf.env['DEST_OS'] or Utils.unversioned_sys_platform()
- xlf_modifier_func = getattr(conf, 'xlf_modifier_' + dest_os, None)
- if xlf_modifier_func:
- xlf_modifier_func()
-
-@conf
-def get_xlf_version(conf, fc):
- """Get the compiler version"""
-
- cmd = fc + ['-qversion']
- try:
- out, err = conf.cmd_and_log(cmd, output=0)
- except Errors.WafError:
- conf.fatal('Could not find xlf %r' % cmd)
-
- for v in (r"IBM XL Fortran.* V(?P<major>\d*)\.(?P<minor>\d*)",):
- version_re = re.compile(v, re.I).search
- match = version_re(out or err)
- if match:
- k = match.groupdict()
- conf.env['FC_VERSION'] = (k['major'], k['minor'])
- break
- else:
- conf.fatal('Could not determine the XLF version.')
-
-def configure(conf):
- conf.find_xlf()
- conf.find_ar()
- conf.fc_flags()
- conf.fc_add_flags()
- conf.xlf_flags()
- conf.xlf_modifier_platform()
-
diff --git a/waflib/extras/file_to_object.py b/waflib/extras/file_to_object.py
deleted file mode 100644
index 1393b51..0000000
--- a/waflib/extras/file_to_object.py
+++ /dev/null
@@ -1,137 +0,0 @@
-#!/usr/bin/python
-# -*- coding: utf-8 -*-
-# Tool to embed file into objects
-
-__author__ = __maintainer__ = "Jérôme Carretero <cJ-waf@zougloub.eu>"
-__copyright__ = "Jérôme Carretero, 2014"
-
-"""
-
-This tool allows to embed file contents in object files (.o).
-It is not exactly portable, and the file contents are reachable
-using various non-portable fashions.
-The goal here is to provide a functional interface to the embedding
-of file data in objects.
-See the ``playground/embedded_resources`` example for an example.
-
-Usage::
-
- bld(
- name='pipeline',
- # ^ Reference this in use="..." for things using the generated code
- features='file_to_object',
- source='some.file',
- # ^ Name of the file to embed in binary section.
- )
-
-Known issues:
-
-- Destination is named like source, with extension renamed to .o
- eg. some.file -> some.o
-
-"""
-
-import os
-from waflib import Task, TaskGen, Errors
-
-def filename_c_escape(x):
- return x.replace("\\", "\\\\")
-
-class file_to_object_s(Task.Task):
- color = 'CYAN'
- vars = ['DEST_CPU', 'DEST_BINFMT']
-
- def run(self):
- name = []
- for i, x in enumerate(self.inputs[0].name):
- if x.isalnum():
- name.append(x)
- else:
- name.append('_')
- file = self.inputs[0].abspath()
- size = os.path.getsize(file)
- if self.env.DEST_CPU in ('x86_64', 'ia', 'aarch64'):
- unit = 'quad'
- align = 8
- elif self.env.DEST_CPU in ('x86','arm', 'thumb', 'm68k'):
- unit = 'long'
- align = 4
- else:
- raise Errors.WafError("Unsupported DEST_CPU, please report bug!")
-
- file = filename_c_escape(file)
- name = "_binary_" + "".join(name)
- rodata = ".section .rodata"
- if self.env.DEST_BINFMT == "mac-o":
- name = "_" + name
- rodata = ".section __TEXT,__const"
-
- with open(self.outputs[0].abspath(), 'w') as f:
- f.write(\
-"""
- .global %(name)s_start
- .global %(name)s_end
- .global %(name)s_size
- %(rodata)s
-%(name)s_start:
- .incbin "%(file)s"
-%(name)s_end:
- .align %(align)d
-%(name)s_size:
- .%(unit)s 0x%(size)x
-""" % locals())
-
-class file_to_object_c(Task.Task):
- color = 'CYAN'
- def run(self):
- name = []
- for i, x in enumerate(self.inputs[0].name):
- if x.isalnum():
- name.append(x)
- else:
- name.append('_')
- file = self.inputs[0].abspath()
- size = os.path.getsize(file)
-
- name = "_binary_" + "".join(name)
-
- data = self.inputs[0].read('rb')
- lines, line = [], []
- for idx_byte, byte in enumerate(data):
- line.append(byte)
- if len(line) > 15 or idx_byte == size-1:
- lines.append(", ".join(("0x%02x" % ord(x)) for x in line))
- line = []
- data = ",\n ".join(lines)
-
- self.outputs[0].write(\
-"""
-unsigned long %(name)s_size = %(size)dL;
-char const %(name)s_start[] = {
- %(data)s
-};
-char const %(name)s_end[] = {};
-""" % locals())
-
-@TaskGen.feature('file_to_object')
-@TaskGen.before_method('process_source')
-def tg_file_to_object(self):
- bld = self.bld
- sources = self.to_nodes(self.source)
- targets = []
- for src in sources:
- if bld.env.F2O_METHOD == ["asm"]:
- tgt = src.parent.find_or_declare(src.name + '.f2o.s')
- tsk = self.create_task('file_to_object_s', src, tgt)
- tsk.cwd = src.parent.abspath() # verify
- else:
- tgt = src.parent.find_or_declare(src.name + '.f2o.c')
- tsk = self.create_task('file_to_object_c', src, tgt)
- tsk.cwd = src.parent.abspath() # verify
- targets.append(tgt)
- self.source = targets
-
-def configure(conf):
- conf.load('gas')
- conf.env.F2O_METHOD = ["c"]
-
diff --git a/waflib/extras/fluid.py b/waflib/extras/fluid.py
deleted file mode 100644
index 4814a35..0000000
--- a/waflib/extras/fluid.py
+++ /dev/null
@@ -1,30 +0,0 @@
-#!/usr/bin/python
-# encoding: utf-8
-# Grygoriy Fuchedzhy 2009
-
-"""
-Compile fluid files (fltk graphic library). Use the 'fluid' feature in conjunction with the 'cxx' feature.
-"""
-
-from waflib import Task
-from waflib.TaskGen import extension
-
-class fluid(Task.Task):
- color = 'BLUE'
- ext_out = ['.h']
- run_str = '${FLUID} -c -o ${TGT[0].abspath()} -h ${TGT[1].abspath()} ${SRC}'
-
-@extension('.fl')
-def process_fluid(self, node):
- """add the .fl to the source list; the cxx file generated will be compiled when possible"""
- cpp = node.change_ext('.cpp')
- hpp = node.change_ext('.hpp')
- self.create_task('fluid', node, [cpp, hpp])
-
- if 'cxx' in self.features:
- self.source.append(cpp)
-
-def configure(conf):
- conf.find_program('fluid', var='FLUID')
- conf.check_cfg(path='fltk-config', package='', args='--cxxflags --ldflags', uselib_store='FLTK', mandatory=True)
-
diff --git a/waflib/extras/freeimage.py b/waflib/extras/freeimage.py
deleted file mode 100644
index f27e525..0000000
--- a/waflib/extras/freeimage.py
+++ /dev/null
@@ -1,74 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-#
-# written by Sylvain Rouquette, 2011
-
-'''
-To add the freeimage tool to the waf file:
-$ ./waf-light --tools=compat15,freeimage
- or, if you have waf >= 1.6.2
-$ ./waf update --files=freeimage
-
-The wscript will look like:
-
-def options(opt):
- opt.load('compiler_cxx freeimage')
-
-def configure(conf):
- conf.load('compiler_cxx freeimage')
-
- # you can call check_freeimage with some parameters.
- # It's optional on Linux, it's 'mandatory' on Windows if
- # you didn't use --fi-path on the command-line
-
- # conf.check_freeimage(path='FreeImage/Dist', fip=True)
-
-def build(bld):
- bld(source='main.cpp', target='app', use='FREEIMAGE')
-'''
-
-from waflib import Utils
-from waflib.Configure import conf
-
-
-def options(opt):
- opt.add_option('--fi-path', type='string', default='', dest='fi_path',
- help='''path to the FreeImage directory \
- where the files are e.g. /FreeImage/Dist''')
- opt.add_option('--fip', action='store_true', default=False, dest='fip',
- help='link with FreeImagePlus')
- opt.add_option('--fi-static', action='store_true',
- default=False, dest='fi_static',
- help="link as shared libraries")
-
-
-@conf
-def check_freeimage(self, path=None, fip=False):
- self.start_msg('Checking FreeImage')
- if not self.env['CXX']:
- self.fatal('you must load compiler_cxx before loading freeimage')
- prefix = self.options.fi_static and 'ST' or ''
- platform = Utils.unversioned_sys_platform()
- if platform == 'win32':
- if not path:
- self.fatal('you must specify the path to FreeImage. \
- use --fi-path=/FreeImage/Dist')
- else:
- self.env['INCLUDES_FREEIMAGE'] = path
- self.env['%sLIBPATH_FREEIMAGE' % prefix] = path
- libs = ['FreeImage']
- if self.options.fip:
- libs.append('FreeImagePlus')
- if platform == 'win32':
- self.env['%sLIB_FREEIMAGE' % prefix] = libs
- else:
- self.env['%sLIB_FREEIMAGE' % prefix] = [i.lower() for i in libs]
- self.end_msg('ok')
-
-
-def configure(conf):
- platform = Utils.unversioned_sys_platform()
- if platform == 'win32' and not conf.options.fi_path:
- return
- conf.check_freeimage(conf.options.fi_path, conf.options.fip)
-
diff --git a/waflib/extras/fsb.py b/waflib/extras/fsb.py
deleted file mode 100644
index 1b8f398..0000000
--- a/waflib/extras/fsb.py
+++ /dev/null
@@ -1,31 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2011 (ita)
-
-"""
-Fully sequential builds
-
-The previous tasks from task generators are re-processed, and this may lead to speed issues
-Yet, if you are using this, speed is probably a minor concern
-"""
-
-from waflib import Build
-
-def options(opt):
- pass
-
-def configure(conf):
- pass
-
-class FSBContext(Build.BuildContext):
- def __call__(self, *k, **kw):
- ret = Build.BuildContext.__call__(self, *k, **kw)
-
- # evaluate the results immediately
- Build.BuildContext.compile(self)
-
- return ret
-
- def compile(self):
- pass
-
diff --git a/waflib/extras/fsc.py b/waflib/extras/fsc.py
deleted file mode 100644
index c67e70b..0000000
--- a/waflib/extras/fsc.py
+++ /dev/null
@@ -1,64 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2011 (ita)
-
-"""
-Experimental F# stuff
-
-FSC="mono /path/to/fsc.exe" waf configure build
-"""
-
-from waflib import Utils, Task
-from waflib.TaskGen import before_method, after_method, feature
-from waflib.Tools import ccroot, cs
-
-ccroot.USELIB_VARS['fsc'] = set(['CSFLAGS', 'ASSEMBLIES', 'RESOURCES'])
-
-@feature('fs')
-@before_method('process_source')
-def apply_fsc(self):
- cs_nodes = []
- no_nodes = []
- for x in self.to_nodes(self.source):
- if x.name.endswith('.fs'):
- cs_nodes.append(x)
- else:
- no_nodes.append(x)
- self.source = no_nodes
-
- bintype = getattr(self, 'type', self.gen.endswith('.dll') and 'library' or 'exe')
- self.cs_task = tsk = self.create_task('fsc', cs_nodes, self.path.find_or_declare(self.gen))
- tsk.env.CSTYPE = '/target:%s' % bintype
- tsk.env.OUT = '/out:%s' % tsk.outputs[0].abspath()
-
- inst_to = getattr(self, 'install_path', bintype=='exe' and '${BINDIR}' or '${LIBDIR}')
- if inst_to:
- # note: we are making a copy, so the files added to cs_task.outputs won't be installed automatically
- mod = getattr(self, 'chmod', bintype=='exe' and Utils.O755 or Utils.O644)
- self.install_task = self.add_install_files(install_to=inst_to, install_from=self.cs_task.outputs[:], chmod=mod)
-
-feature('fs')(cs.use_cs)
-after_method('apply_fsc')(cs.use_cs)
-
-feature('fs')(cs.debug_cs)
-after_method('apply_fsc', 'use_cs')(cs.debug_cs)
-
-class fsc(Task.Task):
- """
- Compile F# files
- """
- color = 'YELLOW'
- run_str = '${FSC} ${CSTYPE} ${CSFLAGS} ${ASS_ST:ASSEMBLIES} ${RES_ST:RESOURCES} ${OUT} ${SRC}'
-
-def configure(conf):
- """
- Find a F# compiler, set the variable FSC for the compiler and FS_NAME (mono or fsc)
- """
- conf.find_program(['fsc.exe', 'fsharpc'], var='FSC')
- conf.env.ASS_ST = '/r:%s'
- conf.env.RES_ST = '/resource:%s'
-
- conf.env.FS_NAME = 'fsc'
- if str(conf.env.FSC).lower().find('fsharpc') > -1:
- conf.env.FS_NAME = 'mono'
-
diff --git a/waflib/extras/gccdeps.py b/waflib/extras/gccdeps.py
deleted file mode 100644
index d9758ab..0000000
--- a/waflib/extras/gccdeps.py
+++ /dev/null
@@ -1,214 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2008-2010 (ita)
-
-"""
-Execute the tasks with gcc -MD, read the dependencies from the .d file
-and prepare the dependency calculation for the next run.
-This affects the cxx class, so make sure to load Qt5 after this tool.
-
-Usage::
-
- def options(opt):
- opt.load('compiler_cxx')
- def configure(conf):
- conf.load('compiler_cxx gccdeps')
-"""
-
-import os, re, threading
-from waflib import Task, Logs, Utils, Errors
-from waflib.Tools import c_preproc
-from waflib.TaskGen import before_method, feature
-
-lock = threading.Lock()
-
-gccdeps_flags = ['-MD']
-if not c_preproc.go_absolute:
- gccdeps_flags = ['-MMD']
-
-# Third-party tools are allowed to add extra names in here with append()
-supported_compilers = ['gcc', 'icc', 'clang']
-
-def scan(self):
- if not self.__class__.__name__ in self.env.ENABLE_GCCDEPS:
- return super(self.derived_gccdeps, self).scan()
- nodes = self.generator.bld.node_deps.get(self.uid(), [])
- names = []
- return (nodes, names)
-
-re_o = re.compile("\.o$")
-re_splitter = re.compile(r'(?<!\\)\s+') # split by space, except when spaces are escaped
-
-def remove_makefile_rule_lhs(line):
- # Splitting on a plain colon would accidentally match inside a
- # Windows absolute-path filename, so we must search for a colon
- # followed by whitespace to find the divider between LHS and RHS
- # of the Makefile rule.
- rulesep = ': '
-
- sep_idx = line.find(rulesep)
- if sep_idx >= 0:
- return line[sep_idx + 2:]
- else:
- return line
-
-def path_to_node(base_node, path, cached_nodes):
- # Take the base node and the path and return a node
- # Results are cached because searching the node tree is expensive
- # The following code is executed by threads, it is not safe, so a lock is needed...
- if getattr(path, '__hash__'):
- node_lookup_key = (base_node, path)
- else:
- # Not hashable, assume it is a list and join into a string
- node_lookup_key = (base_node, os.path.sep.join(path))
- try:
- lock.acquire()
- node = cached_nodes[node_lookup_key]
- except KeyError:
- node = base_node.find_resource(path)
- cached_nodes[node_lookup_key] = node
- finally:
- lock.release()
- return node
-
-def post_run(self):
- if not self.__class__.__name__ in self.env.ENABLE_GCCDEPS:
- return super(self.derived_gccdeps, self).post_run()
-
- name = self.outputs[0].abspath()
- name = re_o.sub('.d', name)
- try:
- txt = Utils.readf(name)
- except EnvironmentError:
- Logs.error('Could not find a .d dependency file, are cflags/cxxflags overwritten?')
- raise
- #os.remove(name)
-
- # Compilers have the choice to either output the file's dependencies
- # as one large Makefile rule:
- #
- # /path/to/file.o: /path/to/dep1.h \
- # /path/to/dep2.h \
- # /path/to/dep3.h \
- # ...
- #
- # or as many individual rules:
- #
- # /path/to/file.o: /path/to/dep1.h
- # /path/to/file.o: /path/to/dep2.h
- # /path/to/file.o: /path/to/dep3.h
- # ...
- #
- # So the first step is to sanitize the input by stripping out the left-
- # hand side of all these lines. After that, whatever remains are the
- # implicit dependencies of task.outputs[0]
- txt = '\n'.join([remove_makefile_rule_lhs(line) for line in txt.splitlines()])
-
- # Now join all the lines together
- txt = txt.replace('\\\n', '')
-
- val = txt.strip()
- val = [x.replace('\\ ', ' ') for x in re_splitter.split(val) if x]
-
- nodes = []
- bld = self.generator.bld
-
- # Dynamically bind to the cache
- try:
- cached_nodes = bld.cached_nodes
- except AttributeError:
- cached_nodes = bld.cached_nodes = {}
-
- for x in val:
-
- node = None
- if os.path.isabs(x):
- node = path_to_node(bld.root, x, cached_nodes)
- else:
- # TODO waf 1.9 - single cwd value
- path = getattr(bld, 'cwdx', bld.bldnode)
- # when calling find_resource, make sure the path does not contain '..'
- x = [k for k in Utils.split_path(x) if k and k != '.']
- while '..' in x:
- idx = x.index('..')
- if idx == 0:
- x = x[1:]
- path = path.parent
- else:
- del x[idx]
- del x[idx-1]
-
- node = path_to_node(path, x, cached_nodes)
-
- if not node:
- raise ValueError('could not find %r for %r' % (x, self))
- if id(node) == id(self.inputs[0]):
- # ignore the source file, it is already in the dependencies
- # this way, successful config tests may be retrieved from the cache
- continue
- nodes.append(node)
-
- Logs.debug('deps: gccdeps for %s returned %s', self, nodes)
-
- bld.node_deps[self.uid()] = nodes
- bld.raw_deps[self.uid()] = []
-
- try:
- del self.cache_sig
- except AttributeError:
- pass
-
- Task.Task.post_run(self)
-
-def sig_implicit_deps(self):
- if not self.__class__.__name__ in self.env.ENABLE_GCCDEPS:
- return super(self.derived_gccdeps, self).sig_implicit_deps()
- try:
- return Task.Task.sig_implicit_deps(self)
- except Errors.WafError:
- return Utils.SIG_NIL
-
-def wrap_compiled_task(classname):
- derived_class = type(classname, (Task.classes[classname],), {})
- derived_class.derived_gccdeps = derived_class
- derived_class.post_run = post_run
- derived_class.scan = scan
- derived_class.sig_implicit_deps = sig_implicit_deps
-
-for k in ('c', 'cxx'):
- if k in Task.classes:
- wrap_compiled_task(k)
-
-@before_method('process_source')
-@feature('force_gccdeps')
-def force_gccdeps(self):
- self.env.ENABLE_GCCDEPS = ['c', 'cxx']
-
-def configure(conf):
- # in case someone provides a --enable-gccdeps command-line option
- if not getattr(conf.options, 'enable_gccdeps', True):
- return
-
- global gccdeps_flags
- flags = conf.env.GCCDEPS_FLAGS or gccdeps_flags
- if conf.env.CC_NAME in supported_compilers:
- try:
- conf.check(fragment='int main() { return 0; }', features='c force_gccdeps', cflags=flags, msg='Checking for c flags %r' % ''.join(flags))
- except Errors.ConfigurationError:
- pass
- else:
- conf.env.append_value('CFLAGS', gccdeps_flags)
- conf.env.append_unique('ENABLE_GCCDEPS', 'c')
-
- if conf.env.CXX_NAME in supported_compilers:
- try:
- conf.check(fragment='int main() { return 0; }', features='cxx force_gccdeps', cxxflags=flags, msg='Checking for cxx flags %r' % ''.join(flags))
- except Errors.ConfigurationError:
- pass
- else:
- conf.env.append_value('CXXFLAGS', gccdeps_flags)
- conf.env.append_unique('ENABLE_GCCDEPS', 'cxx')
-
-def options(opt):
- raise ValueError('Do not load gccdeps options')
-
diff --git a/waflib/extras/gdbus.py b/waflib/extras/gdbus.py
deleted file mode 100644
index 0e0476e..0000000
--- a/waflib/extras/gdbus.py
+++ /dev/null
@@ -1,87 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Copyright Garmin International or its subsidiaries, 2018
-#
-# Heavily based on dbus.py
-
-"""
-Compiles dbus files with **gdbus-codegen**
-Typical usage::
- def options(opt):
- opt.load('compiler_c gdbus')
- def configure(conf):
- conf.load('compiler_c gdbus')
- def build(bld):
- tg = bld.program(
- includes = '.',
- source = bld.path.ant_glob('*.c'),
- target = 'gnome-hello')
- tg.add_gdbus_file('test.xml', 'com.example.example.', 'Example')
-"""
-
-from waflib import Task, Errors, Utils
-from waflib.TaskGen import taskgen_method, before_method
-
-@taskgen_method
-def add_gdbus_file(self, filename, prefix, namespace, export=False):
- """
- Adds a dbus file to the list of dbus files to process. Store them in the attribute *dbus_lst*.
- :param filename: xml file to compile
- :type filename: string
- :param prefix: interface prefix (--interface-prefix=prefix)
- :type prefix: string
- :param mode: C namespace (--c-namespace=namespace)
- :type mode: string
- :param export: Export Headers?
- :type export: boolean
- """
- if not hasattr(self, 'gdbus_lst'):
- self.gdbus_lst = []
- if not 'process_gdbus' in self.meths:
- self.meths.append('process_gdbus')
- self.gdbus_lst.append([filename, prefix, namespace, export])
-
-@before_method('process_source')
-def process_gdbus(self):
- """
- Processes the dbus files stored in the attribute *gdbus_lst* to create :py:class:`gdbus_binding_tool` instances.
- """
- output_node = self.path.get_bld().make_node(['gdbus', self.get_name()])
- sources = []
-
- for filename, prefix, namespace, export in getattr(self, 'gdbus_lst', []):
- node = self.path.find_resource(filename)
- if not node:
- raise Errors.WafError('file not found ' + filename)
- c_file = output_node.find_or_declare(node.change_ext('.c').name)
- h_file = output_node.find_or_declare(node.change_ext('.h').name)
- tsk = self.create_task('gdbus_binding_tool', node, [c_file, h_file])
- tsk.cwd = output_node.abspath()
-
- tsk.env.GDBUS_CODEGEN_INTERFACE_PREFIX = prefix
- tsk.env.GDBUS_CODEGEN_NAMESPACE = namespace
- tsk.env.GDBUS_CODEGEN_OUTPUT = node.change_ext('').name
- sources.append(c_file)
-
- if sources:
- output_node.mkdir()
- self.source = Utils.to_list(self.source) + sources
- self.includes = [output_node] + self.to_incnodes(getattr(self, 'includes', []))
- if export:
- self.export_includes = [output_node] + self.to_incnodes(getattr(self, 'export_includes', []))
-
-class gdbus_binding_tool(Task.Task):
- """
- Compiles a dbus file
- """
- color = 'BLUE'
- ext_out = ['.h', '.c']
- run_str = '${GDBUS_CODEGEN} --interface-prefix ${GDBUS_CODEGEN_INTERFACE_PREFIX} --generate-c-code ${GDBUS_CODEGEN_OUTPUT} --c-namespace ${GDBUS_CODEGEN_NAMESPACE} --c-generate-object-manager ${SRC[0].abspath()}'
- shell = True
-
-def configure(conf):
- """
- Detects the program gdbus-codegen and sets ``conf.env.GDBUS_CODEGEN``
- """
- conf.find_program('gdbus-codegen', var='GDBUS_CODEGEN')
-
diff --git a/waflib/extras/gob2.py b/waflib/extras/gob2.py
deleted file mode 100644
index b4fa3b9..0000000
--- a/waflib/extras/gob2.py
+++ /dev/null
@@ -1,17 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Ali Sabil, 2007
-
-from waflib import TaskGen
-
-TaskGen.declare_chain(
- name = 'gob2',
- rule = '${GOB2} -o ${TGT[0].bld_dir()} ${GOB2FLAGS} ${SRC}',
- ext_in = '.gob',
- ext_out = '.c'
-)
-
-def configure(conf):
- conf.find_program('gob2', var='GOB2')
- conf.env['GOB2FLAGS'] = ''
-
diff --git a/waflib/extras/halide.py b/waflib/extras/halide.py
deleted file mode 100644
index 6078e38..0000000
--- a/waflib/extras/halide.py
+++ /dev/null
@@ -1,151 +0,0 @@
-#!/usr/bin/python
-# -*- coding: utf-8 -*-
-# Halide code generation tool
-
-__author__ = __maintainer__ = "Jérôme Carretero <cJ-waf@zougloub.eu>"
-__copyright__ = "Jérôme Carretero, 2014"
-
-"""
-
-Tool to run `Halide <http://halide-lang.org>`_ code generators.
-
-Usage::
-
- bld(
- name='pipeline',
- # ^ Reference this in use="..." for things using the generated code
- #target=['pipeline.o', 'pipeline.h']
- # ^ by default, name.{o,h} is added, but you can set the outputs here
- features='halide',
- halide_env="HL_TRACE=1 HL_TARGET=host-opencl-gpu_debug",
- # ^ Environment passed to the generator,
- # can be a dict, k/v list, or string.
- args=[],
- # ^ Command-line arguments to the generator (optional),
- # eg. to give parameters to the scheduling
- source='pipeline_gen',
- # ^ Name of the source executable
- )
-
-
-Known issues:
-
-
-- Currently only supports Linux (no ".exe")
-
-- Doesn't rerun on input modification when input is part of a build
- chain, and has been modified externally.
-
-"""
-
-import os
-from waflib import Task, Utils, Options, TaskGen, Errors
-
-class run_halide_gen(Task.Task):
- color = 'CYAN'
- vars = ['HALIDE_ENV', 'HALIDE_ARGS']
- run_str = "${SRC[0].abspath()} ${HALIDE_ARGS}"
- def __str__(self):
- stuff = "halide"
- stuff += ("[%s]" % (",".join(
- ('%s=%s' % (k,v)) for k, v in sorted(self.env.env.items()))))
- return Task.Task.__str__(self).replace(self.__class__.__name__,
- stuff)
-
-@TaskGen.feature('halide')
-@TaskGen.before_method('process_source')
-def halide(self):
- Utils.def_attrs(self,
- args=[],
- halide_env={},
- )
-
- bld = self.bld
-
- env = self.halide_env
- try:
- if isinstance(env, str):
- env = dict(x.split('=') for x in env.split())
- elif isinstance(env, list):
- env = dict(x.split('=') for x in env)
- assert isinstance(env, dict)
- except Exception as e:
- if not isinstance(e, ValueError) \
- and not isinstance(e, AssertionError):
- raise
- raise Errors.WafError(
- "halide_env must be under the form" \
- " {'HL_x':'a', 'HL_y':'b'}" \
- " or ['HL_x=y', 'HL_y=b']" \
- " or 'HL_x=y HL_y=b'")
-
- src = self.to_nodes(self.source)
- assert len(src) == 1, "Only one source expected"
- src = src[0]
-
- args = Utils.to_list(self.args)
-
- def change_ext(src, ext):
- # Return a node with a new extension, in an appropriate folder
- name = src.name
- xpos = src.name.rfind('.')
- if xpos == -1:
- xpos = len(src.name)
- newname = name[:xpos] + ext
- if src.is_child_of(bld.bldnode):
- node = src.get_src().parent.find_or_declare(newname)
- else:
- node = bld.bldnode.find_or_declare(newname)
- return node
-
- def to_nodes(self, lst, path=None):
- tmp = []
- path = path or self.path
- find = path.find_or_declare
-
- if isinstance(lst, self.path.__class__):
- lst = [lst]
-
- for x in Utils.to_list(lst):
- if isinstance(x, str):
- node = find(x)
- else:
- node = x
- tmp.append(node)
- return tmp
-
- tgt = to_nodes(self, self.target)
- if not tgt:
- tgt = [change_ext(src, '.o'), change_ext(src, '.h')]
- cwd = tgt[0].parent.abspath()
- task = self.create_task('run_halide_gen', src, tgt, cwd=cwd)
- task.env.append_unique('HALIDE_ARGS', args)
- if task.env.env == []:
- task.env.env = {}
- task.env.env.update(env)
- task.env.HALIDE_ENV = " ".join(("%s=%s" % (k,v)) for (k,v) in sorted(env.items()))
- task.env.HALIDE_ARGS = args
-
- try:
- self.compiled_tasks.append(task)
- except AttributeError:
- self.compiled_tasks = [task]
- self.source = []
-
-def configure(conf):
- if Options.options.halide_root is None:
- conf.check_cfg(package='Halide', args='--cflags --libs')
- else:
- halide_root = Options.options.halide_root
- conf.env.INCLUDES_HALIDE = [ os.path.join(halide_root, "include") ]
- conf.env.LIBPATH_HALIDE = [ os.path.join(halide_root, "lib") ]
- conf.env.LIB_HALIDE = ["Halide"]
-
- # You might want to add this, while upstream doesn't fix it
- #conf.env.LIB_HALIDE += ['ncurses', 'dl', 'pthread']
-
-def options(opt):
- opt.add_option('--halide-root',
- help="path to Halide include and lib files",
- )
-
diff --git a/waflib/extras/javatest.py b/waflib/extras/javatest.py
deleted file mode 100755
index 979b8d8..0000000
--- a/waflib/extras/javatest.py
+++ /dev/null
@@ -1,118 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# Federico Pellegrin, 2017 (fedepell)
-
-"""
-Provides Java Unit test support using :py:class:`waflib.Tools.waf_unit_test.utest`
-task via the **javatest** feature.
-
-This gives the possibility to run unit test and have them integrated into the
-standard waf unit test environment. It has been tested with TestNG and JUnit
-but should be easily expandable to other frameworks given the flexibility of
-ut_str provided by the standard waf unit test environment.
-
-Example usage:
-
-def options(opt):
- opt.load('java waf_unit_test javatest')
-
-def configure(conf):
- conf.load('java javatest')
-
-def build(bld):
-
- [ ... mainprog is built here ... ]
-
- bld(features = 'javac javatest',
- srcdir = 'test/',
- outdir = 'test',
- sourcepath = ['test'],
- classpath = [ 'src' ],
- basedir = 'test',
- use = ['JAVATEST', 'mainprog'], # mainprog is the program being tested in src/
- ut_str = 'java -cp ${CLASSPATH} ${JTRUNNER} ${SRC}',
- jtest_source = bld.path.ant_glob('test/*.xml'),
- )
-
-
-At command line the CLASSPATH where to find the testing environment and the
-test runner (default TestNG) that will then be seen in the environment as
-CLASSPATH_JAVATEST (then used for use) and JTRUNNER and can be used for
-dependencies and ut_str generation.
-
-Example configure for TestNG:
- waf configure --jtpath=/tmp/testng-6.12.jar:/tmp/jcommander-1.71.jar --jtrunner=org.testng.TestNG
- or as default runner is TestNG:
- waf configure --jtpath=/tmp/testng-6.12.jar:/tmp/jcommander-1.71.jar
-
-Example configure for JUnit:
- waf configure --jtpath=/tmp/junit.jar --jtrunner=org.junit.runner.JUnitCore
-
-The runner class presence on the system is checked for at configuration stage.
-
-"""
-
-import os
-from waflib import Task, TaskGen, Options
-
-@TaskGen.feature('javatest')
-@TaskGen.after_method('apply_java', 'use_javac_files', 'set_classpath')
-def make_javatest(self):
- """
- Creates a ``utest`` task with a populated environment for Java Unit test execution
-
- """
- tsk = self.create_task('utest')
- tsk.set_run_after(self.javac_task)
-
- # Put test input files as waf_unit_test relies on that for some prints and log generation
- # If jtest_source is there, this is specially useful for passing XML for TestNG
- # that contain test specification, use that as inputs, otherwise test sources
- if getattr(self, 'jtest_source', None):
- tsk.inputs = self.to_nodes(self.jtest_source)
- else:
- if self.javac_task.srcdir[0].exists():
- tsk.inputs = self.javac_task.srcdir[0].ant_glob('**/*.java', remove=False)
-
- if getattr(self, 'ut_str', None):
- self.ut_run, lst = Task.compile_fun(self.ut_str, shell=getattr(self, 'ut_shell', False))
- tsk.vars = lst + tsk.vars
-
- if getattr(self, 'ut_cwd', None):
- if isinstance(self.ut_cwd, str):
- # we want a Node instance
- if os.path.isabs(self.ut_cwd):
- self.ut_cwd = self.bld.root.make_node(self.ut_cwd)
- else:
- self.ut_cwd = self.path.make_node(self.ut_cwd)
- else:
- self.ut_cwd = self.bld.bldnode
-
- # Get parent CLASSPATH and add output dir of test, we run from wscript dir
- # We have to change it from list to the standard java -cp format (: separated)
- tsk.env.CLASSPATH = ':'.join(self.env.CLASSPATH) + ':' + self.outdir.abspath()
-
- if not self.ut_cwd.exists():
- self.ut_cwd.mkdir()
-
- if not hasattr(self, 'ut_env'):
- self.ut_env = dict(os.environ)
-
-def configure(ctx):
- cp = ctx.env.CLASSPATH or '.'
- if getattr(Options.options, 'jtpath', None):
- ctx.env.CLASSPATH_JAVATEST = getattr(Options.options, 'jtpath').split(':')
- cp += ':' + getattr(Options.options, 'jtpath')
-
- if getattr(Options.options, 'jtrunner', None):
- ctx.env.JTRUNNER = getattr(Options.options, 'jtrunner')
-
- if ctx.check_java_class(ctx.env.JTRUNNER, with_classpath=cp):
- ctx.fatal('Could not run test class %r' % ctx.env.JTRUNNER)
-
-def options(opt):
- opt.add_option('--jtpath', action='store', default='', dest='jtpath',
- help='Path to jar(s) needed for javatest execution, colon separated, if not in the system CLASSPATH')
- opt.add_option('--jtrunner', action='store', default='org.testng.TestNG', dest='jtrunner',
- help='Class to run javatest test [default: org.testng.TestNG]')
-
diff --git a/waflib/extras/kde4.py b/waflib/extras/kde4.py
deleted file mode 100644
index e49a9ec..0000000
--- a/waflib/extras/kde4.py
+++ /dev/null
@@ -1,93 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2006-2010 (ita)
-
-"""
-Support for the KDE4 libraries and msgfmt
-"""
-
-import os, re
-from waflib import Task, Utils
-from waflib.TaskGen import feature
-
-@feature('msgfmt')
-def apply_msgfmt(self):
- """
- Process all languages to create .mo files and to install them::
-
- def build(bld):
- bld(features='msgfmt', langs='es de fr', appname='myapp', install_path='${KDE4_LOCALE_INSTALL_DIR}')
- """
- for lang in self.to_list(self.langs):
- node = self.path.find_resource(lang+'.po')
- task = self.create_task('msgfmt', node, node.change_ext('.mo'))
-
- langname = lang.split('/')
- langname = langname[-1]
-
- inst = getattr(self, 'install_path', '${KDE4_LOCALE_INSTALL_DIR}')
-
- self.add_install_as(
- inst_to = inst + os.sep + langname + os.sep + 'LC_MESSAGES' + os.sep + getattr(self, 'appname', 'set_your_appname') + '.mo',
- inst_from = task.outputs[0],
- chmod = getattr(self, 'chmod', Utils.O644))
-
-class msgfmt(Task.Task):
- """
- Transform .po files into .mo files
- """
- color = 'BLUE'
- run_str = '${MSGFMT} ${SRC} -o ${TGT}'
-
-def configure(self):
- """
- Detect kde4-config and set various variables for the *use* system::
-
- def options(opt):
- opt.load('compiler_cxx kde4')
- def configure(conf):
- conf.load('compiler_cxx kde4')
- def build(bld):
- bld.program(source='main.c', target='app', use='KDECORE KIO KHTML')
- """
- kdeconfig = self.find_program('kde4-config')
- prefix = self.cmd_and_log(kdeconfig + ['--prefix']).strip()
- fname = '%s/share/apps/cmake/modules/KDELibsDependencies.cmake' % prefix
- try:
- os.stat(fname)
- except OSError:
- fname = '%s/share/kde4/apps/cmake/modules/KDELibsDependencies.cmake' % prefix
- try:
- os.stat(fname)
- except OSError:
- self.fatal('could not open %s' % fname)
-
- try:
- txt = Utils.readf(fname)
- except EnvironmentError:
- self.fatal('could not read %s' % fname)
-
- txt = txt.replace('\\\n', '\n')
- fu = re.compile('#(.*)\n')
- txt = fu.sub('', txt)
-
- setregexp = re.compile('([sS][eE][tT]\s*\()\s*([^\s]+)\s+\"([^"]+)\"\)')
- found = setregexp.findall(txt)
-
- for (_, key, val) in found:
- #print key, val
- self.env[key] = val
-
- # well well, i could just write an interpreter for cmake files
- self.env['LIB_KDECORE']= ['kdecore']
- self.env['LIB_KDEUI'] = ['kdeui']
- self.env['LIB_KIO'] = ['kio']
- self.env['LIB_KHTML'] = ['khtml']
- self.env['LIB_KPARTS'] = ['kparts']
-
- self.env['LIBPATH_KDECORE'] = [os.path.join(self.env.KDE4_LIB_INSTALL_DIR, 'kde4', 'devel'), self.env.KDE4_LIB_INSTALL_DIR]
- self.env['INCLUDES_KDECORE'] = [self.env['KDE4_INCLUDE_INSTALL_DIR']]
- self.env.append_value('INCLUDES_KDECORE', [self.env['KDE4_INCLUDE_INSTALL_DIR']+ os.sep + 'KDE'])
-
- self.find_program('msgfmt', var='MSGFMT')
-
diff --git a/waflib/extras/local_rpath.py b/waflib/extras/local_rpath.py
deleted file mode 100644
index b2507e1..0000000
--- a/waflib/extras/local_rpath.py
+++ /dev/null
@@ -1,19 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2011 (ita)
-
-from waflib.TaskGen import after_method, feature
-
-@after_method('propagate_uselib_vars')
-@feature('cprogram', 'cshlib', 'cxxprogram', 'cxxshlib', 'fcprogram', 'fcshlib')
-def add_rpath_stuff(self):
- all = self.to_list(getattr(self, 'use', []))
- while all:
- name = all.pop()
- try:
- tg = self.bld.get_tgen_by_name(name)
- except:
- continue
- self.env.append_value('RPATH', tg.link_task.outputs[0].parent.abspath())
- all.extend(self.to_list(getattr(tg, 'use', [])))
-
diff --git a/waflib/extras/lv2.py b/waflib/extras/lv2.py
deleted file mode 100644
index 815987f..0000000
--- a/waflib/extras/lv2.py
+++ /dev/null
@@ -1,47 +0,0 @@
-import os
-import sys
-
-from waflib import Logs
-from waflib import Options
-
-def options(opt):
- conf_opts = opt.get_option_group('Configuration options')
- conf_opts.add_option('--lv2-user', action='store_true', default=False, dest='lv2_user',
- help='install LV2 bundles to user location')
- conf_opts.add_option('--lv2dir', type='string',
- help='LV2 bundles [Default: LIBDIR/lv2]')
-
-def configure(conf):
- def env_path(parent_dir_var, name):
- parent = os.getenv(parent_dir_var)
- if parent:
- return os.path.join(parent, name)
- else:
- Logs.warn('Environment variable %s unset, using LIBDIR\n' % parent_dir_var)
- return os.path.join(conf.env['LIBDIR'], name)
-
- def normpath(path):
- if sys.platform == 'win32':
- return os.path.normpath(path).replace('\\', '/')
- else:
- return os.path.normpath(path)
-
- if Options.options.lv2dir:
- conf.env['LV2DIR'] = Options.options.lv2dir
- elif Options.options.lv2_user:
- if conf.env.DEST_OS == 'darwin':
- conf.env['LV2DIR'] = env_path('HOME', 'Library/Audio/Plug-Ins/LV2')
- elif conf.env.DEST_OS == 'win32':
- conf.env['LV2DIR'] = env_path('APPDATA', 'LV2')
- else:
- conf.env['LV2DIR'] = env_path('HOME', '.lv2')
- else:
- if conf.env.DEST_OS == 'darwin':
- conf.env['LV2DIR'] = '/Library/Audio/Plug-Ins/LV2'
- elif conf.env.DEST_OS == 'win32':
- conf.env['LV2DIR'] = env_path('COMMONPROGRAMFILES', 'LV2')
- else:
- conf.env['LV2DIR'] = os.path.join(conf.env['LIBDIR'], 'lv2')
-
- conf.env['LV2DIR'] = normpath(conf.env['LV2DIR'])
-
diff --git a/waflib/extras/make.py b/waflib/extras/make.py
deleted file mode 100644
index 933d9ca..0000000
--- a/waflib/extras/make.py
+++ /dev/null
@@ -1,142 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2011 (ita)
-
-"""
-A make-like way of executing the build, following the relationships between inputs/outputs
-
-This algorithm will lead to slower builds, will not be as flexible as "waf build", but
-it might be useful for building data files (?)
-
-It is likely to break in the following cases:
-- files are created dynamically (no inputs or outputs)
-- headers
-- building two files from different groups
-"""
-
-import re
-from waflib import Options, Task
-from waflib.Build import BuildContext
-
-class MakeContext(BuildContext):
- '''executes tasks in a step-by-step manner, following dependencies between inputs/outputs'''
- cmd = 'make'
- fun = 'build'
-
- def __init__(self, **kw):
- super(MakeContext, self).__init__(**kw)
- self.files = Options.options.files
-
- def get_build_iterator(self):
- if not self.files:
- while 1:
- yield super(MakeContext, self).get_build_iterator()
-
- for g in self.groups:
- for tg in g:
- try:
- f = tg.post
- except AttributeError:
- pass
- else:
- f()
-
- provides = {}
- uses = {}
- all_tasks = []
- tasks = []
- for pat in self.files.split(','):
- matcher = self.get_matcher(pat)
- for tg in g:
- if isinstance(tg, Task.Task):
- lst = [tg]
- else:
- lst = tg.tasks
- for tsk in lst:
- all_tasks.append(tsk)
-
- do_exec = False
- for node in tsk.inputs:
- try:
- uses[node].append(tsk)
- except:
- uses[node] = [tsk]
-
- if matcher(node, output=False):
- do_exec = True
- break
-
- for node in tsk.outputs:
- try:
- provides[node].append(tsk)
- except:
- provides[node] = [tsk]
-
- if matcher(node, output=True):
- do_exec = True
- break
- if do_exec:
- tasks.append(tsk)
-
- # so we have the tasks that we need to process, the list of all tasks,
- # the map of the tasks providing nodes, and the map of tasks using nodes
-
- if not tasks:
- # if there are no tasks matching, return everything in the current group
- result = all_tasks
- else:
- # this is like a big filter...
- result = set()
- seen = set()
- cur = set(tasks)
- while cur:
- result |= cur
- tosee = set()
- for tsk in cur:
- for node in tsk.inputs:
- if node in seen:
- continue
- seen.add(node)
- tosee |= set(provides.get(node, []))
- cur = tosee
- result = list(result)
-
- Task.set_file_constraints(result)
- Task.set_precedence_constraints(result)
- yield result
-
- while 1:
- yield []
-
- def get_matcher(self, pat):
- # this returns a function
- inn = True
- out = True
- if pat.startswith('in:'):
- out = False
- pat = pat.replace('in:', '')
- elif pat.startswith('out:'):
- inn = False
- pat = pat.replace('out:', '')
-
- anode = self.root.find_node(pat)
- pattern = None
- if not anode:
- if not pat.startswith('^'):
- pat = '^.+?%s' % pat
- if not pat.endswith('$'):
- pat = '%s$' % pat
- pattern = re.compile(pat)
-
- def match(node, output):
- if output and not out:
- return False
- if not output and not inn:
- return False
-
- if anode:
- return anode == node
- else:
- return pattern.match(node.abspath())
- return match
-
diff --git a/waflib/extras/midl.py b/waflib/extras/midl.py
deleted file mode 100644
index 43e6cf9..0000000
--- a/waflib/extras/midl.py
+++ /dev/null
@@ -1,69 +0,0 @@
-#!/usr/bin/env python
-# Issue 1185 ultrix gmail com
-
-"""
-Microsoft Interface Definition Language support. Given ComObject.idl, this tool
-will generate ComObject.tlb ComObject_i.h ComObject_i.c ComObject_p.c and dlldata.c
-
-To declare targets using midl::
-
- def configure(conf):
- conf.load('msvc')
- conf.load('midl')
-
- def build(bld):
- bld(
- features='c cshlib',
- # Note: ComObject_i.c is generated from ComObject.idl
- source = 'main.c ComObject.idl ComObject_i.c',
- target = 'ComObject.dll')
-"""
-
-from waflib import Task, Utils
-from waflib.TaskGen import feature, before_method
-import os
-
-def configure(conf):
- conf.find_program(['midl'], var='MIDL')
-
- conf.env.MIDLFLAGS = [
- '/nologo',
- '/D',
- '_DEBUG',
- '/W1',
- '/char',
- 'signed',
- '/Oicf',
- ]
-
-@feature('c', 'cxx')
-@before_method('process_source')
-def idl_file(self):
- # Do this before process_source so that the generated header can be resolved
- # when scanning source dependencies.
- idl_nodes = []
- src_nodes = []
- for node in Utils.to_list(self.source):
- if str(node).endswith('.idl'):
- idl_nodes.append(node)
- else:
- src_nodes.append(node)
-
- for node in self.to_nodes(idl_nodes):
- t = node.change_ext('.tlb')
- h = node.change_ext('_i.h')
- c = node.change_ext('_i.c')
- p = node.change_ext('_p.c')
- d = node.parent.find_or_declare('dlldata.c')
- self.create_task('midl', node, [t, h, c, p, d])
-
- self.source = src_nodes
-
-class midl(Task.Task):
- """
- Compile idl files
- """
- color = 'YELLOW'
- run_str = '${MIDL} ${MIDLFLAGS} ${CPPPATH_ST:INCLUDES} /tlb ${TGT[0].bldpath()} /header ${TGT[1].bldpath()} /iid ${TGT[2].bldpath()} /proxy ${TGT[3].bldpath()} /dlldata ${TGT[4].bldpath()} ${SRC}'
- before = ['winrc']
-
diff --git a/waflib/extras/msvcdeps.py b/waflib/extras/msvcdeps.py
deleted file mode 100644
index fc1ecd4..0000000
--- a/waflib/extras/msvcdeps.py
+++ /dev/null
@@ -1,256 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Copyright Garmin International or its subsidiaries, 2012-2013
-
-'''
-Off-load dependency scanning from Python code to MSVC compiler
-
-This tool is safe to load in any environment; it will only activate the
-MSVC exploits when it finds that a particular taskgen uses MSVC to
-compile.
-
-Empirical testing shows about a 10% execution time savings from using
-this tool as compared to c_preproc.
-
-The technique of gutting scan() and pushing the dependency calculation
-down to post_run() is cribbed from gccdeps.py.
-
-This affects the cxx class, so make sure to load Qt5 after this tool.
-
-Usage::
-
- def options(opt):
- opt.load('compiler_cxx')
- def configure(conf):
- conf.load('compiler_cxx msvcdeps')
-'''
-
-import os, sys, tempfile, threading
-
-from waflib import Context, Errors, Logs, Task, Utils
-from waflib.Tools import c_preproc, c, cxx, msvc
-from waflib.TaskGen import feature, before_method
-
-lock = threading.Lock()
-nodes = {} # Cache the path -> Node lookup
-
-PREPROCESSOR_FLAG = '/showIncludes'
-INCLUDE_PATTERN = 'Note: including file:'
-
-# Extensible by outside tools
-supported_compilers = ['msvc']
-
-@feature('c', 'cxx')
-@before_method('process_source')
-def apply_msvcdeps_flags(taskgen):
- if taskgen.env.CC_NAME not in supported_compilers:
- return
-
- for flag in ('CFLAGS', 'CXXFLAGS'):
- if taskgen.env.get_flat(flag).find(PREPROCESSOR_FLAG) < 0:
- taskgen.env.append_value(flag, PREPROCESSOR_FLAG)
-
- # Figure out what casing conventions the user's shell used when
- # launching Waf
- (drive, _) = os.path.splitdrive(taskgen.bld.srcnode.abspath())
- taskgen.msvcdeps_drive_lowercase = drive == drive.lower()
-
-def path_to_node(base_node, path, cached_nodes):
- # Take the base node and the path and return a node
- # Results are cached because searching the node tree is expensive
- # The following code is executed by threads, it is not safe, so a lock is needed...
- if getattr(path, '__hash__'):
- node_lookup_key = (base_node, path)
- else:
- # Not hashable, assume it is a list and join into a string
- node_lookup_key = (base_node, os.path.sep.join(path))
- try:
- lock.acquire()
- node = cached_nodes[node_lookup_key]
- except KeyError:
- node = base_node.find_resource(path)
- cached_nodes[node_lookup_key] = node
- finally:
- lock.release()
- return node
-
-def post_run(self):
- if self.env.CC_NAME not in supported_compilers:
- return super(self.derived_msvcdeps, self).post_run()
-
- # TODO this is unlikely to work with netcache
- if getattr(self, 'cached', None):
- return Task.Task.post_run(self)
-
- bld = self.generator.bld
- unresolved_names = []
- resolved_nodes = []
-
- lowercase = self.generator.msvcdeps_drive_lowercase
- correct_case_path = bld.path.abspath()
- correct_case_path_len = len(correct_case_path)
- correct_case_path_norm = os.path.normcase(correct_case_path)
-
- # Dynamically bind to the cache
- try:
- cached_nodes = bld.cached_nodes
- except AttributeError:
- cached_nodes = bld.cached_nodes = {}
-
- for path in self.msvcdeps_paths:
- node = None
- if os.path.isabs(path):
- # Force drive letter to match conventions of main source tree
- drive, tail = os.path.splitdrive(path)
-
- if os.path.normcase(path[:correct_case_path_len]) == correct_case_path_norm:
- # Path is in the sandbox, force it to be correct. MSVC sometimes returns a lowercase path.
- path = correct_case_path + path[correct_case_path_len:]
- else:
- # Check the drive letter
- if lowercase and (drive != drive.lower()):
- path = drive.lower() + tail
- elif (not lowercase) and (drive != drive.upper()):
- path = drive.upper() + tail
- node = path_to_node(bld.root, path, cached_nodes)
- else:
- base_node = bld.bldnode
- # when calling find_resource, make sure the path does not begin by '..'
- path = [k for k in Utils.split_path(path) if k and k != '.']
- while path[0] == '..':
- path = path[1:]
- base_node = base_node.parent
-
- node = path_to_node(base_node, path, cached_nodes)
-
- if not node:
- raise ValueError('could not find %r for %r' % (path, self))
- else:
- if not c_preproc.go_absolute:
- if not (node.is_child_of(bld.srcnode) or node.is_child_of(bld.bldnode)):
- # System library
- Logs.debug('msvcdeps: Ignoring system include %r', node)
- continue
-
- if id(node) == id(self.inputs[0]):
- # Self-dependency
- continue
-
- resolved_nodes.append(node)
-
- bld.node_deps[self.uid()] = resolved_nodes
- bld.raw_deps[self.uid()] = unresolved_names
-
- try:
- del self.cache_sig
- except AttributeError:
- pass
-
- Task.Task.post_run(self)
-
-def scan(self):
- if self.env.CC_NAME not in supported_compilers:
- return super(self.derived_msvcdeps, self).scan()
-
- resolved_nodes = self.generator.bld.node_deps.get(self.uid(), [])
- unresolved_names = []
- return (resolved_nodes, unresolved_names)
-
-def sig_implicit_deps(self):
- if self.env.CC_NAME not in supported_compilers:
- return super(self.derived_msvcdeps, self).sig_implicit_deps()
-
- try:
- return Task.Task.sig_implicit_deps(self)
- except Errors.WafError:
- return Utils.SIG_NIL
-
-def exec_command(self, cmd, **kw):
- if self.env.CC_NAME not in supported_compilers:
- return super(self.derived_msvcdeps, self).exec_command(cmd, **kw)
-
- if not 'cwd' in kw:
- kw['cwd'] = self.get_cwd()
-
- if self.env.PATH:
- env = kw['env'] = dict(kw.get('env') or self.env.env or os.environ)
- env['PATH'] = self.env.PATH if isinstance(self.env.PATH, str) else os.pathsep.join(self.env.PATH)
-
- # The Visual Studio IDE adds an environment variable that causes
- # the MS compiler to send its textual output directly to the
- # debugging window rather than normal stdout/stderr.
- #
- # This is unrecoverably bad for this tool because it will cause
- # all the dependency scanning to see an empty stdout stream and
- # assume that the file being compiled uses no headers.
- #
- # See http://blogs.msdn.com/b/freik/archive/2006/04/05/569025.aspx
- #
- # Attempting to repair the situation by deleting the offending
- # envvar at this point in tool execution will not be good enough--
- # its presence poisons the 'waf configure' step earlier. We just
- # want to put a sanity check here in order to help developers
- # quickly diagnose the issue if an otherwise-good Waf tree
- # is then executed inside the MSVS IDE.
- assert 'VS_UNICODE_OUTPUT' not in kw['env']
-
- cmd, args = self.split_argfile(cmd)
- try:
- (fd, tmp) = tempfile.mkstemp()
- os.write(fd, '\r\n'.join(args).encode())
- os.close(fd)
-
- self.msvcdeps_paths = []
- kw['env'] = kw.get('env', os.environ.copy())
- kw['cwd'] = kw.get('cwd', os.getcwd())
- kw['quiet'] = Context.STDOUT
- kw['output'] = Context.STDOUT
-
- out = []
- if Logs.verbose:
- Logs.debug('argfile: @%r -> %r', tmp, args)
- try:
- raw_out = self.generator.bld.cmd_and_log(cmd + ['@' + tmp], **kw)
- ret = 0
- except Errors.WafError as e:
- raw_out = e.stdout
- ret = e.returncode
-
- for line in raw_out.splitlines():
- if line.startswith(INCLUDE_PATTERN):
- inc_path = line[len(INCLUDE_PATTERN):].strip()
- Logs.debug('msvcdeps: Regex matched %s', inc_path)
- self.msvcdeps_paths.append(inc_path)
- else:
- out.append(line)
-
- # Pipe through the remaining stdout content (not related to /showIncludes)
- if self.generator.bld.logger:
- self.generator.bld.logger.debug('out: %s' % os.linesep.join(out))
- else:
- sys.stdout.write(os.linesep.join(out) + os.linesep)
-
- return ret
- finally:
- try:
- os.remove(tmp)
- except OSError:
- # anti-virus and indexers can keep files open -_-
- pass
-
-
-def wrap_compiled_task(classname):
- derived_class = type(classname, (Task.classes[classname],), {})
- derived_class.derived_msvcdeps = derived_class
- derived_class.post_run = post_run
- derived_class.scan = scan
- derived_class.sig_implicit_deps = sig_implicit_deps
- derived_class.exec_command = exec_command
-
-for k in ('c', 'cxx'):
- if k in Task.classes:
- wrap_compiled_task(k)
-
-def options(opt):
- raise ValueError('Do not load msvcdeps options')
-
diff --git a/waflib/extras/msvs.py b/waflib/extras/msvs.py
deleted file mode 100644
index 8aa2db0..0000000
--- a/waflib/extras/msvs.py
+++ /dev/null
@@ -1,1048 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# Avalanche Studios 2009-2011
-# Thomas Nagy 2011
-
-"""
-Redistribution and use in source and binary forms, with or without
-modification, are permitted provided that the following conditions
-are met:
-
-1. Redistributions of source code must retain the above copyright
- notice, this list of conditions and the following disclaimer.
-
-2. Redistributions in binary form must reproduce the above copyright
- notice, this list of conditions and the following disclaimer in the
- documentation and/or other materials provided with the distribution.
-
-3. The name of the author may not be used to endorse or promote products
- derived from this software without specific prior written permission.
-
-THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR
-IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
-WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
-DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT,
-INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
-(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
-SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
-HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
-STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING
-IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
-POSSIBILITY OF SUCH DAMAGE.
-"""
-
-"""
-To add this tool to your project:
-def options(conf):
- opt.load('msvs')
-
-It can be a good idea to add the sync_exec tool too.
-
-To generate solution files:
-$ waf configure msvs
-
-To customize the outputs, provide subclasses in your wscript files::
-
- from waflib.extras import msvs
- class vsnode_target(msvs.vsnode_target):
- def get_build_command(self, props):
- # likely to be required
- return "waf.bat build"
- def collect_source(self):
- # likely to be required
- ...
- class msvs_bar(msvs.msvs_generator):
- def init(self):
- msvs.msvs_generator.init(self)
- self.vsnode_target = vsnode_target
-
-The msvs class re-uses the same build() function for reading the targets (task generators),
-you may therefore specify msvs settings on the context object::
-
- def build(bld):
- bld.solution_name = 'foo.sln'
- bld.waf_command = 'waf.bat'
- bld.projects_dir = bld.srcnode.make_node('.depproj')
- bld.projects_dir.mkdir()
-
-For visual studio 2008, the command is called 'msvs2008', and the classes
-such as vsnode_target are wrapped by a decorator class 'wrap_2008' to
-provide special functionality.
-
-To customize platform toolsets, pass additional parameters, for example::
-
- class msvs_2013(msvs.msvs_generator):
- cmd = 'msvs2013'
- numver = '13.00'
- vsver = '2013'
- platform_toolset_ver = 'v120'
-
-ASSUMPTIONS:
-* a project can be either a directory or a target, vcxproj files are written only for targets that have source files
-* each project is a vcxproj file, therefore the project uuid needs only to be a hash of the absolute path
-"""
-
-import os, re, sys
-import uuid # requires python 2.5
-from waflib.Build import BuildContext
-from waflib import Utils, TaskGen, Logs, Task, Context, Node, Options
-
-HEADERS_GLOB = '**/(*.h|*.hpp|*.H|*.inl)'
-
-PROJECT_TEMPLATE = r'''<?xml version="1.0" encoding="UTF-8"?>
-<Project DefaultTargets="Build" ToolsVersion="4.0"
- xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
-
- <ItemGroup Label="ProjectConfigurations">
- ${for b in project.build_properties}
- <ProjectConfiguration Include="${b.configuration}|${b.platform}">
- <Configuration>${b.configuration}</Configuration>
- <Platform>${b.platform}</Platform>
- </ProjectConfiguration>
- ${endfor}
- </ItemGroup>
-
- <PropertyGroup Label="Globals">
- <ProjectGuid>{${project.uuid}}</ProjectGuid>
- <Keyword>MakeFileProj</Keyword>
- <ProjectName>${project.name}</ProjectName>
- </PropertyGroup>
- <Import Project="$(VCTargetsPath)\Microsoft.Cpp.Default.props" />
-
- ${for b in project.build_properties}
- <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='${b.configuration}|${b.platform}'" Label="Configuration">
- <ConfigurationType>Makefile</ConfigurationType>
- <OutDir>${b.outdir}</OutDir>
- <PlatformToolset>${project.platform_toolset_ver}</PlatformToolset>
- </PropertyGroup>
- ${endfor}
-
- <Import Project="$(VCTargetsPath)\Microsoft.Cpp.props" />
- <ImportGroup Label="ExtensionSettings">
- </ImportGroup>
-
- ${for b in project.build_properties}
- <ImportGroup Label="PropertySheets" Condition="'$(Configuration)|$(Platform)'=='${b.configuration}|${b.platform}'">
- <Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
- </ImportGroup>
- ${endfor}
-
- ${for b in project.build_properties}
- <PropertyGroup Condition="'$(Configuration)|$(Platform)'=='${b.configuration}|${b.platform}'">
- <NMakeBuildCommandLine>${xml:project.get_build_command(b)}</NMakeBuildCommandLine>
- <NMakeReBuildCommandLine>${xml:project.get_rebuild_command(b)}</NMakeReBuildCommandLine>
- <NMakeCleanCommandLine>${xml:project.get_clean_command(b)}</NMakeCleanCommandLine>
- <NMakeIncludeSearchPath>${xml:b.includes_search_path}</NMakeIncludeSearchPath>
- <NMakePreprocessorDefinitions>${xml:b.preprocessor_definitions};$(NMakePreprocessorDefinitions)</NMakePreprocessorDefinitions>
- <IncludePath>${xml:b.includes_search_path}</IncludePath>
- <ExecutablePath>$(ExecutablePath)</ExecutablePath>
-
- ${if getattr(b, 'output_file', None)}
- <NMakeOutput>${xml:b.output_file}</NMakeOutput>
- ${endif}
- ${if getattr(b, 'deploy_dir', None)}
- <RemoteRoot>${xml:b.deploy_dir}</RemoteRoot>
- ${endif}
- </PropertyGroup>
- ${endfor}
-
- ${for b in project.build_properties}
- ${if getattr(b, 'deploy_dir', None)}
- <ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='${b.configuration}|${b.platform}'">
- <Deploy>
- <DeploymentType>CopyToHardDrive</DeploymentType>
- </Deploy>
- </ItemDefinitionGroup>
- ${endif}
- ${endfor}
-
- <ItemGroup>
- ${for x in project.source}
- <${project.get_key(x)} Include='${x.win32path()}' />
- ${endfor}
- </ItemGroup>
- <Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />
- <ImportGroup Label="ExtensionTargets">
- </ImportGroup>
-</Project>
-'''
-
-FILTER_TEMPLATE = '''<?xml version="1.0" encoding="UTF-8"?>
-<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
- <ItemGroup>
- ${for x in project.source}
- <${project.get_key(x)} Include="${x.win32path()}">
- <Filter>${project.get_filter_name(x.parent)}</Filter>
- </${project.get_key(x)}>
- ${endfor}
- </ItemGroup>
- <ItemGroup>
- ${for x in project.dirs()}
- <Filter Include="${project.get_filter_name(x)}">
- <UniqueIdentifier>{${project.make_uuid(x.win32path())}}</UniqueIdentifier>
- </Filter>
- ${endfor}
- </ItemGroup>
-</Project>
-'''
-
-PROJECT_2008_TEMPLATE = r'''<?xml version="1.0" encoding="UTF-8"?>
-<VisualStudioProject ProjectType="Visual C++" Version="9,00"
- Name="${xml: project.name}" ProjectGUID="{${project.uuid}}"
- Keyword="MakeFileProj"
- TargetFrameworkVersion="196613">
- <Platforms>
- ${if project.build_properties}
- ${for b in project.build_properties}
- <Platform Name="${xml: b.platform}" />
- ${endfor}
- ${else}
- <Platform Name="Win32" />
- ${endif}
- </Platforms>
- <ToolFiles>
- </ToolFiles>
- <Configurations>
- ${if project.build_properties}
- ${for b in project.build_properties}
- <Configuration
- Name="${xml: b.configuration}|${xml: b.platform}"
- IntermediateDirectory="$ConfigurationName"
- OutputDirectory="${xml: b.outdir}"
- ConfigurationType="0">
- <Tool
- Name="VCNMakeTool"
- BuildCommandLine="${xml: project.get_build_command(b)}"
- ReBuildCommandLine="${xml: project.get_rebuild_command(b)}"
- CleanCommandLine="${xml: project.get_clean_command(b)}"
- ${if getattr(b, 'output_file', None)}
- Output="${xml: b.output_file}"
- ${endif}
- PreprocessorDefinitions="${xml: b.preprocessor_definitions}"
- IncludeSearchPath="${xml: b.includes_search_path}"
- ForcedIncludes=""
- ForcedUsingAssemblies=""
- AssemblySearchPath=""
- CompileAsManaged=""
- />
- </Configuration>
- ${endfor}
- ${else}
- <Configuration Name="Release|Win32" >
- </Configuration>
- ${endif}
- </Configurations>
- <References>
- </References>
- <Files>
-${project.display_filter()}
- </Files>
-</VisualStudioProject>
-'''
-
-SOLUTION_TEMPLATE = '''Microsoft Visual Studio Solution File, Format Version ${project.numver}
-# Visual Studio ${project.vsver}
-${for p in project.all_projects}
-Project("{${p.ptype()}}") = "${p.name}", "${p.title}", "{${p.uuid}}"
-EndProject${endfor}
-Global
- GlobalSection(SolutionConfigurationPlatforms) = preSolution
- ${if project.all_projects}
- ${for (configuration, platform) in project.all_projects[0].ctx.project_configurations()}
- ${configuration}|${platform} = ${configuration}|${platform}
- ${endfor}
- ${endif}
- EndGlobalSection
- GlobalSection(ProjectConfigurationPlatforms) = postSolution
- ${for p in project.all_projects}
- ${if hasattr(p, 'source')}
- ${for b in p.build_properties}
- {${p.uuid}}.${b.configuration}|${b.platform}.ActiveCfg = ${b.configuration}|${b.platform}
- ${if getattr(p, 'is_active', None)}
- {${p.uuid}}.${b.configuration}|${b.platform}.Build.0 = ${b.configuration}|${b.platform}
- ${endif}
- ${if getattr(p, 'is_deploy', None)}
- {${p.uuid}}.${b.configuration}|${b.platform}.Deploy.0 = ${b.configuration}|${b.platform}
- ${endif}
- ${endfor}
- ${endif}
- ${endfor}
- EndGlobalSection
- GlobalSection(SolutionProperties) = preSolution
- HideSolutionNode = FALSE
- EndGlobalSection
- GlobalSection(NestedProjects) = preSolution
- ${for p in project.all_projects}
- ${if p.parent}
- {${p.uuid}} = {${p.parent.uuid}}
- ${endif}
- ${endfor}
- EndGlobalSection
-EndGlobal
-'''
-
-COMPILE_TEMPLATE = '''def f(project):
- lst = []
- def xml_escape(value):
- return value.replace("&", "&amp;").replace('"', "&quot;").replace("'", "&apos;").replace("<", "&lt;").replace(">", "&gt;")
-
- %s
-
- #f = open('cmd.txt', 'w')
- #f.write(str(lst))
- #f.close()
- return ''.join(lst)
-'''
-reg_act = re.compile(r"(?P<backslash>\\)|(?P<dollar>\$\$)|(?P<subst>\$\{(?P<code>[^}]*?)\})", re.M)
-def compile_template(line):
- """
- Compile a template expression into a python function (like jsps, but way shorter)
- """
- extr = []
- def repl(match):
- g = match.group
- if g('dollar'):
- return "$"
- elif g('backslash'):
- return "\\"
- elif g('subst'):
- extr.append(g('code'))
- return "<<|@|>>"
- return None
-
- line2 = reg_act.sub(repl, line)
- params = line2.split('<<|@|>>')
- assert(extr)
-
-
- indent = 0
- buf = []
- app = buf.append
-
- def app(txt):
- buf.append(indent * '\t' + txt)
-
- for x in range(len(extr)):
- if params[x]:
- app("lst.append(%r)" % params[x])
-
- f = extr[x]
- if f.startswith(('if', 'for')):
- app(f + ':')
- indent += 1
- elif f.startswith('py:'):
- app(f[3:])
- elif f.startswith(('endif', 'endfor')):
- indent -= 1
- elif f.startswith(('else', 'elif')):
- indent -= 1
- app(f + ':')
- indent += 1
- elif f.startswith('xml:'):
- app('lst.append(xml_escape(%s))' % f[4:])
- else:
- #app('lst.append((%s) or "cannot find %s")' % (f, f))
- app('lst.append(%s)' % f)
-
- if extr:
- if params[-1]:
- app("lst.append(%r)" % params[-1])
-
- fun = COMPILE_TEMPLATE % "\n\t".join(buf)
- #print(fun)
- return Task.funex(fun)
-
-
-re_blank = re.compile('(\n|\r|\\s)*\n', re.M)
-def rm_blank_lines(txt):
- txt = re_blank.sub('\r\n', txt)
- return txt
-
-BOM = '\xef\xbb\xbf'
-try:
- BOM = bytes(BOM, 'latin-1') # python 3
-except TypeError:
- pass
-
-def stealth_write(self, data, flags='wb'):
- try:
- unicode
- except NameError:
- data = data.encode('utf-8') # python 3
- else:
- data = data.decode(sys.getfilesystemencoding(), 'replace')
- data = data.encode('utf-8')
-
- if self.name.endswith(('.vcproj', '.vcxproj')):
- data = BOM + data
-
- try:
- txt = self.read(flags='rb')
- if txt != data:
- raise ValueError('must write')
- except (IOError, ValueError):
- self.write(data, flags=flags)
- else:
- Logs.debug('msvs: skipping %s', self.win32path())
-Node.Node.stealth_write = stealth_write
-
-re_win32 = re.compile(r'^([/\\]cygdrive)?[/\\]([a-z])([^a-z0-9_-].*)', re.I)
-def win32path(self):
- p = self.abspath()
- m = re_win32.match(p)
- if m:
- return "%s:%s" % (m.group(2).upper(), m.group(3))
- return p
-Node.Node.win32path = win32path
-
-re_quote = re.compile("[^a-zA-Z0-9-]")
-def quote(s):
- return re_quote.sub("_", s)
-
-def xml_escape(value):
- return value.replace("&", "&amp;").replace('"', "&quot;").replace("'", "&apos;").replace("<", "&lt;").replace(">", "&gt;")
-
-def make_uuid(v, prefix = None):
- """
- simple utility function
- """
- if isinstance(v, dict):
- keys = list(v.keys())
- keys.sort()
- tmp = str([(k, v[k]) for k in keys])
- else:
- tmp = str(v)
- d = Utils.md5(tmp.encode()).hexdigest().upper()
- if prefix:
- d = '%s%s' % (prefix, d[8:])
- gid = uuid.UUID(d, version = 4)
- return str(gid).upper()
-
-def diff(node, fromnode):
- # difference between two nodes, but with "(..)" instead of ".."
- c1 = node
- c2 = fromnode
-
- c1h = c1.height()
- c2h = c2.height()
-
- lst = []
- up = 0
-
- while c1h > c2h:
- lst.append(c1.name)
- c1 = c1.parent
- c1h -= 1
-
- while c2h > c1h:
- up += 1
- c2 = c2.parent
- c2h -= 1
-
- while id(c1) != id(c2):
- lst.append(c1.name)
- up += 1
-
- c1 = c1.parent
- c2 = c2.parent
-
- for i in range(up):
- lst.append('(..)')
- lst.reverse()
- return tuple(lst)
-
-class build_property(object):
- pass
-
-class vsnode(object):
- """
- Abstract class representing visual studio elements
- We assume that all visual studio nodes have a uuid and a parent
- """
- def __init__(self, ctx):
- self.ctx = ctx # msvs context
- self.name = '' # string, mandatory
- self.vspath = '' # path in visual studio (name for dirs, absolute path for projects)
- self.uuid = '' # string, mandatory
- self.parent = None # parent node for visual studio nesting
-
- def get_waf(self):
- """
- Override in subclasses...
- """
- return 'cd /d "%s" & %s' % (self.ctx.srcnode.win32path(), getattr(self.ctx, 'waf_command', 'waf.bat'))
-
- def ptype(self):
- """
- Return a special uuid for projects written in the solution file
- """
- pass
-
- def write(self):
- """
- Write the project file, by default, do nothing
- """
- pass
-
- def make_uuid(self, val):
- """
- Alias for creating uuid values easily (the templates cannot access global variables)
- """
- return make_uuid(val)
-
-class vsnode_vsdir(vsnode):
- """
- Nodes representing visual studio folders (which do not match the filesystem tree!)
- """
- VS_GUID_SOLUTIONFOLDER = "2150E333-8FDC-42A3-9474-1A3956D46DE8"
- def __init__(self, ctx, uuid, name, vspath=''):
- vsnode.__init__(self, ctx)
- self.title = self.name = name
- self.uuid = uuid
- self.vspath = vspath or name
-
- def ptype(self):
- return self.VS_GUID_SOLUTIONFOLDER
-
-class vsnode_project(vsnode):
- """
- Abstract class representing visual studio project elements
- A project is assumed to be writable, and has a node representing the file to write to
- """
- VS_GUID_VCPROJ = "8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942"
- def ptype(self):
- return self.VS_GUID_VCPROJ
-
- def __init__(self, ctx, node):
- vsnode.__init__(self, ctx)
- self.path = node
- self.uuid = make_uuid(node.win32path())
- self.name = node.name
- self.platform_toolset_ver = getattr(ctx, 'platform_toolset_ver', None)
- self.title = self.path.win32path()
- self.source = [] # list of node objects
- self.build_properties = [] # list of properties (nmake commands, output dir, etc)
-
- def dirs(self):
- """
- Get the list of parent folders of the source files (header files included)
- for writing the filters
- """
- lst = []
- def add(x):
- if x.height() > self.tg.path.height() and x not in lst:
- lst.append(x)
- add(x.parent)
- for x in self.source:
- add(x.parent)
- return lst
-
- def write(self):
- Logs.debug('msvs: creating %r', self.path)
-
- # first write the project file
- template1 = compile_template(PROJECT_TEMPLATE)
- proj_str = template1(self)
- proj_str = rm_blank_lines(proj_str)
- self.path.stealth_write(proj_str)
-
- # then write the filter
- template2 = compile_template(FILTER_TEMPLATE)
- filter_str = template2(self)
- filter_str = rm_blank_lines(filter_str)
- tmp = self.path.parent.make_node(self.path.name + '.filters')
- tmp.stealth_write(filter_str)
-
- def get_key(self, node):
- """
- required for writing the source files
- """
- name = node.name
- if name.endswith(('.cpp', '.c')):
- return 'ClCompile'
- return 'ClInclude'
-
- def collect_properties(self):
- """
- Returns a list of triplet (configuration, platform, output_directory)
- """
- ret = []
- for c in self.ctx.configurations:
- for p in self.ctx.platforms:
- x = build_property()
- x.outdir = ''
-
- x.configuration = c
- x.platform = p
-
- x.preprocessor_definitions = ''
- x.includes_search_path = ''
-
- # can specify "deploy_dir" too
- ret.append(x)
- self.build_properties = ret
-
- def get_build_params(self, props):
- opt = '--execsolution=%s' % self.ctx.get_solution_node().win32path()
- return (self.get_waf(), opt)
-
- def get_build_command(self, props):
- return "%s build %s" % self.get_build_params(props)
-
- def get_clean_command(self, props):
- return "%s clean %s" % self.get_build_params(props)
-
- def get_rebuild_command(self, props):
- return "%s clean build %s" % self.get_build_params(props)
-
- def get_filter_name(self, node):
- lst = diff(node, self.tg.path)
- return '\\'.join(lst) or '.'
-
-class vsnode_alias(vsnode_project):
- def __init__(self, ctx, node, name):
- vsnode_project.__init__(self, ctx, node)
- self.name = name
- self.output_file = ''
-
-class vsnode_build_all(vsnode_alias):
- """
- Fake target used to emulate the behaviour of "make all" (starting one process by target is slow)
- This is the only alias enabled by default
- """
- def __init__(self, ctx, node, name='build_all_projects'):
- vsnode_alias.__init__(self, ctx, node, name)
- self.is_active = True
-
-class vsnode_install_all(vsnode_alias):
- """
- Fake target used to emulate the behaviour of "make install"
- """
- def __init__(self, ctx, node, name='install_all_projects'):
- vsnode_alias.__init__(self, ctx, node, name)
-
- def get_build_command(self, props):
- return "%s build install %s" % self.get_build_params(props)
-
- def get_clean_command(self, props):
- return "%s clean %s" % self.get_build_params(props)
-
- def get_rebuild_command(self, props):
- return "%s clean build install %s" % self.get_build_params(props)
-
-class vsnode_project_view(vsnode_alias):
- """
- Fake target used to emulate a file system view
- """
- def __init__(self, ctx, node, name='project_view'):
- vsnode_alias.__init__(self, ctx, node, name)
- self.tg = self.ctx() # fake one, cannot remove
- self.exclude_files = Node.exclude_regs + '''
-waf-2*
-waf3-2*/**
-.waf-2*
-.waf3-2*/**
-**/*.sdf
-**/*.suo
-**/*.ncb
-**/%s
- ''' % Options.lockfile
-
- def collect_source(self):
- # this is likely to be slow
- self.source = self.ctx.srcnode.ant_glob('**', excl=self.exclude_files)
-
- def get_build_command(self, props):
- params = self.get_build_params(props) + (self.ctx.cmd,)
- return "%s %s %s" % params
-
- def get_clean_command(self, props):
- return ""
-
- def get_rebuild_command(self, props):
- return self.get_build_command(props)
-
-class vsnode_target(vsnode_project):
- """
- Visual studio project representing a targets (programs, libraries, etc) and bound
- to a task generator
- """
- def __init__(self, ctx, tg):
- """
- A project is more or less equivalent to a file/folder
- """
- base = getattr(ctx, 'projects_dir', None) or tg.path
- node = base.make_node(quote(tg.name) + ctx.project_extension) # the project file as a Node
- vsnode_project.__init__(self, ctx, node)
- self.name = quote(tg.name)
- self.tg = tg # task generator
-
- def get_build_params(self, props):
- """
- Override the default to add the target name
- """
- opt = '--execsolution=%s' % self.ctx.get_solution_node().win32path()
- if getattr(self, 'tg', None):
- opt += " --targets=%s" % self.tg.name
- return (self.get_waf(), opt)
-
- def collect_source(self):
- tg = self.tg
- source_files = tg.to_nodes(getattr(tg, 'source', []))
- include_dirs = Utils.to_list(getattr(tg, 'msvs_includes', []))
- include_files = []
- for x in include_dirs:
- if isinstance(x, str):
- x = tg.path.find_node(x)
- if x:
- lst = [y for y in x.ant_glob(HEADERS_GLOB, flat=False)]
- include_files.extend(lst)
-
- # remove duplicates
- self.source.extend(list(set(source_files + include_files)))
- self.source.sort(key=lambda x: x.win32path())
-
- def collect_properties(self):
- """
- Visual studio projects are associated with platforms and configurations (for building especially)
- """
- super(vsnode_target, self).collect_properties()
- for x in self.build_properties:
- x.outdir = self.path.parent.win32path()
- x.preprocessor_definitions = ''
- x.includes_search_path = ''
-
- try:
- tsk = self.tg.link_task
- except AttributeError:
- pass
- else:
- x.output_file = tsk.outputs[0].win32path()
- x.preprocessor_definitions = ';'.join(tsk.env.DEFINES)
- x.includes_search_path = ';'.join(self.tg.env.INCPATHS)
-
-class msvs_generator(BuildContext):
- '''generates a visual studio 2010 solution'''
- cmd = 'msvs'
- fun = 'build'
- numver = '11.00' # Visual Studio Version Number
- vsver = '2010' # Visual Studio Version Year
- platform_toolset_ver = 'v110' # Platform Toolset Version Number
-
- def init(self):
- """
- Some data that needs to be present
- """
- if not getattr(self, 'configurations', None):
- self.configurations = ['Release'] # LocalRelease, RemoteDebug, etc
- if not getattr(self, 'platforms', None):
- self.platforms = ['Win32']
- if not getattr(self, 'all_projects', None):
- self.all_projects = []
- if not getattr(self, 'project_extension', None):
- self.project_extension = '.vcxproj'
- if not getattr(self, 'projects_dir', None):
- self.projects_dir = self.srcnode.make_node('.depproj')
- self.projects_dir.mkdir()
-
- # bind the classes to the object, so that subclass can provide custom generators
- if not getattr(self, 'vsnode_vsdir', None):
- self.vsnode_vsdir = vsnode_vsdir
- if not getattr(self, 'vsnode_target', None):
- self.vsnode_target = vsnode_target
- if not getattr(self, 'vsnode_build_all', None):
- self.vsnode_build_all = vsnode_build_all
- if not getattr(self, 'vsnode_install_all', None):
- self.vsnode_install_all = vsnode_install_all
- if not getattr(self, 'vsnode_project_view', None):
- self.vsnode_project_view = vsnode_project_view
-
- self.numver = self.__class__.numver
- self.vsver = self.__class__.vsver
- self.platform_toolset_ver = self.__class__.platform_toolset_ver
-
- def execute(self):
- """
- Entry point
- """
- self.restore()
- if not self.all_envs:
- self.load_envs()
- self.recurse([self.run_dir])
-
- # user initialization
- self.init()
-
- # two phases for creating the solution
- self.collect_projects() # add project objects into "self.all_projects"
- self.write_files() # write the corresponding project and solution files
-
- def collect_projects(self):
- """
- Fill the list self.all_projects with project objects
- Fill the list of build targets
- """
- self.collect_targets()
- self.add_aliases()
- self.collect_dirs()
- default_project = getattr(self, 'default_project', None)
- def sortfun(x):
- if x.name == default_project:
- return ''
- return getattr(x, 'path', None) and x.path.win32path() or x.name
- self.all_projects.sort(key=sortfun)
-
- def write_files(self):
- """
- Write the project and solution files from the data collected
- so far. It is unlikely that you will want to change this
- """
- for p in self.all_projects:
- p.write()
-
- # and finally write the solution file
- node = self.get_solution_node()
- node.parent.mkdir()
- Logs.warn('Creating %r', node)
- template1 = compile_template(SOLUTION_TEMPLATE)
- sln_str = template1(self)
- sln_str = rm_blank_lines(sln_str)
- node.stealth_write(sln_str)
-
- def get_solution_node(self):
- """
- The solution filename is required when writing the .vcproj files
- return self.solution_node and if it does not exist, make one
- """
- try:
- return self.solution_node
- except AttributeError:
- pass
-
- solution_name = getattr(self, 'solution_name', None)
- if not solution_name:
- solution_name = getattr(Context.g_module, Context.APPNAME, 'project') + '.sln'
- if os.path.isabs(solution_name):
- self.solution_node = self.root.make_node(solution_name)
- else:
- self.solution_node = self.srcnode.make_node(solution_name)
- return self.solution_node
-
- def project_configurations(self):
- """
- Helper that returns all the pairs (config,platform)
- """
- ret = []
- for c in self.configurations:
- for p in self.platforms:
- ret.append((c, p))
- return ret
-
- def collect_targets(self):
- """
- Process the list of task generators
- """
- for g in self.groups:
- for tg in g:
- if not isinstance(tg, TaskGen.task_gen):
- continue
-
- if not hasattr(tg, 'msvs_includes'):
- tg.msvs_includes = tg.to_list(getattr(tg, 'includes', [])) + tg.to_list(getattr(tg, 'export_includes', []))
- tg.post()
- if not getattr(tg, 'link_task', None):
- continue
-
- p = self.vsnode_target(self, tg)
- p.collect_source() # delegate this processing
- p.collect_properties()
- self.all_projects.append(p)
-
- def add_aliases(self):
- """
- Add a specific target that emulates the "make all" necessary for Visual studio when pressing F7
- We also add an alias for "make install" (disabled by default)
- """
- base = getattr(self, 'projects_dir', None) or self.tg.path
-
- node_project = base.make_node('build_all_projects' + self.project_extension) # Node
- p_build = self.vsnode_build_all(self, node_project)
- p_build.collect_properties()
- self.all_projects.append(p_build)
-
- node_project = base.make_node('install_all_projects' + self.project_extension) # Node
- p_install = self.vsnode_install_all(self, node_project)
- p_install.collect_properties()
- self.all_projects.append(p_install)
-
- node_project = base.make_node('project_view' + self.project_extension) # Node
- p_view = self.vsnode_project_view(self, node_project)
- p_view.collect_source()
- p_view.collect_properties()
- self.all_projects.append(p_view)
-
- n = self.vsnode_vsdir(self, make_uuid(self.srcnode.win32path() + 'build_aliases'), "build_aliases")
- p_build.parent = p_install.parent = p_view.parent = n
- self.all_projects.append(n)
-
- def collect_dirs(self):
- """
- Create the folder structure in the Visual studio project view
- """
- seen = {}
- def make_parents(proj):
- # look at a project, try to make a parent
- if getattr(proj, 'parent', None):
- # aliases already have parents
- return
- x = proj.iter_path
- if x in seen:
- proj.parent = seen[x]
- return
-
- # There is not vsnode_vsdir for x.
- # So create a project representing the folder "x"
- n = proj.parent = seen[x] = self.vsnode_vsdir(self, make_uuid(x.win32path()), x.name)
- n.iter_path = x.parent
- self.all_projects.append(n)
-
- # recurse up to the project directory
- if x.height() > self.srcnode.height() + 1:
- make_parents(n)
-
- for p in self.all_projects[:]: # iterate over a copy of all projects
- if not getattr(p, 'tg', None):
- # but only projects that have a task generator
- continue
-
- # make a folder for each task generator
- p.iter_path = p.tg.path
- make_parents(p)
-
-def wrap_2008(cls):
- class dec(cls):
- def __init__(self, *k, **kw):
- cls.__init__(self, *k, **kw)
- self.project_template = PROJECT_2008_TEMPLATE
-
- def display_filter(self):
-
- root = build_property()
- root.subfilters = []
- root.sourcefiles = []
- root.source = []
- root.name = ''
-
- @Utils.run_once
- def add_path(lst):
- if not lst:
- return root
- child = build_property()
- child.subfilters = []
- child.sourcefiles = []
- child.source = []
- child.name = lst[-1]
-
- par = add_path(lst[:-1])
- par.subfilters.append(child)
- return child
-
- for x in self.source:
- # this crap is for enabling subclasses to override get_filter_name
- tmp = self.get_filter_name(x.parent)
- tmp = tmp != '.' and tuple(tmp.split('\\')) or ()
- par = add_path(tmp)
- par.source.append(x)
-
- def display(n):
- buf = []
- for x in n.source:
- buf.append('<File RelativePath="%s" FileType="%s"/>\n' % (xml_escape(x.win32path()), self.get_key(x)))
- for x in n.subfilters:
- buf.append('<Filter Name="%s">' % xml_escape(x.name))
- buf.append(display(x))
- buf.append('</Filter>')
- return '\n'.join(buf)
-
- return display(root)
-
- def get_key(self, node):
- """
- If you do not want to let visual studio use the default file extensions,
- override this method to return a value:
- 0: C/C++ Code, 1: C++ Class, 2: C++ Header File, 3: C++ Form,
- 4: C++ Control, 5: Text File, 6: DEF File, 7: IDL File,
- 8: Makefile, 9: RGS File, 10: RC File, 11: RES File, 12: XSD File,
- 13: XML File, 14: HTML File, 15: CSS File, 16: Bitmap, 17: Icon,
- 18: Resx File, 19: BSC File, 20: XSX File, 21: C++ Web Service,
- 22: ASAX File, 23: Asp Page, 24: Document, 25: Discovery File,
- 26: C# File, 27: eFileTypeClassDiagram, 28: MHTML Document,
- 29: Property Sheet, 30: Cursor, 31: Manifest, 32: eFileTypeRDLC
- """
- return ''
-
- def write(self):
- Logs.debug('msvs: creating %r', self.path)
- template1 = compile_template(self.project_template)
- proj_str = template1(self)
- proj_str = rm_blank_lines(proj_str)
- self.path.stealth_write(proj_str)
-
- return dec
-
-class msvs_2008_generator(msvs_generator):
- '''generates a visual studio 2008 solution'''
- cmd = 'msvs2008'
- fun = msvs_generator.fun
- numver = '10.00'
- vsver = '2008'
-
- def init(self):
- if not getattr(self, 'project_extension', None):
- self.project_extension = '_2008.vcproj'
- if not getattr(self, 'solution_name', None):
- self.solution_name = getattr(Context.g_module, Context.APPNAME, 'project') + '_2008.sln'
-
- if not getattr(self, 'vsnode_target', None):
- self.vsnode_target = wrap_2008(vsnode_target)
- if not getattr(self, 'vsnode_build_all', None):
- self.vsnode_build_all = wrap_2008(vsnode_build_all)
- if not getattr(self, 'vsnode_install_all', None):
- self.vsnode_install_all = wrap_2008(vsnode_install_all)
- if not getattr(self, 'vsnode_project_view', None):
- self.vsnode_project_view = wrap_2008(vsnode_project_view)
-
- msvs_generator.init(self)
-
-def options(ctx):
- """
- If the msvs option is used, try to detect if the build is made from visual studio
- """
- ctx.add_option('--execsolution', action='store', help='when building with visual studio, use a build state file')
-
- old = BuildContext.execute
- def override_build_state(ctx):
- def lock(rm, add):
- uns = ctx.options.execsolution.replace('.sln', rm)
- uns = ctx.root.make_node(uns)
- try:
- uns.delete()
- except OSError:
- pass
-
- uns = ctx.options.execsolution.replace('.sln', add)
- uns = ctx.root.make_node(uns)
- try:
- uns.write('')
- except EnvironmentError:
- pass
-
- if ctx.options.execsolution:
- ctx.launch_dir = Context.top_dir # force a build for the whole project (invalid cwd when called by visual studio)
- lock('.lastbuildstate', '.unsuccessfulbuild')
- old(ctx)
- lock('.unsuccessfulbuild', '.lastbuildstate')
- else:
- old(ctx)
- BuildContext.execute = override_build_state
-
diff --git a/waflib/extras/netcache_client.py b/waflib/extras/netcache_client.py
deleted file mode 100644
index dc49048..0000000
--- a/waflib/extras/netcache_client.py
+++ /dev/null
@@ -1,390 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2011-2015 (ita)
-
-"""
-A client for the network cache (playground/netcache/). Launch the server with:
-./netcache_server, then use it for the builds by adding the following:
-
- def build(bld):
- bld.load('netcache_client')
-
-The parameters should be present in the environment in the form:
- NETCACHE=host:port waf configure build
-
-Or in a more detailed way:
- NETCACHE_PUSH=host:port NETCACHE_PULL=host:port waf configure build
-
-where:
- host: host where the server resides, by default localhost
- port: by default push on 11001 and pull on 12001
-
-Use the server provided in playground/netcache/Netcache.java
-"""
-
-import os, socket, time, atexit, sys
-from waflib import Task, Logs, Utils, Build, Runner
-from waflib.Configure import conf
-
-BUF = 8192 * 16
-HEADER_SIZE = 128
-MODES = ['PUSH', 'PULL', 'PUSH_PULL']
-STALE_TIME = 30 # seconds
-
-GET = 'GET'
-PUT = 'PUT'
-LST = 'LST'
-BYE = 'BYE'
-
-all_sigs_in_cache = (0.0, [])
-
-def put_data(conn, data):
- if sys.hexversion > 0x3000000:
- data = data.encode('latin-1')
- cnt = 0
- while cnt < len(data):
- sent = conn.send(data[cnt:])
- if sent == 0:
- raise RuntimeError('connection ended')
- cnt += sent
-
-push_connections = Runner.Queue(0)
-pull_connections = Runner.Queue(0)
-def get_connection(push=False):
- # return a new connection... do not forget to release it!
- try:
- if push:
- ret = push_connections.get(block=False)
- else:
- ret = pull_connections.get(block=False)
- except Exception:
- ret = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
- if push:
- ret.connect(Task.push_addr)
- else:
- ret.connect(Task.pull_addr)
- return ret
-
-def release_connection(conn, msg='', push=False):
- if conn:
- if push:
- push_connections.put(conn)
- else:
- pull_connections.put(conn)
-
-def close_connection(conn, msg=''):
- if conn:
- data = '%s,%s' % (BYE, msg)
- try:
- put_data(conn, data.ljust(HEADER_SIZE))
- except:
- pass
- try:
- conn.close()
- except:
- pass
-
-def close_all():
- for q in (push_connections, pull_connections):
- while q.qsize():
- conn = q.get()
- try:
- close_connection(conn)
- except:
- # ignore errors when cleaning up
- pass
-atexit.register(close_all)
-
-def read_header(conn):
- cnt = 0
- buf = []
- while cnt < HEADER_SIZE:
- data = conn.recv(HEADER_SIZE - cnt)
- if not data:
- #import traceback
- #traceback.print_stack()
- raise ValueError('connection ended when reading a header %r' % buf)
- buf.append(data)
- cnt += len(data)
- if sys.hexversion > 0x3000000:
- ret = ''.encode('latin-1').join(buf)
- ret = ret.decode('latin-1')
- else:
- ret = ''.join(buf)
- return ret
-
-def check_cache(conn, ssig):
- """
- List the files on the server, this is an optimization because it assumes that
- concurrent builds are rare
- """
- global all_sigs_in_cache
- if not STALE_TIME:
- return
- if time.time() - all_sigs_in_cache[0] > STALE_TIME:
-
- params = (LST,'')
- put_data(conn, ','.join(params).ljust(HEADER_SIZE))
-
- # read what is coming back
- ret = read_header(conn)
- size = int(ret.split(',')[0])
-
- buf = []
- cnt = 0
- while cnt < size:
- data = conn.recv(min(BUF, size-cnt))
- if not data:
- raise ValueError('connection ended %r %r' % (cnt, size))
- buf.append(data)
- cnt += len(data)
-
- if sys.hexversion > 0x3000000:
- ret = ''.encode('latin-1').join(buf)
- ret = ret.decode('latin-1')
- else:
- ret = ''.join(buf)
-
- all_sigs_in_cache = (time.time(), ret.splitlines())
- Logs.debug('netcache: server cache has %r entries', len(all_sigs_in_cache[1]))
-
- if not ssig in all_sigs_in_cache[1]:
- raise ValueError('no file %s in cache' % ssig)
-
-class MissingFile(Exception):
- pass
-
-def recv_file(conn, ssig, count, p):
- check_cache(conn, ssig)
-
- params = (GET, ssig, str(count))
- put_data(conn, ','.join(params).ljust(HEADER_SIZE))
- data = read_header(conn)
-
- size = int(data.split(',')[0])
-
- if size == -1:
- raise MissingFile('no file %s - %s in cache' % (ssig, count))
-
- # get the file, writing immediately
- # TODO a tmp file would be better
- f = open(p, 'wb')
- cnt = 0
- while cnt < size:
- data = conn.recv(min(BUF, size-cnt))
- if not data:
- raise ValueError('connection ended %r %r' % (cnt, size))
- f.write(data)
- cnt += len(data)
- f.close()
-
-def sock_send(conn, ssig, cnt, p):
- #print "pushing %r %r %r" % (ssig, cnt, p)
- size = os.stat(p).st_size
- params = (PUT, ssig, str(cnt), str(size))
- put_data(conn, ','.join(params).ljust(HEADER_SIZE))
- f = open(p, 'rb')
- cnt = 0
- while cnt < size:
- r = f.read(min(BUF, size-cnt))
- while r:
- k = conn.send(r)
- if not k:
- raise ValueError('connection ended')
- cnt += k
- r = r[k:]
-
-def can_retrieve_cache(self):
- if not Task.pull_addr:
- return False
- if not self.outputs:
- return False
- self.cached = False
-
- cnt = 0
- sig = self.signature()
- ssig = Utils.to_hex(self.uid() + sig)
-
- conn = None
- err = False
- try:
- try:
- conn = get_connection()
- for node in self.outputs:
- p = node.abspath()
- recv_file(conn, ssig, cnt, p)
- cnt += 1
- except MissingFile as e:
- Logs.debug('netcache: file is not in the cache %r', e)
- err = True
- except Exception as e:
- Logs.debug('netcache: could not get the files %r', self.outputs)
- if Logs.verbose > 1:
- Logs.debug('netcache: exception %r', e)
- err = True
-
- # broken connection? remove this one
- close_connection(conn)
- conn = None
- else:
- Logs.debug('netcache: obtained %r from cache', self.outputs)
-
- finally:
- release_connection(conn)
- if err:
- return False
-
- self.cached = True
- return True
-
-@Utils.run_once
-def put_files_cache(self):
- if not Task.push_addr:
- return
- if not self.outputs:
- return
- if getattr(self, 'cached', None):
- return
-
- #print "called put_files_cache", id(self)
- bld = self.generator.bld
- sig = self.signature()
- ssig = Utils.to_hex(self.uid() + sig)
-
- conn = None
- cnt = 0
- try:
- for node in self.outputs:
- # We could re-create the signature of the task with the signature of the outputs
- # in practice, this means hashing the output files
- # this is unnecessary
- try:
- if not conn:
- conn = get_connection(push=True)
- sock_send(conn, ssig, cnt, node.abspath())
- Logs.debug('netcache: sent %r', node)
- except Exception as e:
- Logs.debug('netcache: could not push the files %r', e)
-
- # broken connection? remove this one
- close_connection(conn)
- conn = None
- cnt += 1
- finally:
- release_connection(conn, push=True)
-
- bld.task_sigs[self.uid()] = self.cache_sig
-
-def hash_env_vars(self, env, vars_lst):
- # reimplement so that the resulting hash does not depend on local paths
- if not env.table:
- env = env.parent
- if not env:
- return Utils.SIG_NIL
-
- idx = str(id(env)) + str(vars_lst)
- try:
- cache = self.cache_env
- except AttributeError:
- cache = self.cache_env = {}
- else:
- try:
- return self.cache_env[idx]
- except KeyError:
- pass
-
- v = str([env[a] for a in vars_lst])
- v = v.replace(self.srcnode.abspath().__repr__()[:-1], '')
- m = Utils.md5()
- m.update(v.encode())
- ret = m.digest()
-
- Logs.debug('envhash: %r %r', ret, v)
-
- cache[idx] = ret
-
- return ret
-
-def uid(self):
- # reimplement so that the signature does not depend on local paths
- try:
- return self.uid_
- except AttributeError:
- m = Utils.md5()
- src = self.generator.bld.srcnode
- up = m.update
- up(self.__class__.__name__.encode())
- for x in self.inputs + self.outputs:
- up(x.path_from(src).encode())
- self.uid_ = m.digest()
- return self.uid_
-
-
-def make_cached(cls):
- if getattr(cls, 'nocache', None):
- return
-
- m1 = cls.run
- def run(self):
- if getattr(self, 'nocache', False):
- return m1(self)
- if self.can_retrieve_cache():
- return 0
- return m1(self)
- cls.run = run
-
- m2 = cls.post_run
- def post_run(self):
- if getattr(self, 'nocache', False):
- return m2(self)
- bld = self.generator.bld
- ret = m2(self)
- if bld.cache_global:
- self.put_files_cache()
- if hasattr(self, 'chmod'):
- for node in self.outputs:
- os.chmod(node.abspath(), self.chmod)
- return ret
- cls.post_run = post_run
-
-@conf
-def setup_netcache(ctx, push_addr, pull_addr):
- Task.Task.can_retrieve_cache = can_retrieve_cache
- Task.Task.put_files_cache = put_files_cache
- Task.Task.uid = uid
- Task.push_addr = push_addr
- Task.pull_addr = pull_addr
- Build.BuildContext.hash_env_vars = hash_env_vars
- ctx.cache_global = True
-
- for x in Task.classes.values():
- make_cached(x)
-
-def build(bld):
- if not 'NETCACHE' in os.environ and not 'NETCACHE_PULL' in os.environ and not 'NETCACHE_PUSH' in os.environ:
- Logs.warn('Setting NETCACHE_PULL=127.0.0.1:11001 and NETCACHE_PUSH=127.0.0.1:12001')
- os.environ['NETCACHE_PULL'] = '127.0.0.1:12001'
- os.environ['NETCACHE_PUSH'] = '127.0.0.1:11001'
-
- if 'NETCACHE' in os.environ:
- if not 'NETCACHE_PUSH' in os.environ:
- os.environ['NETCACHE_PUSH'] = os.environ['NETCACHE']
- if not 'NETCACHE_PULL' in os.environ:
- os.environ['NETCACHE_PULL'] = os.environ['NETCACHE']
-
- v = os.environ['NETCACHE_PULL']
- if v:
- h, p = v.split(':')
- pull_addr = (h, int(p))
- else:
- pull_addr = None
-
- v = os.environ['NETCACHE_PUSH']
- if v:
- h, p = v.split(':')
- push_addr = (h, int(p))
- else:
- push_addr = None
-
- setup_netcache(bld, push_addr, pull_addr)
-
diff --git a/waflib/extras/objcopy.py b/waflib/extras/objcopy.py
deleted file mode 100644
index 82d8359..0000000
--- a/waflib/extras/objcopy.py
+++ /dev/null
@@ -1,50 +0,0 @@
-#!/usr/bin/python
-# Grygoriy Fuchedzhy 2010
-
-"""
-Support for converting linked targets to ihex, srec or binary files using
-objcopy. Use the 'objcopy' feature in conjunction with the 'cc' or 'cxx'
-feature. The 'objcopy' feature uses the following attributes:
-
-objcopy_bfdname Target object format name (eg. ihex, srec, binary).
- Defaults to ihex.
-objcopy_target File name used for objcopy output. This defaults to the
- target name with objcopy_bfdname as extension.
-objcopy_install_path Install path for objcopy_target file. Defaults to ${PREFIX}/fw.
-objcopy_flags Additional flags passed to objcopy.
-"""
-
-from waflib.Utils import def_attrs
-from waflib import Task
-from waflib.TaskGen import feature, after_method
-
-class objcopy(Task.Task):
- run_str = '${OBJCOPY} -O ${TARGET_BFDNAME} ${OBJCOPYFLAGS} ${SRC} ${TGT}'
- color = 'CYAN'
-
-@feature('objcopy')
-@after_method('apply_link')
-def map_objcopy(self):
- def_attrs(self,
- objcopy_bfdname = 'ihex',
- objcopy_target = None,
- objcopy_install_path = "${PREFIX}/firmware",
- objcopy_flags = '')
-
- link_output = self.link_task.outputs[0]
- if not self.objcopy_target:
- self.objcopy_target = link_output.change_ext('.' + self.objcopy_bfdname).name
- task = self.create_task('objcopy', src=link_output, tgt=self.path.find_or_declare(self.objcopy_target))
-
- task.env.append_unique('TARGET_BFDNAME', self.objcopy_bfdname)
- try:
- task.env.append_unique('OBJCOPYFLAGS', getattr(self, 'objcopy_flags'))
- except AttributeError:
- pass
-
- if self.objcopy_install_path:
- self.add_install_files(install_to=self.objcopy_install_path, install_from=task.outputs[0])
-
-def configure(ctx):
- ctx.find_program('objcopy', var='OBJCOPY', mandatory=True)
-
diff --git a/waflib/extras/ocaml.py b/waflib/extras/ocaml.py
deleted file mode 100644
index afe73c0..0000000
--- a/waflib/extras/ocaml.py
+++ /dev/null
@@ -1,348 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2006-2010 (ita)
-
-"ocaml support"
-
-import os, re
-from waflib import Utils, Task
-from waflib.Logs import error
-from waflib.TaskGen import feature, before_method, after_method, extension
-
-EXT_MLL = ['.mll']
-EXT_MLY = ['.mly']
-EXT_MLI = ['.mli']
-EXT_MLC = ['.c']
-EXT_ML = ['.ml']
-
-open_re = re.compile('^\s*open\s+([a-zA-Z]+)(;;){0,1}$', re.M)
-foo = re.compile(r"""(\(\*)|(\*\))|("(\\.|[^"\\])*"|'(\\.|[^'\\])*'|.[^()*"'\\]*)""", re.M)
-def filter_comments(txt):
- meh = [0]
- def repl(m):
- if m.group(1):
- meh[0] += 1
- elif m.group(2):
- meh[0] -= 1
- elif not meh[0]:
- return m.group()
- return ''
- return foo.sub(repl, txt)
-
-def scan(self):
- node = self.inputs[0]
- code = filter_comments(node.read())
-
- global open_re
- names = []
- import_iterator = open_re.finditer(code)
- if import_iterator:
- for import_match in import_iterator:
- names.append(import_match.group(1))
- found_lst = []
- raw_lst = []
- for name in names:
- nd = None
- for x in self.incpaths:
- nd = x.find_resource(name.lower()+'.ml')
- if not nd:
- nd = x.find_resource(name+'.ml')
- if nd:
- found_lst.append(nd)
- break
- else:
- raw_lst.append(name)
-
- return (found_lst, raw_lst)
-
-native_lst=['native', 'all', 'c_object']
-bytecode_lst=['bytecode', 'all']
-
-@feature('ocaml')
-def init_ml(self):
- Utils.def_attrs(self,
- type = 'all',
- incpaths_lst = [],
- bld_incpaths_lst = [],
- mlltasks = [],
- mlytasks = [],
- mlitasks = [],
- native_tasks = [],
- bytecode_tasks = [],
- linktasks = [],
- bytecode_env = None,
- native_env = None,
- compiled_tasks = [],
- includes = '',
- uselib = '',
- are_deps_set = 0)
-
-@feature('ocaml')
-@after_method('init_ml')
-def init_envs_ml(self):
-
- self.islibrary = getattr(self, 'islibrary', False)
-
- global native_lst, bytecode_lst
- self.native_env = None
- if self.type in native_lst:
- self.native_env = self.env.derive()
- if self.islibrary:
- self.native_env['OCALINKFLAGS'] = '-a'
-
- self.bytecode_env = None
- if self.type in bytecode_lst:
- self.bytecode_env = self.env.derive()
- if self.islibrary:
- self.bytecode_env['OCALINKFLAGS'] = '-a'
-
- if self.type == 'c_object':
- self.native_env.append_unique('OCALINKFLAGS_OPT', '-output-obj')
-
-@feature('ocaml')
-@before_method('apply_vars_ml')
-@after_method('init_envs_ml')
-def apply_incpaths_ml(self):
- inc_lst = self.includes.split()
- lst = self.incpaths_lst
- for dir in inc_lst:
- node = self.path.find_dir(dir)
- if not node:
- error("node not found: " + str(dir))
- continue
- if not node in lst:
- lst.append(node)
- self.bld_incpaths_lst.append(node)
- # now the nodes are added to self.incpaths_lst
-
-@feature('ocaml')
-@before_method('process_source')
-def apply_vars_ml(self):
- for i in self.incpaths_lst:
- if self.bytecode_env:
- app = self.bytecode_env.append_value
- app('OCAMLPATH', ['-I', i.bldpath(), '-I', i.srcpath()])
-
- if self.native_env:
- app = self.native_env.append_value
- app('OCAMLPATH', ['-I', i.bldpath(), '-I', i.srcpath()])
-
- varnames = ['INCLUDES', 'OCAMLFLAGS', 'OCALINKFLAGS', 'OCALINKFLAGS_OPT']
- for name in self.uselib.split():
- for vname in varnames:
- cnt = self.env[vname+'_'+name]
- if cnt:
- if self.bytecode_env:
- self.bytecode_env.append_value(vname, cnt)
- if self.native_env:
- self.native_env.append_value(vname, cnt)
-
-@feature('ocaml')
-@after_method('process_source')
-def apply_link_ml(self):
-
- if self.bytecode_env:
- ext = self.islibrary and '.cma' or '.run'
-
- linktask = self.create_task('ocalink')
- linktask.bytecode = 1
- linktask.set_outputs(self.path.find_or_declare(self.target + ext))
- linktask.env = self.bytecode_env
- self.linktasks.append(linktask)
-
- if self.native_env:
- if self.type == 'c_object':
- ext = '.o'
- elif self.islibrary:
- ext = '.cmxa'
- else:
- ext = ''
-
- linktask = self.create_task('ocalinkx')
- linktask.set_outputs(self.path.find_or_declare(self.target + ext))
- linktask.env = self.native_env
- self.linktasks.append(linktask)
-
- # we produce a .o file to be used by gcc
- self.compiled_tasks.append(linktask)
-
-@extension(*EXT_MLL)
-def mll_hook(self, node):
- mll_task = self.create_task('ocamllex', node, node.change_ext('.ml'))
- mll_task.env = self.native_env.derive()
- self.mlltasks.append(mll_task)
-
- self.source.append(mll_task.outputs[0])
-
-@extension(*EXT_MLY)
-def mly_hook(self, node):
- mly_task = self.create_task('ocamlyacc', node, [node.change_ext('.ml'), node.change_ext('.mli')])
- mly_task.env = self.native_env.derive()
- self.mlytasks.append(mly_task)
- self.source.append(mly_task.outputs[0])
-
- task = self.create_task('ocamlcmi', mly_task.outputs[1], mly_task.outputs[1].change_ext('.cmi'))
- task.env = self.native_env.derive()
-
-@extension(*EXT_MLI)
-def mli_hook(self, node):
- task = self.create_task('ocamlcmi', node, node.change_ext('.cmi'))
- task.env = self.native_env.derive()
- self.mlitasks.append(task)
-
-@extension(*EXT_MLC)
-def mlc_hook(self, node):
- task = self.create_task('ocamlcc', node, node.change_ext('.o'))
- task.env = self.native_env.derive()
- self.compiled_tasks.append(task)
-
-@extension(*EXT_ML)
-def ml_hook(self, node):
- if self.native_env:
- task = self.create_task('ocamlx', node, node.change_ext('.cmx'))
- task.env = self.native_env.derive()
- task.incpaths = self.bld_incpaths_lst
- self.native_tasks.append(task)
-
- if self.bytecode_env:
- task = self.create_task('ocaml', node, node.change_ext('.cmo'))
- task.env = self.bytecode_env.derive()
- task.bytecode = 1
- task.incpaths = self.bld_incpaths_lst
- self.bytecode_tasks.append(task)
-
-def compile_may_start(self):
-
- if not getattr(self, 'flag_deps', ''):
- self.flag_deps = 1
-
- # the evil part is that we can only compute the dependencies after the
- # source files can be read (this means actually producing the source files)
- if getattr(self, 'bytecode', ''):
- alltasks = self.generator.bytecode_tasks
- else:
- alltasks = self.generator.native_tasks
-
- self.signature() # ensure that files are scanned - unfortunately
- tree = self.generator.bld
- for node in self.inputs:
- lst = tree.node_deps[self.uid()]
- for depnode in lst:
- for t in alltasks:
- if t == self:
- continue
- if depnode in t.inputs:
- self.set_run_after(t)
-
- # TODO necessary to get the signature right - for now
- delattr(self, 'cache_sig')
- self.signature()
-
- return Task.Task.runnable_status(self)
-
-class ocamlx(Task.Task):
- """native caml compilation"""
- color = 'GREEN'
- run_str = '${OCAMLOPT} ${OCAMLPATH} ${OCAMLFLAGS} ${OCAMLINCLUDES} -c -o ${TGT} ${SRC}'
- scan = scan
- runnable_status = compile_may_start
-
-class ocaml(Task.Task):
- """bytecode caml compilation"""
- color = 'GREEN'
- run_str = '${OCAMLC} ${OCAMLPATH} ${OCAMLFLAGS} ${OCAMLINCLUDES} -c -o ${TGT} ${SRC}'
- scan = scan
- runnable_status = compile_may_start
-
-class ocamlcmi(Task.Task):
- """interface generator (the .i files?)"""
- color = 'BLUE'
- run_str = '${OCAMLC} ${OCAMLPATH} ${OCAMLINCLUDES} -o ${TGT} -c ${SRC}'
- before = ['ocamlcc', 'ocaml', 'ocamlcc']
-
-class ocamlcc(Task.Task):
- """ocaml to c interfaces"""
- color = 'GREEN'
- run_str = 'cd ${TGT[0].bld_dir()} && ${OCAMLOPT} ${OCAMLFLAGS} ${OCAMLPATH} ${OCAMLINCLUDES} -c ${SRC[0].abspath()}'
-
-class ocamllex(Task.Task):
- """lexical generator"""
- color = 'BLUE'
- run_str = '${OCAMLLEX} ${SRC} -o ${TGT}'
- before = ['ocamlcmi', 'ocaml', 'ocamlcc']
-
-class ocamlyacc(Task.Task):
- """parser generator"""
- color = 'BLUE'
- run_str = '${OCAMLYACC} -b ${tsk.base()} ${SRC}'
- before = ['ocamlcmi', 'ocaml', 'ocamlcc']
-
- def base(self):
- node = self.outputs[0]
- s = os.path.splitext(node.name)[0]
- return node.bld_dir() + os.sep + s
-
-def link_may_start(self):
-
- if getattr(self, 'bytecode', 0):
- alltasks = self.generator.bytecode_tasks
- else:
- alltasks = self.generator.native_tasks
-
- for x in alltasks:
- if not x.hasrun:
- return Task.ASK_LATER
-
- if not getattr(self, 'order', ''):
-
- # now reorder the inputs given the task dependencies
- # this part is difficult, we do not have a total order on the tasks
- # if the dependencies are wrong, this may not stop
- seen = []
- pendant = []+alltasks
- while pendant:
- task = pendant.pop(0)
- if task in seen:
- continue
- for x in task.run_after:
- if not x in seen:
- pendant.append(task)
- break
- else:
- seen.append(task)
- self.inputs = [x.outputs[0] for x in seen]
- self.order = 1
- return Task.Task.runnable_status(self)
-
-class ocalink(Task.Task):
- """bytecode caml link"""
- color = 'YELLOW'
- run_str = '${OCAMLC} -o ${TGT} ${OCAMLINCLUDES} ${OCALINKFLAGS} ${SRC}'
- runnable_status = link_may_start
- after = ['ocaml', 'ocamlcc']
-
-class ocalinkx(Task.Task):
- """native caml link"""
- color = 'YELLOW'
- run_str = '${OCAMLOPT} -o ${TGT} ${OCAMLINCLUDES} ${OCALINKFLAGS_OPT} ${SRC}'
- runnable_status = link_may_start
- after = ['ocamlx', 'ocamlcc']
-
-def configure(conf):
- opt = conf.find_program('ocamlopt', var='OCAMLOPT', mandatory=False)
- occ = conf.find_program('ocamlc', var='OCAMLC', mandatory=False)
- if (not opt) or (not occ):
- conf.fatal('The objective caml compiler was not found:\ninstall it or make it available in your PATH')
-
- v = conf.env
- v['OCAMLC'] = occ
- v['OCAMLOPT'] = opt
- v['OCAMLLEX'] = conf.find_program('ocamllex', var='OCAMLLEX', mandatory=False)
- v['OCAMLYACC'] = conf.find_program('ocamlyacc', var='OCAMLYACC', mandatory=False)
- v['OCAMLFLAGS'] = ''
- where = conf.cmd_and_log(conf.env.OCAMLC + ['-where']).strip()+os.sep
- v['OCAMLLIB'] = where
- v['LIBPATH_OCAML'] = where
- v['INCLUDES_OCAML'] = where
- v['LIB_OCAML'] = 'camlrun'
-
diff --git a/waflib/extras/package.py b/waflib/extras/package.py
deleted file mode 100644
index c06498e..0000000
--- a/waflib/extras/package.py
+++ /dev/null
@@ -1,76 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2011
-
-"""
-Obtain packages, unpack them in a location, and add associated uselib variables
-(CFLAGS_pkgname, LIBPATH_pkgname, etc).
-
-The default is use a Dependencies.txt file in the source directory.
-
-This is a work in progress.
-
-Usage:
-
-def options(opt):
- opt.load('package')
-
-def configure(conf):
- conf.load_packages()
-"""
-
-from waflib import Logs
-from waflib.Configure import conf
-
-try:
- from urllib import request
-except ImportError:
- from urllib import urlopen
-else:
- urlopen = request.urlopen
-
-
-CACHEVAR = 'WAFCACHE_PACKAGE'
-
-@conf
-def get_package_cache_dir(self):
- cache = None
- if CACHEVAR in conf.environ:
- cache = conf.environ[CACHEVAR]
- cache = self.root.make_node(cache)
- elif self.env[CACHEVAR]:
- cache = self.env[CACHEVAR]
- cache = self.root.make_node(cache)
- else:
- cache = self.srcnode.make_node('.wafcache_package')
- cache.mkdir()
- return cache
-
-@conf
-def download_archive(self, src, dst):
- for x in self.env.PACKAGE_REPO:
- url = '/'.join((x, src))
- try:
- web = urlopen(url)
- try:
- if web.getcode() != 200:
- continue
- except AttributeError:
- pass
- except Exception:
- # on python3 urlopen throws an exception
- # python 2.3 does not have getcode and throws an exception to fail
- continue
- else:
- tmp = self.root.make_node(dst)
- tmp.write(web.read())
- Logs.warn('Downloaded %s from %s', tmp.abspath(), url)
- break
- else:
- self.fatal('Could not get the package %s' % src)
-
-@conf
-def load_packages(self):
- self.get_package_cache_dir()
- # read the dependencies, get the archives, ..
-
diff --git a/waflib/extras/parallel_debug.py b/waflib/extras/parallel_debug.py
deleted file mode 100644
index 35883a3..0000000
--- a/waflib/extras/parallel_debug.py
+++ /dev/null
@@ -1,459 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2007-2010 (ita)
-
-"""
-Debugging helper for parallel compilation, outputs
-a file named pdebug.svg in the source directory::
-
- def options(opt):
- opt.load('parallel_debug')
- def build(bld):
- ...
-"""
-
-import re, sys, threading, time, traceback
-try:
- from Queue import Queue
-except:
- from queue import Queue
-from waflib import Runner, Options, Task, Logs, Errors
-
-SVG_TEMPLATE = """<?xml version="1.0" encoding="UTF-8" standalone="no"?>
-<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.0//EN" "http://www.w3.org/TR/2001/REC-SVG-20010904/DTD/svg10.dtd">
-<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" version="1.0"
- x="${project.x}" y="${project.y}" width="${project.width}" height="${project.height}" id="svg602" xml:space="preserve">
-
-<style type='text/css' media='screen'>
- g.over rect { stroke:#FF0000; fill-opacity:0.4 }
-</style>
-
-<script type='text/javascript'><![CDATA[
-var svg = document.getElementsByTagName('svg')[0];
-
-svg.addEventListener('mouseover', function(e) {
- var g = e.target.parentNode;
- var x = document.getElementById('r_' + g.id);
- if (x) {
- g.setAttribute('class', g.getAttribute('class') + ' over');
- x.setAttribute('class', x.getAttribute('class') + ' over');
- showInfo(e, g.id, e.target.attributes.tooltip.value);
- }
-}, false);
-
-svg.addEventListener('mouseout', function(e) {
- var g = e.target.parentNode;
- var x = document.getElementById('r_' + g.id);
- if (x) {
- g.setAttribute('class', g.getAttribute('class').replace(' over', ''));
- x.setAttribute('class', x.getAttribute('class').replace(' over', ''));
- hideInfo(e);
- }
-}, false);
-
-function showInfo(evt, txt, details) {
-${if project.tooltip}
- tooltip = document.getElementById('tooltip');
-
- var t = document.getElementById('tooltiptext');
- t.firstChild.data = txt + " " + details;
-
- var x = evt.clientX + 9;
- if (x > 250) { x -= t.getComputedTextLength() + 16; }
- var y = evt.clientY + 20;
- tooltip.setAttribute("transform", "translate(" + x + "," + y + ")");
- tooltip.setAttributeNS(null, "visibility", "visible");
-
- var r = document.getElementById('tooltiprect');
- r.setAttribute('width', t.getComputedTextLength() + 6);
-${endif}
-}
-
-function hideInfo(evt) {
- var tooltip = document.getElementById('tooltip');
- tooltip.setAttributeNS(null,"visibility","hidden");
-}
-]]></script>
-
-<!-- inkscape requires a big rectangle or it will not export the pictures properly -->
-<rect
- x='${project.x}' y='${project.y}' width='${project.width}' height='${project.height}'
- style="font-size:10;fill:#ffffff;fill-opacity:0.01;fill-rule:evenodd;stroke:#ffffff;"></rect>
-
-${if project.title}
- <text x="${project.title_x}" y="${project.title_y}"
- style="font-size:15px; text-anchor:middle; font-style:normal;font-weight:normal;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;font-family:Bitstream Vera Sans">${project.title}</text>
-${endif}
-
-
-${for cls in project.groups}
- <g id='${cls.classname}'>
- ${for rect in cls.rects}
- <rect x='${rect.x}' y='${rect.y}' width='${rect.width}' height='${rect.height}' tooltip='${rect.name}' style="font-size:10;fill:${rect.color};fill-rule:evenodd;stroke:#000000;stroke-width:0.4;" />
- ${endfor}
- </g>
-${endfor}
-
-${for info in project.infos}
- <g id='r_${info.classname}'>
- <rect x='${info.x}' y='${info.y}' width='${info.width}' height='${info.height}' style="font-size:10;fill:${info.color};fill-rule:evenodd;stroke:#000000;stroke-width:0.4;" />
- <text x="${info.text_x}" y="${info.text_y}"
- style="font-size:12px;font-style:normal;font-weight:normal;fill:#000000;fill-opacity:1;stroke:none;stroke-width:1px;stroke-linecap:butt;stroke-linejoin:miter;stroke-opacity:1;font-family:Bitstream Vera Sans"
- >${info.text}</text>
- </g>
-${endfor}
-
-${if project.tooltip}
- <g transform="translate(0,0)" visibility="hidden" id="tooltip">
- <rect id="tooltiprect" y="-15" x="-3" width="1" height="20" style="stroke:black;fill:#edefc2;stroke-width:1"/>
- <text id="tooltiptext" style="font-family:Arial; font-size:12;fill:black;"> </text>
- </g>
-${endif}
-
-</svg>
-"""
-
-COMPILE_TEMPLATE = '''def f(project):
- lst = []
- def xml_escape(value):
- return value.replace("&", "&amp;").replace('"', "&quot;").replace("'", "&apos;").replace("<", "&lt;").replace(">", "&gt;")
-
- %s
- return ''.join(lst)
-'''
-reg_act = re.compile(r"(?P<backslash>\\)|(?P<dollar>\$\$)|(?P<subst>\$\{(?P<code>[^}]*?)\})", re.M)
-def compile_template(line):
-
- extr = []
- def repl(match):
- g = match.group
- if g('dollar'):
- return "$"
- elif g('backslash'):
- return "\\"
- elif g('subst'):
- extr.append(g('code'))
- return "<<|@|>>"
- return None
-
- line2 = reg_act.sub(repl, line)
- params = line2.split('<<|@|>>')
- assert(extr)
-
-
- indent = 0
- buf = []
- app = buf.append
-
- def app(txt):
- buf.append(indent * '\t' + txt)
-
- for x in range(len(extr)):
- if params[x]:
- app("lst.append(%r)" % params[x])
-
- f = extr[x]
- if f.startswith(('if', 'for')):
- app(f + ':')
- indent += 1
- elif f.startswith('py:'):
- app(f[3:])
- elif f.startswith(('endif', 'endfor')):
- indent -= 1
- elif f.startswith(('else', 'elif')):
- indent -= 1
- app(f + ':')
- indent += 1
- elif f.startswith('xml:'):
- app('lst.append(xml_escape(%s))' % f[4:])
- else:
- #app('lst.append((%s) or "cannot find %s")' % (f, f))
- app('lst.append(str(%s))' % f)
-
- if extr:
- if params[-1]:
- app("lst.append(%r)" % params[-1])
-
- fun = COMPILE_TEMPLATE % "\n\t".join(buf)
- # uncomment the following to debug the template
- #for i, x in enumerate(fun.splitlines()):
- # print i, x
- return Task.funex(fun)
-
-# red #ff4d4d
-# green #4da74d
-# lila #a751ff
-
-color2code = {
- 'GREEN' : '#4da74d',
- 'YELLOW' : '#fefe44',
- 'PINK' : '#a751ff',
- 'RED' : '#cc1d1d',
- 'BLUE' : '#6687bb',
- 'CYAN' : '#34e2e2',
-}
-
-mp = {}
-info = [] # list of (text,color)
-
-def map_to_color(name):
- if name in mp:
- return mp[name]
- try:
- cls = Task.classes[name]
- except KeyError:
- return color2code['RED']
- if cls.color in mp:
- return mp[cls.color]
- if cls.color in color2code:
- return color2code[cls.color]
- return color2code['RED']
-
-def process(self):
- m = self.generator.bld.producer
- try:
- # TODO another place for this?
- del self.generator.bld.task_sigs[self.uid()]
- except KeyError:
- pass
-
- self.generator.bld.producer.set_running(1, self)
-
- try:
- ret = self.run()
- except Exception:
- self.err_msg = traceback.format_exc()
- self.hasrun = Task.EXCEPTION
-
- # TODO cleanup
- m.error_handler(self)
- return
-
- if ret:
- self.err_code = ret
- self.hasrun = Task.CRASHED
- else:
- try:
- self.post_run()
- except Errors.WafError:
- pass
- except Exception:
- self.err_msg = traceback.format_exc()
- self.hasrun = Task.EXCEPTION
- else:
- self.hasrun = Task.SUCCESS
- if self.hasrun != Task.SUCCESS:
- m.error_handler(self)
-
- self.generator.bld.producer.set_running(-1, self)
-
-Task.Task.process_back = Task.Task.process
-Task.Task.process = process
-
-old_start = Runner.Parallel.start
-def do_start(self):
- try:
- Options.options.dband
- except AttributeError:
- self.bld.fatal('use def options(opt): opt.load("parallel_debug")!')
-
- self.taskinfo = Queue()
- old_start(self)
- if self.dirty:
- make_picture(self)
-Runner.Parallel.start = do_start
-
-lock_running = threading.Lock()
-def set_running(self, by, tsk):
- with lock_running:
- try:
- cache = self.lock_cache
- except AttributeError:
- cache = self.lock_cache = {}
-
- i = 0
- if by > 0:
- vals = cache.values()
- for i in range(self.numjobs):
- if i not in vals:
- cache[tsk] = i
- break
- else:
- i = cache[tsk]
- del cache[tsk]
-
- self.taskinfo.put( (i, id(tsk), time.time(), tsk.__class__.__name__, self.processed, self.count, by, ",".join(map(str, tsk.outputs))) )
-Runner.Parallel.set_running = set_running
-
-def name2class(name):
- return name.replace(' ', '_').replace('.', '_')
-
-def make_picture(producer):
- # first, cast the parameters
- if not hasattr(producer.bld, 'path'):
- return
-
- tmp = []
- try:
- while True:
- tup = producer.taskinfo.get(False)
- tmp.append(list(tup))
- except:
- pass
-
- try:
- ini = float(tmp[0][2])
- except:
- return
-
- if not info:
- seen = []
- for x in tmp:
- name = x[3]
- if not name in seen:
- seen.append(name)
- else:
- continue
-
- info.append((name, map_to_color(name)))
- info.sort(key=lambda x: x[0])
-
- thread_count = 0
- acc = []
- for x in tmp:
- thread_count += x[6]
- acc.append("%d %d %f %r %d %d %d %s" % (x[0], x[1], x[2] - ini, x[3], x[4], x[5], thread_count, x[7]))
-
- data_node = producer.bld.path.make_node('pdebug.dat')
- data_node.write('\n'.join(acc))
-
- tmp = [lst[:2] + [float(lst[2]) - ini] + lst[3:] for lst in tmp]
-
- st = {}
- for l in tmp:
- if not l[0] in st:
- st[l[0]] = len(st.keys())
- tmp = [ [st[lst[0]]] + lst[1:] for lst in tmp ]
- THREAD_AMOUNT = len(st.keys())
-
- st = {}
- for l in tmp:
- if not l[1] in st:
- st[l[1]] = len(st.keys())
- tmp = [ [lst[0]] + [st[lst[1]]] + lst[2:] for lst in tmp ]
-
-
- BAND = Options.options.dband
-
- seen = {}
- acc = []
- for x in range(len(tmp)):
- line = tmp[x]
- id = line[1]
-
- if id in seen:
- continue
- seen[id] = True
-
- begin = line[2]
- thread_id = line[0]
- for y in range(x + 1, len(tmp)):
- line = tmp[y]
- if line[1] == id:
- end = line[2]
- #print id, thread_id, begin, end
- #acc.append( ( 10*thread_id, 10*(thread_id+1), 10*begin, 10*end ) )
- acc.append( (BAND * begin, BAND*thread_id, BAND*end - BAND*begin, BAND, line[3], line[7]) )
- break
-
- if Options.options.dmaxtime < 0.1:
- gwidth = 1
- for x in tmp:
- m = BAND * x[2]
- if m > gwidth:
- gwidth = m
- else:
- gwidth = BAND * Options.options.dmaxtime
-
- ratio = float(Options.options.dwidth) / gwidth
- gwidth = Options.options.dwidth
- gheight = BAND * (THREAD_AMOUNT + len(info) + 1.5)
-
-
- # simple data model for our template
- class tobject(object):
- pass
-
- model = tobject()
- model.x = 0
- model.y = 0
- model.width = gwidth + 4
- model.height = gheight + 4
-
- model.tooltip = not Options.options.dnotooltip
-
- model.title = Options.options.dtitle
- model.title_x = gwidth / 2
- model.title_y = gheight + - 5
-
- groups = {}
- for (x, y, w, h, clsname, name) in acc:
- try:
- groups[clsname].append((x, y, w, h, name))
- except:
- groups[clsname] = [(x, y, w, h, name)]
-
- # groups of rectangles (else js highlighting is slow)
- model.groups = []
- for cls in groups:
- g = tobject()
- model.groups.append(g)
- g.classname = name2class(cls)
- g.rects = []
- for (x, y, w, h, name) in groups[cls]:
- r = tobject()
- g.rects.append(r)
- r.x = 2 + x * ratio
- r.y = 2 + y
- r.width = w * ratio
- r.height = h
- r.name = name
- r.color = map_to_color(cls)
-
- cnt = THREAD_AMOUNT
-
- # caption
- model.infos = []
- for (text, color) in info:
- inf = tobject()
- model.infos.append(inf)
- inf.classname = name2class(text)
- inf.x = 2 + BAND
- inf.y = 5 + (cnt + 0.5) * BAND
- inf.width = BAND/2
- inf.height = BAND/2
- inf.color = color
-
- inf.text = text
- inf.text_x = 2 + 2 * BAND
- inf.text_y = 5 + (cnt + 0.5) * BAND + 10
-
- cnt += 1
-
- # write the file...
- template1 = compile_template(SVG_TEMPLATE)
- txt = template1(model)
-
- node = producer.bld.path.make_node('pdebug.svg')
- node.write(txt)
- Logs.warn('Created the diagram %r', node)
-
-def options(opt):
- opt.add_option('--dtitle', action='store', default='Parallel build representation for %r' % ' '.join(sys.argv),
- help='title for the svg diagram', dest='dtitle')
- opt.add_option('--dwidth', action='store', type='int', help='diagram width', default=800, dest='dwidth')
- opt.add_option('--dtime', action='store', type='float', help='recording interval in seconds', default=0.009, dest='dtime')
- opt.add_option('--dband', action='store', type='int', help='band width', default=22, dest='dband')
- opt.add_option('--dmaxtime', action='store', type='float', help='maximum time, for drawing fair comparisons', default=0, dest='dmaxtime')
- opt.add_option('--dnotooltip', action='store_true', help='disable tooltips', default=False, dest='dnotooltip')
-
diff --git a/waflib/extras/pch.py b/waflib/extras/pch.py
deleted file mode 100644
index 103e752..0000000
--- a/waflib/extras/pch.py
+++ /dev/null
@@ -1,148 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# Alexander Afanasyev (UCLA), 2014
-
-"""
-Enable precompiled C++ header support (currently only clang++ and g++ are supported)
-
-To use this tool, wscript should look like:
-
- def options(opt):
- opt.load('pch')
- # This will add `--with-pch` configure option.
- # Unless --with-pch during configure stage specified, the precompiled header support is disabled
-
- def configure(conf):
- conf.load('pch')
- # this will set conf.env.WITH_PCH if --with-pch is specified and the supported compiler is used
- # Unless conf.env.WITH_PCH is set, the precompiled header support is disabled
-
- def build(bld):
- bld(features='cxx pch',
- target='precompiled-headers',
- name='precompiled-headers',
- headers='a.h b.h c.h', # headers to pre-compile into `precompiled-headers`
-
- # Other parameters to compile precompiled headers
- # includes=...,
- # export_includes=...,
- # use=...,
- # ...
-
- # Exported parameters will be propagated even if precompiled headers are disabled
- )
-
- bld(
- target='test',
- features='cxx cxxprogram',
- source='a.cpp b.cpp d.cpp main.cpp',
- use='precompiled-headers',
- )
-
- # or
-
- bld(
- target='test',
- features='pch cxx cxxprogram',
- source='a.cpp b.cpp d.cpp main.cpp',
- headers='a.h b.h c.h',
- )
-
-Note that precompiled header must have multiple inclusion guards. If the guards are missing, any benefit of precompiled header will be voided and compilation may fail in some cases.
-"""
-
-import os
-from waflib import Task, TaskGen, Utils
-from waflib.Tools import c_preproc, cxx
-
-
-PCH_COMPILER_OPTIONS = {
- 'clang++': [['-include'], '.pch', ['-x', 'c++-header']],
- 'g++': [['-include'], '.gch', ['-x', 'c++-header']],
-}
-
-
-def options(opt):
- opt.add_option('--without-pch', action='store_false', default=True, dest='with_pch', help='''Try to use precompiled header to speed up compilation (only g++ and clang++)''')
-
-def configure(conf):
- if (conf.options.with_pch and conf.env['COMPILER_CXX'] in PCH_COMPILER_OPTIONS.keys()):
- conf.env.WITH_PCH = True
- flags = PCH_COMPILER_OPTIONS[conf.env['COMPILER_CXX']]
- conf.env.CXXPCH_F = flags[0]
- conf.env.CXXPCH_EXT = flags[1]
- conf.env.CXXPCH_FLAGS = flags[2]
-
-
-@TaskGen.feature('pch')
-@TaskGen.before('process_source')
-def apply_pch(self):
- if not self.env.WITH_PCH:
- return
-
- if getattr(self.bld, 'pch_tasks', None) is None:
- self.bld.pch_tasks = {}
-
- if getattr(self, 'headers', None) is None:
- return
-
- self.headers = self.to_nodes(self.headers)
-
- if getattr(self, 'name', None):
- try:
- task = self.bld.pch_tasks["%s.%s" % (self.name, self.idx)]
- self.bld.fatal("Duplicated 'pch' task with name %r" % "%s.%s" % (self.name, self.idx))
- except KeyError:
- pass
-
- out = '%s.%d%s' % (self.target, self.idx, self.env['CXXPCH_EXT'])
- out = self.path.find_or_declare(out)
- task = self.create_task('gchx', self.headers, out)
-
- # target should be an absolute path of `out`, but without precompiled header extension
- task.target = out.abspath()[:-len(out.suffix())]
-
- self.pch_task = task
- if getattr(self, 'name', None):
- self.bld.pch_tasks["%s.%s" % (self.name, self.idx)] = task
-
-@TaskGen.feature('cxx')
-@TaskGen.after_method('process_source', 'propagate_uselib_vars')
-def add_pch(self):
- if not (self.env['WITH_PCH'] and getattr(self, 'use', None) and getattr(self, 'compiled_tasks', None) and getattr(self.bld, 'pch_tasks', None)):
- return
-
- pch = None
- # find pch task, if any
-
- if getattr(self, 'pch_task', None):
- pch = self.pch_task
- else:
- for use in Utils.to_list(self.use):
- try:
- pch = self.bld.pch_tasks[use]
- except KeyError:
- pass
-
- if pch:
- for x in self.compiled_tasks:
- x.env.append_value('CXXFLAGS', self.env['CXXPCH_F'] + [pch.target])
-
-class gchx(Task.Task):
- run_str = '${CXX} ${ARCH_ST:ARCH} ${CXXFLAGS} ${CXXPCH_FLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${CXXPCH_F:SRC} ${CXX_SRC_F}${SRC[0].abspath()} ${CXX_TGT_F}${TGT[0].abspath()} ${CPPFLAGS}'
- scan = c_preproc.scan
- color = 'BLUE'
- ext_out=['.h']
-
- def runnable_status(self):
- try:
- node_deps = self.generator.bld.node_deps[self.uid()]
- except KeyError:
- node_deps = []
- ret = Task.Task.runnable_status(self)
- if ret == Task.SKIP_ME and self.env.CXX_NAME == 'clang':
- t = os.stat(self.outputs[0].abspath()).st_mtime
- for n in self.inputs + node_deps:
- if os.stat(n.abspath()).st_mtime > t:
- return Task.RUN_ME
- return ret
diff --git a/waflib/extras/pep8.py b/waflib/extras/pep8.py
deleted file mode 100644
index 676beed..0000000
--- a/waflib/extras/pep8.py
+++ /dev/null
@@ -1,106 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-#
-# written by Sylvain Rouquette, 2011
-
-'''
-Install pep8 module:
-$ easy_install pep8
- or
-$ pip install pep8
-
-To add the pep8 tool to the waf file:
-$ ./waf-light --tools=compat15,pep8
- or, if you have waf >= 1.6.2
-$ ./waf update --files=pep8
-
-
-Then add this to your wscript:
-
-[at]extension('.py', 'wscript')
-def run_pep8(self, node):
- self.create_task('Pep8', node)
-
-'''
-
-import threading
-from waflib import Task, Options
-
-pep8 = __import__('pep8')
-
-
-class Pep8(Task.Task):
- color = 'PINK'
- lock = threading.Lock()
-
- def check_options(self):
- if pep8.options:
- return
- pep8.options = Options.options
- pep8.options.prog = 'pep8'
- excl = pep8.options.exclude.split(',')
- pep8.options.exclude = [s.rstrip('/') for s in excl]
- if pep8.options.filename:
- pep8.options.filename = pep8.options.filename.split(',')
- if pep8.options.select:
- pep8.options.select = pep8.options.select.split(',')
- else:
- pep8.options.select = []
- if pep8.options.ignore:
- pep8.options.ignore = pep8.options.ignore.split(',')
- elif pep8.options.select:
- # Ignore all checks which are not explicitly selected
- pep8.options.ignore = ['']
- elif pep8.options.testsuite or pep8.options.doctest:
- # For doctest and testsuite, all checks are required
- pep8.options.ignore = []
- else:
- # The default choice: ignore controversial checks
- pep8.options.ignore = pep8.DEFAULT_IGNORE.split(',')
- pep8.options.physical_checks = pep8.find_checks('physical_line')
- pep8.options.logical_checks = pep8.find_checks('logical_line')
- pep8.options.counters = dict.fromkeys(pep8.BENCHMARK_KEYS, 0)
- pep8.options.messages = {}
-
- def run(self):
- with Pep8.lock:
- self.check_options()
- pep8.input_file(self.inputs[0].abspath())
- return 0 if not pep8.get_count() else -1
-
-
-def options(opt):
- opt.add_option('-q', '--quiet', default=0, action='count',
- help="report only file names, or nothing with -qq")
- opt.add_option('-r', '--repeat', action='store_true',
- help="show all occurrences of the same error")
- opt.add_option('--exclude', metavar='patterns',
- default=pep8.DEFAULT_EXCLUDE,
- help="exclude files or directories which match these "
- "comma separated patterns (default: %s)" %
- pep8.DEFAULT_EXCLUDE,
- dest='exclude')
- opt.add_option('--filename', metavar='patterns', default='*.py',
- help="when parsing directories, only check filenames "
- "matching these comma separated patterns (default: "
- "*.py)")
- opt.add_option('--select', metavar='errors', default='',
- help="select errors and warnings (e.g. E,W6)")
- opt.add_option('--ignore', metavar='errors', default='',
- help="skip errors and warnings (e.g. E4,W)")
- opt.add_option('--show-source', action='store_true',
- help="show source code for each error")
- opt.add_option('--show-pep8', action='store_true',
- help="show text of PEP 8 for each error")
- opt.add_option('--statistics', action='store_true',
- help="count errors and warnings")
- opt.add_option('--count', action='store_true',
- help="print total number of errors and warnings "
- "to standard error and set exit code to 1 if "
- "total is not null")
- opt.add_option('--benchmark', action='store_true',
- help="measure processing speed")
- opt.add_option('--testsuite', metavar='dir',
- help="run regression tests from dir")
- opt.add_option('--doctest', action='store_true',
- help="run doctest on myself")
diff --git a/waflib/extras/pgicc.py b/waflib/extras/pgicc.py
deleted file mode 100644
index 9790b9c..0000000
--- a/waflib/extras/pgicc.py
+++ /dev/null
@@ -1,75 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Antoine Dechaume 2011
-
-"""
-Detect the PGI C compiler
-"""
-
-import sys, re
-from waflib import Errors
-from waflib.Configure import conf
-from waflib.Tools.compiler_c import c_compiler
-c_compiler['linux'].append('pgicc')
-
-@conf
-def find_pgi_compiler(conf, var, name):
- """
- Find the program name, and execute it to ensure it really is itself.
- """
- if sys.platform == 'cygwin':
- conf.fatal('The PGI compiler does not work on Cygwin')
-
- v = conf.env
- cc = None
- if v[var]:
- cc = v[var]
- elif var in conf.environ:
- cc = conf.environ[var]
- if not cc:
- cc = conf.find_program(name, var=var)
- if not cc:
- conf.fatal('PGI Compiler (%s) was not found' % name)
-
- v[var + '_VERSION'] = conf.get_pgi_version(cc)
- v[var] = cc
- v[var + '_NAME'] = 'pgi'
-
-@conf
-def get_pgi_version(conf, cc):
- """Find the version of a pgi compiler."""
- version_re = re.compile(r"The Portland Group", re.I).search
- cmd = cc + ['-V', '-E'] # Issue 1078, prevent wrappers from linking
-
- try:
- out, err = conf.cmd_and_log(cmd, output=0)
- except Errors.WafError:
- conf.fatal('Could not find pgi compiler %r' % cmd)
-
- if out:
- match = version_re(out)
- else:
- match = version_re(err)
-
- if not match:
- conf.fatal('Could not verify PGI signature')
-
- cmd = cc + ['-help=variable']
- try:
- out, err = conf.cmd_and_log(cmd, output=0)
- except Errors.WafError:
- conf.fatal('Could not find pgi compiler %r' % cmd)
-
- version = re.findall('^COMPVER\s*=(.*)', out, re.M)
- if len(version) != 1:
- conf.fatal('Could not determine the compiler version')
- return version[0]
-
-def configure(conf):
- conf.find_pgi_compiler('CC', 'pgcc')
- conf.find_ar()
- conf.gcc_common_flags()
- conf.cc_load_tools()
- conf.cc_add_flags()
- conf.link_add_flags()
-
diff --git a/waflib/extras/pgicxx.py b/waflib/extras/pgicxx.py
deleted file mode 100644
index eae121c..0000000
--- a/waflib/extras/pgicxx.py
+++ /dev/null
@@ -1,20 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Antoine Dechaume 2011
-
-"""
-Detect the PGI C++ compiler
-"""
-
-from waflib.Tools.compiler_cxx import cxx_compiler
-cxx_compiler['linux'].append('pgicxx')
-
-from waflib.extras import pgicc
-
-def configure(conf):
- conf.find_pgi_compiler('CXX', 'pgCC')
- conf.find_ar()
- conf.gxx_common_flags()
- conf.cxx_load_tools()
- conf.cxx_add_flags()
- conf.link_add_flags()
diff --git a/waflib/extras/proc.py b/waflib/extras/proc.py
deleted file mode 100644
index 764abec..0000000
--- a/waflib/extras/proc.py
+++ /dev/null
@@ -1,54 +0,0 @@
-#! /usr/bin/env python
-# per rosengren 2011
-
-from os import environ, path
-from waflib import TaskGen, Utils
-
-def options(opt):
- grp = opt.add_option_group('Oracle ProC Options')
- grp.add_option('--oracle_home', action='store', default=environ.get('PROC_ORACLE'), help='Path to Oracle installation home (has bin/lib)')
- grp.add_option('--tns_admin', action='store', default=environ.get('TNS_ADMIN'), help='Directory containing server list (TNS_NAMES.ORA)')
- grp.add_option('--connection', action='store', default='dummy-user/dummy-password@dummy-server', help='Format: user/password@server')
-
-def configure(cnf):
- env = cnf.env
- if not env.PROC_ORACLE:
- env.PROC_ORACLE = cnf.options.oracle_home
- if not env.PROC_TNS_ADMIN:
- env.PROC_TNS_ADMIN = cnf.options.tns_admin
- if not env.PROC_CONNECTION:
- env.PROC_CONNECTION = cnf.options.connection
- cnf.find_program('proc', var='PROC', path_list=env.PROC_ORACLE + path.sep + 'bin')
-
-def proc(tsk):
- env = tsk.env
- gen = tsk.generator
- inc_nodes = gen.to_incnodes(Utils.to_list(getattr(gen,'includes',[])) + env['INCLUDES'])
-
- cmd = (
- [env.PROC] +
- ['SQLCHECK=SEMANTICS'] +
- (['SYS_INCLUDE=(' + ','.join(env.PROC_INCLUDES) + ')']
- if env.PROC_INCLUDES else []) +
- ['INCLUDE=(' + ','.join(
- [i.bldpath() for i in inc_nodes]
- ) + ')'] +
- ['userid=' + env.PROC_CONNECTION] +
- ['INAME=' + tsk.inputs[0].bldpath()] +
- ['ONAME=' + tsk.outputs[0].bldpath()]
- )
- exec_env = {
- 'ORACLE_HOME': env.PROC_ORACLE,
- 'LD_LIBRARY_PATH': env.PROC_ORACLE + path.sep + 'lib',
- }
- if env.PROC_TNS_ADMIN:
- exec_env['TNS_ADMIN'] = env.PROC_TNS_ADMIN
- return tsk.exec_command(cmd, env=exec_env)
-
-TaskGen.declare_chain(
- name = 'proc',
- rule = proc,
- ext_in = '.pc',
- ext_out = '.c',
-)
-
diff --git a/waflib/extras/protoc.py b/waflib/extras/protoc.py
deleted file mode 100644
index f3cb4d8..0000000
--- a/waflib/extras/protoc.py
+++ /dev/null
@@ -1,243 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Philipp Bender, 2012
-# Matt Clarkson, 2012
-
-import re, os
-from waflib.Task import Task
-from waflib.TaskGen import extension
-from waflib import Errors, Context
-
-"""
-A simple tool to integrate protocol buffers into your build system.
-
-Example for C++:
-
- def configure(conf):
- conf.load('compiler_cxx cxx protoc')
-
- def build(bld):
- bld(
- features = 'cxx cxxprogram'
- source = 'main.cpp file1.proto proto/file2.proto',
- includes = '. proto',
- target = 'executable')
-
-Example for Python:
-
- def configure(conf):
- conf.load('python protoc')
-
- def build(bld):
- bld(
- features = 'py'
- source = 'main.py file1.proto proto/file2.proto',
- protoc_includes = 'proto')
-
-Example for both Python and C++ at same time:
-
- def configure(conf):
- conf.load('cxx python protoc')
-
- def build(bld):
- bld(
- features = 'cxx py'
- source = 'file1.proto proto/file2.proto',
- protoc_includes = 'proto') # or includes
-
-
-Example for Java:
-
- def options(opt):
- opt.load('java')
-
- def configure(conf):
- conf.load('python java protoc')
- # Here you have to point to your protobuf-java JAR and have it in classpath
- conf.env.CLASSPATH_PROTOBUF = ['protobuf-java-2.5.0.jar']
-
- def build(bld):
- bld(
- features = 'javac protoc',
- name = 'pbjava',
- srcdir = 'inc/ src', # directories used by javac
- source = ['inc/message_inc.proto', 'inc/message.proto'],
- # source is used by protoc for .proto files
- use = 'PROTOBUF',
- protoc_includes = ['inc']) # for protoc to search dependencies
-
-
-
-
-Notes when using this tool:
-
-- protoc command line parsing is tricky.
-
- The generated files can be put in subfolders which depend on
- the order of the include paths.
-
- Try to be simple when creating task generators
- containing protoc stuff.
-
-"""
-
-class protoc(Task):
- run_str = '${PROTOC} ${PROTOC_FL:PROTOC_FLAGS} ${PROTOC_ST:INCPATHS} ${PROTOC_ST:PROTOC_INCPATHS} ${SRC[0].bldpath()}'
- color = 'BLUE'
- ext_out = ['.h', 'pb.cc', '.py', '.java']
- def scan(self):
- """
- Scan .proto dependencies
- """
- node = self.inputs[0]
-
- nodes = []
- names = []
- seen = []
- search_nodes = []
-
- if not node:
- return (nodes, names)
-
- if 'cxx' in self.generator.features:
- search_nodes = self.generator.includes_nodes
-
- if 'py' in self.generator.features or 'javac' in self.generator.features:
- for incpath in getattr(self.generator, 'protoc_includes', []):
- search_nodes.append(self.generator.bld.path.find_node(incpath))
-
- def parse_node(node):
- if node in seen:
- return
- seen.append(node)
- code = node.read().splitlines()
- for line in code:
- m = re.search(r'^import\s+"(.*)";.*(//)?.*', line)
- if m:
- dep = m.groups()[0]
- for incnode in search_nodes:
- found = incnode.find_resource(dep)
- if found:
- nodes.append(found)
- parse_node(found)
- else:
- names.append(dep)
-
- parse_node(node)
- # Add also dependencies path to INCPATHS so protoc will find the included file
- for deppath in nodes:
- self.env.append_value('INCPATHS', deppath.parent.bldpath())
- return (nodes, names)
-
-@extension('.proto')
-def process_protoc(self, node):
- incdirs = []
- out_nodes = []
- protoc_flags = []
-
- # ensure PROTOC_FLAGS is a list; a copy is used below anyway
- self.env.PROTOC_FLAGS = self.to_list(self.env.PROTOC_FLAGS)
-
- if 'cxx' in self.features:
- cpp_node = node.change_ext('.pb.cc')
- hpp_node = node.change_ext('.pb.h')
- self.source.append(cpp_node)
- out_nodes.append(cpp_node)
- out_nodes.append(hpp_node)
- protoc_flags.append('--cpp_out=%s' % node.parent.get_bld().bldpath())
-
- if 'py' in self.features:
- py_node = node.change_ext('_pb2.py')
- self.source.append(py_node)
- out_nodes.append(py_node)
- protoc_flags.append('--python_out=%s' % node.parent.get_bld().bldpath())
-
- if 'javac' in self.features:
- pkgname, javapkg, javacn, nodename = None, None, None, None
- messages = []
-
- # .java file name is done with some rules depending on .proto file content:
- # -) package is either derived from option java_package if present
- # or from package directive
- # -) file name is either derived from option java_outer_classname if present
- # or the .proto file is converted to camelcase. If a message
- # is named the same then the behaviour depends on protoc version
- #
- # See also: https://developers.google.com/protocol-buffers/docs/reference/java-generated#invocation
-
- code = node.read().splitlines()
- for line in code:
- m = re.search(r'^package\s+(.*);', line)
- if m:
- pkgname = m.groups()[0]
- m = re.search(r'^option\s+(\S*)\s*=\s*"(\S*)";', line)
- if m:
- optname = m.groups()[0]
- if optname == 'java_package':
- javapkg = m.groups()[1]
- elif optname == 'java_outer_classname':
- javacn = m.groups()[1]
- if self.env.PROTOC_MAJOR > '2':
- m = re.search(r'^message\s+(\w*)\s*{*', line)
- if m:
- messages.append(m.groups()[0])
-
- if javapkg:
- nodename = javapkg
- elif pkgname:
- nodename = pkgname
- else:
- raise Errors.WafError('Cannot derive java name from protoc file')
-
- nodename = nodename.replace('.',os.sep) + os.sep
- if javacn:
- nodename += javacn + '.java'
- else:
- if self.env.PROTOC_MAJOR > '2' and node.abspath()[node.abspath().rfind(os.sep)+1:node.abspath().rfind('.')].title() in messages:
- nodename += node.abspath()[node.abspath().rfind(os.sep)+1:node.abspath().rfind('.')].title().replace('_','') + 'OuterClass.java'
- else:
- nodename += node.abspath()[node.abspath().rfind(os.sep)+1:node.abspath().rfind('.')].title().replace('_','') + '.java'
-
- java_node = node.parent.find_or_declare(nodename)
- out_nodes.append(java_node)
- protoc_flags.append('--java_out=%s' % node.parent.get_bld().bldpath())
-
- # Make javac get also pick java code generated in build
- if not node.parent.get_bld() in self.javac_task.srcdir:
- self.javac_task.srcdir.append(node.parent.get_bld())
-
- if not out_nodes:
- raise Errors.WafError('Feature %r not supported by protoc extra' % self.features)
-
- tsk = self.create_task('protoc', node, out_nodes)
- tsk.env.append_value('PROTOC_FLAGS', protoc_flags)
-
- if 'javac' in self.features:
- self.javac_task.set_run_after(tsk)
-
- # Instruct protoc where to search for .proto included files.
- # For C++ standard include files dirs are used,
- # but this doesn't apply to Python for example
- for incpath in getattr(self, 'protoc_includes', []):
- incdirs.append(self.path.find_node(incpath).bldpath())
- tsk.env.PROTOC_INCPATHS = incdirs
-
- # PR2115: protoc generates output of .proto files in nested
- # directories by canonicalizing paths. To avoid this we have to pass
- # as first include the full directory file of the .proto file
- tsk.env.prepend_value('INCPATHS', node.parent.bldpath())
-
- use = getattr(self, 'use', '')
- if not 'PROTOBUF' in use:
- self.use = self.to_list(use) + ['PROTOBUF']
-
-def configure(conf):
- conf.check_cfg(package='protobuf', uselib_store='PROTOBUF', args=['--cflags', '--libs'])
- conf.find_program('protoc', var='PROTOC')
- conf.start_msg('Checking for protoc version')
- protocver = conf.cmd_and_log(conf.env.PROTOC + ['--version'], output=Context.BOTH)
- protocver = ''.join(protocver).strip()[protocver[0].rfind(' ')+1:]
- conf.end_msg(protocver)
- conf.env.PROTOC_MAJOR = protocver[:protocver.find('.')]
- conf.env.PROTOC_ST = '-I%s'
- conf.env.PROTOC_FL = '%s'
diff --git a/waflib/extras/pyqt5.py b/waflib/extras/pyqt5.py
deleted file mode 100644
index c21dfa7..0000000
--- a/waflib/extras/pyqt5.py
+++ /dev/null
@@ -1,241 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Federico Pellegrin, 2016-2018 (fedepell) adapted for Python
-
-"""
-This tool helps with finding Python Qt5 tools and libraries,
-and provides translation from QT5 files to Python code.
-
-The following snippet illustrates the tool usage::
-
- def options(opt):
- opt.load('py pyqt5')
-
- def configure(conf):
- conf.load('py pyqt5')
-
- def build(bld):
- bld(
- features = 'py pyqt5',
- source = 'main.py textures.qrc aboutDialog.ui',
- )
-
-Here, the UI description and resource files will be processed
-to generate code.
-
-Usage
-=====
-
-Load the "pyqt5" tool.
-
-Add into the sources list also the qrc resources files or ui5
-definition files and they will be translated into python code
-with the system tools (PyQt5, pyside2, PyQt4 are searched in this
-order) and then compiled
-"""
-
-try:
- from xml.sax import make_parser
- from xml.sax.handler import ContentHandler
-except ImportError:
- has_xml = False
- ContentHandler = object
-else:
- has_xml = True
-
-import os
-from waflib.Tools import python
-from waflib import Task, Options
-from waflib.TaskGen import feature, extension
-from waflib.Configure import conf
-from waflib import Logs
-
-EXT_RCC = ['.qrc']
-"""
-File extension for the resource (.qrc) files
-"""
-
-EXT_UI = ['.ui']
-"""
-File extension for the user interface (.ui) files
-"""
-
-
-class XMLHandler(ContentHandler):
- """
- Parses ``.qrc`` files
- """
- def __init__(self):
- self.buf = []
- self.files = []
- def startElement(self, name, attrs):
- if name == 'file':
- self.buf = []
- def endElement(self, name):
- if name == 'file':
- self.files.append(str(''.join(self.buf)))
- def characters(self, cars):
- self.buf.append(cars)
-
-@extension(*EXT_RCC)
-def create_pyrcc_task(self, node):
- "Creates rcc and py task for ``.qrc`` files"
- rcnode = node.change_ext('.py')
- self.create_task('pyrcc', node, rcnode)
- if getattr(self, 'install_from', None):
- self.install_from = self.install_from.get_bld()
- else:
- self.install_from = self.path.get_bld()
- self.install_path = getattr(self, 'install_path', '${PYTHONDIR}')
- self.process_py(rcnode)
-
-@extension(*EXT_UI)
-def create_pyuic_task(self, node):
- "Create uic tasks and py for user interface ``.ui`` definition files"
- uinode = node.change_ext('.py')
- self.create_task('ui5py', node, uinode)
- if getattr(self, 'install_from', None):
- self.install_from = self.install_from.get_bld()
- else:
- self.install_from = self.path.get_bld()
- self.install_path = getattr(self, 'install_path', '${PYTHONDIR}')
- self.process_py(uinode)
-
-@extension('.ts')
-def add_pylang(self, node):
- """Adds all the .ts file into ``self.lang``"""
- self.lang = self.to_list(getattr(self, 'lang', [])) + [node]
-
-@feature('pyqt5')
-def apply_pyqt5(self):
- """
- The additional parameters are:
-
- :param lang: list of translation files (\*.ts) to process
- :type lang: list of :py:class:`waflib.Node.Node` or string without the .ts extension
- :param langname: if given, transform the \*.ts files into a .qrc files to include in the binary file
- :type langname: :py:class:`waflib.Node.Node` or string without the .qrc extension
- """
- if getattr(self, 'lang', None):
- qmtasks = []
- for x in self.to_list(self.lang):
- if isinstance(x, str):
- x = self.path.find_resource(x + '.ts')
- qmtasks.append(self.create_task('ts2qm', x, x.change_ext('.qm')))
-
-
- if getattr(self, 'langname', None):
- qmnodes = [k.outputs[0] for k in qmtasks]
- rcnode = self.langname
- if isinstance(rcnode, str):
- rcnode = self.path.find_or_declare(rcnode + '.qrc')
- t = self.create_task('qm2rcc', qmnodes, rcnode)
- create_pyrcc_task(self, t.outputs[0])
-
-class pyrcc(Task.Task):
- """
- Processes ``.qrc`` files
- """
- color = 'BLUE'
- run_str = '${QT_PYRCC} ${SRC} -o ${TGT}'
- ext_out = ['.py']
-
- def rcname(self):
- return os.path.splitext(self.inputs[0].name)[0]
-
- def scan(self):
- """Parse the *.qrc* files"""
- if not has_xml:
- Logs.error('No xml.sax support was found, rcc dependencies will be incomplete!')
- return ([], [])
-
- parser = make_parser()
- curHandler = XMLHandler()
- parser.setContentHandler(curHandler)
- fi = open(self.inputs[0].abspath(), 'r')
- try:
- parser.parse(fi)
- finally:
- fi.close()
-
- nodes = []
- names = []
- root = self.inputs[0].parent
- for x in curHandler.files:
- nd = root.find_resource(x)
- if nd:
- nodes.append(nd)
- else:
- names.append(x)
- return (nodes, names)
-
-
-class ui5py(Task.Task):
- """
- Processes ``.ui`` files for python
- """
- color = 'BLUE'
- run_str = '${QT_PYUIC} ${SRC} -o ${TGT}'
- ext_out = ['.py']
-
-class ts2qm(Task.Task):
- """
- Generates ``.qm`` files from ``.ts`` files
- """
- color = 'BLUE'
- run_str = '${QT_LRELEASE} ${QT_LRELEASE_FLAGS} ${SRC} -qm ${TGT}'
-
-class qm2rcc(Task.Task):
- """
- Generates ``.qrc`` files from ``.qm`` files
- """
- color = 'BLUE'
- after = 'ts2qm'
- def run(self):
- """Create a qrc file including the inputs"""
- txt = '\n'.join(['<file>%s</file>' % k.path_from(self.outputs[0].parent) for k in self.inputs])
- code = '<!DOCTYPE RCC><RCC version="1.0">\n<qresource>\n%s\n</qresource>\n</RCC>' % txt
- self.outputs[0].write(code)
-
-def configure(self):
- self.find_pyqt5_binaries()
-
- # warn about this during the configuration too
- if not has_xml:
- Logs.error('No xml.sax support was found, rcc dependencies will be incomplete!')
-
-@conf
-def find_pyqt5_binaries(self):
- """
- Detects PyQt5 or pyside2 programs such as pyuic5/pyside2-uic, pyrcc5/pyside2-rcc
- """
- env = self.env
-
- if getattr(Options.options, 'want_pyside2', True):
- self.find_program(['pyside2-uic'], var='QT_PYUIC')
- self.find_program(['pyside2-rcc'], var='QT_PYRCC')
- self.find_program(['pyside2-lupdate'], var='QT_PYLUPDATE')
- elif getattr(Options.options, 'want_pyqt4', True):
- self.find_program(['pyuic4'], var='QT_PYUIC')
- self.find_program(['pyrcc4'], var='QT_PYRCC')
- self.find_program(['pylupdate4'], var='QT_PYLUPDATE')
- else:
- self.find_program(['pyuic5','pyside2-uic','pyuic4'], var='QT_PYUIC')
- self.find_program(['pyrcc5','pyside2-rcc','pyrcc4'], var='QT_PYRCC')
- self.find_program(['pylupdate5', 'pyside2-lupdate','pylupdate4'], var='QT_PYLUPDATE')
-
- if not env.QT_PYUIC:
- self.fatal('cannot find the uic compiler for python for qt5')
-
- if not env.QT_PYUIC:
- self.fatal('cannot find the rcc compiler for python for qt5')
-
- self.find_program(['lrelease-qt5', 'lrelease'], var='QT_LRELEASE')
-
-def options(opt):
- """
- Command-line options
- """
- pyqt5opt=opt.add_option_group("Python QT5 Options")
- pyqt5opt.add_option('--pyqt5-pyside2', action='store_true', default=False, dest='want_pyside2', help='use pyside2 bindings as python QT5 bindings (default PyQt5 is searched first, PySide2 after)')
- pyqt5opt.add_option('--pyqt5-pyqt4', action='store_true', default=False, dest='want_pyqt4', help='use PyQt4 bindings as python QT5 bindings (default PyQt5 is searched first, PySide2 after, PyQt4 last)')
diff --git a/waflib/extras/pytest.py b/waflib/extras/pytest.py
deleted file mode 100644
index 7dd5a1a..0000000
--- a/waflib/extras/pytest.py
+++ /dev/null
@@ -1,225 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# Calle Rosenquist, 2016-2018 (xbreak)
-
-"""
-Provides Python unit test support using :py:class:`waflib.Tools.waf_unit_test.utest`
-task via the **pytest** feature.
-
-To use pytest the following is needed:
-
-1. Load `pytest` and the dependency `waf_unit_test` tools.
-2. Create a task generator with feature `pytest` (not `test`) and customize behaviour with
- the following attributes:
-
- - `pytest_source`: Test input files.
- - `ut_str`: Test runner command, e.g. ``${PYTHON} -B -m unittest discover`` or
- if nose is used: ``${NOSETESTS} --no-byte-compile ${SRC}``.
- - `ut_shell`: Determines if ``ut_str`` is executed in a shell. Default: False.
- - `ut_cwd`: Working directory for test runner. Defaults to directory of
- first ``pytest_source`` file.
-
- Additionally the following `pytest` specific attributes are used in dependent taskgens:
-
- - `pytest_path`: Node or string list of additional Python paths.
- - `pytest_libpath`: Node or string list of additional library paths.
-
-The `use` dependencies are used for both update calculation and to populate
-the following environment variables for the `pytest` test runner:
-
-1. `PYTHONPATH` (`sys.path`) of any dependent taskgen that has the feature `py`:
-
- - `install_from` attribute is used to determine where the root of the Python sources
- are located. If `install_from` is not specified the default is to use the taskgen path
- as the root.
-
- - `pytest_path` attribute is used to manually specify additional Python paths.
-
-2. Dynamic linker search path variable (e.g. `LD_LIBRARY_PATH`) of any dependent taskgen with
- non-static link_task.
-
- - `pytest_libpath` attribute is used to manually specify additional linker paths.
-
-Note: `pytest` cannot automatically determine the correct `PYTHONPATH` for `pyext` taskgens
- because the extension might be part of a Python package or used standalone:
-
- - When used as part of another `py` package, the `PYTHONPATH` is provided by
- that taskgen so no additional action is required.
-
- - When used as a standalone module, the user needs to specify the `PYTHONPATH` explicitly
- via the `pytest_path` attribute on the `pyext` taskgen.
-
- For details c.f. the pytest playground examples.
-
-
-For example::
-
- # A standalone Python C extension that demonstrates unit test environment population
- # of PYTHONPATH and LD_LIBRARY_PATH/PATH/DYLD_LIBRARY_PATH.
- #
- # Note: `pytest_path` is provided here because pytest cannot automatically determine
- # if the extension is part of another Python package or is used standalone.
- bld(name = 'foo_ext',
- features = 'c cshlib pyext',
- source = 'src/foo_ext.c',
- target = 'foo_ext',
- pytest_path = [ bld.path.get_bld() ])
-
- # Python package under test that also depend on the Python module `foo_ext`
- #
- # Note: `install_from` is added automatically to `PYTHONPATH`.
- bld(name = 'foo',
- features = 'py',
- use = 'foo_ext',
- source = bld.path.ant_glob('src/foo/*.py'),
- install_from = 'src')
-
- # Unit test example using the built in module unittest and let that discover
- # any test cases.
- bld(name = 'foo_test',
- features = 'pytest',
- use = 'foo',
- pytest_source = bld.path.ant_glob('test/*.py'),
- ut_str = '${PYTHON} -B -m unittest discover')
-
-"""
-
-import os
-from waflib import Task, TaskGen, Errors, Utils, Logs
-from waflib.Tools import ccroot
-
-def _process_use_rec(self, name):
- """
- Recursively process ``use`` for task generator with name ``name``..
- Used by pytest_process_use.
- """
- if name in self.pytest_use_not or name in self.pytest_use_seen:
- return
- try:
- tg = self.bld.get_tgen_by_name(name)
- except Errors.WafError:
- self.pytest_use_not.add(name)
- return
-
- self.pytest_use_seen.append(name)
- tg.post()
-
- for n in self.to_list(getattr(tg, 'use', [])):
- _process_use_rec(self, n)
-
-
-@TaskGen.feature('pytest')
-@TaskGen.after_method('process_source', 'apply_link')
-def pytest_process_use(self):
- """
- Process the ``use`` attribute which contains a list of task generator names and store
- paths that later is used to populate the unit test runtime environment.
- """
- self.pytest_use_not = set()
- self.pytest_use_seen = []
- self.pytest_paths = [] # strings or Nodes
- self.pytest_libpaths = [] # strings or Nodes
- self.pytest_dep_nodes = []
-
- names = self.to_list(getattr(self, 'use', []))
- for name in names:
- _process_use_rec(self, name)
-
- def extend_unique(lst, varlst):
- ext = []
- for x in varlst:
- if x not in lst:
- ext.append(x)
- lst.extend(ext)
-
- # Collect type specific info needed to construct a valid runtime environment
- # for the test.
- for name in self.pytest_use_seen:
- tg = self.bld.get_tgen_by_name(name)
-
- extend_unique(self.pytest_paths, Utils.to_list(getattr(tg, 'pytest_path', [])))
- extend_unique(self.pytest_libpaths, Utils.to_list(getattr(tg, 'pytest_libpath', [])))
-
- if 'py' in tg.features:
- # Python dependencies are added to PYTHONPATH
- pypath = getattr(tg, 'install_from', tg.path)
-
- if 'buildcopy' in tg.features:
- # Since buildcopy is used we assume that PYTHONPATH in build should be used,
- # not source
- extend_unique(self.pytest_paths, [pypath.get_bld().abspath()])
-
- # Add buildcopy output nodes to dependencies
- extend_unique(self.pytest_dep_nodes, [o for task in getattr(tg, 'tasks', []) \
- for o in getattr(task, 'outputs', [])])
- else:
- # If buildcopy is not used, depend on sources instead
- extend_unique(self.pytest_dep_nodes, tg.source)
- extend_unique(self.pytest_paths, [pypath.abspath()])
-
- if getattr(tg, 'link_task', None):
- # For tasks with a link_task (C, C++, D et.c.) include their library paths:
- if not isinstance(tg.link_task, ccroot.stlink_task):
- extend_unique(self.pytest_dep_nodes, tg.link_task.outputs)
- extend_unique(self.pytest_libpaths, tg.link_task.env.LIBPATH)
-
- if 'pyext' in tg.features:
- # If the taskgen is extending Python we also want to add the interpreter libpath.
- extend_unique(self.pytest_libpaths, tg.link_task.env.LIBPATH_PYEXT)
- else:
- # Only add to libpath if the link task is not a Python extension
- extend_unique(self.pytest_libpaths, [tg.link_task.outputs[0].parent.abspath()])
-
-
-@TaskGen.feature('pytest')
-@TaskGen.after_method('pytest_process_use')
-def make_pytest(self):
- """
- Creates a ``utest`` task with a populated environment for Python if not specified in ``ut_env``:
-
- - Paths in `pytest_paths` attribute are used to populate PYTHONPATH
- - Paths in `pytest_libpaths` attribute are used to populate the system library path (e.g. LD_LIBRARY_PATH)
- """
- nodes = self.to_nodes(self.pytest_source)
- tsk = self.create_task('utest', nodes)
-
- tsk.dep_nodes.extend(self.pytest_dep_nodes)
- if getattr(self, 'ut_str', None):
- self.ut_run, lst = Task.compile_fun(self.ut_str, shell=getattr(self, 'ut_shell', False))
- tsk.vars = lst + tsk.vars
-
- if getattr(self, 'ut_cwd', None):
- if isinstance(self.ut_cwd, str):
- # we want a Node instance
- if os.path.isabs(self.ut_cwd):
- self.ut_cwd = self.bld.root.make_node(self.ut_cwd)
- else:
- self.ut_cwd = self.path.make_node(self.ut_cwd)
- else:
- if tsk.inputs:
- self.ut_cwd = tsk.inputs[0].parent
- else:
- raise Errors.WafError("no valid input files for pytest task, check pytest_source value")
-
- if not self.ut_cwd.exists():
- self.ut_cwd.mkdir()
-
- if not hasattr(self, 'ut_env'):
- self.ut_env = dict(os.environ)
- def add_paths(var, lst):
- # Add list of paths to a variable, lst can contain strings or nodes
- lst = [ str(n) for n in lst ]
- Logs.debug("ut: %s: Adding paths %s=%s", self, var, lst)
- self.ut_env[var] = os.pathsep.join(lst) + os.pathsep + self.ut_env.get(var, '')
-
- # Prepend dependency paths to PYTHONPATH and LD_LIBRARY_PATH
- add_paths('PYTHONPATH', self.pytest_paths)
-
- if Utils.is_win32:
- add_paths('PATH', self.pytest_libpaths)
- elif Utils.unversioned_sys_platform() == 'darwin':
- add_paths('DYLD_LIBRARY_PATH', self.pytest_libpaths)
- add_paths('LD_LIBRARY_PATH', self.pytest_libpaths)
- else:
- add_paths('LD_LIBRARY_PATH', self.pytest_libpaths)
-
diff --git a/waflib/extras/qnxnto.py b/waflib/extras/qnxnto.py
deleted file mode 100644
index 1158124..0000000
--- a/waflib/extras/qnxnto.py
+++ /dev/null
@@ -1,72 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Jérôme Carretero 2011 (zougloub)
-# QNX neutrino compatibility functions
-
-import sys, os
-from waflib import Utils
-
-class Popen(object):
- """
- Popen cannot work on QNX from a threaded program:
- Forking in threads is not implemented in neutrino.
-
- Python's os.popen / spawn / fork won't work when running in threads (they will if in the main program thread)
-
- In waf, this happens mostly in build.
- And the use cases can be replaced by os.system() calls.
- """
- __slots__ = ["prog", "kw", "popen", "verbose"]
- verbose = 0
- def __init__(self, prog, **kw):
- try:
- self.prog = prog
- self.kw = kw
- self.popen = None
- if Popen.verbose:
- sys.stdout.write("Popen created: %r, kw=%r..." % (prog, kw))
-
- do_delegate = kw.get('stdout') == -1 and kw.get('stderr') == -1
- if do_delegate:
- if Popen.verbose:
- print("Delegating to real Popen")
- self.popen = self.real_Popen(prog, **kw)
- else:
- if Popen.verbose:
- print("Emulating")
- except Exception as e:
- if Popen.verbose:
- print("Exception: %s" % e)
- raise
-
- def __getattr__(self, name):
- if Popen.verbose:
- sys.stdout.write("Getattr: %s..." % name)
- if name in Popen.__slots__:
- return object.__getattribute__(self, name)
- else:
- if self.popen is not None:
- if Popen.verbose:
- print("from Popen")
- return getattr(self.popen, name)
- else:
- if name == "wait":
- return self.emu_wait
- else:
- raise Exception("subprocess emulation: not implemented: %s" % name)
-
- def emu_wait(self):
- if Popen.verbose:
- print("emulated wait (%r kw=%r)" % (self.prog, self.kw))
- if isinstance(self.prog, str):
- cmd = self.prog
- else:
- cmd = " ".join(self.prog)
- if 'cwd' in self.kw:
- cmd = 'cd "%s" && %s' % (self.kw['cwd'], cmd)
- return os.system(cmd)
-
-if sys.platform == "qnx6":
- Popen.real_Popen = Utils.subprocess.Popen
- Utils.subprocess.Popen = Popen
-
diff --git a/waflib/extras/qt4.py b/waflib/extras/qt4.py
deleted file mode 100644
index 90cae7e..0000000
--- a/waflib/extras/qt4.py
+++ /dev/null
@@ -1,695 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2006-2010 (ita)
-
-"""
-
-Tool Description
-================
-
-This tool helps with finding Qt4 tools and libraries,
-and also provides syntactic sugar for using Qt4 tools.
-
-The following snippet illustrates the tool usage::
-
- def options(opt):
- opt.load('compiler_cxx qt4')
-
- def configure(conf):
- conf.load('compiler_cxx qt4')
-
- def build(bld):
- bld(
- features = 'qt4 cxx cxxprogram',
- uselib = 'QTCORE QTGUI QTOPENGL QTSVG',
- source = 'main.cpp textures.qrc aboutDialog.ui',
- target = 'window',
- )
-
-Here, the UI description and resource files will be processed
-to generate code.
-
-Usage
-=====
-
-Load the "qt4" tool.
-
-You also need to edit your sources accordingly:
-
-- the normal way of doing things is to have your C++ files
- include the .moc file.
- This is regarded as the best practice (and provides much faster
- compilations).
- It also implies that the include paths have beenset properly.
-
-- to have the include paths added automatically, use the following::
-
- from waflib.TaskGen import feature, before_method, after_method
- @feature('cxx')
- @after_method('process_source')
- @before_method('apply_incpaths')
- def add_includes_paths(self):
- incs = set(self.to_list(getattr(self, 'includes', '')))
- for x in self.compiled_tasks:
- incs.add(x.inputs[0].parent.path_from(self.path))
- self.includes = sorted(incs)
-
-Note: another tool provides Qt processing that does not require
-.moc includes, see 'playground/slow_qt/'.
-
-A few options (--qt{dir,bin,...}) and environment variables
-(QT4_{ROOT,DIR,MOC,UIC,XCOMPILE}) allow finer tuning of the tool,
-tool path selection, etc; please read the source for more info.
-
-"""
-
-try:
- from xml.sax import make_parser
- from xml.sax.handler import ContentHandler
-except ImportError:
- has_xml = False
- ContentHandler = object
-else:
- has_xml = True
-
-import os, sys
-from waflib.Tools import cxx
-from waflib import Task, Utils, Options, Errors, Context
-from waflib.TaskGen import feature, after_method, extension
-from waflib.Configure import conf
-from waflib import Logs
-
-MOC_H = ['.h', '.hpp', '.hxx', '.hh']
-"""
-File extensions associated to the .moc files
-"""
-
-EXT_RCC = ['.qrc']
-"""
-File extension for the resource (.qrc) files
-"""
-
-EXT_UI = ['.ui']
-"""
-File extension for the user interface (.ui) files
-"""
-
-EXT_QT4 = ['.cpp', '.cc', '.cxx', '.C']
-"""
-File extensions of C++ files that may require a .moc processing
-"""
-
-QT4_LIBS = "QtCore QtGui QtUiTools QtNetwork QtOpenGL QtSql QtSvg QtTest QtXml QtXmlPatterns QtWebKit Qt3Support QtHelp QtScript QtDeclarative QtDesigner"
-
-class qxx(Task.classes['cxx']):
- """
- Each C++ file can have zero or several .moc files to create.
- They are known only when the files are scanned (preprocessor)
- To avoid scanning the c++ files each time (parsing C/C++), the results
- are retrieved from the task cache (bld.node_deps/bld.raw_deps).
- The moc tasks are also created *dynamically* during the build.
- """
-
- def __init__(self, *k, **kw):
- Task.Task.__init__(self, *k, **kw)
- self.moc_done = 0
-
- def runnable_status(self):
- """
- Compute the task signature to make sure the scanner was executed. Create the
- moc tasks by using :py:meth:`waflib.Tools.qt4.qxx.add_moc_tasks` (if necessary),
- then postpone the task execution (there is no need to recompute the task signature).
- """
- if self.moc_done:
- return Task.Task.runnable_status(self)
- else:
- for t in self.run_after:
- if not t.hasrun:
- return Task.ASK_LATER
- self.add_moc_tasks()
- return Task.Task.runnable_status(self)
-
- def create_moc_task(self, h_node, m_node):
- """
- If several libraries use the same classes, it is possible that moc will run several times (Issue 1318)
- It is not possible to change the file names, but we can assume that the moc transformation will be identical,
- and the moc tasks can be shared in a global cache.
-
- The defines passed to moc will then depend on task generator order. If this is not acceptable, then
- use the tool slow_qt4 instead (and enjoy the slow builds... :-( )
- """
- try:
- moc_cache = self.generator.bld.moc_cache
- except AttributeError:
- moc_cache = self.generator.bld.moc_cache = {}
-
- try:
- return moc_cache[h_node]
- except KeyError:
- tsk = moc_cache[h_node] = Task.classes['moc'](env=self.env, generator=self.generator)
- tsk.set_inputs(h_node)
- tsk.set_outputs(m_node)
-
- if self.generator:
- self.generator.tasks.append(tsk)
-
- # direct injection in the build phase (safe because called from the main thread)
- gen = self.generator.bld.producer
- gen.outstanding.append(tsk)
- gen.total += 1
-
- return tsk
-
- def moc_h_ext(self):
- ext = []
- try:
- ext = Options.options.qt_header_ext.split()
- except AttributeError:
- pass
- if not ext:
- ext = MOC_H
- return ext
-
- def add_moc_tasks(self):
- """
- Create the moc tasks by looking in ``bld.raw_deps[self.uid()]``
- """
- node = self.inputs[0]
- bld = self.generator.bld
-
- try:
- # compute the signature once to know if there is a moc file to create
- self.signature()
- except KeyError:
- # the moc file may be referenced somewhere else
- pass
- else:
- # remove the signature, it must be recomputed with the moc task
- delattr(self, 'cache_sig')
-
- include_nodes = [node.parent] + self.generator.includes_nodes
-
- moctasks = []
- mocfiles = set()
- for d in bld.raw_deps.get(self.uid(), []):
- if not d.endswith('.moc'):
- continue
-
- # process that base.moc only once
- if d in mocfiles:
- continue
- mocfiles.add(d)
-
- # find the source associated with the moc file
- h_node = None
-
- base2 = d[:-4]
- for x in include_nodes:
- for e in self.moc_h_ext():
- h_node = x.find_node(base2 + e)
- if h_node:
- break
- if h_node:
- m_node = h_node.change_ext('.moc')
- break
- else:
- # foo.cpp -> foo.cpp.moc
- for k in EXT_QT4:
- if base2.endswith(k):
- for x in include_nodes:
- h_node = x.find_node(base2)
- if h_node:
- break
- if h_node:
- m_node = h_node.change_ext(k + '.moc')
- break
-
- if not h_node:
- raise Errors.WafError('No source found for %r which is a moc file' % d)
-
- # create the moc task
- task = self.create_moc_task(h_node, m_node)
- moctasks.append(task)
-
- # simple scheduler dependency: run the moc task before others
- self.run_after.update(set(moctasks))
- self.moc_done = 1
-
-class trans_update(Task.Task):
- """Update a .ts files from a list of C++ files"""
- run_str = '${QT_LUPDATE} ${SRC} -ts ${TGT}'
- color = 'BLUE'
-
-class XMLHandler(ContentHandler):
- """
- Parser for *.qrc* files
- """
- def __init__(self):
- self.buf = []
- self.files = []
- def startElement(self, name, attrs):
- if name == 'file':
- self.buf = []
- def endElement(self, name):
- if name == 'file':
- self.files.append(str(''.join(self.buf)))
- def characters(self, cars):
- self.buf.append(cars)
-
-@extension(*EXT_RCC)
-def create_rcc_task(self, node):
- "Create rcc and cxx tasks for *.qrc* files"
- rcnode = node.change_ext('_rc.cpp')
- self.create_task('rcc', node, rcnode)
- cpptask = self.create_task('cxx', rcnode, rcnode.change_ext('.o'))
- try:
- self.compiled_tasks.append(cpptask)
- except AttributeError:
- self.compiled_tasks = [cpptask]
- return cpptask
-
-@extension(*EXT_UI)
-def create_uic_task(self, node):
- "hook for uic tasks"
- uictask = self.create_task('ui4', node)
- uictask.outputs = [self.path.find_or_declare(self.env['ui_PATTERN'] % node.name[:-3])]
-
-@extension('.ts')
-def add_lang(self, node):
- """add all the .ts file into self.lang"""
- self.lang = self.to_list(getattr(self, 'lang', [])) + [node]
-
-@feature('qt4')
-@after_method('apply_link')
-def apply_qt4(self):
- """
- Add MOC_FLAGS which may be necessary for moc::
-
- def build(bld):
- bld.program(features='qt4', source='main.cpp', target='app', use='QTCORE')
-
- The additional parameters are:
-
- :param lang: list of translation files (\*.ts) to process
- :type lang: list of :py:class:`waflib.Node.Node` or string without the .ts extension
- :param update: whether to process the C++ files to update the \*.ts files (use **waf --translate**)
- :type update: bool
- :param langname: if given, transform the \*.ts files into a .qrc files to include in the binary file
- :type langname: :py:class:`waflib.Node.Node` or string without the .qrc extension
- """
- if getattr(self, 'lang', None):
- qmtasks = []
- for x in self.to_list(self.lang):
- if isinstance(x, str):
- x = self.path.find_resource(x + '.ts')
- qmtasks.append(self.create_task('ts2qm', x, x.change_ext('.qm')))
-
- if getattr(self, 'update', None) and Options.options.trans_qt4:
- cxxnodes = [a.inputs[0] for a in self.compiled_tasks] + [
- a.inputs[0] for a in self.tasks if getattr(a, 'inputs', None) and a.inputs[0].name.endswith('.ui')]
- for x in qmtasks:
- self.create_task('trans_update', cxxnodes, x.inputs)
-
- if getattr(self, 'langname', None):
- qmnodes = [x.outputs[0] for x in qmtasks]
- rcnode = self.langname
- if isinstance(rcnode, str):
- rcnode = self.path.find_or_declare(rcnode + '.qrc')
- t = self.create_task('qm2rcc', qmnodes, rcnode)
- k = create_rcc_task(self, t.outputs[0])
- self.link_task.inputs.append(k.outputs[0])
-
- lst = []
- for flag in self.to_list(self.env['CXXFLAGS']):
- if len(flag) < 2:
- continue
- f = flag[0:2]
- if f in ('-D', '-I', '/D', '/I'):
- if (f[0] == '/'):
- lst.append('-' + flag[1:])
- else:
- lst.append(flag)
- self.env.append_value('MOC_FLAGS', lst)
-
-@extension(*EXT_QT4)
-def cxx_hook(self, node):
- """
- Re-map C++ file extensions to the :py:class:`waflib.Tools.qt4.qxx` task.
- """
- return self.create_compiled_task('qxx', node)
-
-class rcc(Task.Task):
- """
- Process *.qrc* files
- """
- color = 'BLUE'
- run_str = '${QT_RCC} -name ${tsk.rcname()} ${SRC[0].abspath()} ${RCC_ST} -o ${TGT}'
- ext_out = ['.h']
-
- def rcname(self):
- return os.path.splitext(self.inputs[0].name)[0]
-
- def scan(self):
- """Parse the *.qrc* files"""
- if not has_xml:
- Logs.error('no xml support was found, the rcc dependencies will be incomplete!')
- return ([], [])
-
- parser = make_parser()
- curHandler = XMLHandler()
- parser.setContentHandler(curHandler)
- fi = open(self.inputs[0].abspath(), 'r')
- try:
- parser.parse(fi)
- finally:
- fi.close()
-
- nodes = []
- names = []
- root = self.inputs[0].parent
- for x in curHandler.files:
- nd = root.find_resource(x)
- if nd:
- nodes.append(nd)
- else:
- names.append(x)
- return (nodes, names)
-
-class moc(Task.Task):
- """
- Create *.moc* files
- """
- color = 'BLUE'
- run_str = '${QT_MOC} ${MOC_FLAGS} ${MOCCPPPATH_ST:INCPATHS} ${MOCDEFINES_ST:DEFINES} ${SRC} ${MOC_ST} ${TGT}'
- def keyword(self):
- return "Creating"
- def __str__(self):
- return self.outputs[0].path_from(self.generator.bld.launch_node())
-
-class ui4(Task.Task):
- """
- Process *.ui* files
- """
- color = 'BLUE'
- run_str = '${QT_UIC} ${SRC} -o ${TGT}'
- ext_out = ['.h']
-
-class ts2qm(Task.Task):
- """
- Create *.qm* files from *.ts* files
- """
- color = 'BLUE'
- run_str = '${QT_LRELEASE} ${QT_LRELEASE_FLAGS} ${SRC} -qm ${TGT}'
-
-class qm2rcc(Task.Task):
- """
- Transform *.qm* files into *.rc* files
- """
- color = 'BLUE'
- after = 'ts2qm'
-
- def run(self):
- """Create a qrc file including the inputs"""
- txt = '\n'.join(['<file>%s</file>' % k.path_from(self.outputs[0].parent) for k in self.inputs])
- code = '<!DOCTYPE RCC><RCC version="1.0">\n<qresource>\n%s\n</qresource>\n</RCC>' % txt
- self.outputs[0].write(code)
-
-def configure(self):
- """
- Besides the configuration options, the environment variable QT4_ROOT may be used
- to give the location of the qt4 libraries (absolute path).
-
- The detection will use the program *pkg-config* through :py:func:`waflib.Tools.config_c.check_cfg`
- """
- self.find_qt4_binaries()
- self.set_qt4_libs_to_check()
- self.set_qt4_defines()
- self.find_qt4_libraries()
- self.add_qt4_rpath()
- self.simplify_qt4_libs()
-
-@conf
-def find_qt4_binaries(self):
- env = self.env
- opt = Options.options
-
- qtdir = getattr(opt, 'qtdir', '')
- qtbin = getattr(opt, 'qtbin', '')
-
- paths = []
-
- if qtdir:
- qtbin = os.path.join(qtdir, 'bin')
-
- # the qt directory has been given from QT4_ROOT - deduce the qt binary path
- if not qtdir:
- qtdir = os.environ.get('QT4_ROOT', '')
- qtbin = os.environ.get('QT4_BIN') or os.path.join(qtdir, 'bin')
-
- if qtbin:
- paths = [qtbin]
-
- # no qtdir, look in the path and in /usr/local/Trolltech
- if not qtdir:
- paths = os.environ.get('PATH', '').split(os.pathsep)
- paths.append('/usr/share/qt4/bin/')
- try:
- lst = Utils.listdir('/usr/local/Trolltech/')
- except OSError:
- pass
- else:
- if lst:
- lst.sort()
- lst.reverse()
-
- # keep the highest version
- qtdir = '/usr/local/Trolltech/%s/' % lst[0]
- qtbin = os.path.join(qtdir, 'bin')
- paths.append(qtbin)
-
- # at the end, try to find qmake in the paths given
- # keep the one with the highest version
- cand = None
- prev_ver = ['4', '0', '0']
- for qmk in ('qmake-qt4', 'qmake4', 'qmake'):
- try:
- qmake = self.find_program(qmk, path_list=paths)
- except self.errors.ConfigurationError:
- pass
- else:
- try:
- version = self.cmd_and_log(qmake + ['-query', 'QT_VERSION']).strip()
- except self.errors.WafError:
- pass
- else:
- if version:
- new_ver = version.split('.')
- if new_ver > prev_ver:
- cand = qmake
- prev_ver = new_ver
- if cand:
- self.env.QMAKE = cand
- else:
- self.fatal('Could not find qmake for qt4')
-
- qtbin = self.cmd_and_log(self.env.QMAKE + ['-query', 'QT_INSTALL_BINS']).strip() + os.sep
-
- def find_bin(lst, var):
- if var in env:
- return
- for f in lst:
- try:
- ret = self.find_program(f, path_list=paths)
- except self.errors.ConfigurationError:
- pass
- else:
- env[var]=ret
- break
-
- find_bin(['uic-qt3', 'uic3'], 'QT_UIC3')
- find_bin(['uic-qt4', 'uic'], 'QT_UIC')
- if not env.QT_UIC:
- self.fatal('cannot find the uic compiler for qt4')
-
- self.start_msg('Checking for uic version')
- uicver = self.cmd_and_log(env.QT_UIC + ["-version"], output=Context.BOTH)
- uicver = ''.join(uicver).strip()
- uicver = uicver.replace('Qt User Interface Compiler ','').replace('User Interface Compiler for Qt', '')
- self.end_msg(uicver)
- if uicver.find(' 3.') != -1:
- self.fatal('this uic compiler is for qt3, add uic for qt4 to your path')
-
- find_bin(['moc-qt4', 'moc'], 'QT_MOC')
- find_bin(['rcc-qt4', 'rcc'], 'QT_RCC')
- find_bin(['lrelease-qt4', 'lrelease'], 'QT_LRELEASE')
- find_bin(['lupdate-qt4', 'lupdate'], 'QT_LUPDATE')
-
- env['UIC3_ST']= '%s -o %s'
- env['UIC_ST'] = '%s -o %s'
- env['MOC_ST'] = '-o'
- env['ui_PATTERN'] = 'ui_%s.h'
- env['QT_LRELEASE_FLAGS'] = ['-silent']
- env.MOCCPPPATH_ST = '-I%s'
- env.MOCDEFINES_ST = '-D%s'
-
-@conf
-def find_qt4_libraries(self):
- qtlibs = getattr(Options.options, 'qtlibs', None) or os.environ.get("QT4_LIBDIR")
- if not qtlibs:
- try:
- qtlibs = self.cmd_and_log(self.env.QMAKE + ['-query', 'QT_INSTALL_LIBS']).strip()
- except Errors.WafError:
- qtdir = self.cmd_and_log(self.env.QMAKE + ['-query', 'QT_INSTALL_PREFIX']).strip() + os.sep
- qtlibs = os.path.join(qtdir, 'lib')
- self.msg('Found the Qt4 libraries in', qtlibs)
-
- qtincludes = os.environ.get("QT4_INCLUDES") or self.cmd_and_log(self.env.QMAKE + ['-query', 'QT_INSTALL_HEADERS']).strip()
- env = self.env
- if not 'PKG_CONFIG_PATH' in os.environ:
- os.environ['PKG_CONFIG_PATH'] = '%s:%s/pkgconfig:/usr/lib/qt4/lib/pkgconfig:/opt/qt4/lib/pkgconfig:/usr/lib/qt4/lib:/opt/qt4/lib' % (qtlibs, qtlibs)
-
- try:
- if os.environ.get("QT4_XCOMPILE"):
- raise self.errors.ConfigurationError()
- self.check_cfg(atleast_pkgconfig_version='0.1')
- except self.errors.ConfigurationError:
- for i in self.qt4_vars:
- uselib = i.upper()
- if Utils.unversioned_sys_platform() == "darwin":
- # Since at least qt 4.7.3 each library locates in separate directory
- frameworkName = i + ".framework"
- qtDynamicLib = os.path.join(qtlibs, frameworkName, i)
- if os.path.exists(qtDynamicLib):
- env.append_unique('FRAMEWORK_' + uselib, i)
- self.msg('Checking for %s' % i, qtDynamicLib, 'GREEN')
- else:
- self.msg('Checking for %s' % i, False, 'YELLOW')
- env.append_unique('INCLUDES_' + uselib, os.path.join(qtlibs, frameworkName, 'Headers'))
- elif env.DEST_OS != "win32":
- qtDynamicLib = os.path.join(qtlibs, "lib" + i + ".so")
- qtStaticLib = os.path.join(qtlibs, "lib" + i + ".a")
- if os.path.exists(qtDynamicLib):
- env.append_unique('LIB_' + uselib, i)
- self.msg('Checking for %s' % i, qtDynamicLib, 'GREEN')
- elif os.path.exists(qtStaticLib):
- env.append_unique('LIB_' + uselib, i)
- self.msg('Checking for %s' % i, qtStaticLib, 'GREEN')
- else:
- self.msg('Checking for %s' % i, False, 'YELLOW')
-
- env.append_unique('LIBPATH_' + uselib, qtlibs)
- env.append_unique('INCLUDES_' + uselib, qtincludes)
- env.append_unique('INCLUDES_' + uselib, os.path.join(qtincludes, i))
- else:
- # Release library names are like QtCore4
- for k in ("lib%s.a", "lib%s4.a", "%s.lib", "%s4.lib"):
- lib = os.path.join(qtlibs, k % i)
- if os.path.exists(lib):
- env.append_unique('LIB_' + uselib, i + k[k.find("%s") + 2 : k.find('.')])
- self.msg('Checking for %s' % i, lib, 'GREEN')
- break
- else:
- self.msg('Checking for %s' % i, False, 'YELLOW')
-
- env.append_unique('LIBPATH_' + uselib, qtlibs)
- env.append_unique('INCLUDES_' + uselib, qtincludes)
- env.append_unique('INCLUDES_' + uselib, os.path.join(qtincludes, i))
-
- # Debug library names are like QtCore4d
- uselib = i.upper() + "_debug"
- for k in ("lib%sd.a", "lib%sd4.a", "%sd.lib", "%sd4.lib"):
- lib = os.path.join(qtlibs, k % i)
- if os.path.exists(lib):
- env.append_unique('LIB_' + uselib, i + k[k.find("%s") + 2 : k.find('.')])
- self.msg('Checking for %s' % i, lib, 'GREEN')
- break
- else:
- self.msg('Checking for %s' % i, False, 'YELLOW')
-
- env.append_unique('LIBPATH_' + uselib, qtlibs)
- env.append_unique('INCLUDES_' + uselib, qtincludes)
- env.append_unique('INCLUDES_' + uselib, os.path.join(qtincludes, i))
- else:
- for i in self.qt4_vars_debug + self.qt4_vars:
- self.check_cfg(package=i, args='--cflags --libs', mandatory=False)
-
-@conf
-def simplify_qt4_libs(self):
- # the libpaths make really long command-lines
- # remove the qtcore ones from qtgui, etc
- env = self.env
- def process_lib(vars_, coreval):
- for d in vars_:
- var = d.upper()
- if var == 'QTCORE':
- continue
-
- value = env['LIBPATH_'+var]
- if value:
- core = env[coreval]
- accu = []
- for lib in value:
- if lib in core:
- continue
- accu.append(lib)
- env['LIBPATH_'+var] = accu
-
- process_lib(self.qt4_vars, 'LIBPATH_QTCORE')
- process_lib(self.qt4_vars_debug, 'LIBPATH_QTCORE_DEBUG')
-
-@conf
-def add_qt4_rpath(self):
- # rpath if wanted
- env = self.env
- if getattr(Options.options, 'want_rpath', False):
- def process_rpath(vars_, coreval):
- for d in vars_:
- var = d.upper()
- value = env['LIBPATH_'+var]
- if value:
- core = env[coreval]
- accu = []
- for lib in value:
- if var != 'QTCORE':
- if lib in core:
- continue
- accu.append('-Wl,--rpath='+lib)
- env['RPATH_'+var] = accu
- process_rpath(self.qt4_vars, 'LIBPATH_QTCORE')
- process_rpath(self.qt4_vars_debug, 'LIBPATH_QTCORE_DEBUG')
-
-@conf
-def set_qt4_libs_to_check(self):
- if not hasattr(self, 'qt4_vars'):
- self.qt4_vars = QT4_LIBS
- self.qt4_vars = Utils.to_list(self.qt4_vars)
- if not hasattr(self, 'qt4_vars_debug'):
- self.qt4_vars_debug = [a + '_debug' for a in self.qt4_vars]
- self.qt4_vars_debug = Utils.to_list(self.qt4_vars_debug)
-
-@conf
-def set_qt4_defines(self):
- if sys.platform != 'win32':
- return
- for x in self.qt4_vars:
- y = x[2:].upper()
- self.env.append_unique('DEFINES_%s' % x.upper(), 'QT_%s_LIB' % y)
- self.env.append_unique('DEFINES_%s_DEBUG' % x.upper(), 'QT_%s_LIB' % y)
-
-def options(opt):
- """
- Command-line options
- """
- opt.add_option('--want-rpath', action='store_true', default=False, dest='want_rpath', help='enable the rpath for qt libraries')
-
- opt.add_option('--header-ext',
- type='string',
- default='',
- help='header extension for moc files',
- dest='qt_header_ext')
-
- for i in 'qtdir qtbin qtlibs'.split():
- opt.add_option('--'+i, type='string', default='', dest=i)
-
- opt.add_option('--translate', action="store_true", help="collect translation strings", dest="trans_qt4", default=False)
-
diff --git a/waflib/extras/relocation.py b/waflib/extras/relocation.py
deleted file mode 100644
index 7e821f4..0000000
--- a/waflib/extras/relocation.py
+++ /dev/null
@@ -1,85 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-
-"""
-Waf 1.6
-
-Try to detect if the project directory was relocated, and if it was,
-change the node representing the project directory. Just call:
-
- waf configure build
-
-Note that if the project directory name changes, the signatures for the tasks using
-files in that directory will change, causing a partial build.
-"""
-
-import os
-from waflib import Build, ConfigSet, Task, Utils, Errors
-from waflib.TaskGen import feature, after_method
-
-EXTRA_LOCK = '.old_srcdir'
-
-old1 = Build.BuildContext.store
-def store(self):
- old1(self)
- db = os.path.join(self.variant_dir, EXTRA_LOCK)
- env = ConfigSet.ConfigSet()
- env.SRCDIR = self.srcnode.abspath()
- env.store(db)
-Build.BuildContext.store = store
-
-old2 = Build.BuildContext.init_dirs
-def init_dirs(self):
-
- if not (os.path.isabs(self.top_dir) and os.path.isabs(self.out_dir)):
- raise Errors.WafError('The project was not configured: run "waf configure" first!')
-
- srcdir = None
- db = os.path.join(self.variant_dir, EXTRA_LOCK)
- env = ConfigSet.ConfigSet()
- try:
- env.load(db)
- srcdir = env.SRCDIR
- except:
- pass
-
- if srcdir:
- d = self.root.find_node(srcdir)
- if d and srcdir != self.top_dir and getattr(d, 'children', ''):
- srcnode = self.root.make_node(self.top_dir)
- print("relocating the source directory %r -> %r" % (srcdir, self.top_dir))
- srcnode.children = {}
-
- for (k, v) in d.children.items():
- srcnode.children[k] = v
- v.parent = srcnode
- d.children = {}
-
- old2(self)
-
-Build.BuildContext.init_dirs = init_dirs
-
-
-def uid(self):
- try:
- return self.uid_
- except AttributeError:
- # this is not a real hot zone, but we want to avoid surprises here
- m = Utils.md5()
- up = m.update
- up(self.__class__.__name__.encode())
- for x in self.inputs + self.outputs:
- up(x.path_from(x.ctx.srcnode).encode())
- self.uid_ = m.digest()
- return self.uid_
-Task.Task.uid = uid
-
-@feature('c', 'cxx', 'd', 'go', 'asm', 'fc', 'includes')
-@after_method('propagate_uselib_vars', 'process_source')
-def apply_incpaths(self):
- lst = self.to_incnodes(self.to_list(getattr(self, 'includes', [])) + self.env['INCLUDES'])
- self.includes_nodes = lst
- bld = self.bld
- self.env['INCPATHS'] = [x.is_child_of(bld.srcnode) and x.path_from(bld.bldnode) or x.abspath() for x in lst]
-
-
diff --git a/waflib/extras/remote.py b/waflib/extras/remote.py
deleted file mode 100644
index 3b038f7..0000000
--- a/waflib/extras/remote.py
+++ /dev/null
@@ -1,327 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Remote Builds tool using rsync+ssh
-
-__author__ = "Jérôme Carretero <cJ-waf@zougloub.eu>"
-__copyright__ = "Jérôme Carretero, 2013"
-
-"""
-Simple Remote Builds
-********************
-
-This tool is an *experimental* tool (meaning, do not even try to pollute
-the waf bug tracker with bugs in here, contact me directly) providing simple
-remote builds.
-
-It uses rsync and ssh to perform the remote builds.
-It is intended for performing cross-compilation on platforms where
-a cross-compiler is either unavailable (eg. MacOS, QNX) a specific product
-does not exist (eg. Windows builds using Visual Studio) or simply not installed.
-This tool sends the sources and the waf script to the remote host,
-and commands the usual waf execution.
-
-There are alternatives to using this tool, such as setting up shared folders,
-logging on to remote machines, and building on the shared folders.
-Electing one method or another depends on the size of the program.
-
-
-Usage
-=====
-
-1. Set your wscript file so it includes a list of variants,
- e.g.::
-
- from waflib import Utils
- top = '.'
- out = 'build'
-
- variants = [
- 'linux_64_debug',
- 'linux_64_release',
- 'linux_32_debug',
- 'linux_32_release',
- ]
-
- from waflib.extras import remote
-
- def options(opt):
- # normal stuff from here on
- opt.load('compiler_c')
-
- def configure(conf):
- if not conf.variant:
- return
- # normal stuff from here on
- conf.load('compiler_c')
-
- def build(bld):
- if not bld.variant:
- return
- # normal stuff from here on
- bld(features='c cprogram', target='app', source='main.c')
-
-
-2. Build the waf file, so it includes this tool, and put it in the current
- directory
-
- .. code:: bash
-
- ./waf-light --tools=remote
-
-3. Set the host names to access the hosts:
-
- .. code:: bash
-
- export REMOTE_QNX=user@kiunix
-
-4. Setup the ssh server and ssh keys
-
- The ssh key should not be protected by a password, or it will prompt for it every time.
- Create the key on the client:
-
- .. code:: bash
-
- ssh-keygen -t rsa -f foo.rsa
-
- Then copy foo.rsa.pub to the remote machine (user@kiunix:/home/user/.ssh/authorized_keys),
- and make sure the permissions are correct (chmod go-w ~ ~/.ssh ~/.ssh/authorized_keys)
-
- A separate key for the build processes can be set in the environment variable WAF_SSH_KEY.
- The tool will then use 'ssh-keyscan' to avoid prompting for remote hosts, so
- be warned to use this feature on internal networks only (MITM).
-
- .. code:: bash
-
- export WAF_SSH_KEY=~/foo.rsa
-
-5. Perform the build:
-
- .. code:: bash
-
- waf configure_all build_all --remote
-
-"""
-
-
-import getpass, os, re, sys
-from collections import OrderedDict
-from waflib import Context, Options, Utils, ConfigSet
-
-from waflib.Build import BuildContext, CleanContext, InstallContext, UninstallContext
-from waflib.Configure import ConfigurationContext
-
-
-is_remote = False
-if '--remote' in sys.argv:
- is_remote = True
- sys.argv.remove('--remote')
-
-class init(Context.Context):
- """
- Generates the *_all commands
- """
- cmd = 'init'
- fun = 'init'
- def execute(self):
- for x in list(Context.g_module.variants):
- self.make_variant(x)
- lst = ['remote']
- for k in Options.commands:
- if k.endswith('_all'):
- name = k.replace('_all', '')
- for x in Context.g_module.variants:
- lst.append('%s_%s' % (name, x))
- else:
- lst.append(k)
- del Options.commands[:]
- Options.commands += lst
-
- def make_variant(self, x):
- for y in (BuildContext, CleanContext, InstallContext, UninstallContext):
- name = y.__name__.replace('Context','').lower()
- class tmp(y):
- cmd = name + '_' + x
- fun = 'build'
- variant = x
- class tmp(ConfigurationContext):
- cmd = 'configure_' + x
- fun = 'configure'
- variant = x
- def __init__(self, **kw):
- ConfigurationContext.__init__(self, **kw)
- self.setenv(x)
-
-class remote(BuildContext):
- cmd = 'remote'
- fun = 'build'
-
- def get_ssh_hosts(self):
- lst = []
- for v in Context.g_module.variants:
- self.env.HOST = self.login_to_host(self.variant_to_login(v))
- cmd = Utils.subst_vars('${SSH_KEYSCAN} -t rsa,ecdsa ${HOST}', self.env)
- out, err = self.cmd_and_log(cmd, output=Context.BOTH, quiet=Context.BOTH)
- lst.append(out.strip())
- return lst
-
- def setup_private_ssh_key(self):
- """
- When WAF_SSH_KEY points to a private key, a .ssh directory will be created in the build directory
- Make sure that the ssh key does not prompt for a password
- """
- key = os.environ.get('WAF_SSH_KEY', '')
- if not key:
- return
- if not os.path.isfile(key):
- self.fatal('Key in WAF_SSH_KEY must point to a valid file')
- self.ssh_dir = os.path.join(self.path.abspath(), 'build', '.ssh')
- self.ssh_hosts = os.path.join(self.ssh_dir, 'known_hosts')
- self.ssh_key = os.path.join(self.ssh_dir, os.path.basename(key))
- self.ssh_config = os.path.join(self.ssh_dir, 'config')
- for x in self.ssh_hosts, self.ssh_key, self.ssh_config:
- if not os.path.isfile(x):
- if not os.path.isdir(self.ssh_dir):
- os.makedirs(self.ssh_dir)
- Utils.writef(self.ssh_key, Utils.readf(key), 'wb')
- os.chmod(self.ssh_key, 448)
-
- Utils.writef(self.ssh_hosts, '\n'.join(self.get_ssh_hosts()))
- os.chmod(self.ssh_key, 448)
-
- Utils.writef(self.ssh_config, 'UserKnownHostsFile %s' % self.ssh_hosts, 'wb')
- os.chmod(self.ssh_config, 448)
- self.env.SSH_OPTS = ['-F', self.ssh_config, '-i', self.ssh_key]
- self.env.append_value('RSYNC_SEND_OPTS', '--exclude=build/.ssh')
-
- def skip_unbuildable_variant(self):
- # skip variants that cannot be built on this OS
- for k in Options.commands:
- a, _, b = k.partition('_')
- if b in Context.g_module.variants:
- c, _, _ = b.partition('_')
- if c != Utils.unversioned_sys_platform():
- Options.commands.remove(k)
-
- def login_to_host(self, login):
- return re.sub('(\w+@)', '', login)
-
- def variant_to_login(self, variant):
- """linux_32_debug -> search env.LINUX_32 and then env.LINUX"""
- x = variant[:variant.rfind('_')]
- ret = os.environ.get('REMOTE_' + x.upper(), '')
- if not ret:
- x = x[:x.find('_')]
- ret = os.environ.get('REMOTE_' + x.upper(), '')
- if not ret:
- ret = '%s@localhost' % getpass.getuser()
- return ret
-
- def execute(self):
- global is_remote
- if not is_remote:
- self.skip_unbuildable_variant()
- else:
- BuildContext.execute(self)
-
- def restore(self):
- self.top_dir = os.path.abspath(Context.g_module.top)
- self.srcnode = self.root.find_node(self.top_dir)
- self.path = self.srcnode
-
- self.out_dir = os.path.join(self.top_dir, Context.g_module.out)
- self.bldnode = self.root.make_node(self.out_dir)
- self.bldnode.mkdir()
-
- self.env = ConfigSet.ConfigSet()
-
- def extract_groups_of_builds(self):
- """Return a dict mapping each variants to the commands to build"""
- self.vgroups = {}
- for x in reversed(Options.commands):
- _, _, variant = x.partition('_')
- if variant in Context.g_module.variants:
- try:
- dct = self.vgroups[variant]
- except KeyError:
- dct = self.vgroups[variant] = OrderedDict()
- try:
- dct[variant].append(x)
- except KeyError:
- dct[variant] = [x]
- Options.commands.remove(x)
-
- def custom_options(self, login):
- try:
- return Context.g_module.host_options[login]
- except (AttributeError, KeyError):
- return {}
-
- def recurse(self, *k, **kw):
- self.env.RSYNC = getattr(Context.g_module, 'rsync', 'rsync -a --chmod=u+rwx')
- self.env.SSH = getattr(Context.g_module, 'ssh', 'ssh')
- self.env.SSH_KEYSCAN = getattr(Context.g_module, 'ssh_keyscan', 'ssh-keyscan')
- try:
- self.env.WAF = getattr(Context.g_module, 'waf')
- except AttributeError:
- try:
- os.stat('waf')
- except KeyError:
- self.fatal('Put a waf file in the directory (./waf-light --tools=remote)')
- else:
- self.env.WAF = './waf'
-
- self.extract_groups_of_builds()
- self.setup_private_ssh_key()
- for k, v in self.vgroups.items():
- task = self(rule=rsync_and_ssh, always=True)
- task.env.login = self.variant_to_login(k)
-
- task.env.commands = []
- for opt, value in v.items():
- task.env.commands += value
- task.env.variant = task.env.commands[0].partition('_')[2]
- for opt, value in self.custom_options(k):
- task.env[opt] = value
- self.jobs = len(self.vgroups)
-
- def make_mkdir_command(self, task):
- return Utils.subst_vars('${SSH} ${SSH_OPTS} ${login} "rm -fr ${remote_dir} && mkdir -p ${remote_dir}"', task.env)
-
- def make_send_command(self, task):
- return Utils.subst_vars('${RSYNC} ${RSYNC_SEND_OPTS} -e "${SSH} ${SSH_OPTS}" ${local_dir} ${login}:${remote_dir}', task.env)
-
- def make_exec_command(self, task):
- txt = '''${SSH} ${SSH_OPTS} ${login} "cd ${remote_dir} && ${WAF} ${commands}"'''
- return Utils.subst_vars(txt, task.env)
-
- def make_save_command(self, task):
- return Utils.subst_vars('${RSYNC} ${RSYNC_SAVE_OPTS} -e "${SSH} ${SSH_OPTS}" ${login}:${remote_dir_variant} ${build_dir}', task.env)
-
-def rsync_and_ssh(task):
-
- # remove a warning
- task.uid_ = id(task)
-
- bld = task.generator.bld
-
- task.env.user, _, _ = task.env.login.partition('@')
- task.env.hdir = Utils.to_hex(Utils.h_list((task.generator.path.abspath(), task.env.variant)))
- task.env.remote_dir = '~%s/wafremote/%s' % (task.env.user, task.env.hdir)
- task.env.local_dir = bld.srcnode.abspath() + '/'
-
- task.env.remote_dir_variant = '%s/%s/%s' % (task.env.remote_dir, Context.g_module.out, task.env.variant)
- task.env.build_dir = bld.bldnode.abspath()
-
- ret = task.exec_command(bld.make_mkdir_command(task))
- if ret:
- return ret
- ret = task.exec_command(bld.make_send_command(task))
- if ret:
- return ret
- ret = task.exec_command(bld.make_exec_command(task))
- if ret:
- return ret
- ret = task.exec_command(bld.make_save_command(task))
- if ret:
- return ret
-
diff --git a/waflib/extras/resx.py b/waflib/extras/resx.py
deleted file mode 100644
index caf4d31..0000000
--- a/waflib/extras/resx.py
+++ /dev/null
@@ -1,35 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-
-import os
-from waflib import Task
-from waflib.TaskGen import extension
-
-def configure(conf):
- conf.find_program(['resgen'], var='RESGEN')
- conf.env.RESGENFLAGS = '/useSourcePath'
-
-@extension('.resx')
-def resx_file(self, node):
- """
- Bind the .resx extension to a resgen task
- """
- if not getattr(self, 'cs_task', None):
- self.bld.fatal('resx_file has no link task for use %r' % self)
-
- # Given assembly 'Foo' and file 'Sub/Dir/File.resx', create 'Foo.Sub.Dir.File.resources'
- assembly = getattr(self, 'namespace', os.path.splitext(self.gen)[0])
- res = os.path.splitext(node.path_from(self.path))[0].replace('/', '.').replace('\\', '.')
- out = self.path.find_or_declare(assembly + '.' + res + '.resources')
-
- tsk = self.create_task('resgen', node, out)
-
- self.cs_task.dep_nodes.extend(tsk.outputs) # dependency
- self.env.append_value('RESOURCES', tsk.outputs[0].bldpath())
-
-class resgen(Task.Task):
- """
- Compile C# resource files
- """
- color = 'YELLOW'
- run_str = '${RESGEN} ${RESGENFLAGS} ${SRC} ${TGT}'
diff --git a/waflib/extras/review.py b/waflib/extras/review.py
deleted file mode 100644
index 561e062..0000000
--- a/waflib/extras/review.py
+++ /dev/null
@@ -1,325 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Laurent Birtz, 2011
-# moved the code into a separate tool (ita)
-
-"""
-There are several things here:
-- a different command-line option management making options persistent
-- the review command to display the options set
-
-Assumptions:
-- configuration options are not always added to the right group (and do not count on the users to do it...)
-- the options are persistent between the executions (waf options are NOT persistent by design), even for the configuration
-- when the options change, the build is invalidated (forcing a reconfiguration)
-"""
-
-import os, textwrap, shutil
-from waflib import Logs, Context, ConfigSet, Options, Build, Configure
-
-class Odict(dict):
- """Ordered dictionary"""
- def __init__(self, data=None):
- self._keys = []
- dict.__init__(self)
- if data:
- # we were provided a regular dict
- if isinstance(data, dict):
- self.append_from_dict(data)
-
- # we were provided a tuple list
- elif type(data) == list:
- self.append_from_plist(data)
-
- # we were provided invalid input
- else:
- raise Exception("expected a dict or a tuple list")
-
- def append_from_dict(self, dict):
- map(self.__setitem__, dict.keys(), dict.values())
-
- def append_from_plist(self, plist):
- for pair in plist:
- if len(pair) != 2:
- raise Exception("invalid pairs list")
- for (k, v) in plist:
- self.__setitem__(k, v)
-
- def __delitem__(self, key):
- if not key in self._keys:
- raise KeyError(key)
- dict.__delitem__(self, key)
- self._keys.remove(key)
-
- def __setitem__(self, key, item):
- dict.__setitem__(self, key, item)
- if key not in self._keys:
- self._keys.append(key)
-
- def clear(self):
- dict.clear(self)
- self._keys = []
-
- def copy(self):
- return Odict(self.plist())
-
- def items(self):
- return zip(self._keys, self.values())
-
- def keys(self):
- return list(self._keys) # return a copy of the list
-
- def values(self):
- return map(self.get, self._keys)
-
- def plist(self):
- p = []
- for k, v in self.items():
- p.append( (k, v) )
- return p
-
- def __str__(self):
- buf = []
- buf.append("{ ")
- for k, v in self.items():
- buf.append('%r : %r, ' % (k, v))
- buf.append("}")
- return ''.join(buf)
-
-review_options = Odict()
-"""
-Ordered dictionary mapping configuration option names to their optparse option.
-"""
-
-review_defaults = {}
-"""
-Dictionary mapping configuration option names to their default value.
-"""
-
-old_review_set = None
-"""
-Review set containing the configuration values before parsing the command line.
-"""
-
-new_review_set = None
-"""
-Review set containing the configuration values after parsing the command line.
-"""
-
-class OptionsReview(Options.OptionsContext):
- def __init__(self, **kw):
- super(self.__class__, self).__init__(**kw)
-
- def prepare_config_review(self):
- """
- Find the configuration options that are reviewable, detach
- their default value from their optparse object and store them
- into the review dictionaries.
- """
- gr = self.get_option_group('configure options')
- for opt in gr.option_list:
- if opt.action != 'store' or opt.dest in ("out", "top"):
- continue
- review_options[opt.dest] = opt
- review_defaults[opt.dest] = opt.default
- if gr.defaults.has_key(opt.dest):
- del gr.defaults[opt.dest]
- opt.default = None
-
- def parse_args(self):
- self.prepare_config_review()
- self.parser.get_option('--prefix').help = 'installation prefix'
- super(OptionsReview, self).parse_args()
- Context.create_context('review').refresh_review_set()
-
-class ReviewContext(Context.Context):
- '''reviews the configuration values'''
-
- cmd = 'review'
-
- def __init__(self, **kw):
- super(self.__class__, self).__init__(**kw)
-
- out = Options.options.out
- if not out:
- out = getattr(Context.g_module, Context.OUT, None)
- if not out:
- out = Options.lockfile.replace('.lock-waf', '')
- self.build_path = (os.path.isabs(out) and self.root or self.path).make_node(out).abspath()
- """Path to the build directory"""
-
- self.cache_path = os.path.join(self.build_path, Build.CACHE_DIR)
- """Path to the cache directory"""
-
- self.review_path = os.path.join(self.cache_path, 'review.cache')
- """Path to the review cache file"""
-
- def execute(self):
- """
- Display and store the review set. Invalidate the cache as required.
- """
- if not self.compare_review_set(old_review_set, new_review_set):
- self.invalidate_cache()
- self.store_review_set(new_review_set)
- print(self.display_review_set(new_review_set))
-
- def invalidate_cache(self):
- """Invalidate the cache to prevent bad builds."""
- try:
- Logs.warn("Removing the cached configuration since the options have changed")
- shutil.rmtree(self.cache_path)
- except:
- pass
-
- def refresh_review_set(self):
- """
- Obtain the old review set and the new review set, and import the new set.
- """
- global old_review_set, new_review_set
- old_review_set = self.load_review_set()
- new_review_set = self.update_review_set(old_review_set)
- self.import_review_set(new_review_set)
-
- def load_review_set(self):
- """
- Load and return the review set from the cache if it exists.
- Otherwise, return an empty set.
- """
- if os.path.isfile(self.review_path):
- return ConfigSet.ConfigSet(self.review_path)
- return ConfigSet.ConfigSet()
-
- def store_review_set(self, review_set):
- """
- Store the review set specified in the cache.
- """
- if not os.path.isdir(self.cache_path):
- os.makedirs(self.cache_path)
- review_set.store(self.review_path)
-
- def update_review_set(self, old_set):
- """
- Merge the options passed on the command line with those imported
- from the previous review set and return the corresponding
- preview set.
- """
-
- # Convert value to string. It's important that 'None' maps to
- # the empty string.
- def val_to_str(val):
- if val == None or val == '':
- return ''
- return str(val)
-
- new_set = ConfigSet.ConfigSet()
- opt_dict = Options.options.__dict__
-
- for name in review_options.keys():
- # the option is specified explicitly on the command line
- if name in opt_dict:
- # if the option is the default, pretend it was never specified
- if val_to_str(opt_dict[name]) != val_to_str(review_defaults[name]):
- new_set[name] = opt_dict[name]
- # the option was explicitly specified in a previous command
- elif name in old_set:
- new_set[name] = old_set[name]
-
- return new_set
-
- def import_review_set(self, review_set):
- """
- Import the actual value of the reviewable options in the option
- dictionary, given the current review set.
- """
- for name in review_options.keys():
- if name in review_set:
- value = review_set[name]
- else:
- value = review_defaults[name]
- setattr(Options.options, name, value)
-
- def compare_review_set(self, set1, set2):
- """
- Return true if the review sets specified are equal.
- """
- if len(set1.keys()) != len(set2.keys()):
- return False
- for key in set1.keys():
- if not key in set2 or set1[key] != set2[key]:
- return False
- return True
-
- def display_review_set(self, review_set):
- """
- Return the string representing the review set specified.
- """
- term_width = Logs.get_term_cols()
- lines = []
- for dest in review_options.keys():
- opt = review_options[dest]
- name = ", ".join(opt._short_opts + opt._long_opts)
- help = opt.help
- actual = None
- if dest in review_set:
- actual = review_set[dest]
- default = review_defaults[dest]
- lines.append(self.format_option(name, help, actual, default, term_width))
- return "Configuration:\n\n" + "\n\n".join(lines) + "\n"
-
- def format_option(self, name, help, actual, default, term_width):
- """
- Return the string representing the option specified.
- """
- def val_to_str(val):
- if val == None or val == '':
- return "(void)"
- return str(val)
-
- max_name_len = 20
- sep_len = 2
-
- w = textwrap.TextWrapper()
- w.width = term_width - 1
- if w.width < 60:
- w.width = 60
-
- out = ""
-
- # format the help
- out += w.fill(help) + "\n"
-
- # format the name
- name_len = len(name)
- out += Logs.colors.CYAN + name + Logs.colors.NORMAL
-
- # set the indentation used when the value wraps to the next line
- w.subsequent_indent = " ".rjust(max_name_len + sep_len)
- w.width -= (max_name_len + sep_len)
-
- # the name string is too long, switch to the next line
- if name_len > max_name_len:
- out += "\n" + w.subsequent_indent
-
- # fill the remaining of the line with spaces
- else:
- out += " ".rjust(max_name_len + sep_len - name_len)
-
- # format the actual value, if there is one
- if actual != None:
- out += Logs.colors.BOLD + w.fill(val_to_str(actual)) + Logs.colors.NORMAL + "\n" + w.subsequent_indent
-
- # format the default value
- default_fmt = val_to_str(default)
- if actual != None:
- default_fmt = "default: " + default_fmt
- out += Logs.colors.NORMAL + w.fill(default_fmt) + Logs.colors.NORMAL
-
- return out
-
-# Monkey-patch ConfigurationContext.execute() to have it store the review set.
-old_configure_execute = Configure.ConfigurationContext.execute
-def new_configure_execute(self):
- old_configure_execute(self)
- Context.create_context('review').store_review_set(new_review_set)
-Configure.ConfigurationContext.execute = new_configure_execute
-
diff --git a/waflib/extras/rst.py b/waflib/extras/rst.py
deleted file mode 100644
index f3c3a5e..0000000
--- a/waflib/extras/rst.py
+++ /dev/null
@@ -1,260 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Jérôme Carretero, 2013 (zougloub)
-
-"""
-reStructuredText support (experimental)
-
-Example::
-
- def configure(conf):
- conf.load('rst')
- if not conf.env.RST2HTML:
- conf.fatal('The program rst2html is required')
-
- def build(bld):
- bld(
- features = 'rst',
- type = 'rst2html', # rst2html, rst2pdf, ...
- source = 'index.rst', # mandatory, the source
- deps = 'image.png', # to give additional non-trivial dependencies
- )
-
-By default the tool looks for a set of programs in PATH.
-The tools are defined in `rst_progs`.
-To configure with a special program use::
-
- $ RST2HTML=/path/to/rst2html waf configure
-
-This tool is experimental; don't hesitate to contribute to it.
-
-"""
-
-import re
-from waflib import Node, Utils, Task, Errors, Logs
-from waflib.TaskGen import feature, before_method
-
-rst_progs = "rst2html rst2xetex rst2latex rst2xml rst2pdf rst2s5 rst2man rst2odt rst2rtf".split()
-
-def parse_rst_node(task, node, nodes, names, seen, dirs=None):
- # TODO add extensibility, to handle custom rst include tags...
- if dirs is None:
- dirs = (node.parent,node.get_bld().parent)
-
- if node in seen:
- return
- seen.append(node)
- code = node.read()
- re_rst = re.compile(r'^\s*.. ((?P<subst>\|\S+\|) )?(?P<type>include|image|figure):: (?P<file>.*)$', re.M)
- for match in re_rst.finditer(code):
- ipath = match.group('file')
- itype = match.group('type')
- Logs.debug('rst: visiting %s: %s', itype, ipath)
- found = False
- for d in dirs:
- Logs.debug('rst: looking for %s in %s', ipath, d.abspath())
- found = d.find_node(ipath)
- if found:
- Logs.debug('rst: found %s as %s', ipath, found.abspath())
- nodes.append((itype, found))
- if itype == 'include':
- parse_rst_node(task, found, nodes, names, seen)
- break
- if not found:
- names.append((itype, ipath))
-
-class docutils(Task.Task):
- """
- Compile a rst file.
- """
-
- def scan(self):
- """
- A recursive regex-based scanner that finds rst dependencies.
- """
-
- nodes = []
- names = []
- seen = []
-
- node = self.inputs[0]
-
- if not node:
- return (nodes, names)
-
- parse_rst_node(self, node, nodes, names, seen)
-
- Logs.debug('rst: %r: found the following file deps: %r', self, nodes)
- if names:
- Logs.warn('rst: %r: could not find the following file deps: %r', self, names)
-
- return ([v for (t,v) in nodes], [v for (t,v) in names])
-
- def check_status(self, msg, retcode):
- """
- Check an exit status and raise an error with a particular message
-
- :param msg: message to display if the code is non-zero
- :type msg: string
- :param retcode: condition
- :type retcode: boolean
- """
- if retcode != 0:
- raise Errors.WafError('%r command exit status %r' % (msg, retcode))
-
- def run(self):
- """
- Runs the rst compilation using docutils
- """
- raise NotImplementedError()
-
-class rst2html(docutils):
- color = 'BLUE'
-
- def __init__(self, *args, **kw):
- docutils.__init__(self, *args, **kw)
- self.command = self.generator.env.RST2HTML
- self.attributes = ['stylesheet']
-
- def scan(self):
- nodes, names = docutils.scan(self)
-
- for attribute in self.attributes:
- stylesheet = getattr(self.generator, attribute, None)
- if stylesheet is not None:
- ssnode = self.generator.to_nodes(stylesheet)[0]
- nodes.append(ssnode)
- Logs.debug('rst: adding dep to %s %s', attribute, stylesheet)
-
- return nodes, names
-
- def run(self):
- cwdn = self.outputs[0].parent
- src = self.inputs[0].path_from(cwdn)
- dst = self.outputs[0].path_from(cwdn)
-
- cmd = self.command + [src, dst]
- cmd += Utils.to_list(getattr(self.generator, 'options', []))
- for attribute in self.attributes:
- stylesheet = getattr(self.generator, attribute, None)
- if stylesheet is not None:
- stylesheet = self.generator.to_nodes(stylesheet)[0]
- cmd += ['--%s' % attribute, stylesheet.path_from(cwdn)]
-
- return self.exec_command(cmd, cwd=cwdn.abspath())
-
-class rst2s5(rst2html):
- def __init__(self, *args, **kw):
- rst2html.__init__(self, *args, **kw)
- self.command = self.generator.env.RST2S5
- self.attributes = ['stylesheet']
-
-class rst2latex(rst2html):
- def __init__(self, *args, **kw):
- rst2html.__init__(self, *args, **kw)
- self.command = self.generator.env.RST2LATEX
- self.attributes = ['stylesheet']
-
-class rst2xetex(rst2html):
- def __init__(self, *args, **kw):
- rst2html.__init__(self, *args, **kw)
- self.command = self.generator.env.RST2XETEX
- self.attributes = ['stylesheet']
-
-class rst2pdf(docutils):
- color = 'BLUE'
- def run(self):
- cwdn = self.outputs[0].parent
- src = self.inputs[0].path_from(cwdn)
- dst = self.outputs[0].path_from(cwdn)
-
- cmd = self.generator.env.RST2PDF + [src, '-o', dst]
- cmd += Utils.to_list(getattr(self.generator, 'options', []))
-
- return self.exec_command(cmd, cwd=cwdn.abspath())
-
-
-@feature('rst')
-@before_method('process_source')
-def apply_rst(self):
- """
- Create :py:class:`rst` or other rst-related task objects
- """
-
- if self.target:
- if isinstance(self.target, Node.Node):
- tgt = self.target
- elif isinstance(self.target, str):
- tgt = self.path.get_bld().make_node(self.target)
- else:
- self.bld.fatal("rst: Don't know how to build target name %s which is not a string or Node for %s" % (self.target, self))
- else:
- tgt = None
-
- tsk_type = getattr(self, 'type', None)
-
- src = self.to_nodes(self.source)
- assert len(src) == 1
- src = src[0]
-
- if tsk_type is not None and tgt is None:
- if tsk_type.startswith('rst2'):
- ext = tsk_type[4:]
- else:
- self.bld.fatal("rst: Could not detect the output file extension for %s" % self)
- tgt = src.change_ext('.%s' % ext)
- elif tsk_type is None and tgt is not None:
- out = tgt.name
- ext = out[out.rfind('.')+1:]
- self.type = 'rst2' + ext
- elif tsk_type is not None and tgt is not None:
- # the user knows what he wants
- pass
- else:
- self.bld.fatal("rst: Need to indicate task type or target name for %s" % self)
-
- deps_lst = []
-
- if getattr(self, 'deps', None):
- deps = self.to_list(self.deps)
- for filename in deps:
- n = self.path.find_resource(filename)
- if not n:
- self.bld.fatal('Could not find %r for %r' % (filename, self))
- if not n in deps_lst:
- deps_lst.append(n)
-
- try:
- task = self.create_task(self.type, src, tgt)
- except KeyError:
- self.bld.fatal("rst: Task of type %s not implemented (created by %s)" % (self.type, self))
-
- task.env = self.env
-
- # add the manual dependencies
- if deps_lst:
- try:
- lst = self.bld.node_deps[task.uid()]
- for n in deps_lst:
- if not n in lst:
- lst.append(n)
- except KeyError:
- self.bld.node_deps[task.uid()] = deps_lst
-
- inst_to = getattr(self, 'install_path', None)
- if inst_to:
- self.install_task = self.add_install_files(install_to=inst_to, install_from=task.outputs[:])
-
- self.source = []
-
-def configure(self):
- """
- Try to find the rst programs.
-
- Do not raise any error if they are not found.
- You'll have to use additional code in configure() to die
- if programs were not found.
- """
- for p in rst_progs:
- self.find_program(p, mandatory=False)
-
diff --git a/waflib/extras/run_do_script.py b/waflib/extras/run_do_script.py
deleted file mode 100644
index f3c5812..0000000
--- a/waflib/extras/run_do_script.py
+++ /dev/null
@@ -1,139 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Hans-Martin von Gaudecker, 2012
-
-"""
-Run a Stata do-script in the directory specified by **ctx.bldnode**. The
-first and only argument will be the name of the do-script (no extension),
-which can be accessed inside the do-script by the local macro `1'. Useful
-for keeping a log file.
-
-The tool uses the log file that is automatically kept by Stata only
-for error-catching purposes, it will be destroyed if the task finished
-without error. In case of an error in **some_script.do**, you can inspect
-it as **some_script.log** in the **ctx.bldnode** directory.
-
-Note that Stata will not return an error code if it exits abnormally --
-catching errors relies on parsing the log file mentioned before. Should
-the parser behave incorrectly please send an email to hmgaudecker [at] gmail.
-
-**WARNING**
-
- The tool will not work if multiple do-scripts of the same name---but in
- different directories---are run at the same time! Avoid this situation.
-
-Usage::
-
- ctx(features='run_do_script',
- source='some_script.do',
- target=['some_table.tex', 'some_figure.eps'],
- deps='some_data.csv')
-"""
-
-
-import os, re, sys
-from waflib import Task, TaskGen, Logs
-
-if sys.platform == 'darwin':
- STATA_COMMANDS = ['Stata64MP', 'StataMP',
- 'Stata64SE', 'StataSE',
- 'Stata64', 'Stata']
- STATAFLAGS = '-e -q do'
- STATAENCODING = 'MacRoman'
-elif sys.platform.startswith('linux'):
- STATA_COMMANDS = ['stata-mp', 'stata-se', 'stata']
- STATAFLAGS = '-b -q do'
- # Not sure whether this is correct...
- STATAENCODING = 'Latin-1'
-elif sys.platform.lower().startswith('win'):
- STATA_COMMANDS = ['StataMP-64', 'StataMP-ia',
- 'StataMP', 'StataSE-64',
- 'StataSE-ia', 'StataSE',
- 'Stata-64', 'Stata-ia',
- 'Stata.e', 'WMPSTATA',
- 'WSESTATA', 'WSTATA']
- STATAFLAGS = '/e do'
- STATAENCODING = 'Latin-1'
-else:
- raise Exception("Unknown sys.platform: %s " % sys.platform)
-
-def configure(ctx):
- ctx.find_program(STATA_COMMANDS, var='STATACMD', errmsg="""\n
-No Stata executable found!\n\n
-If Stata is needed:\n
- 1) Check the settings of your system path.
- 2) Note we are looking for Stata executables called: %s
- If yours has a different name, please report to hmgaudecker [at] gmail\n
-Else:\n
- Do not load the 'run_do_script' tool in the main wscript.\n\n""" % STATA_COMMANDS)
- ctx.env.STATAFLAGS = STATAFLAGS
- ctx.env.STATAENCODING = STATAENCODING
-
-class run_do_script_base(Task.Task):
- """Run a Stata do-script from the bldnode directory."""
- run_str = '"${STATACMD}" ${STATAFLAGS} "${SRC[0].abspath()}" "${DOFILETRUNK}"'
- shell = True
-
-class run_do_script(run_do_script_base):
- """Use the log file automatically kept by Stata for error-catching.
- Erase it if the task finished without error. If not, it will show
- up as do_script.log in the bldnode directory.
- """
- def run(self):
- run_do_script_base.run(self)
- ret, log_tail = self.check_erase_log_file()
- if ret:
- Logs.error("""Running Stata on %r failed with code %r.\n\nCheck the log file %s, last 10 lines\n\n%s\n\n\n""",
- self.inputs[0], ret, self.env.LOGFILEPATH, log_tail)
- return ret
-
- def check_erase_log_file(self):
- """Parse Stata's default log file and erase it if everything okay.
-
- Parser is based on Brendan Halpin's shell script found here:
- http://teaching.sociology.ul.ie/bhalpin/wordpress/?p=122
- """
-
- if sys.version_info.major >= 3:
- kwargs = {'file': self.env.LOGFILEPATH, 'mode': 'r', 'encoding': self.env.STATAENCODING}
- else:
- kwargs = {'name': self.env.LOGFILEPATH, 'mode': 'r'}
- with open(**kwargs) as log:
- log_tail = log.readlines()[-10:]
- for line in log_tail:
- error_found = re.match("r\(([0-9]+)\)", line)
- if error_found:
- return error_found.group(1), ''.join(log_tail)
- else:
- pass
- # Only end up here if the parser did not identify an error.
- os.remove(self.env.LOGFILEPATH)
- return None, None
-
-
-@TaskGen.feature('run_do_script')
-@TaskGen.before_method('process_source')
-def apply_run_do_script(tg):
- """Task generator customising the options etc. to call Stata in batch
- mode for running a do-script.
- """
-
- # Convert sources and targets to nodes
- src_node = tg.path.find_resource(tg.source)
- tgt_nodes = [tg.path.find_or_declare(t) for t in tg.to_list(tg.target)]
-
- tsk = tg.create_task('run_do_script', src=src_node, tgt=tgt_nodes)
- tsk.env.DOFILETRUNK = os.path.splitext(src_node.name)[0]
- tsk.env.LOGFILEPATH = os.path.join(tg.bld.bldnode.abspath(), '%s.log' % (tsk.env.DOFILETRUNK))
-
- # dependencies (if the attribute 'deps' changes, trigger a recompilation)
- for x in tg.to_list(getattr(tg, 'deps', [])):
- node = tg.path.find_resource(x)
- if not node:
- tg.bld.fatal('Could not find dependency %r for running %r' % (x, src_node.abspath()))
- tsk.dep_nodes.append(node)
- Logs.debug('deps: found dependencies %r for running %r', tsk.dep_nodes, src_node.abspath())
-
- # Bypass the execution of process_source by setting the source to an empty list
- tg.source = []
-
diff --git a/waflib/extras/run_m_script.py b/waflib/extras/run_m_script.py
deleted file mode 100644
index b5f27eb..0000000
--- a/waflib/extras/run_m_script.py
+++ /dev/null
@@ -1,88 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Hans-Martin von Gaudecker, 2012
-
-"""
-Run a Matlab script.
-
-Note that the script is run in the directory where it lives -- Matlab won't
-allow it any other way.
-
-For error-catching purposes, keep an own log-file that is destroyed if the
-task finished without error. If not, it will show up as mscript_[index].log
-in the bldnode directory.
-
-Usage::
-
- ctx(features='run_m_script',
- source='some_script.m',
- target=['some_table.tex', 'some_figure.eps'],
- deps='some_data.mat')
-"""
-
-import os, sys
-from waflib import Task, TaskGen, Logs
-
-MATLAB_COMMANDS = ['matlab']
-
-def configure(ctx):
- ctx.find_program(MATLAB_COMMANDS, var='MATLABCMD', errmsg = """\n
-No Matlab executable found!\n\n
-If Matlab is needed:\n
- 1) Check the settings of your system path.
- 2) Note we are looking for Matlab executables called: %s
- If yours has a different name, please report to hmgaudecker [at] gmail\n
-Else:\n
- Do not load the 'run_m_script' tool in the main wscript.\n\n""" % MATLAB_COMMANDS)
- ctx.env.MATLABFLAGS = '-wait -nojvm -nosplash -minimize'
-
-class run_m_script_base(Task.Task):
- """Run a Matlab script."""
- run_str = '"${MATLABCMD}" ${MATLABFLAGS} -logfile "${LOGFILEPATH}" -r "try, ${MSCRIPTTRUNK}, exit(0), catch err, disp(err.getReport()), exit(1), end"'
- shell = True
-
-class run_m_script(run_m_script_base):
- """Erase the Matlab overall log file if everything went okay, else raise an
- error and print its 10 last lines.
- """
- def run(self):
- ret = run_m_script_base.run(self)
- logfile = self.env.LOGFILEPATH
- if ret:
- mode = 'r'
- if sys.version_info.major >= 3:
- mode = 'rb'
- with open(logfile, mode=mode) as f:
- tail = f.readlines()[-10:]
- Logs.error("""Running Matlab on %r returned the error %r\n\nCheck the log file %s, last 10 lines\n\n%s\n\n\n""",
- self.inputs[0], ret, logfile, '\n'.join(tail))
- else:
- os.remove(logfile)
- return ret
-
-@TaskGen.feature('run_m_script')
-@TaskGen.before_method('process_source')
-def apply_run_m_script(tg):
- """Task generator customising the options etc. to call Matlab in batch
- mode for running a m-script.
- """
-
- # Convert sources and targets to nodes
- src_node = tg.path.find_resource(tg.source)
- tgt_nodes = [tg.path.find_or_declare(t) for t in tg.to_list(tg.target)]
-
- tsk = tg.create_task('run_m_script', src=src_node, tgt=tgt_nodes)
- tsk.cwd = src_node.parent.abspath()
- tsk.env.MSCRIPTTRUNK = os.path.splitext(src_node.name)[0]
- tsk.env.LOGFILEPATH = os.path.join(tg.bld.bldnode.abspath(), '%s_%d.log' % (tsk.env.MSCRIPTTRUNK, tg.idx))
-
- # dependencies (if the attribute 'deps' changes, trigger a recompilation)
- for x in tg.to_list(getattr(tg, 'deps', [])):
- node = tg.path.find_resource(x)
- if not node:
- tg.bld.fatal('Could not find dependency %r for running %r' % (x, src_node.abspath()))
- tsk.dep_nodes.append(node)
- Logs.debug('deps: found dependencies %r for running %r', tsk.dep_nodes, src_node.abspath())
-
- # Bypass the execution of process_source by setting the source to an empty list
- tg.source = []
diff --git a/waflib/extras/run_py_script.py b/waflib/extras/run_py_script.py
deleted file mode 100644
index 3670381..0000000
--- a/waflib/extras/run_py_script.py
+++ /dev/null
@@ -1,104 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Hans-Martin von Gaudecker, 2012
-
-"""
-Run a Python script in the directory specified by **ctx.bldnode**.
-
-Select a Python version by specifying the **version** keyword for
-the task generator instance as integer 2 or 3. Default is 3.
-
-If the build environment has an attribute "PROJECT_PATHS" with
-a key "PROJECT_ROOT", its value will be appended to the PYTHONPATH.
-Same a string passed to the optional **add_to_pythonpath**
-keyword (appended after the PROJECT_ROOT).
-
-Usage::
-
- ctx(features='run_py_script', version=3,
- source='some_script.py',
- target=['some_table.tex', 'some_figure.eps'],
- deps='some_data.csv',
- add_to_pythonpath='src/some/library')
-"""
-
-import os, re
-from waflib import Task, TaskGen, Logs
-
-
-def configure(conf):
- """TODO: Might need to be updated for Windows once
- "PEP 397":http://www.python.org/dev/peps/pep-0397/ is settled.
- """
- conf.find_program('python', var='PY2CMD', mandatory=False)
- conf.find_program('python3', var='PY3CMD', mandatory=False)
- if not conf.env.PY2CMD and not conf.env.PY3CMD:
- conf.fatal("No Python interpreter found!")
-
-class run_py_2_script(Task.Task):
- """Run a Python 2 script."""
- run_str = '${PY2CMD} ${SRC[0].abspath()}'
- shell=True
-
-class run_py_3_script(Task.Task):
- """Run a Python 3 script."""
- run_str = '${PY3CMD} ${SRC[0].abspath()}'
- shell=True
-
-@TaskGen.feature('run_py_script')
-@TaskGen.before_method('process_source')
-def apply_run_py_script(tg):
- """Task generator for running either Python 2 or Python 3 on a single
- script.
-
- Attributes:
-
- * source -- A **single** source node or string. (required)
- * target -- A single target or list of targets (nodes or strings)
- * deps -- A single dependency or list of dependencies (nodes or strings)
- * add_to_pythonpath -- A string that will be appended to the PYTHONPATH environment variable
-
- If the build environment has an attribute "PROJECT_PATHS" with
- a key "PROJECT_ROOT", its value will be appended to the PYTHONPATH.
- """
-
- # Set the Python version to use, default to 3.
- v = getattr(tg, 'version', 3)
- if v not in (2, 3):
- raise ValueError("Specify the 'version' attribute for run_py_script task generator as integer 2 or 3.\n Got: %s" %v)
-
- # Convert sources and targets to nodes
- src_node = tg.path.find_resource(tg.source)
- tgt_nodes = [tg.path.find_or_declare(t) for t in tg.to_list(tg.target)]
-
- # Create the task.
- tsk = tg.create_task('run_py_%d_script' %v, src=src_node, tgt=tgt_nodes)
-
- # custom execution environment
- # TODO use a list and os.sep.join(lst) at the end instead of concatenating strings
- tsk.env.env = dict(os.environ)
- tsk.env.env['PYTHONPATH'] = tsk.env.env.get('PYTHONPATH', '')
- project_paths = getattr(tsk.env, 'PROJECT_PATHS', None)
- if project_paths and 'PROJECT_ROOT' in project_paths:
- tsk.env.env['PYTHONPATH'] += os.pathsep + project_paths['PROJECT_ROOT'].abspath()
- if getattr(tg, 'add_to_pythonpath', None):
- tsk.env.env['PYTHONPATH'] += os.pathsep + tg.add_to_pythonpath
-
- # Clean up the PYTHONPATH -- replace double occurrences of path separator
- tsk.env.env['PYTHONPATH'] = re.sub(os.pathsep + '+', os.pathsep, tsk.env.env['PYTHONPATH'])
-
- # Clean up the PYTHONPATH -- doesn't like starting with path separator
- if tsk.env.env['PYTHONPATH'].startswith(os.pathsep):
- tsk.env.env['PYTHONPATH'] = tsk.env.env['PYTHONPATH'][1:]
-
- # dependencies (if the attribute 'deps' changes, trigger a recompilation)
- for x in tg.to_list(getattr(tg, 'deps', [])):
- node = tg.path.find_resource(x)
- if not node:
- tg.bld.fatal('Could not find dependency %r for running %r' % (x, src_node.abspath()))
- tsk.dep_nodes.append(node)
- Logs.debug('deps: found dependencies %r for running %r', tsk.dep_nodes, src_node.abspath())
-
- # Bypass the execution of process_source by setting the source to an empty list
- tg.source = []
-
diff --git a/waflib/extras/run_r_script.py b/waflib/extras/run_r_script.py
deleted file mode 100644
index b0d8f2b..0000000
--- a/waflib/extras/run_r_script.py
+++ /dev/null
@@ -1,86 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Hans-Martin von Gaudecker, 2012
-
-"""
-Run a R script in the directory specified by **ctx.bldnode**.
-
-For error-catching purposes, keep an own log-file that is destroyed if the
-task finished without error. If not, it will show up as rscript_[index].log
-in the bldnode directory.
-
-Usage::
-
- ctx(features='run_r_script',
- source='some_script.r',
- target=['some_table.tex', 'some_figure.eps'],
- deps='some_data.csv')
-"""
-
-
-import os, sys
-from waflib import Task, TaskGen, Logs
-
-R_COMMANDS = ['RTerm', 'R', 'r']
-
-def configure(ctx):
- ctx.find_program(R_COMMANDS, var='RCMD', errmsg = """\n
-No R executable found!\n\n
-If R is needed:\n
- 1) Check the settings of your system path.
- 2) Note we are looking for R executables called: %s
- If yours has a different name, please report to hmgaudecker [at] gmail\n
-Else:\n
- Do not load the 'run_r_script' tool in the main wscript.\n\n""" % R_COMMANDS)
- ctx.env.RFLAGS = 'CMD BATCH --slave'
-
-class run_r_script_base(Task.Task):
- """Run a R script."""
- run_str = '"${RCMD}" ${RFLAGS} "${SRC[0].abspath()}" "${LOGFILEPATH}"'
- shell = True
-
-class run_r_script(run_r_script_base):
- """Erase the R overall log file if everything went okay, else raise an
- error and print its 10 last lines.
- """
- def run(self):
- ret = run_r_script_base.run(self)
- logfile = self.env.LOGFILEPATH
- if ret:
- mode = 'r'
- if sys.version_info.major >= 3:
- mode = 'rb'
- with open(logfile, mode=mode) as f:
- tail = f.readlines()[-10:]
- Logs.error("""Running R on %r returned the error %r\n\nCheck the log file %s, last 10 lines\n\n%s\n\n\n""",
- self.inputs[0], ret, logfile, '\n'.join(tail))
- else:
- os.remove(logfile)
- return ret
-
-
-@TaskGen.feature('run_r_script')
-@TaskGen.before_method('process_source')
-def apply_run_r_script(tg):
- """Task generator customising the options etc. to call R in batch
- mode for running a R script.
- """
-
- # Convert sources and targets to nodes
- src_node = tg.path.find_resource(tg.source)
- tgt_nodes = [tg.path.find_or_declare(t) for t in tg.to_list(tg.target)]
-
- tsk = tg.create_task('run_r_script', src=src_node, tgt=tgt_nodes)
- tsk.env.LOGFILEPATH = os.path.join(tg.bld.bldnode.abspath(), '%s_%d.log' % (os.path.splitext(src_node.name)[0], tg.idx))
-
- # dependencies (if the attribute 'deps' changes, trigger a recompilation)
- for x in tg.to_list(getattr(tg, 'deps', [])):
- node = tg.path.find_resource(x)
- if not node:
- tg.bld.fatal('Could not find dependency %r for running %r' % (x, src_node.abspath()))
- tsk.dep_nodes.append(node)
- Logs.debug('deps: found dependencies %r for running %r', tsk.dep_nodes, src_node.abspath())
-
- # Bypass the execution of process_source by setting the source to an empty list
- tg.source = []
-
diff --git a/waflib/extras/sas.py b/waflib/extras/sas.py
deleted file mode 100644
index 754c614..0000000
--- a/waflib/extras/sas.py
+++ /dev/null
@@ -1,71 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Mark Coggeshall, 2010
-
-"SAS support"
-
-import os
-from waflib import Task, Errors, Logs
-from waflib.TaskGen import feature, before_method
-
-sas_fun, _ = Task.compile_fun('sas -sysin ${SRCFILE} -log ${LOGFILE} -print ${LSTFILE}', shell=False)
-
-class sas(Task.Task):
- vars = ['SAS', 'SASFLAGS']
- def run(task):
- command = 'SAS'
- fun = sas_fun
-
- node = task.inputs[0]
- logfilenode = node.change_ext('.log')
- lstfilenode = node.change_ext('.lst')
-
- # set the cwd
- task.cwd = task.inputs[0].parent.get_src().abspath()
- Logs.debug('runner: %r on %r', command, node)
-
- SASINPUTS = node.parent.get_bld().abspath() + os.pathsep + node.parent.get_src().abspath() + os.pathsep
- task.env.env = {'SASINPUTS': SASINPUTS}
-
- task.env.SRCFILE = node.abspath()
- task.env.LOGFILE = logfilenode.abspath()
- task.env.LSTFILE = lstfilenode.abspath()
- ret = fun(task)
- if ret:
- Logs.error('Running %s on %r returned a non-zero exit', command, node)
- Logs.error('SRCFILE = %r', node)
- Logs.error('LOGFILE = %r', logfilenode)
- Logs.error('LSTFILE = %r', lstfilenode)
- return ret
-
-@feature('sas')
-@before_method('process_source')
-def apply_sas(self):
- if not getattr(self, 'type', None) in ('sas',):
- self.type = 'sas'
-
- self.env['logdir'] = getattr(self, 'logdir', 'log')
- self.env['lstdir'] = getattr(self, 'lstdir', 'lst')
-
- deps_lst = []
-
- if getattr(self, 'deps', None):
- deps = self.to_list(self.deps)
- for filename in deps:
- n = self.path.find_resource(filename)
- if not n:
- n = self.bld.root.find_resource(filename)
- if not n:
- raise Errors.WafError('cannot find input file %s for processing' % filename)
- if not n in deps_lst:
- deps_lst.append(n)
-
- for node in self.to_nodes(self.source):
- if self.type == 'sas':
- task = self.create_task('sas', src=node)
- task.dep_nodes = deps_lst
- self.source = []
-
-def configure(self):
- self.find_program('sas', var='SAS', mandatory=False)
-
diff --git a/waflib/extras/satellite_assembly.py b/waflib/extras/satellite_assembly.py
deleted file mode 100644
index 005eb07..0000000
--- a/waflib/extras/satellite_assembly.py
+++ /dev/null
@@ -1,57 +0,0 @@
-#!/usr/bin/python
-# encoding: utf-8
-# vim: tabstop=4 noexpandtab
-
-"""
-Create a satellite assembly from "*.??.txt" files. ?? stands for a language code.
-
-The projects Resources subfolder contains resources.??.txt string files for several languages.
-The build folder will hold the satellite assemblies as ./??/ExeName.resources.dll
-
-#gen becomes template (It is called gen because it also uses resx.py).
-bld(source='Resources/resources.de.txt',gen=ExeName)
-"""
-
-import os, re
-from waflib import Task
-from waflib.TaskGen import feature,before_method
-
-class al(Task.Task):
- run_str = '${AL} ${ALFLAGS}'
-
-@feature('satellite_assembly')
-@before_method('process_source')
-def satellite_assembly(self):
- if not getattr(self, 'gen', None):
- self.bld.fatal('satellite_assembly needs a template assembly provided with the "gen" parameter')
- res_lang = re.compile(r'(.*)\.(\w\w)\.(?:resx|txt)',flags=re.I)
-
- # self.source can contain node objects, so this will break in one way or another
- self.source = self.to_list(self.source)
- for i, x in enumerate(self.source):
- #x = 'resources/resources.de.resx'
- #x = 'resources/resources.de.txt'
- mo = res_lang.match(x)
- if mo:
- template = os.path.splitext(self.gen)[0]
- templatedir, templatename = os.path.split(template)
- res = mo.group(1)
- lang = mo.group(2)
- #./Resources/resources.de.resources
- resources = self.path.find_or_declare(res+ '.' + lang + '.resources')
- self.create_task('resgen', self.to_nodes(x), [resources])
- #./de/Exename.resources.dll
- satellite = self.path.find_or_declare(os.path.join(templatedir,lang,templatename) + '.resources.dll')
- tsk = self.create_task('al',[resources],[satellite])
- tsk.env.append_value('ALFLAGS','/template:'+os.path.join(self.path.relpath(),self.gen))
- tsk.env.append_value('ALFLAGS','/embed:'+resources.relpath())
- tsk.env.append_value('ALFLAGS','/culture:'+lang)
- tsk.env.append_value('ALFLAGS','/out:'+satellite.relpath())
- self.source[i] = None
- # remove the None elements that we just substituted
- self.source = list(filter(lambda x:x, self.source))
-
-def configure(ctx):
- ctx.find_program('al', var='AL', mandatory=True)
- ctx.load('resx')
-
diff --git a/waflib/extras/scala.py b/waflib/extras/scala.py
deleted file mode 100644
index a9880f0..0000000
--- a/waflib/extras/scala.py
+++ /dev/null
@@ -1,128 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2010 (ita)
-
-"""
-Scala support
-
-scalac outputs files a bit where it wants to
-"""
-
-import os
-from waflib import Task, Utils, Node
-from waflib.TaskGen import feature, before_method, after_method
-
-from waflib.Tools import ccroot
-ccroot.USELIB_VARS['scalac'] = set(['CLASSPATH', 'SCALACFLAGS'])
-
-from waflib.Tools import javaw
-
-@feature('scalac')
-@before_method('process_source')
-def apply_scalac(self):
-
- Utils.def_attrs(self, jarname='', classpath='',
- sourcepath='.', srcdir='.',
- jar_mf_attributes={}, jar_mf_classpath=[])
-
- outdir = getattr(self, 'outdir', None)
- if outdir:
- if not isinstance(outdir, Node.Node):
- outdir = self.path.get_bld().make_node(self.outdir)
- else:
- outdir = self.path.get_bld()
- outdir.mkdir()
- self.env['OUTDIR'] = outdir.abspath()
-
- self.scalac_task = tsk = self.create_task('scalac')
- tmp = []
-
- srcdir = getattr(self, 'srcdir', '')
- if isinstance(srcdir, Node.Node):
- srcdir = [srcdir]
- for x in Utils.to_list(srcdir):
- if isinstance(x, Node.Node):
- y = x
- else:
- y = self.path.find_dir(x)
- if not y:
- self.bld.fatal('Could not find the folder %s from %s' % (x, self.path))
- tmp.append(y)
- tsk.srcdir = tmp
-
-# reuse some code
-feature('scalac')(javaw.use_javac_files)
-after_method('apply_scalac')(javaw.use_javac_files)
-
-feature('scalac')(javaw.set_classpath)
-after_method('apply_scalac', 'use_scalac_files')(javaw.set_classpath)
-
-
-SOURCE_RE = '**/*.scala'
-class scalac(javaw.javac):
- color = 'GREEN'
- vars = ['CLASSPATH', 'SCALACFLAGS', 'SCALAC', 'OUTDIR']
-
- def runnable_status(self):
- """
- Wait for dependent tasks to be complete, then read the file system to find the input nodes.
- """
- for t in self.run_after:
- if not t.hasrun:
- return Task.ASK_LATER
-
- if not self.inputs:
- global SOURCE_RE
- self.inputs = []
- for x in self.srcdir:
- self.inputs.extend(x.ant_glob(SOURCE_RE, remove=False))
- return super(javaw.javac, self).runnable_status()
-
- def run(self):
- """
- Execute the scalac compiler
- """
- env = self.env
- gen = self.generator
- bld = gen.bld
- wd = bld.bldnode.abspath()
- def to_list(xx):
- if isinstance(xx, str):
- return [xx]
- return xx
- self.last_cmd = lst = []
- lst.extend(to_list(env['SCALAC']))
- lst.extend(['-classpath'])
- lst.extend(to_list(env['CLASSPATH']))
- lst.extend(['-d'])
- lst.extend(to_list(env['OUTDIR']))
- lst.extend(to_list(env['SCALACFLAGS']))
- lst.extend([a.abspath() for a in self.inputs])
- lst = [x for x in lst if x]
- try:
- self.out = self.generator.bld.cmd_and_log(lst, cwd=wd, env=env.env or None, output=0, quiet=0)[1]
- except:
- self.generator.bld.cmd_and_log(lst, cwd=wd, env=env.env or None)
-
-def configure(self):
- """
- Detect the scalac program
- """
- # If SCALA_HOME is set, we prepend it to the path list
- java_path = self.environ['PATH'].split(os.pathsep)
- v = self.env
-
- if 'SCALA_HOME' in self.environ:
- java_path = [os.path.join(self.environ['SCALA_HOME'], 'bin')] + java_path
- self.env['SCALA_HOME'] = [self.environ['SCALA_HOME']]
-
- for x in 'scalac scala'.split():
- self.find_program(x, var=x.upper(), path_list=java_path)
-
- if 'CLASSPATH' in self.environ:
- v['CLASSPATH'] = self.environ['CLASSPATH']
-
- v.SCALACFLAGS = ['-verbose']
- if not v['SCALAC']:
- self.fatal('scalac is required for compiling scala classes')
-
diff --git a/waflib/extras/slow_qt4.py b/waflib/extras/slow_qt4.py
deleted file mode 100644
index ec7880b..0000000
--- a/waflib/extras/slow_qt4.py
+++ /dev/null
@@ -1,96 +0,0 @@
-#! /usr/bin/env python
-# Thomas Nagy, 2011 (ita)
-
-"""
-Create _moc.cpp files
-
-The builds are 30-40% faster when .moc files are included,
-you should NOT use this tool. If you really
-really want it:
-
-def configure(conf):
- conf.load('compiler_cxx qt4')
- conf.load('slow_qt4')
-
-See playground/slow_qt/wscript for a complete example.
-"""
-
-from waflib.TaskGen import extension
-from waflib import Task
-import waflib.Tools.qt4
-import waflib.Tools.cxx
-
-@extension(*waflib.Tools.qt4.EXT_QT4)
-def cxx_hook(self, node):
- return self.create_compiled_task('cxx_qt', node)
-
-class cxx_qt(Task.classes['cxx']):
- def runnable_status(self):
- ret = Task.classes['cxx'].runnable_status(self)
- if ret != Task.ASK_LATER and not getattr(self, 'moc_done', None):
-
- try:
- cache = self.generator.moc_cache
- except AttributeError:
- cache = self.generator.moc_cache = {}
-
- deps = self.generator.bld.node_deps[self.uid()]
- for x in [self.inputs[0]] + deps:
- if x.read().find('Q_OBJECT') > 0:
-
- # process "foo.h -> foo.moc" only if "foo.cpp" is in the sources for the current task generator
- # this code will work because it is in the main thread (runnable_status)
- if x.name.rfind('.') > -1: # a .h file...
- name = x.name[:x.name.rfind('.')]
- for tsk in self.generator.compiled_tasks:
- if tsk.inputs and tsk.inputs[0].name.startswith(name):
- break
- else:
- # no corresponding file, continue
- continue
-
- # the file foo.cpp could be compiled for a static and a shared library - hence the %number in the name
- cxx_node = x.parent.get_bld().make_node(x.name.replace('.', '_') + '_%d_moc.cpp' % self.generator.idx)
- if cxx_node in cache:
- continue
- cache[cxx_node] = self
-
- tsk = Task.classes['moc'](env=self.env, generator=self.generator)
- tsk.set_inputs(x)
- tsk.set_outputs(cxx_node)
-
- if x.name.endswith('.cpp'):
- # moc is trying to be too smart but it is too dumb:
- # why forcing the #include when Q_OBJECT is in the cpp file?
- gen = self.generator.bld.producer
- gen.outstanding.append(tsk)
- gen.total += 1
- self.set_run_after(tsk)
- else:
- cxxtsk = Task.classes['cxx'](env=self.env, generator=self.generator)
- cxxtsk.set_inputs(tsk.outputs)
- cxxtsk.set_outputs(cxx_node.change_ext('.o'))
- cxxtsk.set_run_after(tsk)
-
- try:
- self.more_tasks.extend([tsk, cxxtsk])
- except AttributeError:
- self.more_tasks = [tsk, cxxtsk]
-
- try:
- link = self.generator.link_task
- except AttributeError:
- pass
- else:
- link.set_run_after(cxxtsk)
- link.inputs.extend(cxxtsk.outputs)
- link.inputs.sort(key=lambda x: x.abspath())
-
- self.moc_done = True
-
- for t in self.run_after:
- if not t.hasrun:
- return Task.ASK_LATER
-
- return ret
-
diff --git a/waflib/extras/softlink_libs.py b/waflib/extras/softlink_libs.py
deleted file mode 100644
index 50c777f..0000000
--- a/waflib/extras/softlink_libs.py
+++ /dev/null
@@ -1,76 +0,0 @@
-#! /usr/bin/env python
-# per rosengren 2011
-
-from waflib.TaskGen import feature, after_method
-from waflib.Task import Task, always_run
-from os.path import basename, isabs
-from os import tmpfile, linesep
-
-def options(opt):
- grp = opt.add_option_group('Softlink Libraries Options')
- grp.add_option('--exclude', default='/usr/lib,/lib', help='No symbolic links are created for libs within [%default]')
-
-def configure(cnf):
- cnf.find_program('ldd')
- if not cnf.env.SOFTLINK_EXCLUDE:
- cnf.env.SOFTLINK_EXCLUDE = cnf.options.exclude.split(',')
-
-@feature('softlink_libs')
-@after_method('process_rule')
-def add_finder(self):
- tgt = self.path.find_or_declare(self.target)
- self.create_task('sll_finder', tgt=tgt)
- self.create_task('sll_installer', tgt=tgt)
- always_run(sll_installer)
-
-class sll_finder(Task):
- ext_out = 'softlink_libs'
- def run(self):
- bld = self.generator.bld
- linked=[]
- target_paths = []
- for g in bld.groups:
- for tgen in g:
- # FIXME it might be better to check if there is a link_task (getattr?)
- target_paths += [tgen.path.get_bld().bldpath()]
- linked += [t.outputs[0].bldpath()
- for t in getattr(tgen, 'tasks', [])
- if t.__class__.__name__ in
- ['cprogram', 'cshlib', 'cxxprogram', 'cxxshlib']]
- lib_list = []
- if len(linked):
- cmd = [self.env.LDD] + linked
- # FIXME add DYLD_LIBRARY_PATH+PATH for osx+win32
- ldd_env = {'LD_LIBRARY_PATH': ':'.join(target_paths + self.env.LIBPATH)}
- # FIXME the with syntax will not work in python 2
- with tmpfile() as result:
- self.exec_command(cmd, env=ldd_env, stdout=result)
- result.seek(0)
- for line in result.readlines():
- words = line.split()
- if len(words) < 3 or words[1] != '=>':
- continue
- lib = words[2]
- if lib == 'not':
- continue
- if any([lib.startswith(p) for p in
- [bld.bldnode.abspath(), '('] +
- self.env.SOFTLINK_EXCLUDE]):
- continue
- if not isabs(lib):
- continue
- lib_list.append(lib)
- lib_list = sorted(set(lib_list))
- self.outputs[0].write(linesep.join(lib_list + self.env.DYNAMIC_LIBS))
- return 0
-
-class sll_installer(Task):
- ext_in = 'softlink_libs'
- def run(self):
- tgt = self.outputs[0]
- self.generator.bld.install_files('${LIBDIR}', tgt, postpone=False)
- lib_list=tgt.read().split()
- for lib in lib_list:
- self.generator.bld.symlink_as('${LIBDIR}/'+basename(lib), lib, postpone=False)
- return 0
-
diff --git a/waflib/extras/stale.py b/waflib/extras/stale.py
deleted file mode 100644
index cac3f46..0000000
--- a/waflib/extras/stale.py
+++ /dev/null
@@ -1,98 +0,0 @@
-#! /usr/bin/env python
-# encoding: UTF-8
-# Thomas Nagy, 2006-2015 (ita)
-
-"""
-Add a pre-build hook to remove build files (declared in the system)
-that do not have a corresponding target
-
-This can be used for example to remove the targets
-that have changed name without performing
-a full 'waf clean'
-
-Of course, it will only work if there are no dynamically generated
-nodes/tasks, in which case the method will have to be modified
-to exclude some folders for example.
-
-Make sure to set bld.post_mode = waflib.Build.POST_AT_ONCE
-"""
-
-from waflib import Logs, Build
-from waflib.Runner import Parallel
-
-DYNAMIC_EXT = [] # add your non-cleanable files/extensions here
-MOC_H_EXTS = '.cpp .cxx .hpp .hxx .h'.split()
-
-def can_delete(node):
- """Imperfect moc cleanup which does not look for a Q_OBJECT macro in the files"""
- if not node.name.endswith('.moc'):
- return True
- base = node.name[:-4]
- p1 = node.parent.get_src()
- p2 = node.parent.get_bld()
- for k in MOC_H_EXTS:
- h_name = base + k
- n = p1.search_node(h_name)
- if n:
- return False
- n = p2.search_node(h_name)
- if n:
- return False
-
- # foo.cpp.moc, foo.h.moc, etc.
- if base.endswith(k):
- return False
-
- return True
-
-# recursion over the nodes to find the stale files
-def stale_rec(node, nodes):
- if node.abspath() in node.ctx.env[Build.CFG_FILES]:
- return
-
- if getattr(node, 'children', []):
- for x in node.children.values():
- if x.name != "c4che":
- stale_rec(x, nodes)
- else:
- for ext in DYNAMIC_EXT:
- if node.name.endswith(ext):
- break
- else:
- if not node in nodes:
- if can_delete(node):
- Logs.warn('Removing stale file -> %r', node)
- node.delete()
-
-old = Parallel.refill_task_list
-def refill_task_list(self):
- iit = old(self)
- bld = self.bld
-
- # execute this operation only once
- if getattr(self, 'stale_done', False):
- return iit
- self.stale_done = True
-
- # this does not work in partial builds
- if bld.targets != '*':
- return iit
-
- # this does not work in dynamic builds
- if getattr(bld, 'post_mode') == Build.POST_AT_ONCE:
- return iit
-
- # obtain the nodes to use during the build
- nodes = []
- for tasks in bld.groups:
- for x in tasks:
- try:
- nodes.extend(x.outputs)
- except AttributeError:
- pass
-
- stale_rec(bld.bldnode, nodes)
- return iit
-
-Parallel.refill_task_list = refill_task_list
-
diff --git a/waflib/extras/stracedeps.py b/waflib/extras/stracedeps.py
deleted file mode 100644
index 37d82cb..0000000
--- a/waflib/extras/stracedeps.py
+++ /dev/null
@@ -1,174 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2015 (ita)
-
-"""
-Execute tasks through strace to obtain dependencies after the process is run. This
-scheme is similar to that of the Fabricate script.
-
-To use::
-
- def configure(conf):
- conf.load('strace')
-
-WARNING:
-* This will not work when advanced scanners are needed (qt4/qt5)
-* The overhead of running 'strace' is significant (56s -> 1m29s)
-* It will not work on Windows :-)
-"""
-
-import os, re, threading
-from waflib import Task, Logs, Utils
-
-#TRACECALLS = 'trace=access,chdir,clone,creat,execve,exit_group,fork,lstat,lstat64,mkdir,open,rename,stat,stat64,symlink,vfork'
-TRACECALLS = 'trace=process,file'
-
-BANNED = ('/tmp', '/proc', '/sys', '/dev')
-
-s_process = r'(?:clone|fork|vfork)\(.*?(?P<npid>\d+)'
-s_file = r'(?P<call>\w+)\("(?P<path>([^"\\]|\\.)*)"(.*)'
-re_lines = re.compile(r'^(?P<pid>\d+)\s+(?:(?:%s)|(?:%s))\r*$' % (s_file, s_process), re.IGNORECASE | re.MULTILINE)
-strace_lock = threading.Lock()
-
-def configure(conf):
- conf.find_program('strace')
-
-def task_method(func):
- # Decorator function to bind/replace methods on the base Task class
- #
- # The methods Task.exec_command and Task.sig_implicit_deps already exists and are rarely overridden
- # we thus expect that we are the only ones doing this
- try:
- setattr(Task.Task, 'nostrace_%s' % func.__name__, getattr(Task.Task, func.__name__))
- except AttributeError:
- pass
- setattr(Task.Task, func.__name__, func)
- return func
-
-@task_method
-def get_strace_file(self):
- try:
- return self.strace_file
- except AttributeError:
- pass
-
- if self.outputs:
- ret = self.outputs[0].abspath() + '.strace'
- else:
- ret = '%s%s%d%s' % (self.generator.bld.bldnode.abspath(), os.sep, id(self), '.strace')
- self.strace_file = ret
- return ret
-
-@task_method
-def get_strace_args(self):
- return (self.env.STRACE or ['strace']) + ['-e', TRACECALLS, '-f', '-o', self.get_strace_file()]
-
-@task_method
-def exec_command(self, cmd, **kw):
- bld = self.generator.bld
- if not 'cwd' in kw:
- kw['cwd'] = self.get_cwd()
-
- args = self.get_strace_args()
- fname = self.get_strace_file()
- if isinstance(cmd, list):
- cmd = args + cmd
- else:
- cmd = '%s %s' % (' '.join(args), cmd)
-
- try:
- ret = bld.exec_command(cmd, **kw)
- finally:
- if not ret:
- self.parse_strace_deps(fname, kw['cwd'])
- return ret
-
-@task_method
-def sig_implicit_deps(self):
- # bypass the scanner functions
- return
-
-@task_method
-def parse_strace_deps(self, path, cwd):
- # uncomment the following line to disable the dependencies and force a file scan
- # return
- try:
- cnt = Utils.readf(path)
- finally:
- try:
- os.remove(path)
- except OSError:
- pass
-
- if not isinstance(cwd, str):
- cwd = cwd.abspath()
-
- nodes = []
- bld = self.generator.bld
- try:
- cache = bld.strace_cache
- except AttributeError:
- cache = bld.strace_cache = {}
-
- # chdir and relative paths
- pid_to_cwd = {}
-
- global BANNED
- done = set()
- for m in re.finditer(re_lines, cnt):
- # scraping the output of strace
- pid = m.group('pid')
- if m.group('npid'):
- npid = m.group('npid')
- pid_to_cwd[npid] = pid_to_cwd.get(pid, cwd)
- continue
-
- p = m.group('path').replace('\\"', '"')
-
- if p == '.' or m.group().find('= -1 ENOENT') > -1:
- # just to speed it up a bit
- continue
-
- if not os.path.isabs(p):
- p = os.path.join(pid_to_cwd.get(pid, cwd), p)
-
- call = m.group('call')
- if call == 'chdir':
- pid_to_cwd[pid] = p
- continue
-
- if p in done:
- continue
- done.add(p)
-
- for x in BANNED:
- if p.startswith(x):
- break
- else:
- if p.endswith('/') or os.path.isdir(p):
- continue
-
- try:
- node = cache[p]
- except KeyError:
- strace_lock.acquire()
- try:
- cache[p] = node = bld.root.find_node(p)
- if not node:
- continue
- finally:
- strace_lock.release()
- nodes.append(node)
-
- # record the dependencies then force the task signature recalculation for next time
- if Logs.verbose:
- Logs.debug('deps: real scanner for %r returned %r', self, nodes)
- bld = self.generator.bld
- bld.node_deps[self.uid()] = nodes
- bld.raw_deps[self.uid()] = []
- try:
- del self.cache_sig
- except AttributeError:
- pass
- self.signature()
-
diff --git a/waflib/extras/swig.py b/waflib/extras/swig.py
deleted file mode 100644
index fd3d6d2..0000000
--- a/waflib/extras/swig.py
+++ /dev/null
@@ -1,237 +0,0 @@
-#! /usr/bin/env python
-# encoding: UTF-8
-# Petar Forai
-# Thomas Nagy 2008-2010 (ita)
-
-import re
-from waflib import Task, Logs
-from waflib.TaskGen import extension, feature, after_method
-from waflib.Configure import conf
-from waflib.Tools import c_preproc
-
-"""
-tasks have to be added dynamically:
-- swig interface files may be created at runtime
-- the module name may be unknown in advance
-"""
-
-SWIG_EXTS = ['.swig', '.i']
-
-re_module = re.compile('%module(?:\s*\(.*\))?\s+(.+)', re.M)
-
-re_1 = re.compile(r'^%module.*?\s+([\w]+)\s*?$', re.M)
-re_2 = re.compile('[#%]include [<"](.*)[">]', re.M)
-
-class swig(Task.Task):
- color = 'BLUE'
- run_str = '${SWIG} ${SWIGFLAGS} ${SWIGPATH_ST:INCPATHS} ${SWIGDEF_ST:DEFINES} ${SRC}'
- ext_out = ['.h'] # might produce .h files although it is not mandatory
- vars = ['SWIG_VERSION', 'SWIGDEPS']
-
- def runnable_status(self):
- for t in self.run_after:
- if not t.hasrun:
- return Task.ASK_LATER
-
- if not getattr(self, 'init_outputs', None):
- self.init_outputs = True
- if not getattr(self, 'module', None):
- # search the module name
- txt = self.inputs[0].read()
- m = re_module.search(txt)
- if not m:
- raise ValueError("could not find the swig module name")
- self.module = m.group(1)
-
- swig_c(self)
-
- # add the language-specific output files as nodes
- # call funs in the dict swig_langs
- for x in self.env['SWIGFLAGS']:
- # obtain the language
- x = x[1:]
- try:
- fun = swig_langs[x]
- except KeyError:
- pass
- else:
- fun(self)
-
- return super(swig, self).runnable_status()
-
- def scan(self):
- "scan for swig dependencies, climb the .i files"
- lst_src = []
-
- seen = []
- missing = []
- to_see = [self.inputs[0]]
-
- while to_see:
- node = to_see.pop(0)
- if node in seen:
- continue
- seen.append(node)
- lst_src.append(node)
-
- # read the file
- code = node.read()
- code = c_preproc.re_nl.sub('', code)
- code = c_preproc.re_cpp.sub(c_preproc.repl, code)
-
- # find .i files and project headers
- names = re_2.findall(code)
- for n in names:
- for d in self.generator.includes_nodes + [node.parent]:
- u = d.find_resource(n)
- if u:
- to_see.append(u)
- break
- else:
- missing.append(n)
- return (lst_src, missing)
-
-# provide additional language processing
-swig_langs = {}
-def swigf(fun):
- swig_langs[fun.__name__.replace('swig_', '')] = fun
- return fun
-swig.swigf = swigf
-
-def swig_c(self):
- ext = '.swigwrap_%d.c' % self.generator.idx
- flags = self.env['SWIGFLAGS']
- if '-c++' in flags:
- ext += 'xx'
- out_node = self.inputs[0].parent.find_or_declare(self.module + ext)
-
- if '-c++' in flags:
- c_tsk = self.generator.cxx_hook(out_node)
- else:
- c_tsk = self.generator.c_hook(out_node)
-
- c_tsk.set_run_after(self)
-
- # transfer weights from swig task to c task
- if getattr(self, 'weight', None):
- c_tsk.weight = self.weight
- if getattr(self, 'tree_weight', None):
- c_tsk.tree_weight = self.tree_weight
-
- try:
- self.more_tasks.append(c_tsk)
- except AttributeError:
- self.more_tasks = [c_tsk]
-
- try:
- ltask = self.generator.link_task
- except AttributeError:
- pass
- else:
- ltask.set_run_after(c_tsk)
- # setting input nodes does not declare the build order
- # because the build already started, but it sets
- # the dependency to enable rebuilds
- ltask.inputs.append(c_tsk.outputs[0])
-
- self.outputs.append(out_node)
-
- if not '-o' in self.env['SWIGFLAGS']:
- self.env.append_value('SWIGFLAGS', ['-o', self.outputs[0].abspath()])
-
-@swigf
-def swig_python(tsk):
- node = tsk.inputs[0].parent
- if tsk.outdir:
- node = tsk.outdir
- tsk.set_outputs(node.find_or_declare(tsk.module+'.py'))
-
-@swigf
-def swig_ocaml(tsk):
- node = tsk.inputs[0].parent
- if tsk.outdir:
- node = tsk.outdir
- tsk.set_outputs(node.find_or_declare(tsk.module+'.ml'))
- tsk.set_outputs(node.find_or_declare(tsk.module+'.mli'))
-
-@extension(*SWIG_EXTS)
-def i_file(self, node):
- # the task instance
- tsk = self.create_task('swig')
- tsk.set_inputs(node)
- tsk.module = getattr(self, 'swig_module', None)
-
- flags = self.to_list(getattr(self, 'swig_flags', []))
- tsk.env.append_value('SWIGFLAGS', flags)
-
- tsk.outdir = None
- if '-outdir' in flags:
- outdir = flags[flags.index('-outdir')+1]
- outdir = tsk.generator.bld.bldnode.make_node(outdir)
- outdir.mkdir()
- tsk.outdir = outdir
-
-@feature('c', 'cxx', 'd', 'fc', 'asm')
-@after_method('apply_link', 'process_source')
-def enforce_swig_before_link(self):
- try:
- link_task = self.link_task
- except AttributeError:
- pass
- else:
- for x in self.tasks:
- if x.__class__.__name__ == 'swig':
- link_task.run_after.add(x)
-
-@conf
-def check_swig_version(conf, minver=None):
- """
- Check if the swig tool is found matching a given minimum version.
- minver should be a tuple, eg. to check for swig >= 1.3.28 pass (1,3,28) as minver.
-
- If successful, SWIG_VERSION is defined as 'MAJOR.MINOR'
- (eg. '1.3') of the actual swig version found.
-
- :param minver: minimum version
- :type minver: tuple of int
- :return: swig version
- :rtype: tuple of int
- """
- assert minver is None or isinstance(minver, tuple)
- swigbin = conf.env['SWIG']
- if not swigbin:
- conf.fatal('could not find the swig executable')
-
- # Get swig version string
- cmd = swigbin + ['-version']
- Logs.debug('swig: Running swig command %r', cmd)
- reg_swig = re.compile(r'SWIG Version\s(.*)', re.M)
- swig_out = conf.cmd_and_log(cmd)
- swigver_tuple = tuple([int(s) for s in reg_swig.findall(swig_out)[0].split('.')])
-
- # Compare swig version with the minimum required
- result = (minver is None) or (swigver_tuple >= minver)
-
- if result:
- # Define useful environment variables
- swigver = '.'.join([str(x) for x in swigver_tuple[:2]])
- conf.env['SWIG_VERSION'] = swigver
-
- # Feedback
- swigver_full = '.'.join(map(str, swigver_tuple[:3]))
- if minver is None:
- conf.msg('Checking for swig version', swigver_full)
- else:
- minver_str = '.'.join(map(str, minver))
- conf.msg('Checking for swig version >= %s' % (minver_str,), swigver_full, color=result and 'GREEN' or 'YELLOW')
-
- if not result:
- conf.fatal('The swig version is too old, expecting %r' % (minver,))
-
- return swigver_tuple
-
-def configure(conf):
- conf.find_program('swig', var='SWIG')
- conf.env.SWIGPATH_ST = '-I%s'
- conf.env.SWIGDEF_ST = '-D%s'
-
diff --git a/waflib/extras/syms.py b/waflib/extras/syms.py
deleted file mode 100644
index dfa0059..0000000
--- a/waflib/extras/syms.py
+++ /dev/null
@@ -1,84 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-
-"""
-This tool supports the export_symbols_regex to export the symbols in a shared library.
-by default, all symbols are exported by gcc, and nothing by msvc.
-to use the tool, do something like:
-
-def build(ctx):
- ctx(features='c cshlib syms', source='a.c b.c', export_symbols_regex='mylib_.*', target='testlib')
-
-only the symbols starting with 'mylib_' will be exported.
-"""
-
-import re
-from waflib.Context import STDOUT
-from waflib.Task import Task
-from waflib.Errors import WafError
-from waflib.TaskGen import feature, after_method
-
-class gen_sym(Task):
- def run(self):
- obj = self.inputs[0]
- kw = {}
-
- reg = getattr(self.generator, 'export_symbols_regex', '.+?')
- if 'msvc' in (self.env.CC_NAME, self.env.CXX_NAME):
- re_nm = re.compile(r'External\s+\|\s+_(?P<symbol>%s)\b' % reg)
- cmd = (self.env.DUMPBIN or ['dumpbin']) + ['/symbols', obj.abspath()]
- else:
- if self.env.DEST_BINFMT == 'pe': #gcc uses nm, and has a preceding _ on windows
- re_nm = re.compile(r'(T|D)\s+_(?P<symbol>%s)\b' % reg)
- elif self.env.DEST_BINFMT=='mac-o':
- re_nm=re.compile(r'(T|D)\s+(?P<symbol>_?%s)\b' % reg)
- else:
- re_nm = re.compile(r'(T|D)\s+(?P<symbol>%s)\b' % reg)
- cmd = (self.env.NM or ['nm']) + ['-g', obj.abspath()]
- syms = [m.group('symbol') for m in re_nm.finditer(self.generator.bld.cmd_and_log(cmd, quiet=STDOUT, **kw))]
- self.outputs[0].write('%r' % syms)
-
-class compile_sym(Task):
- def run(self):
- syms = {}
- for x in self.inputs:
- slist = eval(x.read())
- for s in slist:
- syms[s] = 1
- lsyms = list(syms.keys())
- lsyms.sort()
- if self.env.DEST_BINFMT == 'pe':
- self.outputs[0].write('EXPORTS\n' + '\n'.join(lsyms))
- elif self.env.DEST_BINFMT == 'elf':
- self.outputs[0].write('{ global:\n' + ';\n'.join(lsyms) + ";\nlocal: *; };\n")
- elif self.env.DEST_BINFMT=='mac-o':
- self.outputs[0].write('\n'.join(lsyms) + '\n')
- else:
- raise WafError('NotImplemented')
-
-@feature('syms')
-@after_method('process_source', 'process_use', 'apply_link', 'process_uselib_local', 'propagate_uselib_vars')
-def do_the_symbol_stuff(self):
- def_node = self.path.find_or_declare(getattr(self, 'sym_file', self.target + '.def'))
- compiled_tasks = getattr(self, 'compiled_tasks', None)
- if compiled_tasks:
- ins = [x.outputs[0] for x in compiled_tasks]
- self.gen_sym_tasks = [self.create_task('gen_sym', x, x.change_ext('.%d.sym' % self.idx)) for x in ins]
- self.create_task('compile_sym', [x.outputs[0] for x in self.gen_sym_tasks], def_node)
-
- link_task = getattr(self, 'link_task', None)
- if link_task:
- self.link_task.dep_nodes.append(def_node)
-
- if 'msvc' in (self.env.CC_NAME, self.env.CXX_NAME):
- self.link_task.env.append_value('LINKFLAGS', ['/def:' + def_node.bldpath()])
- elif self.env.DEST_BINFMT == 'pe':
- # gcc on windows takes *.def as an additional input
- self.link_task.inputs.append(def_node)
- elif self.env.DEST_BINFMT == 'elf':
- self.link_task.env.append_value('LINKFLAGS', ['-Wl,-version-script', '-Wl,' + def_node.bldpath()])
- elif self.env.DEST_BINFMT=='mac-o':
- self.link_task.env.append_value('LINKFLAGS',['-Wl,-exported_symbols_list,' + def_node.bldpath()])
- else:
- raise WafError('NotImplemented')
-
diff --git a/waflib/extras/ticgt.py b/waflib/extras/ticgt.py
deleted file mode 100644
index f43a7ea..0000000
--- a/waflib/extras/ticgt.py
+++ /dev/null
@@ -1,300 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-
-# Texas Instruments code generator support (experimental)
-# When reporting issues, please directly assign the bug to the maintainer.
-
-__author__ = __maintainer__ = "Jérôme Carretero <cJ-waf@zougloub.eu>"
-__copyright__ = "Jérôme Carretero, 2012"
-
-"""
-TI cgt6x is a compiler suite for TI DSPs.
-
-The toolchain does pretty weird things, and I'm sure I'm missing some of them.
-But still, the tool saves time.
-
-What this tool does is:
-
-- create a TI compiler environment
-- create TI compiler features, to handle some specifics about this compiler
- It has a few idiosyncracies, such as not giving the liberty of the .o file names
-- automatically activate them when using the TI compiler
-- handle the tconf tool
- The tool
-
-TODO:
-
-- the set_platform_flags() function is not nice
-- more tests
-- broaden tool scope, if needed
-
-"""
-
-import os, re
-
-from waflib import Options, Utils, Task, TaskGen
-from waflib.Tools import c, ccroot, c_preproc
-from waflib.Configure import conf
-from waflib.TaskGen import feature, before_method
-from waflib.Tools.c import cprogram
-
-opj = os.path.join
-
-@conf
-def find_ticc(conf):
- conf.find_program(['cl6x'], var='CC', path_list=opj(getattr(Options.options, 'ti-cgt-dir', ""), 'bin'))
- conf.env.CC_NAME = 'ticc'
-
-@conf
-def find_tild(conf):
- conf.find_program(['lnk6x'], var='LINK_CC', path_list=opj(getattr(Options.options, 'ti-cgt-dir', ""), 'bin'))
- conf.env.LINK_CC_NAME = 'tild'
-
-@conf
-def find_tiar(conf):
- conf.find_program(['ar6x'], var='AR', path_list=opj(getattr(Options.options, 'ti-cgt-dir', ""), 'bin'))
- conf.env.AR_NAME = 'tiar'
- conf.env.ARFLAGS = 'qru'
-
-@conf
-def ticc_common_flags(conf):
- v = conf.env
-
- if not v['LINK_CC']:
- v['LINK_CC'] = v['CC']
- v['CCLNK_SRC_F'] = []
- v['CCLNK_TGT_F'] = ['-o']
- v['CPPPATH_ST'] = '-I%s'
- v['DEFINES_ST'] = '-d%s'
-
- v['LIB_ST'] = '-l%s' # template for adding libs
- v['LIBPATH_ST'] = '-i%s' # template for adding libpaths
- v['STLIB_ST'] = '-l=%s.lib'
- v['STLIBPATH_ST'] = '-i%s'
-
- # program
- v['cprogram_PATTERN'] = '%s.out'
-
- # static lib
- #v['LINKFLAGS_cstlib'] = ['-Wl,-Bstatic']
- v['cstlib_PATTERN'] = '%s.lib'
-
-def configure(conf):
- v = conf.env
- v.TI_CGT_DIR = getattr(Options.options, 'ti-cgt-dir', "")
- v.TI_DSPLINK_DIR = getattr(Options.options, 'ti-dsplink-dir', "")
- v.TI_BIOSUTILS_DIR = getattr(Options.options, 'ti-biosutils-dir', "")
- v.TI_DSPBIOS_DIR = getattr(Options.options, 'ti-dspbios-dir', "")
- v.TI_XDCTOOLS_DIR = getattr(Options.options, 'ti-xdctools-dir', "")
- conf.find_ticc()
- conf.find_tiar()
- conf.find_tild()
- conf.ticc_common_flags()
- conf.cc_load_tools()
- conf.cc_add_flags()
- conf.link_add_flags()
- conf.find_program(['tconf'], var='TCONF', path_list=v.TI_XDCTOOLS_DIR)
-
- conf.env.TCONF_INCLUDES += [
- opj(conf.env.TI_DSPBIOS_DIR, 'packages'),
- ]
-
- conf.env.INCLUDES += [
- opj(conf.env.TI_CGT_DIR, 'include'),
- ]
-
- conf.env.LIBPATH += [
- opj(conf.env.TI_CGT_DIR, "lib"),
- ]
-
- conf.env.INCLUDES_DSPBIOS += [
- opj(conf.env.TI_DSPBIOS_DIR, 'packages', 'ti', 'bios', 'include'),
- ]
-
- conf.env.LIBPATH_DSPBIOS += [
- opj(conf.env.TI_DSPBIOS_DIR, 'packages', 'ti', 'bios', 'lib'),
- ]
-
- conf.env.INCLUDES_DSPLINK += [
- opj(conf.env.TI_DSPLINK_DIR, 'dsplink', 'dsp', 'inc'),
- ]
-
-@conf
-def ti_set_debug(cfg, debug=1):
- """
- Sets debug flags for the compiler.
-
- TODO:
- - for each TI CFLAG/INCLUDES/LINKFLAGS/LIBPATH replace RELEASE by DEBUG
- - -g --no_compress
- """
- if debug:
- cfg.env.CFLAGS += "-d_DEBUG -dDEBUG -dDDSP_DEBUG".split()
-
-@conf
-def ti_dsplink_set_platform_flags(cfg, splat, dsp, dspbios_ver, board):
- """
- Sets the INCLUDES, LINKFLAGS for DSPLINK and TCONF_INCLUDES
- For the specific hardware.
-
- Assumes that DSPLINK was built in its own folder.
-
- :param splat: short platform name (eg. OMAPL138)
- :param dsp: DSP name (eg. 674X)
- :param dspbios_ver: string identifying DspBios version (eg. 5.XX)
- :param board: board name (eg. OMAPL138GEM)
-
- """
- d1 = opj(cfg.env.TI_DSPLINK_DIR, 'dsplink', 'dsp', 'inc', 'DspBios', dspbios_ver)
- d = opj(cfg.env.TI_DSPLINK_DIR, 'dsplink', 'dsp', 'inc', 'DspBios', dspbios_ver, board)
- cfg.env.TCONF_INCLUDES += [d1, d]
- cfg.env.INCLUDES_DSPLINK += [
- opj(cfg.env.TI_DSPLINK_DIR, 'dsplink', 'dsp', 'inc', dsp),
- d,
- ]
-
- cfg.env.LINKFLAGS_DSPLINK += [
- opj(cfg.env.TI_DSPLINK_DIR, 'dsplink', 'dsp', 'export', 'BIN', 'DspBios', splat, board+'_0', 'RELEASE', 'dsplink%s.lib' % x)
- for x in ('', 'pool', 'mpcs', 'mplist', 'msg', 'data', 'notify', 'ringio')
- ]
-
-
-def options(opt):
- opt.add_option('--with-ti-cgt', type='string', dest='ti-cgt-dir', help = 'Specify alternate cgt root folder', default="")
- opt.add_option('--with-ti-biosutils', type='string', dest='ti-biosutils-dir', help = 'Specify alternate biosutils folder', default="")
- opt.add_option('--with-ti-dspbios', type='string', dest='ti-dspbios-dir', help = 'Specify alternate dspbios folder', default="")
- opt.add_option('--with-ti-dsplink', type='string', dest='ti-dsplink-dir', help = 'Specify alternate dsplink folder', default="")
- opt.add_option('--with-ti-xdctools', type='string', dest='ti-xdctools-dir', help = 'Specify alternate xdctools folder', default="")
-
-class ti_cprogram(cprogram):
- """
- Link object files into a c program
-
- Changes:
-
- - the linked executable to have a relative path (because we can)
- - put the LIBPATH first
- """
- run_str = '${LINK_CC} ${LIBPATH_ST:LIBPATH} ${LIB_ST:LIB} ${LINKFLAGS} ${CCLNK_SRC_F}${SRC} ${CCLNK_TGT_F}${TGT[0].bldpath()} ${RPATH_ST:RPATH} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${FRAMEWORK_ST:FRAMEWORK} ${ARCH_ST:ARCH} ${STLIB_MARKER} ${STLIBPATH_ST:STLIBPATH} ${STLIB_ST:STLIB} ${SHLIB_MARKER} '
-
-@feature("c")
-@before_method('apply_link')
-def use_ti_cprogram(self):
- """
- Automatically uses ti_cprogram link process
- """
- if 'cprogram' in self.features and self.env.CC_NAME == 'ticc':
- self.features.insert(0, "ti_cprogram")
-
-class ti_c(Task.Task):
- """
- Compile task for the TI codegen compiler
-
- This compiler does not allow specifying the output file name, only the output path.
-
- """
- "Compile C files into object files"
- run_str = '${CC} ${ARCH_ST:ARCH} ${CFLAGS} ${FRAMEWORKPATH_ST:FRAMEWORKPATH} ${CPPPATH_ST:INCPATHS} ${DEFINES_ST:DEFINES} ${SRC} -c ${OUT} ${CPPFLAGS}'
- vars = ['CCDEPS'] # unused variable to depend on, just in case
- ext_in = ['.h'] # set the build order easily by using ext_out=['.h']
- scan = c_preproc.scan
-
-def create_compiled_task(self, name, node):
- """
- Overrides ccroot.create_compiled_task to support ti_c
- """
- out = '%s' % (node.change_ext('.obj').name)
- if self.env.CC_NAME == 'ticc':
- name = 'ti_c'
- task = self.create_task(name, node, node.parent.find_or_declare(out))
- self.env.OUT = '-fr%s' % (node.parent.get_bld().abspath())
- try:
- self.compiled_tasks.append(task)
- except AttributeError:
- self.compiled_tasks = [task]
- return task
-
-@TaskGen.extension('.c')
-def c_hook(self, node):
- "Bind the c file extension to the creation of a :py:class:`waflib.Tools.c.c` instance"
- if self.env.CC_NAME == 'ticc':
- return create_compiled_task(self, 'ti_c', node)
- else:
- return self.create_compiled_task('c', node)
-
-
-@feature("ti-tconf")
-@before_method('process_source')
-def apply_tconf(self):
- sources = [x.get_src() for x in self.to_nodes(self.source, path=self.path.get_src())]
- node = sources[0]
- assert(sources[0].name.endswith(".tcf"))
- if len(sources) > 1:
- assert(sources[1].name.endswith(".cmd"))
-
- target = getattr(self, 'target', self.source)
- target_node = node.get_bld().parent.find_or_declare(node.name)
-
- procid = "%d" % int(getattr(self, 'procid', 0))
-
- importpaths = []
- includes = Utils.to_list(getattr(self, 'includes', []))
- for x in includes + self.env.TCONF_INCLUDES:
- if x == os.path.abspath(x):
- importpaths.append(x)
- else:
- relpath = self.path.find_node(x).path_from(target_node.parent)
- importpaths.append(relpath)
-
- task = self.create_task('ti_tconf', sources, target_node.change_ext('.cdb'))
- task.path = self.path
- task.includes = includes
- task.cwd = target_node.parent.abspath()
- task.env = self.env.derive()
- task.env["TCONFSRC"] = node.path_from(target_node.parent)
- task.env["TCONFINC"] = '-Dconfig.importPath=%s' % ";".join(importpaths)
- task.env['TCONFPROGNAME'] = '-Dconfig.programName=%s' % target
- task.env['PROCID'] = procid
- task.outputs = [
- target_node.change_ext("cfg_c.c"),
- target_node.change_ext("cfg.s62"),
- target_node.change_ext("cfg.cmd"),
- ]
-
- create_compiled_task(self, 'ti_c', task.outputs[1])
- ctask = create_compiled_task(self, 'ti_c', task.outputs[0])
- ctask.env = self.env.derive()
-
- self.add_those_o_files(target_node.change_ext("cfg.cmd"))
- if len(sources) > 1:
- self.add_those_o_files(sources[1])
- self.source = []
-
-re_tconf_include = re.compile(r'(?P<type>utils\.importFile)\("(?P<file>.*)"\)',re.M)
-class ti_tconf(Task.Task):
- run_str = '${TCONF} ${TCONFINC} ${TCONFPROGNAME} ${TCONFSRC} ${PROCID}'
- color = 'PINK'
-
- def scan(self):
- includes = Utils.to_list(getattr(self, 'includes', []))
-
- def deps(node):
- nodes, names = [], []
- if node:
- code = Utils.readf(node.abspath())
- for match in re_tconf_include.finditer(code):
- path = match.group('file')
- if path:
- for x in includes:
- filename = opj(x, path)
- fi = self.path.find_resource(filename)
- if fi:
- subnodes, subnames = deps(fi)
- nodes += subnodes
- names += subnames
- nodes.append(fi)
- names.append(path)
- break
- return nodes, names
- return deps(self.inputs[0])
-
diff --git a/waflib/extras/unity.py b/waflib/extras/unity.py
deleted file mode 100644
index 78128ed..0000000
--- a/waflib/extras/unity.py
+++ /dev/null
@@ -1,108 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-
-"""
-Compile whole groups of C/C++ files at once
-(C and C++ files are processed independently though).
-
-To enable globally::
-
- def options(opt):
- opt.load('compiler_cxx')
- def build(bld):
- bld.load('compiler_cxx unity')
-
-To enable for specific task generators only::
-
- def build(bld):
- bld(features='c cprogram unity', source='main.c', ...)
-
-The file order is often significant in such builds, so it can be
-necessary to adjust the order of source files and the batch sizes.
-To control the amount of files processed in a batch per target
-(the default is 50)::
-
- def build(bld):
- bld(features='c cprogram', unity_size=20)
-
-"""
-
-from waflib import Task, Options
-from waflib.Tools import c_preproc
-from waflib import TaskGen
-
-MAX_BATCH = 50
-
-EXTS_C = ('.c',)
-EXTS_CXX = ('.cpp','.cc','.cxx','.C','.c++')
-
-def options(opt):
- global MAX_BATCH
- opt.add_option('--batchsize', action='store', dest='batchsize', type='int', default=MAX_BATCH,
- help='default unity batch size (0 disables unity builds)')
-
-@TaskGen.taskgen_method
-def batch_size(self):
- default = getattr(Options.options, 'batchsize', MAX_BATCH)
- if default < 1:
- return 0
- return getattr(self, 'unity_size', default)
-
-
-class unity(Task.Task):
- color = 'BLUE'
- scan = c_preproc.scan
- def to_include(self, node):
- ret = node.path_from(self.outputs[0].parent)
- ret = ret.replace('\\', '\\\\').replace('"', '\\"')
- return ret
- def run(self):
- lst = ['#include "%s"\n' % self.to_include(node) for node in self.inputs]
- txt = ''.join(lst)
- self.outputs[0].write(txt)
- def __str__(self):
- node = self.outputs[0]
- return node.path_from(node.ctx.launch_node())
-
-def bind_unity(obj, cls_name, exts):
- if not 'mappings' in obj.__dict__:
- obj.mappings = dict(obj.mappings)
-
- for j in exts:
- fun = obj.mappings[j]
- if fun.__name__ == 'unity_fun':
- raise ValueError('Attempt to bind unity mappings multiple times %r' % j)
-
- def unity_fun(self, node):
- cnt = self.batch_size()
- if cnt <= 1:
- return fun(self, node)
- x = getattr(self, 'master_%s' % cls_name, None)
- if not x or len(x.inputs) >= cnt:
- x = self.create_task('unity')
- setattr(self, 'master_%s' % cls_name, x)
-
- cnt_cur = getattr(self, 'cnt_%s' % cls_name, 0)
- c_node = node.parent.find_or_declare('unity_%s_%d_%d.%s' % (self.idx, cnt_cur, cnt, cls_name))
- x.outputs = [c_node]
- setattr(self, 'cnt_%s' % cls_name, cnt_cur + 1)
- fun(self, c_node)
- x.inputs.append(node)
-
- obj.mappings[j] = unity_fun
-
-@TaskGen.feature('unity')
-@TaskGen.before('process_source')
-def single_unity(self):
- lst = self.to_list(self.features)
- if 'c' in lst:
- bind_unity(self, 'c', EXTS_C)
- if 'cxx' in lst:
- bind_unity(self, 'cxx', EXTS_CXX)
-
-def build(bld):
- if bld.env.CC_NAME:
- bind_unity(TaskGen.task_gen, 'c', EXTS_C)
- if bld.env.CXX_NAME:
- bind_unity(TaskGen.task_gen, 'cxx', EXTS_CXX)
-
diff --git a/waflib/extras/use_config.py b/waflib/extras/use_config.py
deleted file mode 100644
index ef5129f..0000000
--- a/waflib/extras/use_config.py
+++ /dev/null
@@ -1,185 +0,0 @@
-#!/usr/bin/env python
-# coding=utf-8
-# Mathieu Courtois - EDF R&D, 2013 - http://www.code-aster.org
-
-"""
-When a project has a lot of options the 'waf configure' command line can be
-very long and it becomes a cause of error.
-This tool provides a convenient way to load a set of configuration parameters
-from a local file or from a remote url.
-
-The configuration parameters are stored in a Python file that is imported as
-an extra waf tool can be.
-
-Example:
-$ waf configure --use-config-dir=http://www.anywhere.org --use-config=myconf1 ...
-
-The file 'myconf1' will be downloaded from 'http://www.anywhere.org'
-(or 'http://www.anywhere.org/wafcfg').
-If the files are available locally, it could be:
-$ waf configure --use-config-dir=/somewhere/myconfigurations --use-config=myconf1 ...
-
-The configuration of 'myconf1.py' is automatically loaded by calling
-its 'configure' function. In this example, it defines environment variables and
-set options:
-
-def configure(self):
- self.env['CC'] = 'gcc-4.8'
- self.env.append_value('LIBPATH', [...])
- self.options.perlbinary = '/usr/local/bin/perl'
- self.options.pyc = False
-
-The corresponding command line should have been:
-$ CC=gcc-4.8 LIBPATH=... waf configure --nopyc --with-perl-binary=/usr/local/bin/perl
-
-
-This is an extra tool, not bundled with the default waf binary.
-To add the use_config tool to the waf file:
-$ ./waf-light --tools=use_config
-
-When using this tool, the wscript will look like:
-
- def options(opt):
- opt.load('use_config')
-
- def configure(conf):
- conf.load('use_config')
-"""
-
-import sys
-import os.path as osp
-import os
-
-local_repo = ''
-"""Local repository containing additional Waf tools (plugins)"""
-remote_repo = 'https://gitlab.com/ita1024/waf/raw/master/'
-"""
-Remote directory containing downloadable waf tools. The missing tools can be downloaded by using::
-
- $ waf configure --download
-"""
-
-remote_locs = ['waflib/extras', 'waflib/Tools']
-"""
-Remote directories for use with :py:const:`waflib.extras.use_config.remote_repo`
-"""
-
-
-try:
- from urllib import request
-except ImportError:
- from urllib import urlopen
-else:
- urlopen = request.urlopen
-
-
-from waflib import Errors, Context, Logs, Utils, Options, Configure
-
-try:
- from urllib.parse import urlparse
-except ImportError:
- from urlparse import urlparse
-
-
-
-
-DEFAULT_DIR = 'wafcfg'
-# add first the current wafcfg subdirectory
-sys.path.append(osp.abspath(DEFAULT_DIR))
-
-def options(self):
- group = self.add_option_group('configure options')
- group.add_option('--download', dest='download', default=False, action='store_true', help='try to download the tools if missing')
-
- group.add_option('--use-config', action='store', default=None,
- metavar='CFG', dest='use_config',
- help='force the configuration parameters by importing '
- 'CFG.py. Several modules may be provided (comma '
- 'separated).')
- group.add_option('--use-config-dir', action='store', default=DEFAULT_DIR,
- metavar='CFG_DIR', dest='use_config_dir',
- help='path or url where to find the configuration file')
-
-def download_check(node):
- """
- Hook to check for the tools which are downloaded. Replace with your function if necessary.
- """
- pass
-
-
-def download_tool(tool, force=False, ctx=None):
- """
- Download a Waf tool from the remote repository defined in :py:const:`waflib.extras.use_config.remote_repo`::
-
- $ waf configure --download
- """
- for x in Utils.to_list(remote_repo):
- for sub in Utils.to_list(remote_locs):
- url = '/'.join((x, sub, tool + '.py'))
- try:
- web = urlopen(url)
- try:
- if web.getcode() != 200:
- continue
- except AttributeError:
- pass
- except Exception:
- # on python3 urlopen throws an exception
- # python 2.3 does not have getcode and throws an exception to fail
- continue
- else:
- tmp = ctx.root.make_node(os.sep.join((Context.waf_dir, 'waflib', 'extras', tool + '.py')))
- tmp.write(web.read(), 'wb')
- Logs.warn('Downloaded %s from %s', tool, url)
- download_check(tmp)
- try:
- module = Context.load_tool(tool)
- except Exception:
- Logs.warn('The tool %s from %s is unusable', tool, url)
- try:
- tmp.delete()
- except Exception:
- pass
- continue
- return module
-
- raise Errors.WafError('Could not load the Waf tool')
-
-def load_tool(tool, tooldir=None, ctx=None, with_sys_path=True):
- try:
- module = Context.load_tool_default(tool, tooldir, ctx, with_sys_path)
- except ImportError as e:
- if not ctx or not hasattr(Options.options, 'download'):
- Logs.error('Could not load %r during options phase (download unavailable at this point)' % tool)
- raise
- if Options.options.download:
- module = download_tool(tool, ctx=ctx)
- if not module:
- ctx.fatal('Could not load the Waf tool %r or download a suitable replacement from the repository (sys.path %r)\n%s' % (tool, sys.path, e))
- else:
- ctx.fatal('Could not load the Waf tool %r from %r (try the --download option?):\n%s' % (tool, sys.path, e))
- return module
-
-Context.load_tool_default = Context.load_tool
-Context.load_tool = load_tool
-Configure.download_tool = download_tool
-
-def configure(self):
- opts = self.options
- use_cfg = opts.use_config
- if use_cfg is None:
- return
- url = urlparse(opts.use_config_dir)
- kwargs = {}
- if url.scheme:
- kwargs['download'] = True
- kwargs['remote_url'] = url.geturl()
- # search first with the exact url, else try with +'/wafcfg'
- kwargs['remote_locs'] = ['', DEFAULT_DIR]
- tooldir = url.geturl() + ' ' + DEFAULT_DIR
- for cfg in use_cfg.split(','):
- Logs.pprint('NORMAL', "Searching configuration '%s'..." % cfg)
- self.load(cfg, tooldir=tooldir, **kwargs)
- self.start_msg('Checking for configuration')
- self.end_msg(use_cfg)
-
diff --git a/waflib/extras/valadoc.py b/waflib/extras/valadoc.py
deleted file mode 100644
index c50f69e..0000000
--- a/waflib/extras/valadoc.py
+++ /dev/null
@@ -1,140 +0,0 @@
-#! /usr/bin/env python
-# encoding: UTF-8
-# Nicolas Joseph 2009
-
-"""
-ported from waf 1.5:
-TODO: tabs vs spaces
-"""
-
-from waflib import Task, Utils, Errors, Logs
-from waflib.TaskGen import feature
-
-VALADOC_STR = '${VALADOC}'
-
-class valadoc(Task.Task):
- vars = ['VALADOC', 'VALADOCFLAGS']
- color = 'BLUE'
- after = ['cprogram', 'cstlib', 'cshlib', 'cxxprogram', 'cxxstlib', 'cxxshlib']
- quiet = True # no outputs .. this is weird
-
- def __init__(self, *k, **kw):
- Task.Task.__init__(self, *k, **kw)
- self.output_dir = ''
- self.doclet = ''
- self.package_name = ''
- self.package_version = ''
- self.files = []
- self.vapi_dirs = []
- self.protected = True
- self.private = False
- self.inherit = False
- self.deps = False
- self.vala_defines = []
- self.vala_target_glib = None
- self.enable_non_null_experimental = False
- self.force = False
-
- def run(self):
- if not self.env['VALADOCFLAGS']:
- self.env['VALADOCFLAGS'] = ''
- cmd = [Utils.subst_vars(VALADOC_STR, self.env)]
- cmd.append ('-o %s' % self.output_dir)
- if getattr(self, 'doclet', None):
- cmd.append ('--doclet %s' % self.doclet)
- cmd.append ('--package-name %s' % self.package_name)
- if getattr(self, 'package_version', None):
- cmd.append ('--package-version %s' % self.package_version)
- if getattr(self, 'packages', None):
- for package in self.packages:
- cmd.append ('--pkg %s' % package)
- if getattr(self, 'vapi_dirs', None):
- for vapi_dir in self.vapi_dirs:
- cmd.append ('--vapidir %s' % vapi_dir)
- if not getattr(self, 'protected', None):
- cmd.append ('--no-protected')
- if getattr(self, 'private', None):
- cmd.append ('--private')
- if getattr(self, 'inherit', None):
- cmd.append ('--inherit')
- if getattr(self, 'deps', None):
- cmd.append ('--deps')
- if getattr(self, 'vala_defines', None):
- for define in self.vala_defines:
- cmd.append ('--define %s' % define)
- if getattr(self, 'vala_target_glib', None):
- cmd.append ('--target-glib=%s' % self.vala_target_glib)
- if getattr(self, 'enable_non_null_experimental', None):
- cmd.append ('--enable-non-null-experimental')
- if getattr(self, 'force', None):
- cmd.append ('--force')
- cmd.append (' '.join ([x.abspath() for x in self.files]))
- return self.generator.bld.exec_command(' '.join(cmd))
-
-@feature('valadoc')
-def process_valadoc(self):
- """
- Generate API documentation from Vala source code with valadoc
-
- doc = bld(
- features = 'valadoc',
- output_dir = '../doc/html',
- package_name = 'vala-gtk-example',
- package_version = '1.0.0',
- packages = 'gtk+-2.0',
- vapi_dirs = '../vapi',
- force = True
- )
-
- path = bld.path.find_dir ('../src')
- doc.files = path.ant_glob (incl='**/*.vala')
- """
-
- task = self.create_task('valadoc')
- if getattr(self, 'output_dir', None):
- task.output_dir = self.path.find_or_declare(self.output_dir).abspath()
- else:
- Errors.WafError('no output directory')
- if getattr(self, 'doclet', None):
- task.doclet = self.doclet
- else:
- Errors.WafError('no doclet directory')
- if getattr(self, 'package_name', None):
- task.package_name = self.package_name
- else:
- Errors.WafError('no package name')
- if getattr(self, 'package_version', None):
- task.package_version = self.package_version
- if getattr(self, 'packages', None):
- task.packages = Utils.to_list(self.packages)
- if getattr(self, 'vapi_dirs', None):
- vapi_dirs = Utils.to_list(self.vapi_dirs)
- for vapi_dir in vapi_dirs:
- try:
- task.vapi_dirs.append(self.path.find_dir(vapi_dir).abspath())
- except AttributeError:
- Logs.warn('Unable to locate Vala API directory: %r', vapi_dir)
- if getattr(self, 'files', None):
- task.files = self.files
- else:
- Errors.WafError('no input file')
- if getattr(self, 'protected', None):
- task.protected = self.protected
- if getattr(self, 'private', None):
- task.private = self.private
- if getattr(self, 'inherit', None):
- task.inherit = self.inherit
- if getattr(self, 'deps', None):
- task.deps = self.deps
- if getattr(self, 'vala_defines', None):
- task.vala_defines = Utils.to_list(self.vala_defines)
- if getattr(self, 'vala_target_glib', None):
- task.vala_target_glib = self.vala_target_glib
- if getattr(self, 'enable_non_null_experimental', None):
- task.enable_non_null_experimental = self.enable_non_null_experimental
- if getattr(self, 'force', None):
- task.force = self.force
-
-def configure(conf):
- conf.find_program('valadoc', errmsg='You must install valadoc <http://live.gnome.org/Valadoc> for generate the API documentation')
-
diff --git a/waflib/extras/waf_xattr.py b/waflib/extras/waf_xattr.py
deleted file mode 100644
index 351dd63..0000000
--- a/waflib/extras/waf_xattr.py
+++ /dev/null
@@ -1,150 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-
-"""
-Use extended attributes instead of database files
-
-1. Input files will be made writable
-2. This is only for systems providing extended filesystem attributes
-3. By default, hashes are calculated only if timestamp/size change (HASH_CACHE below)
-4. The module enables "deep_inputs" on all tasks by propagating task signatures
-5. This module also skips task signature comparisons for task code changes due to point 4.
-6. This module is for Python3/Linux only, but it could be extended to Python2/other systems
- using the xattr library
-7. For projects in which tasks always declare output files, it should be possible to
- store the rest of build context attributes on output files (imp_sigs, raw_deps and node_deps)
- but this is not done here
-
-On a simple C++ project benchmark, the variations before and after adding waf_xattr.py were observed:
-total build time: 20s -> 22s
-no-op build time: 2.4s -> 1.8s
-pickle file size: 2.9MB -> 2.6MB
-"""
-
-import os
-from waflib import Logs, Node, Task, Utils, Errors
-from waflib.Task import SKIP_ME, RUN_ME, CANCEL_ME, ASK_LATER, SKIPPED, MISSING
-
-HASH_CACHE = True
-SIG_VAR = 'user.waf.sig'
-SEP = ','.encode()
-TEMPLATE = '%b%d,%d'.encode()
-
-try:
- PermissionError
-except NameError:
- PermissionError = IOError
-
-def getxattr(self):
- return os.getxattr(self.abspath(), SIG_VAR)
-
-def setxattr(self, val):
- os.setxattr(self.abspath(), SIG_VAR, val)
-
-def h_file(self):
- try:
- ret = getxattr(self)
- except OSError:
- if HASH_CACHE:
- st = os.stat(self.abspath())
- mtime = st.st_mtime
- size = st.st_size
- else:
- if len(ret) == 16:
- # for build directory files
- return ret
-
- if HASH_CACHE:
- # check if timestamp and mtime match to avoid re-hashing
- st = os.stat(self.abspath())
- mtime, size = ret[16:].split(SEP)
- if int(1000 * st.st_mtime) == int(mtime) and st.st_size == int(size):
- return ret[:16]
-
- ret = Utils.h_file(self.abspath())
- if HASH_CACHE:
- val = TEMPLATE % (ret, int(1000 * st.st_mtime), int(st.st_size))
- try:
- setxattr(self, val)
- except PermissionError:
- os.chmod(self.abspath(), st.st_mode | 128)
- setxattr(self, val)
- return ret
-
-def runnable_status(self):
- bld = self.generator.bld
- if bld.is_install < 0:
- return SKIP_ME
-
- for t in self.run_after:
- if not t.hasrun:
- return ASK_LATER
- elif t.hasrun < SKIPPED:
- # a dependency has an error
- return CANCEL_ME
-
- # first compute the signature
- try:
- new_sig = self.signature()
- except Errors.TaskNotReady:
- return ASK_LATER
-
- if not self.outputs:
- # compare the signature to a signature computed previously
- # this part is only for tasks with no output files
- key = self.uid()
- try:
- prev_sig = bld.task_sigs[key]
- except KeyError:
- Logs.debug('task: task %r must run: it was never run before or the task code changed', self)
- return RUN_ME
- if new_sig != prev_sig:
- Logs.debug('task: task %r must run: the task signature changed', self)
- return RUN_ME
-
- # compare the signatures of the outputs to make a decision
- for node in self.outputs:
- try:
- sig = node.h_file()
- except EnvironmentError:
- Logs.debug('task: task %r must run: an output node does not exist', self)
- return RUN_ME
- if sig != new_sig:
- Logs.debug('task: task %r must run: an output node is stale', self)
- return RUN_ME
-
- return (self.always_run and RUN_ME) or SKIP_ME
-
-def post_run(self):
- bld = self.generator.bld
- sig = self.signature()
- for node in self.outputs:
- if not node.exists():
- self.hasrun = MISSING
- self.err_msg = '-> missing file: %r' % node.abspath()
- raise Errors.WafError(self.err_msg)
- os.setxattr(node.abspath(), 'user.waf.sig', sig)
- if not self.outputs:
- # only for task with no outputs
- bld.task_sigs[self.uid()] = sig
- if not self.keep_last_cmd:
- try:
- del self.last_cmd
- except AttributeError:
- pass
-
-try:
- os.getxattr
-except AttributeError:
- pass
-else:
- h_file.__doc__ = Node.Node.h_file.__doc__
-
- # keep file hashes as file attributes
- Node.Node.h_file = h_file
-
- # enable "deep_inputs" on all tasks
- Task.Task.runnable_status = runnable_status
- Task.Task.post_run = post_run
- Task.Task.sig_deep_inputs = Utils.nada
-
diff --git a/waflib/extras/why.py b/waflib/extras/why.py
deleted file mode 100644
index 1bb941f..0000000
--- a/waflib/extras/why.py
+++ /dev/null
@@ -1,78 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2010 (ita)
-
-"""
-This tool modifies the task signature scheme to store and obtain
-information about the task execution (why it must run, etc)::
-
- def configure(conf):
- conf.load('why')
-
-After adding the tool, a full rebuild is necessary:
-waf clean build --zones=task
-"""
-
-from waflib import Task, Utils, Logs, Errors
-
-def signature(self):
- # compute the result one time, and suppose the scan_signature will give the good result
- try:
- return self.cache_sig
- except AttributeError:
- pass
-
- self.m = Utils.md5()
- self.m.update(self.hcode)
- id_sig = self.m.digest()
-
- # explicit deps
- self.m = Utils.md5()
- self.sig_explicit_deps()
- exp_sig = self.m.digest()
-
- # env vars
- self.m = Utils.md5()
- self.sig_vars()
- var_sig = self.m.digest()
-
- # implicit deps / scanner results
- self.m = Utils.md5()
- if self.scan:
- try:
- self.sig_implicit_deps()
- except Errors.TaskRescan:
- return self.signature()
- impl_sig = self.m.digest()
-
- ret = self.cache_sig = impl_sig + id_sig + exp_sig + var_sig
- return ret
-
-
-Task.Task.signature = signature
-
-old = Task.Task.runnable_status
-def runnable_status(self):
- ret = old(self)
- if ret == Task.RUN_ME:
- try:
- old_sigs = self.generator.bld.task_sigs[self.uid()]
- except (KeyError, AttributeError):
- Logs.debug("task: task must run as no previous signature exists")
- else:
- new_sigs = self.cache_sig
- def v(x):
- return Utils.to_hex(x)
-
- Logs.debug('Task %r', self)
- msgs = ['* Implicit or scanner dependency', '* Task code', '* Source file, explicit or manual dependency', '* Configuration data variable']
- tmp = 'task: -> %s: %s %s'
- for x in range(len(msgs)):
- l = len(Utils.SIG_NIL)
- a = new_sigs[x*l : (x+1)*l]
- b = old_sigs[x*l : (x+1)*l]
- if (a != b):
- Logs.debug(tmp, msgs[x].ljust(35), v(a), v(b))
- return ret
-Task.Task.runnable_status = runnable_status
-
diff --git a/waflib/extras/win32_opts.py b/waflib/extras/win32_opts.py
deleted file mode 100644
index 9f7443c..0000000
--- a/waflib/extras/win32_opts.py
+++ /dev/null
@@ -1,170 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-
-"""
-Windows-specific optimizations
-
-This module can help reducing the overhead of listing files on windows
-(more than 10000 files). Python 3.5 already provides the listdir
-optimization though.
-"""
-
-import os
-from waflib import Utils, Build, Node, Logs
-
-try:
- TP = '%s\\*'.decode('ascii')
-except AttributeError:
- TP = '%s\\*'
-
-if Utils.is_win32:
- from waflib.Tools import md5_tstamp
- import ctypes, ctypes.wintypes
-
- FindFirstFile = ctypes.windll.kernel32.FindFirstFileW
- FindNextFile = ctypes.windll.kernel32.FindNextFileW
- FindClose = ctypes.windll.kernel32.FindClose
- FILE_ATTRIBUTE_DIRECTORY = 0x10
- INVALID_HANDLE_VALUE = -1
- UPPER_FOLDERS = ('.', '..')
- try:
- UPPER_FOLDERS = [unicode(x) for x in UPPER_FOLDERS]
- except NameError:
- pass
-
- def cached_hash_file(self):
- try:
- cache = self.ctx.cache_listdir_cache_hash_file
- except AttributeError:
- cache = self.ctx.cache_listdir_cache_hash_file = {}
-
- if id(self.parent) in cache:
- try:
- t = cache[id(self.parent)][self.name]
- except KeyError:
- raise IOError('Not a file')
- else:
- # an opportunity to list the files and the timestamps at once
- findData = ctypes.wintypes.WIN32_FIND_DATAW()
- find = FindFirstFile(TP % self.parent.abspath(), ctypes.byref(findData))
-
- if find == INVALID_HANDLE_VALUE:
- cache[id(self.parent)] = {}
- raise IOError('Not a file')
-
- cache[id(self.parent)] = lst_files = {}
- try:
- while True:
- if findData.cFileName not in UPPER_FOLDERS:
- thatsadir = findData.dwFileAttributes & FILE_ATTRIBUTE_DIRECTORY
- if not thatsadir:
- ts = findData.ftLastWriteTime
- d = (ts.dwLowDateTime << 32) | ts.dwHighDateTime
- lst_files[str(findData.cFileName)] = d
- if not FindNextFile(find, ctypes.byref(findData)):
- break
- except Exception:
- cache[id(self.parent)] = {}
- raise IOError('Not a file')
- finally:
- FindClose(find)
- t = lst_files[self.name]
-
- fname = self.abspath()
- if fname in Build.hashes_md5_tstamp:
- if Build.hashes_md5_tstamp[fname][0] == t:
- return Build.hashes_md5_tstamp[fname][1]
-
- try:
- fd = os.open(fname, os.O_BINARY | os.O_RDONLY | os.O_NOINHERIT)
- except OSError:
- raise IOError('Cannot read from %r' % fname)
- f = os.fdopen(fd, 'rb')
- m = Utils.md5()
- rb = 1
- try:
- while rb:
- rb = f.read(200000)
- m.update(rb)
- finally:
- f.close()
-
- # ensure that the cache is overwritten
- Build.hashes_md5_tstamp[fname] = (t, m.digest())
- return m.digest()
- Node.Node.cached_hash_file = cached_hash_file
-
- def get_bld_sig_win32(self):
- try:
- return self.ctx.hash_cache[id(self)]
- except KeyError:
- pass
- except AttributeError:
- self.ctx.hash_cache = {}
- self.ctx.hash_cache[id(self)] = ret = Utils.h_file(self.abspath())
- return ret
- Node.Node.get_bld_sig = get_bld_sig_win32
-
- def isfile_cached(self):
- # optimize for nt.stat calls, assuming there are many files for few folders
- try:
- cache = self.__class__.cache_isfile_cache
- except AttributeError:
- cache = self.__class__.cache_isfile_cache = {}
-
- try:
- c1 = cache[id(self.parent)]
- except KeyError:
- c1 = cache[id(self.parent)] = []
-
- curpath = self.parent.abspath()
- findData = ctypes.wintypes.WIN32_FIND_DATAW()
- find = FindFirstFile(TP % curpath, ctypes.byref(findData))
-
- if find == INVALID_HANDLE_VALUE:
- Logs.error("invalid win32 handle isfile_cached %r", self.abspath())
- return os.path.isfile(self.abspath())
-
- try:
- while True:
- if findData.cFileName not in UPPER_FOLDERS:
- thatsadir = findData.dwFileAttributes & FILE_ATTRIBUTE_DIRECTORY
- if not thatsadir:
- c1.append(str(findData.cFileName))
- if not FindNextFile(find, ctypes.byref(findData)):
- break
- except Exception as e:
- Logs.error('exception while listing a folder %r %r', self.abspath(), e)
- return os.path.isfile(self.abspath())
- finally:
- FindClose(find)
- return self.name in c1
- Node.Node.isfile_cached = isfile_cached
-
- def find_or_declare_win32(self, lst):
- # assuming that "find_or_declare" is called before the build starts, remove the calls to os.path.isfile
- if isinstance(lst, str):
- lst = [x for x in Utils.split_path(lst) if x and x != '.']
-
- node = self.get_bld().search_node(lst)
- if node:
- if not node.isfile_cached():
- try:
- node.parent.mkdir()
- except OSError:
- pass
- return node
- self = self.get_src()
- node = self.find_node(lst)
- if node:
- if not node.isfile_cached():
- try:
- node.parent.mkdir()
- except OSError:
- pass
- return node
- node = self.get_bld().make_node(lst)
- node.parent.mkdir()
- return node
- Node.Node.find_or_declare = find_or_declare_win32
-
diff --git a/waflib/extras/wix.py b/waflib/extras/wix.py
deleted file mode 100644
index d87bfbb..0000000
--- a/waflib/extras/wix.py
+++ /dev/null
@@ -1,87 +0,0 @@
-#!/usr/bin/python
-# encoding: utf-8
-# vim: tabstop=4 noexpandtab
-
-"""
-Windows Installer XML Tool (WiX)
-
-.wxs --- candle ---> .wxobj --- light ---> .msi
-
-bld(features='wix', some.wxs, gen='some.msi', candleflags=[..], lightflags=[..])
-
-bld(features='wix', source=['bundle.wxs','WixBalExtension'], gen='setup.exe', candleflags=[..])
-"""
-
-import os, copy
-from waflib import TaskGen
-from waflib import Task
-from waflib.Utils import winreg
-
-class candle(Task.Task):
- run_str = '${CANDLE} -nologo ${CANDLEFLAGS} -out ${TGT} ${SRC[0].abspath()}',
-
-class light(Task.Task):
- run_str = "${LIGHT} -nologo -b ${SRC[0].parent.abspath()} ${LIGHTFLAGS} -out ${TGT} ${SRC[0].abspath()}"
-
-@TaskGen.feature('wix')
-@TaskGen.before_method('process_source')
-def wix(self):
- #X.wxs -> ${SRC} for CANDLE
- #X.wxobj -> ${SRC} for LIGHT
- #X.dll -> -ext X in ${LIGHTFLAGS}
- #X.wxl -> wixui.wixlib -loc X.wxl in ${LIGHTFLAGS}
- wxobj = []
- wxs = []
- exts = []
- wxl = []
- rest = []
- for x in self.source:
- if x.endswith('.wxobj'):
- wxobj.append(x)
- elif x.endswith('.wxs'):
- wxobj.append(self.path.find_or_declare(x[:-4]+'.wxobj'))
- wxs.append(x)
- elif x.endswith('.dll'):
- exts.append(x[:-4])
- elif '.' not in x:
- exts.append(x)
- elif x.endswith('.wxl'):
- wxl.append(x)
- else:
- rest.append(x)
- self.source = self.to_nodes(rest) #.wxs
-
- cndl = self.create_task('candle', self.to_nodes(wxs), self.to_nodes(wxobj))
- lght = self.create_task('light', self.to_nodes(wxobj), self.path.find_or_declare(self.gen))
-
- cndl.env.CANDLEFLAGS = copy.copy(getattr(self,'candleflags',[]))
- lght.env.LIGHTFLAGS = copy.copy(getattr(self,'lightflags',[]))
-
- for x in wxl:
- lght.env.append_value('LIGHTFLAGS','wixui.wixlib')
- lght.env.append_value('LIGHTFLAGS','-loc')
- lght.env.append_value('LIGHTFLAGS',x)
- for x in exts:
- cndl.env.append_value('CANDLEFLAGS','-ext')
- cndl.env.append_value('CANDLEFLAGS',x)
- lght.env.append_value('LIGHTFLAGS','-ext')
- lght.env.append_value('LIGHTFLAGS',x)
-
-#wix_bin_path()
-def wix_bin_path():
- basekey = r"SOFTWARE\Microsoft\.NETFramework\AssemblyFolders"
- query = winreg.OpenKey(winreg.HKEY_LOCAL_MACHINE, basekey)
- cnt=winreg.QueryInfoKey(query)[0]
- thiskey = r'C:\Program Files (x86)\WiX Toolset v3.10\SDK'
- for i in range(cnt-1,-1,-1):
- thiskey = winreg.EnumKey(query,i)
- if 'WiX' in thiskey:
- break
- winreg.CloseKey(query)
- return os.path.normpath(winreg.QueryValue(winreg.HKEY_LOCAL_MACHINE, basekey+r'\\'+thiskey)+'..\\bin')
-
-def configure(ctx):
- path_list=[wix_bin_path()]
- ctx.find_program('candle', var='CANDLE', mandatory=True, path_list = path_list)
- ctx.find_program('light', var='LIGHT', mandatory=True, path_list = path_list)
-
diff --git a/waflib/extras/xcode6.py b/waflib/extras/xcode6.py
deleted file mode 100644
index 91bbff1..0000000
--- a/waflib/extras/xcode6.py
+++ /dev/null
@@ -1,727 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# XCode 3/XCode 4/XCode 6/Xcode 7 generator for Waf
-# Based on work by Nicolas Mercier 2011
-# Extended by Simon Warg 2015, https://github.com/mimon
-# XCode project file format based on http://www.monobjc.net/xcode-project-file-format.html
-
-"""
-See playground/xcode6/ for usage examples.
-
-"""
-
-from waflib import Context, TaskGen, Build, Utils, Errors, Logs
-import os, sys
-
-# FIXME too few extensions
-XCODE_EXTS = ['.c', '.cpp', '.m', '.mm']
-
-HEADERS_GLOB = '**/(*.h|*.hpp|*.H|*.inl)'
-
-MAP_EXT = {
- '': "folder",
- '.h' : "sourcecode.c.h",
-
- '.hh': "sourcecode.cpp.h",
- '.inl': "sourcecode.cpp.h",
- '.hpp': "sourcecode.cpp.h",
-
- '.c': "sourcecode.c.c",
-
- '.m': "sourcecode.c.objc",
-
- '.mm': "sourcecode.cpp.objcpp",
-
- '.cc': "sourcecode.cpp.cpp",
-
- '.cpp': "sourcecode.cpp.cpp",
- '.C': "sourcecode.cpp.cpp",
- '.cxx': "sourcecode.cpp.cpp",
- '.c++': "sourcecode.cpp.cpp",
-
- '.l': "sourcecode.lex", # luthor
- '.ll': "sourcecode.lex",
-
- '.y': "sourcecode.yacc",
- '.yy': "sourcecode.yacc",
-
- '.plist': "text.plist.xml",
- ".nib": "wrapper.nib",
- ".xib": "text.xib",
-}
-
-# Used in PBXNativeTarget elements
-PRODUCT_TYPE_APPLICATION = 'com.apple.product-type.application'
-PRODUCT_TYPE_FRAMEWORK = 'com.apple.product-type.framework'
-PRODUCT_TYPE_EXECUTABLE = 'com.apple.product-type.tool'
-PRODUCT_TYPE_LIB_STATIC = 'com.apple.product-type.library.static'
-PRODUCT_TYPE_LIB_DYNAMIC = 'com.apple.product-type.library.dynamic'
-PRODUCT_TYPE_EXTENSION = 'com.apple.product-type.kernel-extension'
-PRODUCT_TYPE_IOKIT = 'com.apple.product-type.kernel-extension.iokit'
-
-# Used in PBXFileReference elements
-FILE_TYPE_APPLICATION = 'wrapper.cfbundle'
-FILE_TYPE_FRAMEWORK = 'wrapper.framework'
-FILE_TYPE_LIB_DYNAMIC = 'compiled.mach-o.dylib'
-FILE_TYPE_LIB_STATIC = 'archive.ar'
-FILE_TYPE_EXECUTABLE = 'compiled.mach-o.executable'
-
-# Tuple packs of the above
-TARGET_TYPE_FRAMEWORK = (PRODUCT_TYPE_FRAMEWORK, FILE_TYPE_FRAMEWORK, '.framework')
-TARGET_TYPE_APPLICATION = (PRODUCT_TYPE_APPLICATION, FILE_TYPE_APPLICATION, '.app')
-TARGET_TYPE_DYNAMIC_LIB = (PRODUCT_TYPE_LIB_DYNAMIC, FILE_TYPE_LIB_DYNAMIC, '.dylib')
-TARGET_TYPE_STATIC_LIB = (PRODUCT_TYPE_LIB_STATIC, FILE_TYPE_LIB_STATIC, '.a')
-TARGET_TYPE_EXECUTABLE = (PRODUCT_TYPE_EXECUTABLE, FILE_TYPE_EXECUTABLE, '')
-
-# Maps target type string to its data
-TARGET_TYPES = {
- 'framework': TARGET_TYPE_FRAMEWORK,
- 'app': TARGET_TYPE_APPLICATION,
- 'dylib': TARGET_TYPE_DYNAMIC_LIB,
- 'stlib': TARGET_TYPE_STATIC_LIB,
- 'exe' :TARGET_TYPE_EXECUTABLE,
-}
-
-def delete_invalid_values(dct):
- """ Deletes entries that are dictionaries or sets """
- for k, v in list(dct.items()):
- if isinstance(v, dict) or isinstance(v, set):
- del dct[k]
- return dct
-
-"""
-Configuration of the global project settings. Sets an environment variable 'PROJ_CONFIGURATION'
-which is a dictionary of configuration name and buildsettings pair.
-E.g.:
-env.PROJ_CONFIGURATION = {
- 'Debug': {
- 'ARCHS': 'x86',
- ...
- }
- 'Release': {
- 'ARCHS' x86_64'
- ...
- }
-}
-The user can define a completely customized dictionary in configure() stage. Otherwise a default Debug/Release will be created
-based on env variable
-"""
-def configure(self):
- if not self.env.PROJ_CONFIGURATION:
- self.to_log("A default project configuration was created since no custom one was given in the configure(conf) stage. Define your custom project settings by adding PROJ_CONFIGURATION to env. The env.PROJ_CONFIGURATION must be a dictionary with at least one key, where each key is the configuration name, and the value is a dictionary of key/value settings.\n")
-
- # Check for any added config files added by the tool 'c_config'.
- if 'cfg_files' in self.env:
- self.env.INCLUDES = Utils.to_list(self.env.INCLUDES) + [os.path.abspath(os.path.dirname(f)) for f in self.env.cfg_files]
-
- # Create default project configuration?
- if 'PROJ_CONFIGURATION' not in self.env:
- defaults = delete_invalid_values(self.env.get_merged_dict())
- self.env.PROJ_CONFIGURATION = {
- "Debug": defaults,
- "Release": defaults,
- }
-
- # Some build settings are required to be present by XCode. We will supply default values
- # if user hasn't defined any.
- defaults_required = [('PRODUCT_NAME', '$(TARGET_NAME)')]
- for cfgname,settings in self.env.PROJ_CONFIGURATION.items():
- for default_var, default_val in defaults_required:
- if default_var not in settings:
- settings[default_var] = default_val
-
- # Error check customization
- if not isinstance(self.env.PROJ_CONFIGURATION, dict):
- raise Errors.ConfigurationError("The env.PROJ_CONFIGURATION must be a dictionary with at least one key, where each key is the configuration name, and the value is a dictionary of key/value settings.")
-
-part1 = 0
-part2 = 10000
-part3 = 0
-id = 562000999
-def newid():
- global id
- id += 1
- return "%04X%04X%04X%012d" % (0, 10000, 0, id)
-
-"""
-Represents a tree node in the XCode project plist file format.
-When written to a file, all attributes of XCodeNode are stringified together with
-its value. However, attributes starting with an underscore _ are ignored
-during that process and allows you to store arbitrary values that are not supposed
-to be written out.
-"""
-class XCodeNode(object):
- def __init__(self):
- self._id = newid()
- self._been_written = False
-
- def tostring(self, value):
- if isinstance(value, dict):
- result = "{\n"
- for k,v in value.items():
- result = result + "\t\t\t%s = %s;\n" % (k, self.tostring(v))
- result = result + "\t\t}"
- return result
- elif isinstance(value, str):
- return "\"%s\"" % value
- elif isinstance(value, list):
- result = "(\n"
- for i in value:
- result = result + "\t\t\t%s,\n" % self.tostring(i)
- result = result + "\t\t)"
- return result
- elif isinstance(value, XCodeNode):
- return value._id
- else:
- return str(value)
-
- def write_recursive(self, value, file):
- if isinstance(value, dict):
- for k,v in value.items():
- self.write_recursive(v, file)
- elif isinstance(value, list):
- for i in value:
- self.write_recursive(i, file)
- elif isinstance(value, XCodeNode):
- value.write(file)
-
- def write(self, file):
- if not self._been_written:
- self._been_written = True
- for attribute,value in self.__dict__.items():
- if attribute[0] != '_':
- self.write_recursive(value, file)
- w = file.write
- w("\t%s = {\n" % self._id)
- w("\t\tisa = %s;\n" % self.__class__.__name__)
- for attribute,value in self.__dict__.items():
- if attribute[0] != '_':
- w("\t\t%s = %s;\n" % (attribute, self.tostring(value)))
- w("\t};\n\n")
-
-# Configurations
-class XCBuildConfiguration(XCodeNode):
- def __init__(self, name, settings = {}, env=None):
- XCodeNode.__init__(self)
- self.baseConfigurationReference = ""
- self.buildSettings = settings
- self.name = name
- if env and env.ARCH:
- settings['ARCHS'] = " ".join(env.ARCH)
-
-
-class XCConfigurationList(XCodeNode):
- def __init__(self, configlst):
- """ :param configlst: list of XCConfigurationList """
- XCodeNode.__init__(self)
- self.buildConfigurations = configlst
- self.defaultConfigurationIsVisible = 0
- self.defaultConfigurationName = configlst and configlst[0].name or ""
-
-# Group/Files
-class PBXFileReference(XCodeNode):
- def __init__(self, name, path, filetype = '', sourcetree = "SOURCE_ROOT"):
-
- XCodeNode.__init__(self)
- self.fileEncoding = 4
- if not filetype:
- _, ext = os.path.splitext(name)
- filetype = MAP_EXT.get(ext, 'text')
- self.lastKnownFileType = filetype
- self.explicitFileType = filetype
- self.name = name
- self.path = path
- self.sourceTree = sourcetree
-
- def __hash__(self):
- return (self.path+self.name).__hash__()
-
- def __eq__(self, other):
- return (self.path, self.name) == (other.path, other.name)
-
-class PBXBuildFile(XCodeNode):
- """ This element indicate a file reference that is used in a PBXBuildPhase (either as an include or resource). """
- def __init__(self, fileRef, settings={}):
- XCodeNode.__init__(self)
-
- # fileRef is a reference to a PBXFileReference object
- self.fileRef = fileRef
-
- # A map of key/value pairs for additional settings.
- self.settings = settings
-
- def __hash__(self):
- return (self.fileRef).__hash__()
-
- def __eq__(self, other):
- return self.fileRef == other.fileRef
-
-class PBXGroup(XCodeNode):
- def __init__(self, name, sourcetree = 'SOURCE_TREE'):
- XCodeNode.__init__(self)
- self.children = []
- self.name = name
- self.sourceTree = sourcetree
-
- # Maintain a lookup table for all PBXFileReferences
- # that are contained in this group.
- self._filerefs = {}
-
- def add(self, sources):
- """
- Add a list of PBXFileReferences to this group
-
- :param sources: list of PBXFileReferences objects
- """
- self._filerefs.update(dict(zip(sources, sources)))
- self.children.extend(sources)
-
- def get_sub_groups(self):
- """
- Returns all child PBXGroup objects contained in this group
- """
- return list(filter(lambda x: isinstance(x, PBXGroup), self.children))
-
- def find_fileref(self, fileref):
- """
- Recursively search this group for an existing PBXFileReference. Returns None
- if none were found.
-
- The reason you'd want to reuse existing PBXFileReferences from a PBXGroup is that XCode doesn't like PBXFileReferences that aren't part of a PBXGroup hierarchy.
- If it isn't, the consequence is that certain UI features like 'Reveal in Finder'
- stops working.
- """
- if fileref in self._filerefs:
- return self._filerefs[fileref]
- elif self.children:
- for childgroup in self.get_sub_groups():
- f = childgroup.find_fileref(fileref)
- if f:
- return f
- return None
-
-class PBXContainerItemProxy(XCodeNode):
- """ This is the element for to decorate a target item. """
- def __init__(self, containerPortal, remoteGlobalIDString, remoteInfo='', proxyType=1):
- XCodeNode.__init__(self)
- self.containerPortal = containerPortal # PBXProject
- self.remoteGlobalIDString = remoteGlobalIDString # PBXNativeTarget
- self.remoteInfo = remoteInfo # Target name
- self.proxyType = proxyType
-
-class PBXTargetDependency(XCodeNode):
- """ This is the element for referencing other target through content proxies. """
- def __init__(self, native_target, proxy):
- XCodeNode.__init__(self)
- self.target = native_target
- self.targetProxy = proxy
-
-class PBXFrameworksBuildPhase(XCodeNode):
- """ This is the element for the framework link build phase, i.e. linking to frameworks """
- def __init__(self, pbxbuildfiles):
- XCodeNode.__init__(self)
- self.buildActionMask = 2147483647
- self.runOnlyForDeploymentPostprocessing = 0
- self.files = pbxbuildfiles #List of PBXBuildFile (.o, .framework, .dylib)
-
-class PBXHeadersBuildPhase(XCodeNode):
- """ This is the element for adding header files to be packaged into the .framework """
- def __init__(self, pbxbuildfiles):
- XCodeNode.__init__(self)
- self.buildActionMask = 2147483647
- self.runOnlyForDeploymentPostprocessing = 0
- self.files = pbxbuildfiles #List of PBXBuildFile (.o, .framework, .dylib)
-
-class PBXCopyFilesBuildPhase(XCodeNode):
- """
- Represents the PBXCopyFilesBuildPhase section. PBXBuildFile
- can be added to this node to copy files after build is done.
- """
- def __init__(self, pbxbuildfiles, dstpath, dstSubpathSpec=0, *args, **kwargs):
- XCodeNode.__init__(self)
- self.files = pbxbuildfiles
- self.dstPath = dstpath
- self.dstSubfolderSpec = dstSubpathSpec
-
-class PBXSourcesBuildPhase(XCodeNode):
- """ Represents the 'Compile Sources' build phase in a Xcode target """
- def __init__(self, buildfiles):
- XCodeNode.__init__(self)
- self.files = buildfiles # List of PBXBuildFile objects
-
-class PBXLegacyTarget(XCodeNode):
- def __init__(self, action, target=''):
- XCodeNode.__init__(self)
- self.buildConfigurationList = XCConfigurationList([XCBuildConfiguration('waf', {})])
- if not target:
- self.buildArgumentsString = "%s %s" % (sys.argv[0], action)
- else:
- self.buildArgumentsString = "%s %s --targets=%s" % (sys.argv[0], action, target)
- self.buildPhases = []
- self.buildToolPath = sys.executable
- self.buildWorkingDirectory = ""
- self.dependencies = []
- self.name = target or action
- self.productName = target or action
- self.passBuildSettingsInEnvironment = 0
-
-class PBXShellScriptBuildPhase(XCodeNode):
- def __init__(self, action, target):
- XCodeNode.__init__(self)
- self.buildActionMask = 2147483647
- self.files = []
- self.inputPaths = []
- self.outputPaths = []
- self.runOnlyForDeploymentPostProcessing = 0
- self.shellPath = "/bin/sh"
- self.shellScript = "%s %s %s --targets=%s" % (sys.executable, sys.argv[0], action, target)
-
-class PBXNativeTarget(XCodeNode):
- """ Represents a target in XCode, e.g. App, DyLib, Framework etc. """
- def __init__(self, target, node, target_type=TARGET_TYPE_APPLICATION, configlist=[], buildphases=[]):
- XCodeNode.__init__(self)
- product_type = target_type[0]
- file_type = target_type[1]
-
- self.buildConfigurationList = XCConfigurationList(configlist)
- self.buildPhases = buildphases
- self.buildRules = []
- self.dependencies = []
- self.name = target
- self.productName = target
- self.productType = product_type # See TARGET_TYPE_ tuples constants
- self.productReference = PBXFileReference(node.name, node.abspath(), file_type, '')
-
- def add_configuration(self, cf):
- """ :type cf: XCBuildConfiguration """
- self.buildConfigurationList.buildConfigurations.append(cf)
-
- def add_build_phase(self, phase):
- # Some build phase types may appear only once. If a phase type already exists, then merge them.
- if ( (phase.__class__ == PBXFrameworksBuildPhase)
- or (phase.__class__ == PBXSourcesBuildPhase) ):
- for b in self.buildPhases:
- if b.__class__ == phase.__class__:
- b.files.extend(phase.files)
- return
- self.buildPhases.append(phase)
-
- def add_dependency(self, depnd):
- self.dependencies.append(depnd)
-
-# Root project object
-class PBXProject(XCodeNode):
- def __init__(self, name, version, env):
- XCodeNode.__init__(self)
-
- if not isinstance(env.PROJ_CONFIGURATION, dict):
- raise Errors.WafError("Error: env.PROJ_CONFIGURATION must be a dictionary. This is done for you if you do not define one yourself. However, did you load the xcode module at the end of your wscript configure() ?")
-
- # Retrieve project configuration
- configurations = []
- for config_name, settings in env.PROJ_CONFIGURATION.items():
- cf = XCBuildConfiguration(config_name, settings)
- configurations.append(cf)
-
- self.buildConfigurationList = XCConfigurationList(configurations)
- self.compatibilityVersion = version[0]
- self.hasScannedForEncodings = 1
- self.mainGroup = PBXGroup(name)
- self.projectRoot = ""
- self.projectDirPath = ""
- self.targets = []
- self._objectVersion = version[1]
-
- def create_target_dependency(self, target, name):
- """ : param target : PXBNativeTarget """
- proxy = PBXContainerItemProxy(self, target, name)
- dependency = PBXTargetDependency(target, proxy)
- return dependency
-
- def write(self, file):
-
- # Make sure this is written only once
- if self._been_written:
- return
-
- w = file.write
- w("// !$*UTF8*$!\n")
- w("{\n")
- w("\tarchiveVersion = 1;\n")
- w("\tclasses = {\n")
- w("\t};\n")
- w("\tobjectVersion = %d;\n" % self._objectVersion)
- w("\tobjects = {\n\n")
-
- XCodeNode.write(self, file)
-
- w("\t};\n")
- w("\trootObject = %s;\n" % self._id)
- w("}\n")
-
- def add_target(self, target):
- self.targets.append(target)
-
- def get_target(self, name):
- """ Get a reference to PBXNativeTarget if it exists """
- for t in self.targets:
- if t.name == name:
- return t
- return None
-
-@TaskGen.feature('c', 'cxx')
-@TaskGen.after('propagate_uselib_vars', 'apply_incpaths')
-def process_xcode(self):
- bld = self.bld
- try:
- p = bld.project
- except AttributeError:
- return
-
- if not hasattr(self, 'target_type'):
- return
-
- products_group = bld.products_group
-
- target_group = PBXGroup(self.name)
- p.mainGroup.children.append(target_group)
-
- # Determine what type to build - framework, app bundle etc.
- target_type = getattr(self, 'target_type', 'app')
- if target_type not in TARGET_TYPES:
- raise Errors.WafError("Target type '%s' does not exists. Available options are '%s'. In target '%s'" % (target_type, "', '".join(TARGET_TYPES.keys()), self.name))
- else:
- target_type = TARGET_TYPES[target_type]
- file_ext = target_type[2]
-
- # Create the output node
- target_node = self.path.find_or_declare(self.name+file_ext)
- target = PBXNativeTarget(self.name, target_node, target_type, [], [])
-
- products_group.children.append(target.productReference)
-
- # Pull source files from the 'source' attribute and assign them to a UI group.
- # Use a default UI group named 'Source' unless the user
- # provides a 'group_files' dictionary to customize the UI grouping.
- sources = getattr(self, 'source', [])
- if hasattr(self, 'group_files'):
- group_files = getattr(self, 'group_files', [])
- for grpname,files in group_files.items():
- group = bld.create_group(grpname, files)
- target_group.children.append(group)
- else:
- group = bld.create_group('Source', sources)
- target_group.children.append(group)
-
- # Create a PBXFileReference for each source file.
- # If the source file already exists as a PBXFileReference in any of the UI groups, then
- # reuse that PBXFileReference object (XCode does not like it if we don't reuse)
- for idx, path in enumerate(sources):
- fileref = PBXFileReference(path.name, path.abspath())
- existing_fileref = target_group.find_fileref(fileref)
- if existing_fileref:
- sources[idx] = existing_fileref
- else:
- sources[idx] = fileref
-
- # If the 'source' attribute contains any file extension that XCode can't work with,
- # then remove it. The allowed file extensions are defined in XCODE_EXTS.
- is_valid_file_extension = lambda file: os.path.splitext(file.path)[1] in XCODE_EXTS
- sources = list(filter(is_valid_file_extension, sources))
-
- buildfiles = [bld.unique_buildfile(PBXBuildFile(x)) for x in sources]
- target.add_build_phase(PBXSourcesBuildPhase(buildfiles))
-
- # Check if any framework to link against is some other target we've made
- libs = getattr(self, 'tmp_use_seen', [])
- for lib in libs:
- use_target = p.get_target(lib)
- if use_target:
- # Create an XCode dependency so that XCode knows to build the other target before this target
- dependency = p.create_target_dependency(use_target, use_target.name)
- target.add_dependency(dependency)
-
- buildphase = PBXFrameworksBuildPhase([PBXBuildFile(use_target.productReference)])
- target.add_build_phase(buildphase)
- if lib in self.env.LIB:
- self.env.LIB = list(filter(lambda x: x != lib, self.env.LIB))
-
- # If 'export_headers' is present, add files to the Headers build phase in xcode.
- # These are files that'll get packed into the Framework for instance.
- exp_hdrs = getattr(self, 'export_headers', [])
- hdrs = bld.as_nodes(Utils.to_list(exp_hdrs))
- files = [p.mainGroup.find_fileref(PBXFileReference(n.name, n.abspath())) for n in hdrs]
- files = [PBXBuildFile(f, {'ATTRIBUTES': ('Public',)}) for f in files]
- buildphase = PBXHeadersBuildPhase(files)
- target.add_build_phase(buildphase)
-
- # Merge frameworks and libs into one list, and prefix the frameworks
- frameworks = Utils.to_list(self.env.FRAMEWORK)
- frameworks = ' '.join(['-framework %s' % (f.split('.framework')[0]) for f in frameworks])
-
- libs = Utils.to_list(self.env.STLIB) + Utils.to_list(self.env.LIB)
- libs = ' '.join(bld.env['STLIB_ST'] % t for t in libs)
-
- # Override target specific build settings
- bldsettings = {
- 'HEADER_SEARCH_PATHS': ['$(inherited)'] + self.env['INCPATHS'],
- 'LIBRARY_SEARCH_PATHS': ['$(inherited)'] + Utils.to_list(self.env.LIBPATH) + Utils.to_list(self.env.STLIBPATH) + Utils.to_list(self.env.LIBDIR) ,
- 'FRAMEWORK_SEARCH_PATHS': ['$(inherited)'] + Utils.to_list(self.env.FRAMEWORKPATH),
- 'OTHER_LDFLAGS': libs + ' ' + frameworks,
- 'OTHER_LIBTOOLFLAGS': bld.env['LINKFLAGS'],
- 'OTHER_CPLUSPLUSFLAGS': Utils.to_list(self.env['CXXFLAGS']),
- 'OTHER_CFLAGS': Utils.to_list(self.env['CFLAGS']),
- 'INSTALL_PATH': []
- }
-
- # Install path
- installpaths = Utils.to_list(getattr(self, 'install', []))
- prodbuildfile = PBXBuildFile(target.productReference)
- for instpath in installpaths:
- bldsettings['INSTALL_PATH'].append(instpath)
- target.add_build_phase(PBXCopyFilesBuildPhase([prodbuildfile], instpath))
-
- if not bldsettings['INSTALL_PATH']:
- del bldsettings['INSTALL_PATH']
-
- # Create build settings which can override the project settings. Defaults to none if user
- # did not pass argument. This will be filled up with target specific
- # search paths, libs to link etc.
- settings = getattr(self, 'settings', {})
-
- # The keys represents different build configuration, e.g. Debug, Release and so on..
- # Insert our generated build settings to all configuration names
- keys = set(settings.keys() + bld.env.PROJ_CONFIGURATION.keys())
- for k in keys:
- if k in settings:
- settings[k].update(bldsettings)
- else:
- settings[k] = bldsettings
-
- for k,v in settings.items():
- target.add_configuration(XCBuildConfiguration(k, v))
-
- p.add_target(target)
-
-
-class xcode(Build.BuildContext):
- cmd = 'xcode6'
- fun = 'build'
-
- def as_nodes(self, files):
- """ Returns a list of waflib.Nodes from a list of string of file paths """
- nodes = []
- for x in files:
- if not isinstance(x, str):
- d = x
- else:
- d = self.srcnode.find_node(x)
- if not d:
- raise Errors.WafError('File \'%s\' was not found' % x)
- nodes.append(d)
- return nodes
-
- def create_group(self, name, files):
- """
- Returns a new PBXGroup containing the files (paths) passed in the files arg
- :type files: string
- """
- group = PBXGroup(name)
- """
- Do not use unique file reference here, since XCode seem to allow only one file reference
- to be referenced by a group.
- """
- files_ = []
- for d in self.as_nodes(Utils.to_list(files)):
- fileref = PBXFileReference(d.name, d.abspath())
- files_.append(fileref)
- group.add(files_)
- return group
-
- def unique_buildfile(self, buildfile):
- """
- Returns a unique buildfile, possibly an existing one.
- Use this after you've constructed a PBXBuildFile to make sure there is
- only one PBXBuildFile for the same file in the same project.
- """
- try:
- build_files = self.build_files
- except AttributeError:
- build_files = self.build_files = {}
-
- if buildfile not in build_files:
- build_files[buildfile] = buildfile
- return build_files[buildfile]
-
- def execute(self):
- """
- Entry point
- """
- self.restore()
- if not self.all_envs:
- self.load_envs()
- self.recurse([self.run_dir])
-
- appname = getattr(Context.g_module, Context.APPNAME, os.path.basename(self.srcnode.abspath()))
-
- p = PBXProject(appname, ('Xcode 3.2', 46), self.env)
-
- # If we don't create a Products group, then
- # XCode will create one, which entails that
- # we'll start to see duplicate files in the UI
- # for some reason.
- products_group = PBXGroup('Products')
- p.mainGroup.children.append(products_group)
-
- self.project = p
- self.products_group = products_group
-
- # post all task generators
- # the process_xcode method above will be called for each target
- if self.targets and self.targets != '*':
- (self._min_grp, self._exact_tg) = self.get_targets()
-
- self.current_group = 0
- while self.current_group < len(self.groups):
- self.post_group()
- self.current_group += 1
-
- node = self.bldnode.make_node('%s.xcodeproj' % appname)
- node.mkdir()
- node = node.make_node('project.pbxproj')
- with open(node.abspath(), 'w') as f:
- p.write(f)
- Logs.pprint('GREEN', 'Wrote %r' % node.abspath())
-
-def bind_fun(tgtype):
- def fun(self, *k, **kw):
- tgtype = fun.__name__
- if tgtype == 'shlib' or tgtype == 'dylib':
- features = 'cxx cxxshlib'
- tgtype = 'dylib'
- elif tgtype == 'framework':
- features = 'cxx cxxshlib'
- tgtype = 'framework'
- elif tgtype == 'program':
- features = 'cxx cxxprogram'
- tgtype = 'exe'
- elif tgtype == 'app':
- features = 'cxx cxxprogram'
- tgtype = 'app'
- elif tgtype == 'stlib':
- features = 'cxx cxxstlib'
- tgtype = 'stlib'
- lst = kw['features'] = Utils.to_list(kw.get('features', []))
- for x in features.split():
- if not x in kw['features']:
- lst.append(x)
-
- kw['target_type'] = tgtype
- return self(*k, **kw)
- fun.__name__ = tgtype
- setattr(Build.BuildContext, tgtype, fun)
- return fun
-
-for xx in 'app framework dylib shlib stlib program'.split():
- bind_fun(xx)
-
diff --git a/waflib/fixpy2.py b/waflib/fixpy2.py
deleted file mode 100644
index 24176e0..0000000
--- a/waflib/fixpy2.py
+++ /dev/null
@@ -1,64 +0,0 @@
-#!/usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2010-2018 (ita)
-
-from __future__ import with_statement
-
-import os
-
-all_modifs = {}
-
-def fixdir(dir):
- """Call all substitution functions on Waf folders"""
- for k in all_modifs:
- for v in all_modifs[k]:
- modif(os.path.join(dir, 'waflib'), k, v)
-
-def modif(dir, name, fun):
- """Call a substitution function"""
- if name == '*':
- lst = []
- for y in '. Tools extras'.split():
- for x in os.listdir(os.path.join(dir, y)):
- if x.endswith('.py'):
- lst.append(y + os.sep + x)
- for x in lst:
- modif(dir, x, fun)
- return
-
- filename = os.path.join(dir, name)
- with open(filename, 'r') as f:
- txt = f.read()
-
- txt = fun(txt)
-
- with open(filename, 'w') as f:
- f.write(txt)
-
-def subst(*k):
- """register a substitution function"""
- def do_subst(fun):
- for x in k:
- try:
- all_modifs[x].append(fun)
- except KeyError:
- all_modifs[x] = [fun]
- return fun
- return do_subst
-
-@subst('*')
-def r1(code):
- "utf-8 fixes for python < 2.6"
- code = code.replace('as e:', ',e:')
- code = code.replace(".decode(sys.stdout.encoding or'latin-1',errors='replace')", '')
- return code.replace('.encode()', '')
-
-@subst('Runner.py')
-def r4(code):
- "generator syntax"
- return code.replace('next(self.biter)', 'self.biter.next()')
-
-@subst('Context.py')
-def r5(code):
- return code.replace("('Execution failure: %s'%str(e),ex=e)", "('Execution failure: %s'%str(e),ex=e),None,sys.exc_info()[2]")
-
diff --git a/waflib/processor.py b/waflib/processor.py
deleted file mode 100755
index 2eecf3b..0000000
--- a/waflib/processor.py
+++ /dev/null
@@ -1,64 +0,0 @@
-#! /usr/bin/env python
-# encoding: utf-8
-# Thomas Nagy, 2016-2018 (ita)
-
-import os, sys, traceback, base64, signal
-try:
- import cPickle
-except ImportError:
- import pickle as cPickle
-
-try:
- import subprocess32 as subprocess
-except ImportError:
- import subprocess
-
-try:
- TimeoutExpired = subprocess.TimeoutExpired
-except AttributeError:
- class TimeoutExpired(Exception):
- pass
-
-def run():
- txt = sys.stdin.readline().strip()
- if not txt:
- # parent process probably ended
- sys.exit(1)
- [cmd, kwargs, cargs] = cPickle.loads(base64.b64decode(txt))
- cargs = cargs or {}
-
- ret = 1
- out, err, ex, trace = (None, None, None, None)
- try:
- proc = subprocess.Popen(cmd, **kwargs)
- try:
- out, err = proc.communicate(**cargs)
- except TimeoutExpired:
- if kwargs.get('start_new_session') and hasattr(os, 'killpg'):
- os.killpg(proc.pid, signal.SIGKILL)
- else:
- proc.kill()
- out, err = proc.communicate()
- exc = TimeoutExpired(proc.args, timeout=cargs['timeout'], output=out)
- exc.stderr = err
- raise exc
- ret = proc.returncode
- except Exception as e:
- exc_type, exc_value, tb = sys.exc_info()
- exc_lines = traceback.format_exception(exc_type, exc_value, tb)
- trace = str(cmd) + '\n' + ''.join(exc_lines)
- ex = e.__class__.__name__
-
- # it is just text so maybe we do not need to pickle()
- tmp = [ret, out, err, ex, trace]
- obj = base64.b64encode(cPickle.dumps(tmp))
- sys.stdout.write(obj.decode())
- sys.stdout.write('\n')
- sys.stdout.flush()
-
-while 1:
- try:
- run()
- except KeyboardInterrupt:
- break
-
diff --git a/waflib/waf b/waflib/waf
deleted file mode 100755
index e22930a..0000000
--- a/waflib/waf
+++ /dev/null
@@ -1,16 +0,0 @@
-#!/usr/bin/env python
-
-# Minimal waf script for projects that include waflib directly
-
-from waflib import Context, Scripting
-
-import inspect
-import os
-
-def main():
- script_path = os.path.abspath(inspect.getfile(inspect.getmodule(main)))
- project_path = os.path.dirname(script_path)
- Scripting.waf_entry_point(os.getcwd(), Context.WAFVERSION, project_path)
-
-if __name__ == '__main__':
- main()
diff --git a/wscript b/wscript
deleted file mode 100644
index 88bf7c2..0000000
--- a/wscript
+++ /dev/null
@@ -1,225 +0,0 @@
-#!/usr/bin/env python
-# Licensed under the GNU GPL v3 or later, see COPYING file for details.
-# Copyright 2008-2013 David Robillard
-# Copyright 2008 Nedko Arnaudov
-
-import os
-
-from waflib import Options, Utils
-from waflib.extras import autowaf
-
-# Version of this package (even if built as a child)
-PATCHAGE_VERSION = '1.0.1'
-
-# Variables for 'waf dist'
-APPNAME = 'patchage'
-VERSION = PATCHAGE_VERSION
-APP_HUMAN_NAME = 'Patchage'
-
-# Mandatory variables
-top = '.'
-out = 'build'
-
-def options(ctx):
- ctx.load('compiler_cxx')
- autowaf.set_options(ctx)
- opt = ctx.get_option_group('Configuration options')
-
- opt.add_option('--patchage-install-name', type='string', default=APPNAME,
- dest='patchage_install_name',
- help='patchage install name. [default: '' + APPNAME + '']')
- opt.add_option('--patchage-human-name', type='string', default=APP_HUMAN_NAME,
- dest='patchage_human_name',
- help='patchage human name [default: '' + APP_HUMAN_NAME + '']')
-
- autowaf.add_flags(
- opt,
- {'jack-dbus': 'use Jack via D-Bus',
- 'jack-session-manage': 'include JACK session management support',
- 'no-alsa': 'do not build Alsa Sequencer support',
- 'no-binloc': 'do not try to read files from executable location',
- 'light-theme': 'use light coloured theme'})
-
-def configure(conf):
- autowaf.display_header('Patchage Configuration')
- conf.load('compiler_cxx', cache=True)
- conf.load('autowaf', cache=True)
- autowaf.set_cxx_lang(conf, 'c++11')
-
- autowaf.check_pkg(conf, 'dbus-1', uselib_store='DBUS',
- mandatory=False)
- autowaf.check_pkg(conf, 'dbus-glib-1', uselib_store='DBUS_GLIB',
- mandatory=False)
- autowaf.check_pkg(conf, 'gthread-2.0', uselib_store='GTHREAD',
- atleast_version='2.14.0', mandatory=True)
- autowaf.check_pkg(conf, 'glibmm-2.4', uselib_store='GLIBMM',
- atleast_version='2.14.0', mandatory=True)
- autowaf.check_pkg(conf, 'gtkmm-2.4', uselib_store='GTKMM',
- atleast_version='2.12.0', mandatory=True)
- autowaf.check_pkg(conf, 'ganv-1', uselib_store='GANV',
- atleast_version='1.5.2', mandatory=True)
-
- if conf.env.DEST_OS == 'darwin':
- autowaf.check_pkg(conf, 'gtk-mac-integration', uselib_store='GTK_OSX',
- atleast_version='1.0.0', mandatory=False)
- if conf.env.HAVE_GTK_OSX:
- autowaf.define(conf, 'PATCHAGE_GTK_OSX', 1)
-
- # Check for dladdr
- autowaf.check_function(conf, 'cxx', 'dladdr',
- header_name = 'dlfcn.h',
- defines = ['_GNU_SOURCE'],
- lib = ['dl'],
- define_name = 'HAVE_DLADDR',
- mandatory = False)
-
- # Use Jack D-Bus if requested (only one jack driver is allowed)
- if Options.options.jack_dbus and conf.env.HAVE_DBUS and conf.env.HAVE_DBUS_GLIB:
- autowaf.define(conf, 'HAVE_JACK_DBUS', 1)
- else:
- autowaf.check_pkg(conf, 'jack', uselib_store='JACK',
- atleast_version='0.120.0', mandatory=False)
- if conf.env.HAVE_JACK:
- autowaf.define(conf, 'PATCHAGE_LIBJACK', 1)
- if Options.options.jack_session_manage:
- autowaf.define(conf, 'PATCHAGE_JACK_SESSION', 1)
- autowaf.check_function(conf, 'cxx', 'jack_get_property',
- header_name = 'jack/metadata.h',
- define_name = 'HAVE_JACK_METADATA',
- uselib = 'JACK',
- mandatory = False)
-
- # Use Alsa if present unless --no-alsa
- if not Options.options.no_alsa:
- autowaf.check_pkg(conf, 'alsa', uselib_store='ALSA', mandatory=False)
-
- # Find files at binary location if we have dladdr unless --no-binloc
- if not Options.options.no_binloc and conf.is_defined('HAVE_DLADDR'):
- autowaf.define(conf, 'PATCHAGE_BINLOC', 1)
-
- if Options.options.light_theme:
- autowaf.define(conf, 'PATCHAGE_USE_LIGHT_THEME', 1)
-
- # Boost headers
- conf.check_cxx(header_name='boost/format.hpp')
- conf.check_cxx(header_name='boost/shared_ptr.hpp')
- conf.check_cxx(header_name='boost/utility.hpp')
- conf.check_cxx(header_name='boost/weak_ptr.hpp')
-
- conf.env.PATCHAGE_VERSION = PATCHAGE_VERSION
-
- conf.env.APP_INSTALL_NAME = Options.options.patchage_install_name
- conf.env.APP_HUMAN_NAME = Options.options.patchage_human_name
- autowaf.define(conf, 'PATCHAGE_DATA_DIR', os.path.join(
- conf.env.DATADIR, conf.env.APP_INSTALL_NAME))
-
- conf.write_config_header('patchage_config.h', remove=False)
-
- autowaf.display_summary(
- conf,
- {'Install name': conf.env.APP_INSTALL_NAME,
- 'App human name': conf.env.APP_HUMAN_NAME,
- 'Jack (D-Bus)': bool(conf.env.HAVE_JACK_DBUS),
- 'Jack (libjack)': conf.is_defined('PATCHAGE_LIBJACK'),
- 'Jack Session Management': conf.is_defined('PATCHAGE_JACK_SESSION'),
- 'Jack Metadata': conf.is_defined('HAVE_JACK_METADATA'),
- 'Alsa Sequencer': bool(conf.env.HAVE_ALSA)})
-
- if conf.env.DEST_OS == 'darwin':
- autowaf.display_msg(conf, "Mac Integration", bool(conf.env.HAVE_GTK_OSX))
-
-def build(bld):
- out_base = ''
- if bld.env.DEST_OS == 'darwin':
- out_base = 'Patchage.app/Contents/'
-
- # Program
- prog = bld(features = 'cxx cxxprogram',
- includes = ['.', 'src'],
- target = out_base + bld.env.APP_INSTALL_NAME,
- install_path = '${BINDIR}')
- autowaf.use_lib(bld, prog, 'DBUS GANV DBUS_GLIB GTKMM GNOMECANVAS GTHREAD GTK_OSX')
- prog.source = '''
- src/Configuration.cpp
- src/Patchage.cpp
- src/PatchageCanvas.cpp
- src/PatchageEvent.cpp
- src/PatchageModule.cpp
- src/main.cpp
- '''
- if bld.env.HAVE_JACK_DBUS:
- prog.source += ' src/JackDbusDriver.cpp '
- if bld.is_defined('PATCHAGE_LIBJACK'):
- prog.source += ' src/JackDriver.cpp '
- prog.uselib += ' JACK NEWJACK '
- if bld.env.HAVE_ALSA:
- prog.source += ' src/AlsaDriver.cpp '
- prog.uselib += ' ALSA '
- if bld.is_defined('PATCHAGE_BINLOC') and bld.is_defined('HAVE_DLADDR'):
- prog.lib = ['dl']
-
- # XML UI definition
- bld(features = 'subst',
- source = 'src/patchage.ui',
- target = out_base + 'patchage.ui',
- install_path = '${DATADIR}/' + bld.env.APP_INSTALL_NAME,
- chmod = Utils.O644,
- PATCHAGE_VERSION = PATCHAGE_VERSION)
-
- # 'Desktop' file (menu entry, icon, etc)
- bld(features = 'subst',
- source = 'patchage.desktop.in',
- target = 'patchage.desktop',
- install_path = '${DATADIR}/applications',
- chmod = Utils.O644,
- BINDIR = os.path.normpath(bld.env.BINDIR),
- APP_INSTALL_NAME = bld.env.APP_INSTALL_NAME,
- APP_HUMAN_NAME = bld.env.APP_HUMAN_NAME)
-
- if bld.env.DEST_OS == 'darwin':
- # Property list
- bld(features = 'subst',
- source = 'osx/Info.plist.in',
- target = out_base + 'Info.plist',
- install_path = '',
- chmod = Utils.O644)
-
- # Icons
- bld(rule = 'cp ${SRC} ${TGT}',
- source = 'osx/Patchage.icns',
- target = out_base + 'Resources/Patchage.icns')
-
- # Gtk/Pango/etc configuration files
- for i in ['pangorc', 'pango.modules', 'loaders.cache', 'gtkrc']:
- bld(rule = 'cp ${SRC} ${TGT}',
- source = 'osx/' + i,
- target = out_base + 'Resources/' + i)
-
- # Icons
- # After installation, icon cache should be updated using:
- # gtk-update-icon-cache -f -t $(datadir)/icons/hicolor
- icon_sizes = [16, 22, 24, 32, 48, 128, 256]
- for s in icon_sizes:
- d = '%dx%d' % (s, s)
- bld.install_as(
- os.path.join(bld.env.DATADIR, 'icons', 'hicolor', d, 'apps',
- bld.env.APP_INSTALL_NAME + '.png'),
- os.path.join('icons', d, 'patchage.png'))
-
- bld.install_as(
- os.path.join(bld.env.DATADIR, 'icons', 'hicolor', 'scalable', 'apps',
- bld.env.APP_INSTALL_NAME + '.svg'),
- os.path.join('icons', 'scalable', 'patchage.svg'))
-
- bld.install_files('${MANDIR}/man1', bld.path.ant_glob('doc/*.1'))
-
-def posts(ctx):
- path = str(ctx.path.abspath())
- autowaf.news_to_posts(
- os.path.join(path, 'NEWS'),
- {'title' : 'Patchage',
- 'description' : autowaf.get_blurb(os.path.join(path, 'README')),
- 'dist_pattern' : 'http://download.drobilla.net/patchage-%s.tar.bz2'},
- { 'Author' : 'drobilla',
- 'Tags' : 'Hacking, LAD, Patchage' },
- os.path.join(out, 'posts'))