summaryrefslogtreecommitdiffstats
path: root/bindings
diff options
context:
space:
mode:
Diffstat (limited to 'bindings')
-rw-r--r--bindings/lilv.i66
-rw-r--r--bindings/numpy.i1746
-rw-r--r--bindings/python/Makefile186
-rw-r--r--bindings/python/conf.py263
-rw-r--r--bindings/python/index.rst9
-rw-r--r--bindings/python/lilv.py1775
-rwxr-xr-xbindings/python/lv2_apply.py159
-rwxr-xr-xbindings/python/lv2_list.py9
-rw-r--r--bindings/test/bindings_test_plugin.c196
-rw-r--r--bindings/test/bindings_test_plugin.ttl.in62
-rw-r--r--bindings/test/manifest.ttl.in7
-rw-r--r--bindings/test/python/test_api.py290
12 files changed, 4768 insertions, 0 deletions
diff --git a/bindings/lilv.i b/bindings/lilv.i
new file mode 100644
index 0000000..f6254a7
--- /dev/null
+++ b/bindings/lilv.i
@@ -0,0 +1,66 @@
+%module lilv
+%typedef unsigned uint32_t;
+%{
+#define SWIG_FILE_WITH_INIT
+#include "lilv/lilv.h"
+#include "lilv/lilvmm.hpp"
+%}
+
+%include "numpy.i"
+%init %{
+ import_array();
+%}
+%apply (float* INPLACE_ARRAY1) {(void* data_location)}
+
+%feature("compactdefaultargs") %{
+ lilv_plugin_get_num_ports_of_class;
+ get_num_ports_of_class;
+%}
+%varargs(3, LilvNode* node = NULL) lilv_plugin_get_num_ports_of_class;
+%varargs(3, LilvNode* node = NULL) get_num_ports_of_class;
+%typemap(in, numinputs=0) LilvNode *node3 ""; // Make sure it's NULL terminated
+
+%include "lilv/lilv.h"
+%include "lilv/lilvmm.hpp"
+
+namespace Lilv {
+
+%extend Plugins {
+%pythoncode %{
+ def __iter__(self):
+ class Iterator(object):
+ def __init__(self, plugins):
+ self.plugins = plugins
+ self.iter = plugins.begin()
+
+ def __next__(self):
+ if self.plugins.is_end(self.iter):
+ raise StopIteration
+ plugin = self.plugins.get(self.iter)
+ self.iter = self.plugins.next(self.iter)
+ return plugin
+
+ def next(self):
+ "Python 2 iterator protocol"
+ return Iterator.__next__(self)
+
+ return Iterator(self)
+
+ def get_by_uri(self, *args):
+ """get_by_uri(self, LilvNode uri) -> PluginClass"""
+ ret = _lilv.Plugins_get_by_uri(self, *args)
+ if ret.me is None:
+ return None
+ else:
+ return ret
+%}
+};
+
+%extend Node {
+%pythoncode %{
+ def __str__(self):
+ return self.get_turtle_token()
+%}
+};
+
+} /* namespace Lilv */
diff --git a/bindings/numpy.i b/bindings/numpy.i
new file mode 100644
index 0000000..d695b36
--- /dev/null
+++ b/bindings/numpy.i
@@ -0,0 +1,1746 @@
+/* -*- C -*- (not really, but good for syntax highlighting) */
+#ifdef SWIGPYTHON
+
+%{
+#ifndef SWIG_FILE_WITH_INIT
+# define NO_IMPORT_ARRAY
+#endif
+#include "stdio.h"
+#include <numpy/arrayobject.h>
+%}
+
+/**********************************************************************/
+
+%fragment("NumPy_Backward_Compatibility", "header")
+{
+/* Support older NumPy data type names
+*/
+%#if NDARRAY_VERSION < 0x01000000
+%#define NPY_BOOL PyArray_BOOL
+%#define NPY_BYTE PyArray_BYTE
+%#define NPY_UBYTE PyArray_UBYTE
+%#define NPY_SHORT PyArray_SHORT
+%#define NPY_USHORT PyArray_USHORT
+%#define NPY_INT PyArray_INT
+%#define NPY_UINT PyArray_UINT
+%#define NPY_LONG PyArray_LONG
+%#define NPY_ULONG PyArray_ULONG
+%#define NPY_LONGLONG PyArray_LONGLONG
+%#define NPY_ULONGLONG PyArray_ULONGLONG
+%#define NPY_FLOAT PyArray_FLOAT
+%#define NPY_DOUBLE PyArray_DOUBLE
+%#define NPY_LONGDOUBLE PyArray_LONGDOUBLE
+%#define NPY_CFLOAT PyArray_CFLOAT
+%#define NPY_CDOUBLE PyArray_CDOUBLE
+%#define NPY_CLONGDOUBLE PyArray_CLONGDOUBLE
+%#define NPY_OBJECT PyArray_OBJECT
+%#define NPY_STRING PyArray_STRING
+%#define NPY_UNICODE PyArray_UNICODE
+%#define NPY_VOID PyArray_VOID
+%#define NPY_NTYPES PyArray_NTYPES
+%#define NPY_NOTYPE PyArray_NOTYPE
+%#define NPY_CHAR PyArray_CHAR
+%#define NPY_USERDEF PyArray_USERDEF
+%#define npy_intp intp
+
+%#define NPY_MAX_BYTE MAX_BYTE
+%#define NPY_MIN_BYTE MIN_BYTE
+%#define NPY_MAX_UBYTE MAX_UBYTE
+%#define NPY_MAX_SHORT MAX_SHORT
+%#define NPY_MIN_SHORT MIN_SHORT
+%#define NPY_MAX_USHORT MAX_USHORT
+%#define NPY_MAX_INT MAX_INT
+%#define NPY_MIN_INT MIN_INT
+%#define NPY_MAX_UINT MAX_UINT
+%#define NPY_MAX_LONG MAX_LONG
+%#define NPY_MIN_LONG MIN_LONG
+%#define NPY_MAX_ULONG MAX_ULONG
+%#define NPY_MAX_LONGLONG MAX_LONGLONG
+%#define NPY_MIN_LONGLONG MIN_LONGLONG
+%#define NPY_MAX_ULONGLONG MAX_ULONGLONG
+%#define NPY_MAX_INTP MAX_INTP
+%#define NPY_MIN_INTP MIN_INTP
+
+%#define NPY_FARRAY FARRAY
+%#define NPY_F_CONTIGUOUS F_CONTIGUOUS
+%#endif
+}
+
+/**********************************************************************/
+
+/* The following code originally appeared in
+ * enthought/kiva/agg/src/numeric.i written by Eric Jones. It was
+ * translated from C++ to C by John Hunter. Bill Spotz has modified
+ * it to fix some minor bugs, upgrade from Numeric to numpy (all
+ * versions), add some comments and functionality, and convert from
+ * direct code insertion to SWIG fragments.
+ */
+
+%fragment("NumPy_Macros", "header")
+{
+/* Macros to extract array attributes.
+ */
+%#define is_array(a) ((a) && PyArray_Check((PyArrayObject *)a))
+%#define array_type(a) (int)(PyArray_TYPE(a))
+%#define array_numdims(a) (((PyArrayObject *)a)->nd)
+%#define array_dimensions(a) (((PyArrayObject *)a)->dimensions)
+%#define array_size(a,i) (((PyArrayObject *)a)->dimensions[i])
+%#define array_data(a) (((PyArrayObject *)a)->data)
+%#define array_is_contiguous(a) (PyArray_ISCONTIGUOUS(a))
+%#define array_is_native(a) (PyArray_ISNOTSWAPPED(a))
+%#define array_is_fortran(a) (PyArray_ISFORTRAN(a))
+}
+
+/**********************************************************************/
+
+%fragment("NumPy_Utilities", "header")
+{
+ /* Given a PyObject, return a string describing its type.
+ */
+ const char* pytype_string(PyObject* py_obj) {
+ if (py_obj == NULL ) return "C NULL value";
+ if (py_obj == Py_None ) return "Python None" ;
+ if (PyCallable_Check(py_obj)) return "callable" ;
+ if (PyString_Check( py_obj)) return "string" ;
+ if (PyInt_Check( py_obj)) return "int" ;
+ if (PyFloat_Check( py_obj)) return "float" ;
+ if (PyDict_Check( py_obj)) return "dict" ;
+ if (PyList_Check( py_obj)) return "list" ;
+ if (PyTuple_Check( py_obj)) return "tuple" ;
+ if (PyModule_Check( py_obj)) return "module" ;
+%#if PY_MAJOR_VERSION < 3
+ if (PyFile_Check( py_obj)) return "file" ;
+ if (PyInstance_Check(py_obj)) return "instance" ;
+%#endif
+
+ return "unkown type";
+ }
+
+ /* Given a NumPy typecode, return a string describing the type.
+ */
+ const char* typecode_string(int typecode) {
+ static const char* type_names[25] = {"bool", "byte", "unsigned byte",
+ "short", "unsigned short", "int",
+ "unsigned int", "long", "unsigned long",
+ "long long", "unsigned long long",
+ "float", "double", "long double",
+ "complex float", "complex double",
+ "complex long double", "object",
+ "string", "unicode", "void", "ntypes",
+ "notype", "char", "unknown"};
+ return typecode < 24 ? type_names[typecode] : type_names[24];
+ }
+
+ /* Make sure input has correct numpy type. Allow character and byte
+ * to match. Also allow int and long to match. This is deprecated.
+ * You should use PyArray_EquivTypenums() instead.
+ */
+ int type_match(int actual_type, int desired_type) {
+ return PyArray_EquivTypenums(actual_type, desired_type);
+ }
+}
+
+/**********************************************************************/
+
+%fragment("NumPy_Object_to_Array", "header",
+ fragment="NumPy_Backward_Compatibility",
+ fragment="NumPy_Macros",
+ fragment="NumPy_Utilities")
+{
+ /* Given a PyObject pointer, cast it to a PyArrayObject pointer if
+ * legal. If not, set the python error string appropriately and
+ * return NULL.
+ */
+ PyArrayObject* obj_to_array_no_conversion(PyObject* input, int typecode)
+ {
+ PyArrayObject* ary = NULL;
+ if (is_array(input) && (typecode == NPY_NOTYPE ||
+ PyArray_EquivTypenums(array_type(input), typecode)))
+ {
+ ary = (PyArrayObject*) input;
+ }
+ else if is_array(input)
+ {
+ const char* desired_type = typecode_string(typecode);
+ const char* actual_type = typecode_string(array_type(input));
+ PyErr_Format(PyExc_TypeError,
+ "Array of type '%s' required. Array of type '%s' given",
+ desired_type, actual_type);
+ ary = NULL;
+ }
+ else
+ {
+ const char * desired_type = typecode_string(typecode);
+ const char * actual_type = pytype_string(input);
+ PyErr_Format(PyExc_TypeError,
+ "Array of type '%s' required. A '%s' was given",
+ desired_type, actual_type);
+ ary = NULL;
+ }
+ return ary;
+ }
+
+ /* Convert the given PyObject to a NumPy array with the given
+ * typecode. On success, return a valid PyArrayObject* with the
+ * correct type. On failure, the python error string will be set and
+ * the routine returns NULL.
+ */
+ PyArrayObject* obj_to_array_allow_conversion(PyObject* input, int typecode,
+ int* is_new_object)
+ {
+ PyArrayObject* ary = NULL;
+ PyObject* py_obj;
+ if (is_array(input) && (typecode == NPY_NOTYPE ||
+ PyArray_EquivTypenums(array_type(input),typecode)))
+ {
+ ary = (PyArrayObject*) input;
+ *is_new_object = 0;
+ }
+ else
+ {
+ py_obj = PyArray_FROMANY(input, typecode, 0, 0, NPY_DEFAULT);
+ /* If NULL, PyArray_FromObject will have set python error value.*/
+ ary = (PyArrayObject*) py_obj;
+ *is_new_object = 1;
+ }
+ return ary;
+ }
+
+ /* Given a PyArrayObject, check to see if it is contiguous. If so,
+ * return the input pointer and flag it as not a new object. If it is
+ * not contiguous, create a new PyArrayObject using the original data,
+ * flag it as a new object and return the pointer.
+ */
+ PyArrayObject* make_contiguous(PyArrayObject* ary, int* is_new_object,
+ int min_dims, int max_dims)
+ {
+ PyArrayObject* result;
+ if (array_is_contiguous(ary))
+ {
+ result = ary;
+ *is_new_object = 0;
+ }
+ else
+ {
+ result = (PyArrayObject*) PyArray_ContiguousFromObject((PyObject*)ary,
+ array_type(ary),
+ min_dims,
+ max_dims);
+ *is_new_object = 1;
+ }
+ return result;
+ }
+
+ /* Given a PyArrayObject, check to see if it is Fortran-contiguous.
+ * If so, return the input pointer, but do not flag it as not a new
+ * object. If it is not Fortran-contiguous, create a new
+ * PyArrayObject using the original data, flag it as a new object
+ * and return the pointer.
+ */
+ PyArrayObject* make_fortran(PyArrayObject* ary, int* is_new_object,
+ int min_dims, int max_dims)
+ {
+ PyArrayObject* result;
+ if (array_is_fortran(ary))
+ {
+ result = ary;
+ *is_new_object = 0;
+ }
+ else
+ {
+ Py_INCREF(ary->descr);
+ result = (PyArrayObject*) PyArray_FromArray(ary, ary->descr, NPY_FORTRAN);
+ *is_new_object = 1;
+ }
+ return result;
+ }
+
+ /* Convert a given PyObject to a contiguous PyArrayObject of the
+ * specified type. If the input object is not a contiguous
+ * PyArrayObject, a new one will be created and the new object flag
+ * will be set.
+ */
+ PyArrayObject* obj_to_array_contiguous_allow_conversion(PyObject* input,
+ int typecode,
+ int* is_new_object)
+ {
+ int is_new1 = 0;
+ int is_new2 = 0;
+ PyArrayObject* ary2;
+ PyArrayObject* ary1 = obj_to_array_allow_conversion(input, typecode,
+ &is_new1);
+ if (ary1)
+ {
+ ary2 = make_contiguous(ary1, &is_new2, 0, 0);
+ if ( is_new1 && is_new2)
+ {
+ Py_DECREF(ary1);
+ }
+ ary1 = ary2;
+ }
+ *is_new_object = is_new1 || is_new2;
+ return ary1;
+ }
+
+ /* Convert a given PyObject to a Fortran-ordered PyArrayObject of the
+ * specified type. If the input object is not a Fortran-ordered
+ * PyArrayObject, a new one will be created and the new object flag
+ * will be set.
+ */
+ PyArrayObject* obj_to_array_fortran_allow_conversion(PyObject* input,
+ int typecode,
+ int* is_new_object)
+ {
+ int is_new1 = 0;
+ int is_new2 = 0;
+ PyArrayObject* ary2;
+ PyArrayObject* ary1 = obj_to_array_allow_conversion(input, typecode,
+ &is_new1);
+ if (ary1)
+ {
+ ary2 = make_fortran(ary1, &is_new2, 0, 0);
+ if (is_new1 && is_new2)
+ {
+ Py_DECREF(ary1);
+ }
+ ary1 = ary2;
+ }
+ *is_new_object = is_new1 || is_new2;
+ return ary1;
+ }
+
+ /* The following code was added by Ilmar M. Wilbers for forcing a copy of the
+ * object even when it is a NumPy array. This is meant for use with the
+ * IN_ARRAY typemaps, and allows the user to perform changes on an array
+ * without these chenges being reflected in the calling code.
+ */
+
+ /* Convert the given PyObject to a NumPy array with the given
+ * typecode as a copy. On success, return a valid PyArrayObject* with the
+ * correct type. On failure, the python error string will be set and
+ * the routine returns NULL.
+ */
+ PyArrayObject* obj_to_array_force_conversion(PyObject* input, int typecode,
+ int* is_new_object)
+ {
+ PyArrayObject* ary = NULL;
+ PyObject* py_obj;
+ if (is_array(input) && (typecode == NPY_NOTYPE ||
+ PyArray_EquivTypenums(array_type(input),typecode)))
+ {
+ py_obj = PyArray_Copy((PyArrayObject*) input);
+ ary = (PyArrayObject*) py_obj;
+ *is_new_object = 1;
+ }
+ else
+ {
+ py_obj = PyArray_FROMANY(input, typecode, 0, 0, NPY_DEFAULT);
+ /* If NULL, PyArray_FromObject will have set python error value.*/
+ ary = (PyArrayObject*) py_obj;
+ *is_new_object = 1;
+ }
+ return ary;
+ }
+
+ /* Convert a given PyObject to a contiguous PyArrayObject of the
+ * specified type. If the input object is not a contiguous
+ * PyArrayObject, a new one will be created and the new object flag
+ * will be set.
+ */
+ PyArrayObject* obj_to_array_contiguous_force_conversion(PyObject* input,
+ int typecode,
+ int* is_new_object)
+ {
+ int is_new1 = 0;
+ int is_new2 = 0;
+ PyArrayObject* ary2;
+ PyArrayObject* ary1 = obj_to_array_force_conversion(input, typecode,
+ &is_new1);
+ if (ary1)
+ {
+ ary2 = make_contiguous(ary1, &is_new2, 0, 0);
+ if ( is_new1 && is_new2)
+ {
+ Py_DECREF(ary1);
+ }
+ ary1 = ary2;
+ }
+ *is_new_object = is_new1 || is_new2;
+ return ary1;
+ }
+
+ /* Convert a given PyObject to a Fortran-ordered PyArrayObject of the
+ * specified type. If the input object is not a Fortran-ordered
+ * PyArrayObject, a new one will be created and the new object flag
+ * will be set.
+ */
+ PyArrayObject* obj_to_array_fortran_force_conversion(PyObject* input,
+ int typecode,
+ int* is_new_object)
+ {
+ int is_new1 = 0;
+ int is_new2 = 0;
+ PyArrayObject* ary2;
+ PyArrayObject* ary1 = obj_to_array_force_conversion(input, typecode,
+ &is_new1);
+ if (ary1)
+ {
+ ary2 = make_fortran(ary1, &is_new2, 0, 0);
+ if (is_new1 && is_new2)
+ {
+ Py_DECREF(ary1);
+ }
+ ary1 = ary2;
+ }
+ *is_new_object = is_new1 || is_new2;
+ return ary1;
+ }
+ /* End modifications by Ilmar M. Wilbers
+ */
+
+} /* end fragment */
+
+
+/**********************************************************************/
+
+%fragment("NumPy_Array_Requirements", "header",
+ fragment="NumPy_Backward_Compatibility",
+ fragment="NumPy_Macros")
+{
+ /* Test whether a python object is contiguous. If array is
+ * contiguous, return 1. Otherwise, set the python error string and
+ * return 0.
+ */
+ int require_contiguous(PyArrayObject* ary)
+ {
+ int contiguous = 1;
+ if (!array_is_contiguous(ary))
+ {
+ PyErr_SetString(PyExc_TypeError,
+ "Array must be contiguous. A non-contiguous array was given");
+ contiguous = 0;
+ }
+ return contiguous;
+ }
+
+ /* Require that a numpy array is not byte-swapped. If the array is
+ * not byte-swapped, return 1. Otherwise, set the python error string
+ * and return 0.
+ */
+ int require_native(PyArrayObject* ary)
+ {
+ int native = 1;
+ if (!array_is_native(ary))
+ {
+ PyErr_SetString(PyExc_TypeError,
+ "Array must have native byteorder. "
+ "A byte-swapped array was given");
+ native = 0;
+ }
+ return native;
+ }
+
+ /* Require the given PyArrayObject to have a specified number of
+ * dimensions. If the array has the specified number of dimensions,
+ * return 1. Otherwise, set the python error string and return 0.
+ */
+ int require_dimensions(PyArrayObject* ary, int exact_dimensions)
+ {
+ int success = 1;
+ if (array_numdims(ary) != exact_dimensions)
+ {
+ PyErr_Format(PyExc_TypeError,
+ "Array must have %d dimensions. Given array has %d dimensions",
+ exact_dimensions, array_numdims(ary));
+ success = 0;
+ }
+ return success;
+ }
+
+ /* Require the given PyArrayObject to have one of a list of specified
+ * number of dimensions. If the array has one of the specified number
+ * of dimensions, return 1. Otherwise, set the python error string
+ * and return 0.
+ */
+ int require_dimensions_n(PyArrayObject* ary, int* exact_dimensions, int n)
+ {
+ int success = 0;
+ int i;
+ char dims_str[255] = "";
+ char s[255];
+ for (i = 0; i < n && !success; i++)
+ {
+ if (array_numdims(ary) == exact_dimensions[i])
+ {
+ success = 1;
+ }
+ }
+ if (!success)
+ {
+ for (i = 0; i < n-1; i++)
+ {
+ sprintf(s, "%d, ", exact_dimensions[i]);
+ strcat(dims_str,s);
+ }
+ sprintf(s, " or %d", exact_dimensions[n-1]);
+ strcat(dims_str,s);
+ PyErr_Format(PyExc_TypeError,
+ "Array must have %s dimensions. Given array has %d dimensions",
+ dims_str, array_numdims(ary));
+ }
+ return success;
+ }
+
+ /* Require the given PyArrayObject to have a specified shape. If the
+ * array has the specified shape, return 1. Otherwise, set the python
+ * error string and return 0.
+ */
+ int require_size(PyArrayObject* ary, npy_intp* size, int n)
+ {
+ int i;
+ int success = 1;
+ int len;
+ char desired_dims[255] = "[";
+ char s[255];
+ char actual_dims[255] = "[";
+ for(i=0; i < n;i++)
+ {
+ if (size[i] != -1 && size[i] != array_size(ary,i))
+ {
+ success = 0;
+ }
+ }
+ if (!success)
+ {
+ for (i = 0; i < n; i++)
+ {
+ if (size[i] == -1)
+ {
+ sprintf(s, "*,");
+ }
+ else
+ {
+ sprintf(s, "%ld,", (long int)size[i]);
+ }
+ strcat(desired_dims,s);
+ }
+ len = strlen(desired_dims);
+ desired_dims[len-1] = ']';
+ for (i = 0; i < n; i++)
+ {
+ sprintf(s, "%ld,", (long int)array_size(ary,i));
+ strcat(actual_dims,s);
+ }
+ len = strlen(actual_dims);
+ actual_dims[len-1] = ']';
+ PyErr_Format(PyExc_TypeError,
+ "Array must have shape of %s. Given array has shape of %s",
+ desired_dims, actual_dims);
+ }
+ return success;
+ }
+
+ /* Require the given PyArrayObject to to be FORTRAN ordered. If the
+ * the PyArrayObject is already FORTRAN ordered, do nothing. Else,
+ * set the FORTRAN ordering flag and recompute the strides.
+ */
+ int require_fortran(PyArrayObject* ary)
+ {
+ int success = 1;
+ int nd = array_numdims(ary);
+ int i;
+ if (array_is_fortran(ary)) return success;
+ /* Set the FORTRAN ordered flag */
+ ary->flags = NPY_FARRAY;
+ /* Recompute the strides */
+ ary->strides[0] = ary->strides[nd-1];
+ for (i=1; i < nd; ++i)
+ ary->strides[i] = ary->strides[i-1] * array_size(ary,i-1);
+ return success;
+ }
+}
+
+/* Combine all NumPy fragments into one for convenience */
+%fragment("NumPy_Fragments", "header",
+ fragment="NumPy_Backward_Compatibility",
+ fragment="NumPy_Macros",
+ fragment="NumPy_Utilities",
+ fragment="NumPy_Object_to_Array",
+ fragment="NumPy_Array_Requirements") { }
+
+/* End John Hunter translation (with modifications by Bill Spotz)
+ */
+
+/* %numpy_typemaps() macro
+ *
+ * This macro defines a family of 42 typemaps that allow C arguments
+ * of the form
+ *
+ * (DATA_TYPE IN_ARRAY1[ANY])
+ * (DATA_TYPE* IN_ARRAY1, DIM_TYPE DIM1)
+ * (DIM_TYPE DIM1, DATA_TYPE* IN_ARRAY1)
+ *
+ * (DATA_TYPE IN_ARRAY2[ANY][ANY])
+ * (DATA_TYPE* IN_ARRAY2, DIM_TYPE DIM1, DIM_TYPE DIM2)
+ * (DIM_TYPE DIM1, DIM_TYPE DIM2, DATA_TYPE* IN_ARRAY2)
+ * (DATA_TYPE* IN_FARRAY2, DIM_TYPE DIM1, DIM_TYPE DIM2)
+ * (DIM_TYPE DIM1, DIM_TYPE DIM2, DATA_TYPE* IN_FARRAY2)
+ *
+ * (DATA_TYPE IN_ARRAY3[ANY][ANY][ANY])
+ * (DATA_TYPE* IN_ARRAY3, DIM_TYPE DIM1, DIM_TYPE DIM2, DIM_TYPE DIM3)
+ * (DIM_TYPE DIM1, DIM_TYPE DIM2, DIM_TYPE DIM3, DATA_TYPE* IN_ARRAY3)
+ * (DATA_TYPE* IN_FARRAY3, DIM_TYPE DIM1, DIM_TYPE DIM2, DIM_TYPE DIM3)
+ * (DIM_TYPE DIM1, DIM_TYPE DIM2, DIM_TYPE DIM3, DATA_TYPE* IN_FARRAY3)
+ *
+ * (DATA_TYPE INPLACE_ARRAY1[ANY])
+ * (DATA_TYPE* INPLACE_ARRAY1)
+ * (DATA_TYPE* INPLACE_ARRAY1, DIM_TYPE DIM1)
+ * (DIM_TYPE DIM1, DATA_TYPE* INPLACE_ARRAY1)
+ *
+ * (DATA_TYPE INPLACE_ARRAY2[ANY][ANY])
+ * (DATA_TYPE* INPLACE_ARRAY2, DIM_TYPE DIM1, DIM_TYPE DIM2)
+ * (DIM_TYPE DIM1, DIM_TYPE DIM2, DATA_TYPE* INPLACE_ARRAY2)
+ * (DATA_TYPE* INPLACE_FARRAY2, DIM_TYPE DIM1, DIM_TYPE DIM2)
+ * (DIM_TYPE DIM1, DIM_TYPE DIM2, DATA_TYPE* INPLACE_FARRAY2)
+ *
+ * (DATA_TYPE INPLACE_ARRAY3[ANY][ANY][ANY])
+ * (DATA_TYPE* INPLACE_ARRAY3, DIM_TYPE DIM1, DIM_TYPE DIM2, DIM_TYPE DIM3)
+ * (DIM_TYPE DIM1, DIM_TYPE DIM2, DIM_TYPE DIM3, DATA_TYPE* INPLACE_ARRAY3)
+ * (DATA_TYPE* INPLACE_FARRAY3, DIM_TYPE DIM1, DIM_TYPE DIM2, DIM_TYPE DIM3)
+ * (DIM_TYPE DIM1, DIM_TYPE DIM2, DIM_TYPE DIM3, DATA_TYPE* INPLACE_FARRAY3)
+ *
+ * (DATA_TYPE ARGOUT_ARRAY1[ANY])
+ * (DATA_TYPE* ARGOUT_ARRAY1, DIM_TYPE DIM1)
+ * (DIM_TYPE DIM1, DATA_TYPE* ARGOUT_ARRAY1)
+ *
+ * (DATA_TYPE ARGOUT_ARRAY2[ANY][ANY])
+ *
+ * (DATA_TYPE ARGOUT_ARRAY3[ANY][ANY][ANY])
+ *
+ * (DATA_TYPE** ARGOUTVIEW_ARRAY1, DIM_TYPE* DIM1)
+ * (DIM_TYPE* DIM1, DATA_TYPE** ARGOUTVIEW_ARRAY1)
+ *
+ * (DATA_TYPE** ARGOUTVIEW_ARRAY2, DIM_TYPE* DIM1, DIM_TYPE* DIM2)
+ * (DIM_TYPE* DIM1, DIM_TYPE* DIM2, DATA_TYPE** ARGOUTVIEW_ARRAY2)
+ * (DATA_TYPE** ARGOUTVIEW_FARRAY2, DIM_TYPE* DIM1, DIM_TYPE* DIM2)
+ * (DIM_TYPE* DIM1, DIM_TYPE* DIM2, DATA_TYPE** ARGOUTVIEW_FARRAY2)
+ *
+ * (DATA_TYPE** ARGOUTVIEW_ARRAY3, DIM_TYPE* DIM1, DIM_TYPE* DIM2, DIM_TYPE* DIM3)
+ * (DIM_TYPE* DIM1, DIM_TYPE* DIM2, DIM_TYPE* DIM3, DATA_TYPE** ARGOUTVIEW_ARRAY3)
+ * (DATA_TYPE** ARGOUTVIEW_FARRAY3, DIM_TYPE* DIM1, DIM_TYPE* DIM2, DIM_TYPE* DIM3)
+ * (DIM_TYPE* DIM1, DIM_TYPE* DIM2, DIM_TYPE* DIM3, DATA_TYPE** ARGOUTVIEW_FARRAY3)
+ *
+ * where "DATA_TYPE" is any type supported by the NumPy module, and
+ * "DIM_TYPE" is any int-like type suitable for specifying dimensions.
+ * The difference between "ARRAY" typemaps and "FARRAY" typemaps is
+ * that the "FARRAY" typemaps expect FORTRAN ordering of
+ * multidimensional arrays. In python, the dimensions will not need
+ * to be specified (except for the "DATA_TYPE* ARGOUT_ARRAY1"
+ * typemaps). The IN_ARRAYs can be a numpy array or any sequence that
+ * can be converted to a numpy array of the specified type. The
+ * INPLACE_ARRAYs must be numpy arrays of the appropriate type. The
+ * ARGOUT_ARRAYs will be returned as new numpy arrays of the
+ * appropriate type.
+ *
+ * These typemaps can be applied to existing functions using the
+ * %apply directive. For example:
+ *
+ * %apply (double* IN_ARRAY1, int DIM1) {(double* series, int length)};
+ * double prod(double* series, int length);
+ *
+ * %apply (int DIM1, int DIM2, double* INPLACE_ARRAY2)
+ * {(int rows, int cols, double* matrix )};
+ * void floor(int rows, int cols, double* matrix, double f);
+ *
+ * %apply (double IN_ARRAY3[ANY][ANY][ANY])
+ * {(double tensor[2][2][2] )};
+ * %apply (double ARGOUT_ARRAY3[ANY][ANY][ANY])
+ * {(double low[2][2][2] )};
+ * %apply (double ARGOUT_ARRAY3[ANY][ANY][ANY])
+ * {(double upp[2][2][2] )};
+ * void luSplit(double tensor[2][2][2],
+ * double low[2][2][2],
+ * double upp[2][2][2] );
+ *
+ * or directly with
+ *
+ * double prod(double* IN_ARRAY1, int DIM1);
+ *
+ * void floor(int DIM1, int DIM2, double* INPLACE_ARRAY2, double f);
+ *
+ * void luSplit(double IN_ARRAY3[ANY][ANY][ANY],
+ * double ARGOUT_ARRAY3[ANY][ANY][ANY],
+ * double ARGOUT_ARRAY3[ANY][ANY][ANY]);
+ */
+
+%define %numpy_typemaps(DATA_TYPE, DATA_TYPECODE, DIM_TYPE)
+
+/************************/
+/* Input Array Typemaps */
+/************************/
+
+/* Typemap suite for (DATA_TYPE IN_ARRAY1[ANY])
+ */
+%typecheck(SWIG_TYPECHECK_DOUBLE_ARRAY,
+ fragment="NumPy_Macros")
+ (DATA_TYPE IN_ARRAY1[ANY])
+{
+ $1 = is_array($input) || PySequence_Check($input);
+}
+%typemap(in,
+ fragment="NumPy_Fragments")
+ (DATA_TYPE IN_ARRAY1[ANY])
+ (PyArrayObject* array=NULL, int is_new_object=0)
+{
+ npy_intp size[1] = { $1_dim0 };
+ array = obj_to_array_contiguous_force_conversion($input, DATA_TYPECODE,
+ &is_new_object);
+ if (!array || !require_dimensions(array, 1) ||
+ !require_size(array, size, 1)) SWIG_fail;
+ $1 = ($1_ltype) array_data(array);
+}
+%typemap(freearg)
+ (DATA_TYPE IN_ARRAY1[ANY])
+{
+ if (is_new_object$argnum && array$argnum)
+ { Py_DECREF(array$argnum); }
+}
+
+/* Typemap suite for (DATA_TYPE* IN_ARRAY1, DIM_TYPE DIM1)
+ */
+%typecheck(SWIG_TYPECHECK_DOUBLE_ARRAY,
+ fragment="NumPy_Macros")
+ (DATA_TYPE* IN_ARRAY1, DIM_TYPE DIM1)
+{
+ $1 = is_array($input) || PySequence_Check($input);
+}
+%typemap(in,
+ fragment="NumPy_Fragments")
+ (DATA_TYPE* IN_ARRAY1, DIM_TYPE DIM1)
+ (PyArrayObject* array=NULL, int is_new_object=0)
+{
+ npy_intp size[1] = { -1 };
+ array = obj_to_array_contiguous_force_conversion($input, DATA_TYPECODE,
+ &is_new_object);
+ if (!array || !require_dimensions(array, 1) ||
+ !require_size(array, size, 1)) SWIG_fail;
+ $1 = (DATA_TYPE*) array_data(array);
+ $2 = (DIM_TYPE) array_size(array,0);
+}
+%typemap(freearg)
+ (DATA_TYPE* IN_ARRAY1, DIM_TYPE DIM1)
+{
+ if (is_new_object$argnum && array$argnum)
+ { Py_DECREF(array$argnum); }
+}
+
+/* Typemap suite for (DIM_TYPE DIM1, DATA_TYPE* IN_ARRAY1)
+ */
+%typecheck(SWIG_TYPECHECK_DOUBLE_ARRAY,
+ fragment="NumPy_Macros")
+ (DIM_TYPE DIM1, DATA_TYPE* IN_ARRAY1)
+{
+ $1 = is_array($input) || PySequence_Check($input);
+}
+%typemap(in,
+ fragment="NumPy_Fragments")
+ (DIM_TYPE DIM1, DATA_TYPE* IN_ARRAY1)
+ (PyArrayObject* array=NULL, int is_new_object=0)
+{
+ npy_intp size[1] = {-1};
+ array = obj_to_array_contiguous_force_conversion($input, DATA_TYPECODE,
+ &is_new_object);
+ if (!array || !require_dimensions(array, 1) ||
+ !require_size(array, size, 1)) SWIG_fail;
+ $1 = (DIM_TYPE) array_size(array,0);
+ $2 = (DATA_TYPE*) array_data(array);
+}
+%typemap(freearg)
+ (DIM_TYPE DIM1, DATA_TYPE* IN_ARRAY1)
+{
+ if (is_new_object$argnum && array$argnum)
+ { Py_DECREF(array$argnum); }
+}
+
+/* Typemap suite for (DATA_TYPE IN_ARRAY2[ANY][ANY])
+ */
+%typecheck(SWIG_TYPECHECK_DOUBLE_ARRAY,
+ fragment="NumPy_Macros")
+ (DATA_TYPE IN_ARRAY2[ANY][ANY])
+{
+ $1 = is_array($input) || PySequence_Check($input);
+}
+%typemap(in,
+ fragment="NumPy_Fragments")
+ (DATA_TYPE IN_ARRAY2[ANY][ANY])
+ (PyArrayObject* array=NULL, int is_new_object=0)
+{
+ npy_intp size[2] = { $1_dim0, $1_dim1 };
+ array = obj_to_array_contiguous_force_conversion($input, DATA_TYPECODE,
+ &is_new_object);
+ if (!array || !require_dimensions(array, 2) ||
+ !require_size(array, size, 2)) SWIG_fail;
+ $1 = ($1_ltype) array_data(array);
+}
+%typemap(freearg)
+ (DATA_TYPE IN_ARRAY2[ANY][ANY])
+{
+ if (is_new_object$argnum && array$argnum)
+ { Py_DECREF(array$argnum); }
+}
+
+/* Typemap suite for (DATA_TYPE* IN_ARRAY2, DIM_TYPE DIM1, DIM_TYPE DIM2)
+ */
+%typecheck(SWIG_TYPECHECK_DOUBLE_ARRAY,
+ fragment="NumPy_Macros")
+ (DATA_TYPE* IN_ARRAY2, DIM_TYPE DIM1, DIM_TYPE DIM2)
+{
+ $1 = is_array($input) || PySequence_Check($input);
+}
+%typemap(in,
+ fragment="NumPy_Fragments")
+ (DATA_TYPE* IN_ARRAY2, DIM_TYPE DIM1, DIM_TYPE DIM2)
+ (PyArrayObject* array=NULL, int is_new_object=0)
+{
+ npy_intp size[2] = { -1, -1 };
+ array = obj_to_array_contiguous_force_conversion($input, DATA_TYPECODE,
+ &is_new_object);
+ if (!array || !require_dimensions(array, 2) ||
+ !require_size(array, size, 2)) SWIG_fail;
+ $1 = (DATA_TYPE*) array_data(array);
+ $2 = (DIM_TYPE) array_size(array,0);
+ $3 = (DIM_TYPE) array_size(array,1);
+}
+%typemap(freearg)
+ (DATA_TYPE* IN_ARRAY2, DIM_TYPE DIM1, DIM_TYPE DIM2)
+{
+ if (is_new_object$argnum && array$argnum)
+ { Py_DECREF(array$argnum); }
+}
+
+/* Typemap suite for (DIM_TYPE DIM1, DIM_TYPE DIM2, DATA_TYPE* IN_ARRAY2)
+ */
+%typecheck(SWIG_TYPECHECK_DOUBLE_ARRAY,
+ fragment="NumPy_Macros")
+ (DIM_TYPE DIM1, DIM_TYPE DIM2, DATA_TYPE* IN_ARRAY2)
+{
+ $1 = is_array($input) || PySequence_Check($input);
+}
+%typemap(in,
+ fragment="NumPy_Fragments")
+ (DIM_TYPE DIM1, DIM_TYPE DIM2, DATA_TYPE* IN_ARRAY2)
+ (PyArrayObject* array=NULL, int is_new_object=0)
+{
+ npy_intp size[2] = { -1, -1 };
+ array = obj_to_array_contiguous_force_conversion($input, DATA_TYPECODE,
+ &is_new_object);
+ if (!array || !require_dimensions(array, 2) ||
+ !require_size(array, size, 2)) SWIG_fail;
+ $1 = (DIM_TYPE) array_size(array,0);
+ $2 = (DIM_TYPE) array_size(array,1);
+ $3 = (DATA_TYPE*) array_data(array);
+}
+%typemap(freearg)
+ (DIM_TYPE DIM1, DIM_TYPE DIM2, DATA_TYPE* IN_ARRAY2)
+{
+ if (is_new_object$argnum && array$argnum)
+ { Py_DECREF(array$argnum); }
+}
+
+/* Typemap suite for (DATA_TYPE* IN_FARRAY2, DIM_TYPE DIM1, DIM_TYPE DIM2)
+ */
+%typecheck(SWIG_TYPECHECK_DOUBLE_ARRAY,
+ fragment="NumPy_Macros")
+ (DATA_TYPE* IN_FARRAY2, DIM_TYPE DIM1, DIM_TYPE DIM2)
+{
+ $1 = is_array($input) || PySequence_Check($input);
+}
+%typemap(in,
+ fragment="NumPy_Fragments")
+ (DATA_TYPE* IN_FARRAY2, DIM_TYPE DIM1, DIM_TYPE DIM2)
+ (PyArrayObject* array=NULL, int is_new_object=0)
+{
+ npy_intp size[2] = { -1, -1 };
+ array = obj_to_array_fortran_force_conversion($input, DATA_TYPECODE,
+ &is_new_object);
+ if (!array || !require_dimensions(array, 2) ||
+ !require_size(array, size, 2) || !require_fortran(array)) SWIG_fail;
+ $1 = (DATA_TYPE*) array_data(array);
+ $2 = (DIM_TYPE) array_size(array,0);
+ $3 = (DIM_TYPE) array_size(array,1);
+}
+%typemap(freearg)
+ (DATA_TYPE* IN_FARRAY2, DIM_TYPE DIM1, DIM_TYPE DIM2)
+{
+ if (is_new_object$argnum && array$argnum)
+ { Py_DECREF(array$argnum); }
+}
+
+/* Typemap suite for (DIM_TYPE DIM1, DIM_TYPE DIM2, DATA_TYPE* IN_FARRAY2)
+ */
+%typecheck(SWIG_TYPECHECK_DOUBLE_ARRAY,
+ fragment="NumPy_Macros")
+ (DIM_TYPE DIM1, DIM_TYPE DIM2, DATA_TYPE* IN_FARRAY2)
+{
+ $1 = is_array($input) || PySequence_Check($input);
+}
+%typemap(in,
+ fragment="NumPy_Fragments")
+ (DIM_TYPE DIM1, DIM_TYPE DIM2, DATA_TYPE* IN_FARRAY2)
+ (PyArrayObject* array=NULL, int is_new_object=0)
+{
+ npy_intp size[2] = { -1, -1 };
+ array = obj_to_array_contiguous_force_conversion($input, DATA_TYPECODE,
+ &is_new_object);
+ if (!array || !require_dimensions(array, 2) ||
+ !require_size(array, size, 2) || !require_fortran(array)) SWIG_fail;
+ $1 = (DIM_TYPE) array_size(array,0);
+ $2 = (DIM_TYPE) array_size(array,1);
+ $3 = (DATA_TYPE*) array_data(array);
+}
+%typemap(freearg)
+ (DIM_TYPE DIM1, DIM_TYPE DIM2, DATA_TYPE* IN_FARRAY2)
+{
+ if (is_new_object$argnum && array$argnum)
+ { Py_DECREF(array$argnum); }
+}
+
+/* Typemap suite for (DATA_TYPE IN_ARRAY3[ANY][ANY][ANY])
+ */
+%typecheck(SWIG_TYPECHECK_DOUBLE_ARRAY,
+ fragment="NumPy_Macros")
+ (DATA_TYPE IN_ARRAY3[ANY][ANY][ANY])
+{
+ $1 = is_array($input) || PySequence_Check($input);
+}
+%typemap(in,
+ fragment="NumPy_Fragments")
+ (DATA_TYPE IN_ARRAY3[ANY][ANY][ANY])
+ (PyArrayObject* array=NULL, int is_new_object=0)
+{
+ npy_intp size[3] = { $1_dim0, $1_dim1, $1_dim2 };
+ array = obj_to_array_contiguous_force_conversion($input, DATA_TYPECODE,
+ &is_new_object);
+ if (!array || !require_dimensions(array, 3) ||
+ !require_size(array, size, 3)) SWIG_fail;
+ $1 = ($1_ltype) array_data(array);
+}
+%typemap(freearg)
+ (DATA_TYPE IN_ARRAY3[ANY][ANY][ANY])
+{
+ if (is_new_object$argnum && array$argnum)
+ { Py_DECREF(array$argnum); }
+}
+
+/* Typemap suite for (DATA_TYPE* IN_ARRAY3, DIM_TYPE DIM1, DIM_TYPE DIM2,
+ * DIM_TYPE DIM3)
+ */
+%typecheck(SWIG_TYPECHECK_DOUBLE_ARRAY,
+ fragment="NumPy_Macros")
+ (DATA_TYPE* IN_ARRAY3, DIM_TYPE DIM1, DIM_TYPE DIM2, DIM_TYPE DIM3)
+{
+ $1 = is_array($input) || PySequence_Check($input);
+}
+%typemap(in,
+ fragment="NumPy_Fragments")
+ (DATA_TYPE* IN_ARRAY3, DIM_TYPE DIM1, DIM_TYPE DIM2, DIM_TYPE DIM3)
+ (PyArrayObject* array=NULL, int is_new_object=0)
+{
+ npy_intp size[3] = { -1, -1, -1 };
+ array = obj_to_array_contiguous_force_conversion($input, DATA_TYPECODE,
+ &is_new_object);
+ if (!array || !require_dimensions(array, 3) ||
+ !require_size(array, size, 3)) SWIG_fail;
+ $1 = (DATA_TYPE*) array_data(array);
+ $2 = (DIM_TYPE) array_size(array,0);
+ $3 = (DIM_TYPE) array_size(array,1);
+ $4 = (DIM_TYPE) array_size(array,2);
+}
+%typemap(freearg)
+ (DATA_TYPE* IN_ARRAY3, DIM_TYPE DIM1, DIM_TYPE DIM2, DIM_TYPE DIM3)
+{
+ if (is_new_object$argnum && array$argnum)
+ { Py_DECREF(array$argnum); }
+}
+
+/* Typemap suite for (DIM_TYPE DIM1, DIM_TYPE DIM2, DIM_TYPE DIM3,
+ * DATA_TYPE* IN_ARRAY3)
+ */
+%typecheck(SWIG_TYPECHECK_DOUBLE_ARRAY,
+ fragment="NumPy_Macros")
+ (DIM_TYPE DIM1, DIM_TYPE DIM2, DIM_TYPE DIM3, DATA_TYPE* IN_ARRAY3)
+{
+ $1 = is_array($input) || PySequence_Check($input);
+}
+%typemap(in,
+ fragment="NumPy_Fragments")
+ (DIM_TYPE DIM1, DIM_TYPE DIM2, DIM_TYPE DIM3, DATA_TYPE* IN_ARRAY3)
+ (PyArrayObject* array=NULL, int is_new_object=0)
+{
+ npy_intp size[3] = { -1, -1, -1 };
+ array = obj_to_array_contiguous_force_conversion($input, DATA_TYPECODE,
+ &is_new_object);
+ if (!array || !require_dimensions(array, 3) ||
+ !require_size(array, size, 3)) SWIG_fail;
+ $1 = (DIM_TYPE) array_size(array,0);
+ $2 = (DIM_TYPE) array_size(array,1);
+ $3 = (DIM_TYPE) array_size(array,2);
+ $4 = (DATA_TYPE*) array_data(array);
+}
+%typemap(freearg)
+ (DIM_TYPE DIM1, DIM_TYPE DIM2, DIM_TYPE DIM3, DATA_TYPE* IN_ARRAY3)
+{
+ if (is_new_object$argnum && array$argnum)
+ { Py_DECREF(array$argnum); }
+}
+
+/* Typemap suite for (DATA_TYPE* IN_FARRAY3, DIM_TYPE DIM1, DIM_TYPE DIM2,
+ * DIM_TYPE DIM3)
+ */
+%typecheck(SWIG_TYPECHECK_DOUBLE_ARRAY,
+ fragment="NumPy_Macros")
+ (DATA_TYPE* IN_FARRAY3, DIM_TYPE DIM1, DIM_TYPE DIM2, DIM_TYPE DIM3)
+{
+ $1 = is_array($input) || PySequence_Check($input);
+}
+%typemap(in,
+ fragment="NumPy_Fragments")
+ (DATA_TYPE* IN_FARRAY3, DIM_TYPE DIM1, DIM_TYPE DIM2, DIM_TYPE DIM3)
+ (PyArrayObject* array=NULL, int is_new_object=0)
+{
+ npy_intp size[3] = { -1, -1, -1 };
+ array = obj_to_array_fortran_force_conversion($input, DATA_TYPECODE,
+ &is_new_object);
+ if (!array || !require_dimensions(array, 3) ||
+ !require_size(array, size, 3) | !require_fortran(array)) SWIG_fail;
+ $1 = (DATA_TYPE*) array_data(array);
+ $2 = (DIM_TYPE) array_size(array,0);
+ $3 = (DIM_TYPE) array_size(array,1);
+ $4 = (DIM_TYPE) array_size(array,2);
+}
+%typemap(freearg)
+ (DATA_TYPE* IN_FARRAY3, DIM_TYPE DIM1, DIM_TYPE DIM2, DIM_TYPE DIM3)
+{
+ if (is_new_object$argnum && array$argnum)
+ { Py_DECREF(array$argnum); }
+}
+
+/* Typemap suite for (DIM_TYPE DIM1, DIM_TYPE DIM2, DIM_TYPE DIM3,
+ * DATA_TYPE* IN_FARRAY3)
+ */
+%typecheck(SWIG_TYPECHECK_DOUBLE_ARRAY,
+ fragment="NumPy_Macros")
+ (DIM_TYPE DIM1, DIM_TYPE DIM2, DIM_TYPE DIM3, DATA_TYPE* IN_FARRAY3)
+{
+ $1 = is_array($input) || PySequence_Check($input);
+}
+%typemap(in,
+ fragment="NumPy_Fragments")
+ (DIM_TYPE DIM1, DIM_TYPE DIM2, DIM_TYPE DIM3, DATA_TYPE* IN_FARRAY3)
+ (PyArrayObject* array=NULL, int is_new_object=0)
+{
+ npy_intp size[3] = { -1, -1, -1 };
+ array = obj_to_array_contiguous_force_conversion($input, DATA_TYPECODE,
+ &is_new_object);
+ if (!array || !require_dimensions(array, 3) ||
+ !require_size(array, size, 3) || !require_fortran(array)) SWIG_fail;
+ $1 = (DIM_TYPE) array_size(array,0);
+ $2 = (DIM_TYPE) array_size(array,1);
+ $3 = (DIM_TYPE) array_size(array,2);
+ $4 = (DATA_TYPE*) array_data(array);
+}
+%typemap(freearg)
+ (DIM_TYPE DIM1, DIM_TYPE DIM2, DIM_TYPE DIM3, DATA_TYPE* IN_FARRAY3)
+{
+ if (is_new_object$argnum && array$argnum)
+ { Py_DECREF(array$argnum); }
+}
+
+/***************************/
+/* In-Place Array Typemaps */
+/***************************/
+
+/* Typemap suite for (DATA_TYPE INPLACE_ARRAY1[ANY])
+ */
+%typecheck(SWIG_TYPECHECK_DOUBLE_ARRAY,
+ fragment="NumPy_Macros")
+ (DATA_TYPE INPLACE_ARRAY1[ANY])
+{
+ $1 = is_array($input) && PyArray_EquivTypenums(array_type($input),
+ DATA_TYPECODE);
+}
+%typemap(in,
+ fragment="NumPy_Fragments")
+ (DATA_TYPE INPLACE_ARRAY1[ANY])
+ (PyArrayObject* array=NULL)
+{
+ npy_intp size[1] = { $1_dim0 };
+ array = obj_to_array_no_conversion($input, DATA_TYPECODE);
+ if (!array || !require_dimensions(array,1) || !require_size(array, size, 1) ||
+ !require_contiguous(array) || !require_native(array)) SWIG_fail;
+ $1 = ($1_ltype) array_data(array);
+}
+
+/* Typemap suite for (DATA_TYPE* INPLACE_ARRAY1)
+ */
+%typecheck(SWIG_TYPECHECK_DOUBLE_ARRAY,
+ fragment="NumPy_Macros")
+ (DATA_TYPE* INPLACE_ARRAY1)
+{
+ $1 = is_array($input) && PyArray_EquivTypenums(array_type($input),
+ DATA_TYPECODE);
+}
+%typemap(in,
+ fragment="NumPy_Fragments")
+ (DATA_TYPE* INPLACE_ARRAY1)
+ (PyArrayObject* array=NULL)
+{
+ array = obj_to_array_no_conversion($input, DATA_TYPECODE);
+ if (!array || !require_dimensions(array,1) || !require_contiguous(array)
+ || !require_native(array)) SWIG_fail;
+ $1 = (DATA_TYPE*) array_data(array);
+}
+
+/* Typemap suite for (DATA_TYPE* INPLACE_ARRAY1, DIM_TYPE DIM1)
+ */
+%typecheck(SWIG_TYPECHECK_DOUBLE_ARRAY,
+ fragment="NumPy_Macros")
+ (DATA_TYPE* INPLACE_ARRAY1, DIM_TYPE DIM1)
+{
+ $1 = is_array($input) && PyArray_EquivTypenums(array_type($input),
+ DATA_TYPECODE);
+}
+%typemap(in,
+ fragment="NumPy_Fragments")
+ (DATA_TYPE* INPLACE_ARRAY1, DIM_TYPE DIM1)
+ (PyArrayObject* array=NULL, int i=1)
+{
+ array = obj_to_array_no_conversion($input, DATA_TYPECODE);
+ if (!array || !require_dimensions(array,1) || !require_contiguous(array)
+ || !require_native(array)) SWIG_fail;
+ $1 = (DATA_TYPE*) array_data(array);
+ $2 = 1;
+ for (i=0; i < array_numdims(array); ++i) $2 *= array_size(array,i);
+}
+
+/* Typemap suite for (DIM_TYPE DIM1, DATA_TYPE* INPLACE_ARRAY1)
+ */
+%typecheck(SWIG_TYPECHECK_DOUBLE_ARRAY,
+ fragment="NumPy_Macros")
+ (DIM_TYPE DIM1, DATA_TYPE* INPLACE_ARRAY1)
+{
+ $1 = is_array($input) && PyArray_EquivTypenums(array_type($input),
+ DATA_TYPECODE);
+}
+%typemap(in,
+ fragment="NumPy_Fragments")
+ (DIM_TYPE DIM1, DATA_TYPE* INPLACE_ARRAY1)
+ (PyArrayObject* array=NULL, int i=0)
+{
+ array = obj_to_array_no_conversion($input, DATA_TYPECODE);
+ if (!array || !require_dimensions(array,1) || !require_contiguous(array)
+ || !require_native(array)) SWIG_fail;
+ $1 = 1;
+ for (i=0; i < array_numdims(array); ++i) $1 *= array_size(array,i);
+ $2 = (DATA_TYPE*) array_data(array);
+}
+
+/* Typemap suite for (DATA_TYPE INPLACE_ARRAY2[ANY][ANY])
+ */
+%typecheck(SWIG_TYPECHECK_DOUBLE_ARRAY,
+ fragment="NumPy_Macros")
+ (DATA_TYPE INPLACE_ARRAY2[ANY][ANY])
+{
+ $1 = is_array($input) && PyArray_EquivTypenums(array_type($input),
+ DATA_TYPECODE);
+}
+%typemap(in,
+ fragment="NumPy_Fragments")
+ (DATA_TYPE INPLACE_ARRAY2[ANY][ANY])
+ (PyArrayObject* array=NULL)
+{
+ npy_intp size[2] = { $1_dim0, $1_dim1 };
+ array = obj_to_array_no_conversion($input, DATA_TYPECODE);
+ if (!array || !require_dimensions(array,2) || !require_size(array, size, 2) ||
+ !require_contiguous(array) || !require_native(array)) SWIG_fail;
+ $1 = ($1_ltype) array_data(array);
+}
+
+/* Typemap suite for (DATA_TYPE* INPLACE_ARRAY2, DIM_TYPE DIM1, DIM_TYPE DIM2)
+ */
+%typecheck(SWIG_TYPECHECK_DOUBLE_ARRAY,
+ fragment="NumPy_Macros")
+ (DATA_TYPE* INPLACE_ARRAY2, DIM_TYPE DIM1, DIM_TYPE DIM2)
+{
+ $1 = is_array($input) && PyArray_EquivTypenums(array_type($input),
+ DATA_TYPECODE);
+}
+%typemap(in,
+ fragment="NumPy_Fragments")
+ (DATA_TYPE* INPLACE_ARRAY2, DIM_TYPE DIM1, DIM_TYPE DIM2)
+ (PyArrayObject* array=NULL)
+{
+ array = obj_to_array_no_conversion($input, DATA_TYPECODE);
+ if (!array || !require_dimensions(array,2) || !require_contiguous(array)
+ || !require_native(array)) SWIG_fail;
+ $1 = (DATA_TYPE*) array_data(array);
+ $2 = (DIM_TYPE) array_size(array,0);
+ $3 = (DIM_TYPE) array_size(array,1);
+}
+
+/* Typemap suite for (DIM_TYPE DIM1, DIM_TYPE DIM2, DATA_TYPE* INPLACE_ARRAY2)
+ */
+%typecheck(SWIG_TYPECHECK_DOUBLE_ARRAY,
+ fragment="NumPy_Macros")
+ (DIM_TYPE DIM1, DIM_TYPE DIM2, DATA_TYPE* INPLACE_ARRAY2)
+{
+ $1 = is_array($input) && PyArray_EquivTypenums(array_type($input),
+ DATA_TYPECODE);
+}
+%typemap(in,
+ fragment="NumPy_Fragments")
+ (DIM_TYPE DIM1, DIM_TYPE DIM2, DATA_TYPE* INPLACE_ARRAY2)
+ (PyArrayObject* array=NULL)
+{
+ array = obj_to_array_no_conversion($input, DATA_TYPECODE);
+ if (!array || !require_dimensions(array,2) || !require_contiguous(array) ||
+ !require_native(array)) SWIG_fail;
+ $1 = (DIM_TYPE) array_size(array,0);
+ $2 = (DIM_TYPE) array_size(array,1);
+ $3 = (DATA_TYPE*) array_data(array);
+}
+
+/* Typemap suite for (DATA_TYPE* INPLACE_FARRAY2, DIM_TYPE DIM1, DIM_TYPE DIM2)
+ */
+%typecheck(SWIG_TYPECHECK_DOUBLE_ARRAY,
+ fragment="NumPy_Macros")
+ (DATA_TYPE* INPLACE_FARRAY2, DIM_TYPE DIM1, DIM_TYPE DIM2)
+{
+ $1 = is_array($input) && PyArray_EquivTypenums(array_type($input),
+ DATA_TYPECODE);
+}
+%typemap(in,
+ fragment="NumPy_Fragments")
+ (DATA_TYPE* INPLACE_FARRAY2, DIM_TYPE DIM1, DIM_TYPE DIM2)
+ (PyArrayObject* array=NULL)
+{
+ array = obj_to_array_no_conversion($input, DATA_TYPECODE);
+ if (!array || !require_dimensions(array,2) || !require_contiguous(array)
+ || !require_native(array) || !require_fortran(array)) SWIG_fail;
+ $1 = (DATA_TYPE*) array_data(array);
+ $2 = (DIM_TYPE) array_size(array,0);
+ $3 = (DIM_TYPE) array_size(array,1);
+}
+
+/* Typemap suite for (DIM_TYPE DIM1, DIM_TYPE DIM2, DATA_TYPE* INPLACE_FARRAY2)
+ */
+%typecheck(SWIG_TYPECHECK_DOUBLE_ARRAY,
+ fragment="NumPy_Macros")
+ (DIM_TYPE DIM1, DIM_TYPE DIM2, DATA_TYPE* INPLACE_FARRAY2)
+{
+ $1 = is_array($input) && PyArray_EquivTypenums(array_type($input),
+ DATA_TYPECODE);
+}
+%typemap(in,
+ fragment="NumPy_Fragments")
+ (DIM_TYPE DIM1, DIM_TYPE DIM2, DATA_TYPE* INPLACE_FARRAY2)
+ (PyArrayObject* array=NULL)
+{
+ array = obj_to_array_no_conversion($input, DATA_TYPECODE);
+ if (!array || !require_dimensions(array,2) || !require_contiguous(array) ||
+ !require_native(array) || !require_fortran(array)) SWIG_fail;
+ $1 = (DIM_TYPE) array_size(array,0);
+ $2 = (DIM_TYPE) array_size(array,1);
+ $3 = (DATA_TYPE*) array_data(array);
+}
+
+/* Typemap suite for (DATA_TYPE INPLACE_ARRAY3[ANY][ANY][ANY])
+ */
+%typecheck(SWIG_TYPECHECK_DOUBLE_ARRAY,
+ fragment="NumPy_Macros")
+ (DATA_TYPE INPLACE_ARRAY3[ANY][ANY][ANY])
+{
+ $1 = is_array($input) && PyArray_EquivTypenums(array_type($input),
+ DATA_TYPECODE);
+}
+%typemap(in,
+ fragment="NumPy_Fragments")
+ (DATA_TYPE INPLACE_ARRAY3[ANY][ANY][ANY])
+ (PyArrayObject* array=NULL)
+{
+ npy_intp size[3] = { $1_dim0, $1_dim1, $1_dim2 };
+ array = obj_to_array_no_conversion($input, DATA_TYPECODE);
+ if (!array || !require_dimensions(array,3) || !require_size(array, size, 3) ||
+ !require_contiguous(array) || !require_native(array)) SWIG_fail;
+ $1 = ($1_ltype) array_data(array);
+}
+
+/* Typemap suite for (DATA_TYPE* INPLACE_ARRAY3, DIM_TYPE DIM1, DIM_TYPE DIM2,
+ * DIM_TYPE DIM3)
+ */
+%typecheck(SWIG_TYPECHECK_DOUBLE_ARRAY,
+ fragment="NumPy_Macros")
+ (DATA_TYPE* INPLACE_ARRAY3, DIM_TYPE DIM1, DIM_TYPE DIM2, DIM_TYPE DIM3)
+{
+ $1 = is_array($input) && PyArray_EquivTypenums(array_type($input),
+ DATA_TYPECODE);
+}
+%typemap(in,
+ fragment="NumPy_Fragments")
+ (DATA_TYPE* INPLACE_ARRAY3, DIM_TYPE DIM1, DIM_TYPE DIM2, DIM_TYPE DIM3)
+ (PyArrayObject* array=NULL)
+{
+ array = obj_to_array_no_conversion($input, DATA_TYPECODE);
+ if (!array || !require_dimensions(array,3) || !require_contiguous(array) ||
+ !require_native(array)) SWIG_fail;
+ $1 = (DATA_TYPE*) array_data(array);
+ $2 = (DIM_TYPE) array_size(array,0);
+ $3 = (DIM_TYPE) array_size(array,1);
+ $4 = (DIM_TYPE) array_size(array,2);
+}
+
+/* Typemap suite for (DIM_TYPE DIM1, DIM_TYPE DIM2, DIM_TYPE DIM3,
+ * DATA_TYPE* INPLACE_ARRAY3)
+ */
+%typecheck(SWIG_TYPECHECK_DOUBLE_ARRAY,
+ fragment="NumPy_Macros")
+ (DIM_TYPE DIM1, DIM_TYPE DIM2, DIM_TYPE DIM3, DATA_TYPE* INPLACE_ARRAY3)
+{
+ $1 = is_array($input) && PyArray_EquivTypenums(array_type($input),
+ DATA_TYPECODE);
+}
+%typemap(in,
+ fragment="NumPy_Fragments")
+ (DIM_TYPE DIM1, DIM_TYPE DIM2, DIM_TYPE DIM3, DATA_TYPE* INPLACE_ARRAY3)
+ (PyArrayObject* array=NULL)
+{
+ array = obj_to_array_no_conversion($input, DATA_TYPECODE);
+ if (!array || !require_dimensions(array,3) || !require_contiguous(array)
+ || !require_native(array)) SWIG_fail;
+ $1 = (DIM_TYPE) array_size(array,0);
+ $2 = (DIM_TYPE) array_size(array,1);
+ $3 = (DIM_TYPE) array_size(array,2);
+ $4 = (DATA_TYPE*) array_data(array);
+}
+
+/* Typemap suite for (DATA_TYPE* INPLACE_FARRAY3, DIM_TYPE DIM1, DIM_TYPE DIM2,
+ * DIM_TYPE DIM3)
+ */
+%typecheck(SWIG_TYPECHECK_DOUBLE_ARRAY,
+ fragment="NumPy_Macros")
+ (DATA_TYPE* INPLACE_FARRAY3, DIM_TYPE DIM1, DIM_TYPE DIM2, DIM_TYPE DIM3)
+{
+ $1 = is_array($input) && PyArray_EquivTypenums(array_type($input),
+ DATA_TYPECODE);
+}
+%typemap(in,
+ fragment="NumPy_Fragments")
+ (DATA_TYPE* INPLACE_FARRAY3, DIM_TYPE DIM1, DIM_TYPE DIM2, DIM_TYPE DIM3)
+ (PyArrayObject* array=NULL)
+{
+ array = obj_to_array_no_conversion($input, DATA_TYPECODE);
+ if (!array || !require_dimensions(array,3) || !require_contiguous(array) ||
+ !require_native(array) || !require_fortran(array)) SWIG_fail;
+ $1 = (DATA_TYPE*) array_data(array);
+ $2 = (DIM_TYPE) array_size(array,0);
+ $3 = (DIM_TYPE) array_size(array,1);
+ $4 = (DIM_TYPE) array_size(array,2);
+}
+
+/* Typemap suite for (DIM_TYPE DIM1, DIM_TYPE DIM2, DIM_TYPE DIM3,
+ * DATA_TYPE* INPLACE_FARRAY3)
+ */
+%typecheck(SWIG_TYPECHECK_DOUBLE_ARRAY,
+ fragment="NumPy_Macros")
+ (DIM_TYPE DIM1, DIM_TYPE DIM2, DIM_TYPE DIM3, DATA_TYPE* INPLACE_FARRAY3)
+{
+ $1 = is_array($input) && PyArray_EquivTypenums(array_type($input),
+ DATA_TYPECODE);
+}
+%typemap(in,
+ fragment="NumPy_Fragments")
+ (DIM_TYPE DIM1, DIM_TYPE DIM2, DIM_TYPE DIM3, DATA_TYPE* INPLACE_FARRAY3)
+ (PyArrayObject* array=NULL)
+{
+ array = obj_to_array_no_conversion($input, DATA_TYPECODE);
+ if (!array || !require_dimensions(array,3) || !require_contiguous(array)
+ || !require_native(array) || !require_fortran(array)) SWIG_fail;
+ $1 = (DIM_TYPE) array_size(array,0);
+ $2 = (DIM_TYPE) array_size(array,1);
+ $3 = (DIM_TYPE) array_size(array,2);
+ $4 = (DATA_TYPE*) array_data(array);
+}
+
+/*************************/
+/* Argout Array Typemaps */
+/*************************/
+
+/* Typemap suite for (DATA_TYPE ARGOUT_ARRAY1[ANY])
+ */
+%typemap(in,numinputs=0,
+ fragment="NumPy_Backward_Compatibility,NumPy_Macros")
+ (DATA_TYPE ARGOUT_ARRAY1[ANY])
+ (PyObject * array = NULL)
+{
+ npy_intp dims[1] = { $1_dim0 };
+ array = PyArray_SimpleNew(1, dims, DATA_TYPECODE);
+ if (!array) SWIG_fail;
+ $1 = ($1_ltype) array_data(array);
+}
+%typemap(argout)
+ (DATA_TYPE ARGOUT_ARRAY1[ANY])
+{
+ $result = SWIG_Python_AppendOutput($result,array$argnum);
+}
+
+/* Typemap suite for (DATA_TYPE* ARGOUT_ARRAY1, DIM_TYPE DIM1)
+ */
+%typemap(in,numinputs=1,
+ fragment="NumPy_Fragments")
+ (DATA_TYPE* ARGOUT_ARRAY1, DIM_TYPE DIM1)
+ (PyObject * array = NULL)
+{
+ npy_intp dims[1];
+ if (!PyInt_Check($input))
+ {
+ const char* typestring = pytype_string($input);
+ PyErr_Format(PyExc_TypeError,
+ "Int dimension expected. '%s' given.",
+ typestring);
+ SWIG_fail;
+ }
+ $2 = (DIM_TYPE) PyInt_AsLong($input);
+ dims[0] = (npy_intp) $2;
+ array = PyArray_SimpleNew(1, dims, DATA_TYPECODE);
+ if (!array) SWIG_fail;
+ $1 = (DATA_TYPE*) array_data(array);
+}
+%typemap(argout)
+ (DATA_TYPE* ARGOUT_ARRAY1, DIM_TYPE DIM1)
+{
+ $result = SWIG_Python_AppendOutput($result,array$argnum);
+}
+
+/* Typemap suite for (DIM_TYPE DIM1, DATA_TYPE* ARGOUT_ARRAY1)
+ */
+%typemap(in,numinputs=1,
+ fragment="NumPy_Fragments")
+ (DIM_TYPE DIM1, DATA_TYPE* ARGOUT_ARRAY1)
+ (PyObject * array = NULL)
+{
+ npy_intp dims[1];
+ if (!PyInt_Check($input))
+ {
+ const char* typestring = pytype_string($input);
+ PyErr_Format(PyExc_TypeError,
+ "Int dimension expected. '%s' given.",
+ typestring);
+ SWIG_fail;
+ }
+ $1 = (DIM_TYPE) PyInt_AsLong($input);
+ dims[0] = (npy_intp) $1;
+ array = PyArray_SimpleNew(1, dims, DATA_TYPECODE);
+ if (!array) SWIG_fail;
+ $2 = (DATA_TYPE*) array_data(array);
+}
+%typemap(argout)
+ (DIM_TYPE DIM1, DATA_TYPE* ARGOUT_ARRAY1)
+{
+ $result = SWIG_Python_AppendOutput($result,array$argnum);
+}
+
+/* Typemap suite for (DATA_TYPE ARGOUT_ARRAY2[ANY][ANY])
+ */
+%typemap(in,numinputs=0,
+ fragment="NumPy_Backward_Compatibility,NumPy_Macros")
+ (DATA_TYPE ARGOUT_ARRAY2[ANY][ANY])
+ (PyObject * array = NULL)
+{
+ npy_intp dims[2] = { $1_dim0, $1_dim1 };
+ array = PyArray_SimpleNew(2, dims, DATA_TYPECODE);
+ if (!array) SWIG_fail;
+ $1 = ($1_ltype) array_data(array);
+}
+%typemap(argout)
+ (DATA_TYPE ARGOUT_ARRAY2[ANY][ANY])
+{
+ $result = SWIG_Python_AppendOutput($result,array$argnum);
+}
+
+/* Typemap suite for (DATA_TYPE ARGOUT_ARRAY3[ANY][ANY][ANY])
+ */
+%typemap(in,numinputs=0,
+ fragment="NumPy_Backward_Compatibility,NumPy_Macros")
+ (DATA_TYPE ARGOUT_ARRAY3[ANY][ANY][ANY])
+ (PyObject * array = NULL)
+{
+ npy_intp dims[3] = { $1_dim0, $1_dim1, $1_dim2 };
+ array = PyArray_SimpleNew(3, dims, DATA_TYPECODE);
+ if (!array) SWIG_fail;
+ $1 = ($1_ltype) array_data(array);
+}
+%typemap(argout)
+ (DATA_TYPE ARGOUT_ARRAY3[ANY][ANY][ANY])
+{
+ $result = SWIG_Python_AppendOutput($result,array$argnum);
+}
+
+/*****************************/
+/* Argoutview Array Typemaps */
+/*****************************/
+
+/* Typemap suite for (DATA_TYPE** ARGOUTVIEW_ARRAY1, DIM_TYPE* DIM1)
+ */
+%typemap(in,numinputs=0)
+ (DATA_TYPE** ARGOUTVIEW_ARRAY1, DIM_TYPE* DIM1 )
+ (DATA_TYPE* data_temp , DIM_TYPE dim_temp)
+{
+ $1 = &data_temp;
+ $2 = &dim_temp;
+}
+%typemap(argout,
+ fragment="NumPy_Backward_Compatibility")
+ (DATA_TYPE** ARGOUTVIEW_ARRAY1, DIM_TYPE* DIM1)
+{
+ npy_intp dims[1] = { *$2 };
+ PyObject * array = PyArray_SimpleNewFromData(1, dims, DATA_TYPECODE, (void*)(*$1));
+ if (!array) SWIG_fail;
+ $result = SWIG_Python_AppendOutput($result,array);
+}
+
+/* Typemap suite for (DIM_TYPE* DIM1, DATA_TYPE** ARGOUTVIEW_ARRAY1)
+ */
+%typemap(in,numinputs=0)
+ (DIM_TYPE* DIM1 , DATA_TYPE** ARGOUTVIEW_ARRAY1)
+ (DIM_TYPE dim_temp, DATA_TYPE* data_temp )
+{
+ $1 = &dim_temp;
+ $2 = &data_temp;
+}
+%typemap(argout,
+ fragment="NumPy_Backward_Compatibility")
+ (DIM_TYPE* DIM1, DATA_TYPE** ARGOUTVIEW_ARRAY1)
+{
+ npy_intp dims[1] = { *$1 };
+ PyObject * array = PyArray_SimpleNewFromData(1, dims, DATA_TYPECODE, (void*)(*$2));
+ if (!array) SWIG_fail;
+ $result = SWIG_Python_AppendOutput($result,array);
+}
+
+/* Typemap suite for (DATA_TYPE** ARGOUTVIEW_ARRAY2, DIM_TYPE* DIM1, DIM_TYPE* DIM2)
+ */
+%typemap(in,numinputs=0)
+ (DATA_TYPE** ARGOUTVIEW_ARRAY2, DIM_TYPE* DIM1 , DIM_TYPE* DIM2 )
+ (DATA_TYPE* data_temp , DIM_TYPE dim1_temp, DIM_TYPE dim2_temp)
+{
+ $1 = &data_temp;
+ $2 = &dim1_temp;
+ $3 = &dim2_temp;
+}
+%typemap(argout,
+ fragment="NumPy_Backward_Compatibility")
+ (DATA_TYPE** ARGOUTVIEW_ARRAY2, DIM_TYPE* DIM1, DIM_TYPE* DIM2)
+{
+ npy_intp dims[2] = { *$2, *$3 };
+ PyObject * array = PyArray_SimpleNewFromData(2, dims, DATA_TYPECODE, (void*)(*$1));
+ if (!array) SWIG_fail;
+ $result = SWIG_Python_AppendOutput($result,array);
+}
+
+/* Typemap suite for (DIM_TYPE* DIM1, DIM_TYPE* DIM2, DATA_TYPE** ARGOUTVIEW_ARRAY2)
+ */
+%typemap(in,numinputs=0)
+ (DIM_TYPE* DIM1 , DIM_TYPE* DIM2 , DATA_TYPE** ARGOUTVIEW_ARRAY2)
+ (DIM_TYPE dim1_temp, DIM_TYPE dim2_temp, DATA_TYPE* data_temp )
+{
+ $1 = &dim1_temp;
+ $2 = &dim2_temp;
+ $3 = &data_temp;
+}
+%typemap(argout,
+ fragment="NumPy_Backward_Compatibility")
+ (DIM_TYPE* DIM1, DIM_TYPE* DIM2, DATA_TYPE** ARGOUTVIEW_ARRAY2)
+{
+ npy_intp dims[2] = { *$1, *$2 };
+ PyObject * array = PyArray_SimpleNewFromData(2, dims, DATA_TYPECODE, (void*)(*$3));
+ if (!array) SWIG_fail;
+ $result = SWIG_Python_AppendOutput($result,array);
+}
+
+/* Typemap suite for (DATA_TYPE** ARGOUTVIEW_FARRAY2, DIM_TYPE* DIM1, DIM_TYPE* DIM2)
+ */
+%typemap(in,numinputs=0)
+ (DATA_TYPE** ARGOUTVIEW_FARRAY2, DIM_TYPE* DIM1 , DIM_TYPE* DIM2 )
+ (DATA_TYPE* data_temp , DIM_TYPE dim1_temp, DIM_TYPE dim2_temp)
+{
+ $1 = &data_temp;
+ $2 = &dim1_temp;
+ $3 = &dim2_temp;
+}
+%typemap(argout,
+ fragment="NumPy_Backward_Compatibility,NumPy_Array_Requirements")
+ (DATA_TYPE** ARGOUTVIEW_FARRAY2, DIM_TYPE* DIM1, DIM_TYPE* DIM2)
+{
+ npy_intp dims[2] = { *$2, *$3 };
+ PyObject * obj = PyArray_SimpleNewFromData(2, dims, DATA_TYPECODE, (void*)(*$1));
+ PyArrayObject * array = (PyArrayObject*) obj;
+ if (!array || !require_fortran(array)) SWIG_fail;
+ $result = SWIG_Python_AppendOutput($result,obj);
+}
+
+/* Typemap suite for (DIM_TYPE* DIM1, DIM_TYPE* DIM2, DATA_TYPE** ARGOUTVIEW_FARRAY2)
+ */
+%typemap(in,numinputs=0)
+ (DIM_TYPE* DIM1 , DIM_TYPE* DIM2 , DATA_TYPE** ARGOUTVIEW_FARRAY2)
+ (DIM_TYPE dim1_temp, DIM_TYPE dim2_temp, DATA_TYPE* data_temp )
+{
+ $1 = &dim1_temp;
+ $2 = &dim2_temp;
+ $3 = &data_temp;
+}
+%typemap(argout,
+ fragment="NumPy_Backward_Compatibility,NumPy_Array_Requirements")
+ (DIM_TYPE* DIM1, DIM_TYPE* DIM2, DATA_TYPE** ARGOUTVIEW_FARRAY2)
+{
+ npy_intp dims[2] = { *$1, *$2 };
+ PyObject * obj = PyArray_SimpleNewFromData(2, dims, DATA_TYPECODE, (void*)(*$3));
+ PyArrayObject * array = (PyArrayObject*) obj;
+ if (!array || !require_fortran(array)) SWIG_fail;
+ $result = SWIG_Python_AppendOutput($result,obj);
+}
+
+/* Typemap suite for (DATA_TYPE** ARGOUTVIEW_ARRAY3, DIM_TYPE* DIM1, DIM_TYPE* DIM2,
+ DIM_TYPE* DIM3)
+ */
+%typemap(in,numinputs=0)
+ (DATA_TYPE** ARGOUTVIEW_ARRAY3, DIM_TYPE* DIM1, DIM_TYPE* DIM2, DIM_TYPE* DIM3)
+ (DATA_TYPE* data_temp, DIM_TYPE dim1_temp, DIM_TYPE dim2_temp, DIM_TYPE dim3_temp)
+{
+ $1 = &data_temp;
+ $2 = &dim1_temp;
+ $3 = &dim2_temp;
+ $4 = &dim3_temp;
+}
+%typemap(argout,
+ fragment="NumPy_Backward_Compatibility")
+ (DATA_TYPE** ARGOUTVIEW_ARRAY3, DIM_TYPE* DIM1, DIM_TYPE* DIM2, DIM_TYPE* DIM3)
+{
+ npy_intp dims[3] = { *$2, *$3, *$4 };
+ PyObject * array = PyArray_SimpleNewFromData(3, dims, DATA_TYPECODE, (void*)(*$1));
+ if (!array) SWIG_fail;
+ $result = SWIG_Python_AppendOutput($result,array);
+}
+
+/* Typemap suite for (DIM_TYPE* DIM1, DIM_TYPE* DIM2, DIM_TYPE* DIM3,
+ DATA_TYPE** ARGOUTVIEW_ARRAY3)
+ */
+%typemap(in,numinputs=0)
+ (DIM_TYPE* DIM1, DIM_TYPE* DIM2, DIM_TYPE* DIM3, DATA_TYPE** ARGOUTVIEW_ARRAY3)
+ (DIM_TYPE dim1_temp, DIM_TYPE dim2_temp, DIM_TYPE dim3_temp, DATA_TYPE* data_temp)
+{
+ $1 = &dim1_temp;
+ $2 = &dim2_temp;
+ $3 = &dim3_temp;
+ $4 = &data_temp;
+}
+%typemap(argout,
+ fragment="NumPy_Backward_Compatibility")
+ (DIM_TYPE* DIM1, DIM_TYPE* DIM2, DIM_TYPE* DIM3, DATA_TYPE** ARGOUTVIEW_ARRAY3)
+{
+ npy_intp dims[3] = { *$1, *$2, *$3 };
+ PyObject * array = PyArray_SimpleNewFromData(3, dims, DATA_TYPECODE, (void*)(*$3));
+ if (!array) SWIG_fail;
+ $result = SWIG_Python_AppendOutput($result,array);
+}
+
+/* Typemap suite for (DATA_TYPE** ARGOUTVIEW_FARRAY3, DIM_TYPE* DIM1, DIM_TYPE* DIM2,
+ DIM_TYPE* DIM3)
+ */
+%typemap(in,numinputs=0)
+ (DATA_TYPE** ARGOUTVIEW_FARRAY3, DIM_TYPE* DIM1, DIM_TYPE* DIM2, DIM_TYPE* DIM3)
+ (DATA_TYPE* data_temp, DIM_TYPE dim1_temp, DIM_TYPE dim2_temp, DIM_TYPE dim3_temp)
+{
+ $1 = &data_temp;
+ $2 = &dim1_temp;
+ $3 = &dim2_temp;
+ $4 = &dim3_temp;
+}
+%typemap(argout,
+ fragment="NumPy_Backward_Compatibility,NumPy_Array_Requirements")
+ (DATA_TYPE** ARGOUTVIEW_FARRAY3, DIM_TYPE* DIM1, DIM_TYPE* DIM2, DIM_TYPE* DIM3)
+{
+ npy_intp dims[3] = { *$2, *$3, *$4 };
+ PyObject * obj = PyArray_SimpleNewFromData(3, dims, DATA_TYPECODE, (void*)(*$1));
+ PyArrayObject * array = (PyArrayObject*) obj;
+ if (!array || require_fortran(array)) SWIG_fail;
+ $result = SWIG_Python_AppendOutput($result,obj);
+}
+
+/* Typemap suite for (DIM_TYPE* DIM1, DIM_TYPE* DIM2, DIM_TYPE* DIM3,
+ DATA_TYPE** ARGOUTVIEW_FARRAY3)
+ */
+%typemap(in,numinputs=0)
+ (DIM_TYPE* DIM1, DIM_TYPE* DIM2, DIM_TYPE* DIM3, DATA_TYPE** ARGOUTVIEW_FARRAY3)
+ (DIM_TYPE dim1_temp, DIM_TYPE dim2_temp, DIM_TYPE dim3_temp, DATA_TYPE* data_temp)
+{
+ $1 = &dim1_temp;
+ $2 = &dim2_temp;
+ $3 = &dim3_temp;
+ $4 = &data_temp;
+}
+%typemap(argout,
+ fragment="NumPy_Backward_Compatibility,NumPy_Array_Requirements")
+ (DIM_TYPE* DIM1, DIM_TYPE* DIM2, DIM_TYPE* DIM3, DATA_TYPE** ARGOUTVIEW_FARRAY3)
+{
+ npy_intp dims[3] = { *$1, *$2, *$3 };
+ PyObject * obj = PyArray_SimpleNewFromData(3, dims, DATA_TYPECODE, (void*)(*$3));
+ PyArrayObject * array = (PyArrayObject*) obj;
+ if (!array || require_fortran(array)) SWIG_fail;
+ $result = SWIG_Python_AppendOutput($result,obj);
+}
+
+%enddef /* %numpy_typemaps() macro */
+/* *************************************************************** */
+
+/* Concrete instances of the %numpy_typemaps() macro: Each invocation
+ * below applies all of the typemaps above to the specified data type.
+ */
+%numpy_typemaps(signed char , NPY_BYTE , int)
+%numpy_typemaps(unsigned char , NPY_UBYTE , int)
+%numpy_typemaps(short , NPY_SHORT , int)
+%numpy_typemaps(unsigned short , NPY_USHORT , int)
+%numpy_typemaps(int , NPY_INT , int)
+%numpy_typemaps(unsigned int , NPY_UINT , int)
+%numpy_typemaps(long , NPY_LONG , int)
+%numpy_typemaps(unsigned long , NPY_ULONG , int)
+%numpy_typemaps(long long , NPY_LONGLONG , int)
+%numpy_typemaps(unsigned long long, NPY_ULONGLONG, int)
+%numpy_typemaps(float , NPY_FLOAT , int)
+%numpy_typemaps(double , NPY_DOUBLE , int)
+
+/* ***************************************************************
+ * The follow macro expansion does not work, because C++ bool is 4
+ * bytes and NPY_BOOL is 1 byte
+ *
+ * %numpy_typemaps(bool, NPY_BOOL, int)
+ */
+
+/* ***************************************************************
+ * On my Mac, I get the following warning for this macro expansion:
+ * 'swig/python detected a memory leak of type 'long double *', no destructor found.'
+ *
+ * %numpy_typemaps(long double, NPY_LONGDOUBLE, int)
+ */
+
+/* ***************************************************************
+ * Swig complains about a syntax error for the following macro
+ * expansions:
+ *
+ * %numpy_typemaps(complex float, NPY_CFLOAT , int)
+ *
+ * %numpy_typemaps(complex double, NPY_CDOUBLE, int)
+ *
+ * %numpy_typemaps(complex long double, NPY_CLONGDOUBLE, int)
+ */
+
+#endif /* SWIGPYTHON */
diff --git a/bindings/python/Makefile b/bindings/python/Makefile
new file mode 100644
index 0000000..e63c124
--- /dev/null
+++ b/bindings/python/Makefile
@@ -0,0 +1,186 @@
+# Makefile for Sphinx documentation
+#
+
+# You can set these variables from the command line.
+SPHINXOPTS =
+SPHINXBUILD = sphinx-build
+PAPER =
+BUILDDIR = _build
+
+# User-friendly check for sphinx-build
+ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)
+$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/)
+endif
+
+# Internal variables.
+PAPEROPT_a4 = -D latex_paper_size=a4
+PAPEROPT_letter = -D latex_paper_size=letter
+ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
+# the i18n builder cannot share the environment and doctrees with the others
+I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
+
+.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext
+
+help:
+ @echo "Please use \`make <target>' where <target> is one of"
+ @echo " html to make standalone HTML files"
+ @echo " dirhtml to make HTML files named index.html in directories"
+ @echo " singlehtml to make a single large HTML file"
+ @echo " pickle to make pickle files"
+ @echo " json to make JSON files"
+ @echo " htmlhelp to make HTML files and a HTML help project"
+ @echo " qthelp to make HTML files and a qthelp project"
+ @echo " devhelp to make HTML files and a Devhelp project"
+ @echo " epub to make an epub"
+ @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
+ @echo " latexpdf to make LaTeX files and run them through pdflatex"
+ @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
+ @echo " text to make text files"
+ @echo " man to make manual pages"
+ @echo " texinfo to make Texinfo files"
+ @echo " info to make Texinfo files and run them through makeinfo"
+ @echo " gettext to make PO message catalogs"
+ @echo " changes to make an overview of all changed/added/deprecated items"
+ @echo " xml to make Docutils-native XML files"
+ @echo " pseudoxml to make pseudoxml-XML files for display purposes"
+ @echo " linkcheck to check all external links for integrity"
+ @echo " doctest to run all doctests embedded in the documentation (if enabled)"
+
+modules.rst lilv.rst:
+ mkdir -p lilv
+ ln -s -t lilv ../lilv.py
+ sphinx-apidoc -o . lilv
+
+clean:
+ rm -rf $(BUILDDIR)/*
+ rm -f lilv/lilv.py
+ rm -rf lilv
+ rm -f lilv.rst
+ rm -f modules.rst
+
+html:
+ $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
+ @echo
+ @echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
+
+dirhtml:
+ $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
+ @echo
+ @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
+
+singlehtml: modules.rst lilv.rst
+ $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
+ @echo
+ @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
+
+pickle:
+ $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
+ @echo
+ @echo "Build finished; now you can process the pickle files."
+
+json:
+ $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
+ @echo
+ @echo "Build finished; now you can process the JSON files."
+
+htmlhelp:
+ $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
+ @echo
+ @echo "Build finished; now you can run HTML Help Workshop with the" \
+ ".hhp project file in $(BUILDDIR)/htmlhelp."
+
+qthelp:
+ $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
+ @echo
+ @echo "Build finished; now you can run "qcollectiongenerator" with the" \
+ ".qhcp project file in $(BUILDDIR)/qthelp, like this:"
+ @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/Lilv.qhcp"
+ @echo "To view the help file:"
+ @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/Lilv.qhc"
+
+devhelp:
+ $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
+ @echo
+ @echo "Build finished."
+ @echo "To view the help file:"
+ @echo "# mkdir -p $$HOME/.local/share/devhelp/Lilv"
+ @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/Lilv"
+ @echo "# devhelp"
+
+epub:
+ $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
+ @echo
+ @echo "Build finished. The epub file is in $(BUILDDIR)/epub."
+
+latex:
+ $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+ @echo
+ @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
+ @echo "Run \`make' in that directory to run these through (pdf)latex" \
+ "(use \`make latexpdf' here to do that automatically)."
+
+latexpdf:
+ $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+ @echo "Running LaTeX files through pdflatex..."
+ $(MAKE) -C $(BUILDDIR)/latex all-pdf
+ @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
+
+latexpdfja:
+ $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
+ @echo "Running LaTeX files through platex and dvipdfmx..."
+ $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
+ @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
+
+text:
+ $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
+ @echo
+ @echo "Build finished. The text files are in $(BUILDDIR)/text."
+
+man:
+ $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
+ @echo
+ @echo "Build finished. The manual pages are in $(BUILDDIR)/man."
+
+texinfo:
+ $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
+ @echo
+ @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
+ @echo "Run \`make' in that directory to run these through makeinfo" \
+ "(use \`make info' here to do that automatically)."
+
+info:
+ $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
+ @echo "Running Texinfo files through makeinfo..."
+ make -C $(BUILDDIR)/texinfo info
+ @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
+
+gettext:
+ $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
+ @echo
+ @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
+
+changes:
+ $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
+ @echo
+ @echo "The overview file is in $(BUILDDIR)/changes."
+
+linkcheck:
+ $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
+ @echo
+ @echo "Link check complete; look for any errors in the above output " \
+ "or in $(BUILDDIR)/linkcheck/output.txt."
+
+doctest:
+ $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
+ @echo "Testing of doctests in the sources finished, look at the " \
+ "results in $(BUILDDIR)/doctest/output.txt."
+
+xml:
+ $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
+ @echo
+ @echo "Build finished. The XML files are in $(BUILDDIR)/xml."
+
+pseudoxml:
+ $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
+ @echo
+ @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."
diff --git a/bindings/python/conf.py b/bindings/python/conf.py
new file mode 100644
index 0000000..576919e
--- /dev/null
+++ b/bindings/python/conf.py
@@ -0,0 +1,263 @@
+# -*- coding: utf-8 -*-
+#
+# Lilv documentation build configuration file, created by
+# sphinx-quickstart on Sun Sep 4 18:25:58 2016.
+#
+# This file is execfile()d with the current directory set to its
+# containing dir.
+#
+# Note that not all possible configuration values are present in this
+# autogenerated file.
+#
+# All configuration values have a default; values that are commented out
+# serve to show the default.
+
+import sys
+import os
+
+# If extensions (or modules to document with autodoc) are in another directory,
+# add these directories to sys.path here. If the directory is relative to the
+# documentation root, use os.path.abspath to make it absolute, like shown here.
+#sys.path.insert(0, os.path.abspath('.'))
+
+sys.path.insert(0, os.path.abspath('.'))
+
+# -- General configuration ------------------------------------------------
+
+# If your documentation needs a minimal Sphinx version, state it here.
+#needs_sphinx = '1.0'
+
+# Add any Sphinx extension module names here, as strings. They can be
+# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
+# ones.
+extensions = [
+ 'sphinx.ext.autodoc',
+ 'sphinx.ext.ifconfig',
+]
+
+# Add any paths that contain templates here, relative to this directory.
+templates_path = ['_templates']
+
+# The suffix of source filenames.
+source_suffix = '.rst'
+
+# The encoding of source files.
+#source_encoding = 'utf-8-sig'
+
+# The master toctree document.
+master_doc = 'index'
+
+# General information about the project.
+project = u'Lilv'
+copyright = u'2016, David Robillard'
+
+# The version info for the project you're documenting, acts as replacement for
+# |version| and |release|, also used in various other places throughout the
+# built documents.
+#
+# The short X.Y version.
+version = '0.24.2'
+# The full version, including alpha/beta/rc tags.
+release = '0.24.2'
+
+# The language for content autogenerated by Sphinx. Refer to documentation
+# for a list of supported languages.
+#language = None
+
+# There are two options for replacing |today|: either, you set today to some
+# non-false value, then it is used:
+#today = ''
+# Else, today_fmt is used as the format for a strftime call.
+#today_fmt = '%B %d, %Y'
+
+# List of patterns, relative to source directory, that match files and
+# directories to ignore when looking for source files.
+exclude_patterns = ['_build']
+
+# The reST default role (used for this markup: `text`) to use for all
+# documents.
+#default_role = None
+
+# If true, '()' will be appended to :func: etc. cross-reference text.
+#add_function_parentheses = True
+
+# If true, the current module name will be prepended to all description
+# unit titles (such as .. function::).
+#add_module_names = True
+
+# If true, sectionauthor and moduleauthor directives will be shown in the
+# output. They are ignored by default.
+#show_authors = False
+
+# The name of the Pygments (syntax highlighting) style to use.
+pygments_style = 'sphinx'
+
+# A list of ignored prefixes for module index sorting.
+#modindex_common_prefix = []
+
+# If true, keep warnings as "system message" paragraphs in the built documents.
+#keep_warnings = False
+
+
+# -- Options for HTML output ----------------------------------------------
+
+# The theme to use for HTML and HTML Help pages. See the documentation for
+# a list of builtin themes.
+#html_theme = ''
+
+# Theme options are theme-specific and customize the look and feel of a theme
+# further. For a list of options available for each theme, see the
+# documentation.
+html_theme_options = { 'nosidebar': True }
+
+# Add any paths that contain custom themes here, relative to this directory.
+#html_theme_path = []
+
+# The name for this set of Sphinx documents. If None, it defaults to
+# "<project> v<release> documentation".
+#html_title = None
+
+# A shorter title for the navigation bar. Default is the same as html_title.
+#html_short_title = None
+
+# The name of an image file (relative to this directory) to place at the top
+# of the sidebar.
+#html_logo = None
+
+# The name of an image file (within the static path) to use as favicon of the
+# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
+# pixels large.
+#html_favicon = None
+
+# Add any paths that contain custom static files (such as style sheets) here,
+# relative to this directory. They are copied after the builtin static files,
+# so a file named "default.css" will overwrite the builtin "default.css".
+html_static_path = ['_static']
+
+# Add any extra paths that contain custom files (such as robots.txt or
+# .htaccess) here, relative to this directory. These files are copied
+# directly to the root of the documentation.
+#html_extra_path = []
+
+# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
+# using the given strftime format.
+#html_last_updated_fmt = '%b %d, %Y'
+
+# If true, SmartyPants will be used to convert quotes and dashes to
+# typographically correct entities.
+#html_use_smartypants = True
+
+# Custom sidebar templates, maps document names to template names.
+#html_sidebars = {}
+
+# Additional templates that should be rendered to pages, maps page names to
+# template names.
+#html_additional_pages = {}
+
+# If false, no module index is generated.
+#html_domain_indices = True
+
+# If false, no index is generated.
+#html_use_index = True
+
+# If true, the index is split into individual pages for each letter.
+#html_split_index = False
+
+# If true, links to the reST sources are added to the pages.
+#html_show_sourcelink = True
+
+# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
+#html_show_sphinx = True
+
+# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
+#html_show_copyright = True
+
+# If true, an OpenSearch description file will be output, and all pages will
+# contain a <link> tag referring to it. The value of this option must be the
+# base URL from which the finished HTML is served.
+#html_use_opensearch = ''
+
+# This is the file name suffix for HTML files (e.g. ".xhtml").
+#html_file_suffix = None
+
+# Output file base name for HTML help builder.
+htmlhelp_basename = 'Lilvdoc'
+
+
+# -- Options for LaTeX output ---------------------------------------------
+
+latex_elements = {
+# The paper size ('letterpaper' or 'a4paper').
+#'papersize': 'letterpaper',
+
+# The font size ('10pt', '11pt' or '12pt').
+#'pointsize': '10pt',
+
+# Additional stuff for the LaTeX preamble.
+#'preamble': '',
+}
+
+# Grouping the document tree into LaTeX files. List of tuples
+# (source start file, target name, title,
+# author, documentclass [howto, manual, or own class]).
+latex_documents = [
+ ('index', 'Lilv.tex', u'Lilv Documentation',
+ u'David Robillard', 'manual'),
+]
+
+# The name of an image file (relative to this directory) to place at the top of
+# the title page.
+#latex_logo = None
+
+# For "manual" documents, if this is true, then toplevel headings are parts,
+# not chapters.
+#latex_use_parts = False
+
+# If true, show page references after internal links.
+#latex_show_pagerefs = False
+
+# If true, show URL addresses after external links.
+#latex_show_urls = False
+
+# Documents to append as an appendix to all manuals.
+#latex_appendices = []
+
+# If false, no module index is generated.
+#latex_domain_indices = True
+
+
+# -- Options for manual page output ---------------------------------------
+
+# One entry per manual page. List of tuples
+# (source start file, name, description, authors, manual section).
+man_pages = [
+ ('index', 'lilv', u'Lilv Documentation',
+ [u'David Robillard'], 1)
+]
+
+# If true, show URL addresses after external links.
+#man_show_urls = False
+
+
+# -- Options for Texinfo output -------------------------------------------
+
+# Grouping the document tree into Texinfo files. List of tuples
+# (source start file, target name, title, author,
+# dir menu entry, description, category)
+texinfo_documents = [
+ ('index', 'Lilv', u'Lilv Documentation',
+ u'David Robillard', 'Lilv', 'One line description of project.',
+ 'Miscellaneous'),
+]
+
+# Documents to append as an appendix to all manuals.
+#texinfo_appendices = []
+
+# If false, no module index is generated.
+#texinfo_domain_indices = True
+
+# How to display URL addresses: 'footnote', 'no', or 'inline'.
+#texinfo_show_urls = 'footnote'
+
+# If true, do not generate a @detailmenu in the "Top" node's menu.
+#texinfo_no_detailmenu = False
diff --git a/bindings/python/index.rst b/bindings/python/index.rst
new file mode 100644
index 0000000..4616054
--- /dev/null
+++ b/bindings/python/index.rst
@@ -0,0 +1,9 @@
+Lilv Python Documentation
+=========================
+
+
+.. toctree::
+
+.. automodule:: lilv
+ :noindex:
+ :members:
diff --git a/bindings/python/lilv.py b/bindings/python/lilv.py
new file mode 100644
index 0000000..024bfe7
--- /dev/null
+++ b/bindings/python/lilv.py
@@ -0,0 +1,1775 @@
+"""Lilv Python interface"""
+
+__author__ = "David Robillard"
+__copyright__ = "Copyright 2016 David Robillard"
+__license__ = "ISC"
+__version__ = "0.22.1"
+__maintainer__ = "David Robillard"
+__email__ = "d@drobilla.net"
+__status__ = "Production"
+
+import ctypes
+import os
+import sys
+
+from ctypes import Structure, CDLL, POINTER, CFUNCTYPE
+from ctypes import c_bool, c_double, c_float, c_int, c_size_t, c_uint, c_uint32
+from ctypes import c_char, c_char_p, c_void_p
+from ctypes import byref
+
+# Load lilv library
+
+_lib = CDLL("liblilv-0.so")
+
+# Set namespaced aliases for all lilv functions
+
+class String(str):
+ # Wrapper for string parameters to pass as raw C UTF-8 strings
+ def from_param(cls, obj):
+ return obj.encode('utf-8')
+
+ from_param = classmethod(from_param)
+
+def _as_uri(obj):
+ if type(obj) in [Plugin, PluginClass, UI]:
+ return obj.get_uri()
+ else:
+ return obj
+
+free = _lib.lilv_free
+# uri_to_path = _lib.lilv_uri_to_path
+file_uri_parse = _lib.lilv_file_uri_parse
+new_uri = _lib.lilv_new_uri
+new_file_uri = _lib.lilv_new_file_uri
+new_string = _lib.lilv_new_string
+new_int = _lib.lilv_new_int
+new_float = _lib.lilv_new_float
+new_bool = _lib.lilv_new_bool
+node_free = _lib.lilv_node_free
+node_duplicate = _lib.lilv_node_duplicate
+node_equals = _lib.lilv_node_equals
+node_get_turtle_token = _lib.lilv_node_get_turtle_token
+node_is_uri = _lib.lilv_node_is_uri
+node_as_uri = _lib.lilv_node_as_uri
+node_is_blank = _lib.lilv_node_is_blank
+node_as_blank = _lib.lilv_node_as_blank
+node_is_literal = _lib.lilv_node_is_literal
+node_is_string = _lib.lilv_node_is_string
+node_as_string = _lib.lilv_node_as_string
+node_get_path = _lib.lilv_node_get_path
+node_is_float = _lib.lilv_node_is_float
+node_as_float = _lib.lilv_node_as_float
+node_is_int = _lib.lilv_node_is_int
+node_as_int = _lib.lilv_node_as_int
+node_is_bool = _lib.lilv_node_is_bool
+node_as_bool = _lib.lilv_node_as_bool
+plugin_classes_free = _lib.lilv_plugin_classes_free
+plugin_classes_size = _lib.lilv_plugin_classes_size
+plugin_classes_begin = _lib.lilv_plugin_classes_begin
+plugin_classes_get = _lib.lilv_plugin_classes_get
+plugin_classes_next = _lib.lilv_plugin_classes_next
+plugin_classes_is_end = _lib.lilv_plugin_classes_is_end
+plugin_classes_get_by_uri = _lib.lilv_plugin_classes_get_by_uri
+scale_points_free = _lib.lilv_scale_points_free
+scale_points_size = _lib.lilv_scale_points_size
+scale_points_begin = _lib.lilv_scale_points_begin
+scale_points_get = _lib.lilv_scale_points_get
+scale_points_next = _lib.lilv_scale_points_next
+scale_points_is_end = _lib.lilv_scale_points_is_end
+uis_free = _lib.lilv_uis_free
+uis_size = _lib.lilv_uis_size
+uis_begin = _lib.lilv_uis_begin
+uis_get = _lib.lilv_uis_get
+uis_next = _lib.lilv_uis_next
+uis_is_end = _lib.lilv_uis_is_end
+uis_get_by_uri = _lib.lilv_uis_get_by_uri
+nodes_free = _lib.lilv_nodes_free
+nodes_size = _lib.lilv_nodes_size
+nodes_begin = _lib.lilv_nodes_begin
+nodes_get = _lib.lilv_nodes_get
+nodes_next = _lib.lilv_nodes_next
+nodes_is_end = _lib.lilv_nodes_is_end
+nodes_get_first = _lib.lilv_nodes_get_first
+nodes_contains = _lib.lilv_nodes_contains
+nodes_merge = _lib.lilv_nodes_merge
+plugins_size = _lib.lilv_plugins_size
+plugins_begin = _lib.lilv_plugins_begin
+plugins_get = _lib.lilv_plugins_get
+plugins_next = _lib.lilv_plugins_next
+plugins_is_end = _lib.lilv_plugins_is_end
+plugins_get_by_uri = _lib.lilv_plugins_get_by_uri
+world_new = _lib.lilv_world_new
+world_set_option = _lib.lilv_world_set_option
+world_free = _lib.lilv_world_free
+world_load_all = _lib.lilv_world_load_all
+world_load_bundle = _lib.lilv_world_load_bundle
+world_load_specifications = _lib.lilv_world_load_specifications
+world_load_plugin_classes = _lib.lilv_world_load_plugin_classes
+world_unload_bundle = _lib.lilv_world_unload_bundle
+world_load_resource = _lib.lilv_world_load_resource
+world_unload_resource = _lib.lilv_world_unload_resource
+world_get_plugin_class = _lib.lilv_world_get_plugin_class
+world_get_plugin_classes = _lib.lilv_world_get_plugin_classes
+world_get_all_plugins = _lib.lilv_world_get_all_plugins
+world_find_nodes = _lib.lilv_world_find_nodes
+world_get = _lib.lilv_world_get
+world_ask = _lib.lilv_world_ask
+plugin_verify = _lib.lilv_plugin_verify
+plugin_get_uri = _lib.lilv_plugin_get_uri
+plugin_get_bundle_uri = _lib.lilv_plugin_get_bundle_uri
+plugin_get_data_uris = _lib.lilv_plugin_get_data_uris
+plugin_get_library_uri = _lib.lilv_plugin_get_library_uri
+plugin_get_name = _lib.lilv_plugin_get_name
+plugin_get_class = _lib.lilv_plugin_get_class
+plugin_get_value = _lib.lilv_plugin_get_value
+plugin_has_feature = _lib.lilv_plugin_has_feature
+plugin_get_supported_features = _lib.lilv_plugin_get_supported_features
+plugin_get_required_features = _lib.lilv_plugin_get_required_features
+plugin_get_optional_features = _lib.lilv_plugin_get_optional_features
+plugin_has_extension_data = _lib.lilv_plugin_has_extension_data
+plugin_get_extension_data = _lib.lilv_plugin_get_extension_data
+plugin_get_num_ports = _lib.lilv_plugin_get_num_ports
+plugin_get_port_ranges_float = _lib.lilv_plugin_get_port_ranges_float
+plugin_has_latency = _lib.lilv_plugin_has_latency
+plugin_get_latency_port_index = _lib.lilv_plugin_get_latency_port_index
+plugin_get_port_by_index = _lib.lilv_plugin_get_port_by_index
+plugin_get_port_by_symbol = _lib.lilv_plugin_get_port_by_symbol
+plugin_get_port_by_designation = _lib.lilv_plugin_get_port_by_designation
+plugin_get_project = _lib.lilv_plugin_get_project
+plugin_get_author_name = _lib.lilv_plugin_get_author_name
+plugin_get_author_email = _lib.lilv_plugin_get_author_email
+plugin_get_author_homepage = _lib.lilv_plugin_get_author_homepage
+plugin_is_replaced = _lib.lilv_plugin_is_replaced
+plugin_get_related = _lib.lilv_plugin_get_related
+port_get_node = _lib.lilv_port_get_node
+port_get_value = _lib.lilv_port_get_value
+port_get = _lib.lilv_port_get
+port_get_properties = _lib.lilv_port_get_properties
+port_has_property = _lib.lilv_port_has_property
+port_supports_event = _lib.lilv_port_supports_event
+port_get_index = _lib.lilv_port_get_index
+port_get_symbol = _lib.lilv_port_get_symbol
+port_get_name = _lib.lilv_port_get_name
+port_get_classes = _lib.lilv_port_get_classes
+port_is_a = _lib.lilv_port_is_a
+port_get_range = _lib.lilv_port_get_range
+port_get_scale_points = _lib.lilv_port_get_scale_points
+state_new_from_world = _lib.lilv_state_new_from_world
+state_new_from_file = _lib.lilv_state_new_from_file
+state_new_from_string = _lib.lilv_state_new_from_string
+state_new_from_instance = _lib.lilv_state_new_from_instance
+state_free = _lib.lilv_state_free
+state_equals = _lib.lilv_state_equals
+state_get_num_properties = _lib.lilv_state_get_num_properties
+state_get_plugin_uri = _lib.lilv_state_get_plugin_uri
+state_get_uri = _lib.lilv_state_get_uri
+state_get_label = _lib.lilv_state_get_label
+state_set_label = _lib.lilv_state_set_label
+state_set_metadata = _lib.lilv_state_set_metadata
+state_emit_port_values = _lib.lilv_state_emit_port_values
+state_restore = _lib.lilv_state_restore
+state_save = _lib.lilv_state_save
+state_to_string = _lib.lilv_state_to_string
+state_delete = _lib.lilv_state_delete
+scale_point_get_label = _lib.lilv_scale_point_get_label
+scale_point_get_value = _lib.lilv_scale_point_get_value
+plugin_class_get_parent_uri = _lib.lilv_plugin_class_get_parent_uri
+plugin_class_get_uri = _lib.lilv_plugin_class_get_uri
+plugin_class_get_label = _lib.lilv_plugin_class_get_label
+plugin_class_get_children = _lib.lilv_plugin_class_get_children
+plugin_instantiate = _lib.lilv_plugin_instantiate
+instance_free = _lib.lilv_instance_free
+plugin_get_uis = _lib.lilv_plugin_get_uis
+ui_get_uri = _lib.lilv_ui_get_uri
+ui_get_classes = _lib.lilv_ui_get_classes
+ui_is_a = _lib.lilv_ui_is_a
+ui_is_supported = _lib.lilv_ui_is_supported
+ui_get_bundle_uri = _lib.lilv_ui_get_bundle_uri
+ui_get_binary_uri = _lib.lilv_ui_get_binary_uri
+
+## LV2 types
+
+LV2_Handle = POINTER(None)
+LV2_URID_Map_Handle = POINTER(None)
+LV2_URID_Unmap_Handle = POINTER(None)
+LV2_URID = c_uint32
+
+class LV2_Feature(Structure):
+ __slots__ = [ 'URI', 'data' ]
+ _fields_ = [('URI', c_char_p),
+ ('data', POINTER(None))]
+
+class LV2_Descriptor(Structure):
+ __slots__ = [ 'URI',
+ 'instantiate',
+ 'connect_port',
+ 'activate',
+ 'run',
+ 'deactivate',
+ 'cleanup',
+ 'extension_data' ]
+
+LV2_Descriptor._fields_ = [
+ ('URI', c_char_p),
+ ('instantiate', CFUNCTYPE(LV2_Handle, POINTER(LV2_Descriptor),
+ c_double, c_char_p, POINTER(POINTER(LV2_Feature)))),
+ ('connect_port', CFUNCTYPE(None, LV2_Handle, c_uint32, POINTER(None))),
+ ('activate', CFUNCTYPE(None, LV2_Handle)),
+ ('run', CFUNCTYPE(None, LV2_Handle, c_uint32)),
+ ('deactivate', CFUNCTYPE(None, LV2_Handle)),
+ ('cleanup', CFUNCTYPE(None, LV2_Handle)),
+ ('extension_data', CFUNCTYPE(c_void_p, c_char_p)),
+]
+
+class LV2_URID_Map(Structure):
+ __slots__ = [ 'handle', 'map' ]
+ _fields_ = [
+ ('handle', LV2_URID_Map_Handle),
+ ('map', CFUNCTYPE(LV2_URID, LV2_URID_Map_Handle, c_char_p)),
+ ]
+
+class LV2_URID_Unmap(Structure):
+ __slots__ = [ 'handle', 'unmap' ]
+ _fields_ = [
+ ('handle', LV2_URID_Unmap_Handle),
+ ('unmap', CFUNCTYPE(c_char_p, LV2_URID_Unmap_Handle, LV2_URID)),
+ ]
+
+# Lilv types
+
+class Plugin(Structure):
+ """LV2 Plugin."""
+ def __init__(self, world, plugin):
+ self.world = world
+ self.plugin = plugin
+
+ def __eq__(self, other):
+ return self.get_uri() == other.get_uri()
+
+ def verify(self):
+ """Check if `plugin` is valid.
+
+ This is not a rigorous validator, but can be used to reject some malformed
+ plugins that could cause bugs (e.g. plugins with missing required fields).
+
+ Note that normal hosts do NOT need to use this - lilv does not
+ load invalid plugins into plugin lists. This is included for plugin
+ testing utilities, etc.
+ """
+ return plugin_verify(self.plugin)
+
+ def get_uri(self):
+ """Get the URI of `plugin`.
+
+ Any serialization that refers to plugins should refer to them by this.
+ Hosts SHOULD NOT save any filesystem paths, plugin indexes, etc. in saved
+ files pass save only the URI.
+
+ The URI is a globally unique identifier for one specific plugin. Two
+ plugins with the same URI are compatible in port signature, and should
+ be guaranteed to work in a compatible and consistent way. If a plugin
+ is upgraded in an incompatible way (eg if it has different ports), it
+ MUST have a different URI than it's predecessor.
+ """
+ return Node.wrap(node_duplicate(plugin_get_uri(self.plugin)))
+
+ def get_bundle_uri(self):
+ """Get the (resolvable) URI of the plugin's "main" bundle.
+
+ This returns the URI of the bundle where the plugin itself was found. Note
+ that the data for a plugin may be spread over many bundles, that is,
+ get_data_uris() may return URIs which are not within this bundle.
+
+ Typical hosts should not need to use this function.
+ Note this always returns a fully qualified URI. If you want a local
+ filesystem path, use lilv.file_uri_parse().
+ """
+ return Node.wrap(node_duplicate(plugin_get_bundle_uri(self.plugin)))
+
+ def get_data_uris(self):
+ """Get the (resolvable) URIs of the RDF data files that define a plugin.
+
+ Typical hosts should not need to use this function.
+ Note this always returns fully qualified URIs. If you want local
+ filesystem paths, use lilv.file_uri_parse().
+ """
+ return Nodes(plugin_get_data_uris(self.plugin))
+
+ def get_library_uri(self):
+ """Get the (resolvable) URI of the shared library for `plugin`.
+
+ Note this always returns a fully qualified URI. If you want a local
+ filesystem path, use lilv.file_uri_parse().
+ """
+ return Node.wrap(node_duplicate(plugin_get_library_uri(self.plugin)))
+
+ def get_name(self):
+ """Get the name of `plugin`.
+
+ This returns the name (doap:name) of the plugin. The name may be
+ translated according to the current locale, this value MUST NOT be used
+ as a plugin identifier (use the URI for that).
+ """
+ return Node.wrap(plugin_get_name(self.plugin))
+
+ def get_class(self):
+ """Get the class this plugin belongs to (e.g. Filters)."""
+ return PluginClass(plugin_get_class(self.plugin))
+
+ def get_value(self, predicate):
+ """Get a value associated with the plugin in a plugin's data files.
+
+ `predicate` must be either a URI or a QName.
+
+ Returns the ?object of all triples found of the form:
+
+ plugin-uri predicate ?object
+
+ May return None if the property was not found, or if object(s) is not
+ sensibly represented as a LilvNodes (e.g. blank nodes).
+ """
+ return Nodes(plugin_get_value(self.plugin, predicate.node))
+
+ def has_feature(self, feature_uri):
+ """Return whether a feature is supported by a plugin.
+
+ This will return true if the feature is an optional or required feature
+ of the plugin.
+ """
+ return plugin_has_feature(self.plugin, feature_uri.node)
+
+ def get_supported_features(self):
+ """Get the LV2 Features supported (required or optionally) by a plugin.
+
+ A feature is "supported" by a plugin if it is required OR optional.
+
+ Since required features have special rules the host must obey, this function
+ probably shouldn't be used by normal hosts. Using get_optional_features()
+ and get_required_features() separately is best in most cases.
+ """
+ return Nodes(plugin_get_supported_features(self.plugin))
+
+ def get_required_features(self):
+ """Get the LV2 Features required by a plugin.
+
+ If a feature is required by a plugin, hosts MUST NOT use the plugin if they do not
+ understand (or are unable to support) that feature.
+
+ All values returned here MUST be return plugin_(self.plugin)ed to the plugin's instantiate method
+ (along with data, if necessary, as defined by the feature specification)
+ or plugin instantiation will fail.
+ """
+ return Nodes(plugin_get_required_features(self.plugin))
+
+ def get_optional_features(self):
+ """Get the LV2 Features optionally supported by a plugin.
+
+ Hosts MAY ignore optional plugin features for whatever reasons. Plugins
+ MUST operate (at least somewhat) if they are instantiated without being
+ passed optional features.
+ """
+ return Nodes(plugin_get_optional_features(self.plugin))
+
+ def has_extension_data(self, uri):
+ """Return whether or not a plugin provides a specific extension data."""
+ return plugin_has_extension_data(self.plugin, uri.node)
+
+ def get_extension_data(self):
+ """Get a sequence of all extension data provided by a plugin.
+
+ This can be used to find which URIs get_extension_data()
+ will return a value for without instantiating the plugin.
+ """
+ return Nodes(plugin_get_extension_data(self.plugin))
+
+ def get_num_ports(self):
+ """Get the number of ports on this plugin."""
+ return plugin_get_num_ports(self.plugin)
+
+ # def get_port_ranges_float(self, min_values, max_values, def_values):
+ # """Get the port ranges (minimum, maximum and default values) for all ports.
+
+ # `min_values`, `max_values` and `def_values` must either point to an array
+ # of N floats, where N is the value returned by get_num_ports()
+ # for this plugin, or None. The elements of the array will be set to the
+ # the minimum, maximum and default values of the ports on this plugin,
+ # with array index corresponding to port index. If a port doesn't have a
+ # minimum, maximum or default value, or the port's type is not float, the
+ # corresponding array element will be set to NAN.
+
+ # This is a convenience method for the common case of getting the range of
+ # all float ports on a plugin, and may be significantly faster than
+ # repeated calls to Port.get_range().
+ # """
+ # plugin_get_port_ranges_float(self.plugin, min_values, max_values, def_values)
+
+ def get_num_ports_of_class(self, *args):
+ """Get the number of ports on this plugin that are members of some class(es)."""
+ args = list(map(lambda x: x.node, args))
+ args += (None,)
+ return plugin_get_num_ports_of_class(self.plugin, *args)
+
+ def has_latency(self):
+ """Return whether or not the plugin introduces (and reports) latency.
+
+ The index of the latency port can be found with
+ get_latency_port() ONLY if this function returns true.
+ """
+ return plugin_has_latency(self.plugin)
+
+ def get_latency_port_index(self):
+ """Return the index of the plugin's latency port.
+
+ Returns None if the plugin has no latency port.
+
+ Any plugin that introduces unwanted latency that should be compensated for
+ (by hosts with the ability/need) MUST provide this port, which is a control
+ rate output port that reports the latency for each cycle in frames.
+ """
+ return plugin_get_latency_port_index(self.plugin) if self.has_latency() else None
+
+ def get_port(self, key):
+ """Get a port on `plugin` by index or symbol."""
+ if type(key) == int:
+ return self.get_port_by_index(key)
+ else:
+ return self.get_port_by_symbol(key)
+
+ def get_port_by_index(self, index):
+ """Get a port on `plugin` by `index`."""
+ return Port.wrap(self, plugin_get_port_by_index(self.plugin, index))
+
+ def get_port_by_symbol(self, symbol):
+ """Get a port on `plugin` by `symbol`.
+
+ Note this function is slower than get_port_by_index(),
+ especially on plugins with a very large number of ports.
+ """
+ if type(symbol) == str:
+ symbol = self.world.new_string(symbol)
+ return Port.wrap(self, plugin_get_port_by_symbol(self.plugin, symbol.node))
+
+ def get_port_by_designation(self, port_class, designation):
+ """Get a port on `plugin` by its lv2:designation.
+
+ The designation of a port describes the meaning, assignment, allocation or
+ role of the port, e.g. "left channel" or "gain". If found, the port with
+ matching `port_class` and `designation` is be returned, otherwise None is
+ returned. The `port_class` can be used to distinguish the input and output
+ ports for a particular designation. If `port_class` is None, any port with
+ the given designation will be returned.
+ """
+ return Port.wrap(self,
+ plugin_get_port_by_designation(self.plugin,
+ port_class.node,
+ designation.node))
+
+ def get_project(self):
+ """Get the project the plugin is a part of.
+
+ More information about the project can be read via find_nodes(),
+ typically using properties from DOAP (e.g. doap:name).
+ """
+ return Node.wrap(plugin_get_project(self.plugin))
+
+ def get_author_name(self):
+ """Get the full name of the plugin's author.
+
+ Returns None if author name is not present.
+ """
+ return Node.wrap(plugin_get_author_name(self.plugin))
+
+ def get_author_email(self):
+ """Get the email address of the plugin's author.
+
+ Returns None if author email address is not present.
+ """
+ return Node.wrap(plugin_get_author_email(self.plugin))
+
+ def get_author_homepage(self):
+ """Get the address of the plugin author's home page.
+
+ Returns None if author homepage is not present.
+ """
+ return Node.wrap(plugin_get_author_homepage(self.plugin))
+
+ def is_replaced(self):
+ """Return true iff `plugin` has been replaced by another plugin.
+
+ The plugin will still be usable, but hosts should hide them from their
+ user interfaces to prevent users from using deprecated plugins.
+ """
+ return plugin_is_replaced(self.plugin)
+
+ def get_related(self, resource_type):
+ """Get the resources related to `plugin` with lv2:appliesTo.
+
+ Some plugin-related resources are not linked directly to the plugin with
+ rdfs:seeAlso and thus will not be automatically loaded along with the plugin
+ data (usually for performance reasons). All such resources of the given @c
+ type related to `plugin` can be accessed with this function.
+
+ If `resource_type` is None, all such resources will be returned, regardless of type.
+
+ To actually load the data for each returned resource, use world.load_resource().
+ """
+ return Nodes(plugin_get_related(self.plugin, resource_type))
+
+ def get_uis(self):
+ """Get all UIs for `plugin`."""
+ return UIs(plugin_get_uis(self.plugin))
+
+class PluginClass(Structure):
+ """Plugin Class (type/category)."""
+ def __init__(self, plugin_class):
+ self.plugin_class = plugin_class
+
+ def __str__(self):
+ return self.get_uri().__str__()
+
+ def get_parent_uri(self):
+ """Get the URI of this class' superclass.
+
+ May return None if class has no parent.
+ """
+ return Node.wrap(node_duplicate(plugin_class_get_parent_uri(self.plugin_class)))
+
+ def get_uri(self):
+ """Get the URI of this plugin class."""
+ return Node.wrap(node_duplicate(plugin_class_get_uri(self.plugin_class)))
+
+ def get_label(self):
+ """Get the label of this plugin class, ie "Oscillators"."""
+ return Node.wrap(node_duplicate(plugin_class_get_label(self.plugin_class)))
+
+ def get_children(self):
+ """Get the subclasses of this plugin class."""
+ return PluginClasses(plugin_class_get_children(self.plugin_class))
+
+class Port(Structure):
+ """Port on a Plugin."""
+ @classmethod
+ def wrap(cls, plugin, port):
+ return Port(plugin, port) if plugin and port else None
+
+ def __init__(self, plugin, port):
+ self.plugin = plugin
+ self.port = port
+
+ def get_node(self):
+ """Get the RDF node of `port`.
+
+ Ports nodes may be may be URIs or blank nodes.
+ """
+ return Node.wrap(node_duplicate(port_get_node(self.plugin, self.port)))
+
+ def get_value(self, predicate):
+ """Port analog of Plugin.get_value()."""
+ return Nodes(port_get_value(self.plugin.plugin, self.port, predicate.node))
+
+ def get(self, predicate):
+ """Get a single property value of a port.
+
+ This is equivalent to lilv_nodes_get_first(lilv_port_get_value(...)) but is
+ simpler to use in the common case of only caring about one value. The
+ caller is responsible for freeing the returned node.
+ """
+ return Node.wrap(port_get(self.plugin.plugin, self.port, predicate.node))
+
+ def get_properties(self):
+ """Return the LV2 port properties of a port."""
+ return Nodes(port_get_properties(self.plugin.plugin, self.port))
+
+ def has_property(self, property_uri):
+ """Return whether a port has a certain property."""
+ return port_has_property(self.plugin.plugin, self.port, property_uri.node)
+
+ def supports_event(self, event_type):
+ """Return whether a port supports a certain event type.
+
+ More precisely, this returns true iff the port has an atom:supports or an
+ ev:supportsEvent property with `event_type` as the value.
+ """
+ return port_supports_event(self.plugin.plugin, self.port, event_type.node)
+
+ def get_index(self):
+ """Get the index of a port.
+
+ The index is only valid for the life of the plugin and may change between
+ versions. For a stable identifier, use the symbol.
+ """
+ return port_get_index(self.plugin.plugin, self.port)
+
+ def get_symbol(self):
+ """Get the symbol of a port.
+
+ The 'symbol' is a short string, a valid C identifier.
+ """
+ return Node.wrap(node_duplicate(port_get_symbol(self.plugin.plugin, self.port)))
+
+ def get_name(self):
+ """Get the name of a port.
+
+ This is guaranteed to return the untranslated name (the doap:name in the
+ data file without a language tag).
+ """
+ return Node.wrap(port_get_name(self.plugin.plugin, self.port))
+
+ def get_classes(self):
+ """Get all the classes of a port.
+
+ This can be used to determine if a port is an input, output, audio,
+ control, midi, etc, etc, though it's simpler to use is_a().
+ The returned list does not include lv2:Port, which is implied.
+ Returned value is shared and must not be destroyed by caller.
+ """
+ return Nodes(port_get_classes(self.plugin.plugin, self.port))
+
+ def is_a(self, port_class):
+ """Determine if a port is of a given class (input, output, audio, etc).
+
+ For convenience/performance/extensibility reasons, hosts are expected to
+ create a LilvNode for each port class they "care about". Well-known type
+ URI strings are defined (e.g. LILV_URI_INPUT_PORT) for convenience, but
+ this function is designed so that Lilv is usable with any port types
+ without requiring explicit support in Lilv.
+ """
+ return port_is_a(self.plugin.plugin, self.port, port_class.node)
+
+ def get_range(self):
+ """Return the default, minimum, and maximum values of a port as a tuple."""
+ pdef = POINTER(Node)()
+ pmin = POINTER(Node)()
+ pmax = POINTER(Node)()
+ port_get_range(self.plugin.plugin, self.port, byref(pdef), byref(pmin), byref(pmax))
+ return (Node(pdef.contents) if pdef else None,
+ Node(pmin.contents) if pmin else None,
+ Node(pmax.contents) if pmax else None)
+
+ def get_scale_points(self):
+ """Get the scale points (enumeration values) of a port.
+
+ This returns a collection of 'interesting' named values of a port
+ (e.g. appropriate entries for a UI selector associated with this port).
+ Returned value may be None if `port` has no scale points.
+ """
+ return ScalePoints(port_get_scale_points(self.plugin.plugin, self.port))
+
+class ScalePoint(Structure):
+ """Scale point (detent)."""
+ def __init__(self, point):
+ self.point = point
+
+ def get_label(self):
+ """Get the label of this scale point (enumeration value)."""
+ return Node.wrap(scale_point_get_label(self.point))
+
+ def get_value(self):
+ """Get the value of this scale point (enumeration value)."""
+ return Node.wrap(scale_point_get_value(self.point))
+
+class UI(Structure):
+ """Plugin UI."""
+ def __init__(self, ui):
+ self.ui = ui
+
+ def __str__(self):
+ return str(self.get_uri())
+
+ def __eq__(self, other):
+ return self.get_uri() == _as_uri(other)
+
+ def get_uri(self):
+ """Get the URI of a Plugin UI."""
+ return Node.wrap(node_duplicate(ui_get_uri(self.ui)))
+
+ def get_classes(self):
+ """Get the types (URIs of RDF classes) of a Plugin UI.
+
+ Note that in most cases is_supported() should be used, which avoids
+ the need to use this function (and type specific logic).
+ """
+ return Nodes(ui_get_classes(self.ui))
+
+ def is_a(self, class_uri):
+ """Check whether a plugin UI has a given type."""
+ return ui_is_a(self.ui, class_uri.node)
+
+ def get_bundle_uri(self):
+ """Get the URI of the UI's bundle."""
+ return Node.wrap(node_duplicate(ui_get_bundle_uri(self.ui)))
+
+ def get_binary_uri(self):
+ """Get the URI for the UI's shared library."""
+ return Node.wrap(node_duplicate(ui_get_binary_uri(self.ui)))
+
+class Node(Structure):
+ """Data node (URI, string, integer, etc.).
+
+ A Node can be converted to the corresponding Python datatype, and all nodes
+ can be converted to strings, for example::
+
+ >>> world = lilv.World()
+ >>> i = world.new_int(42)
+ >>> print(i)
+ 42
+ >>> int(i) * 2
+ 84
+ """
+ @classmethod
+ def wrap(cls, node):
+ return Node(node) if node else None
+
+ def __init__(self, node):
+ self.node = node
+
+ def __del__(self):
+ if hasattr(self, 'node'):
+ node_free(self.node)
+
+ def __eq__(self, other):
+ otype = type(other)
+ if otype in [str, int, float]:
+ return otype(self) == other
+ return node_equals(self.node, other.node)
+
+ def __ne__(self, other):
+ return not node_equals(self.node, other.node)
+
+ def __str__(self):
+ return node_as_string(self.node).decode('utf-8')
+
+ def __int__(self):
+ if not self.is_int():
+ raise ValueError('node %s is not an integer' % str(self))
+ return node_as_int(self.node)
+
+ def __float__(self):
+ if not self.is_float():
+ raise ValueError('node %s is not a float' % str(self))
+ return node_as_float(self.node)
+
+ def __bool__(self):
+ if not self.is_bool():
+ raise ValueError('node %s is not a bool' % str(self))
+ return node_as_bool(self.node)
+ __nonzero__ = __bool__
+
+ def get_turtle_token(self):
+ """Return this value as a Turtle/SPARQL token."""
+ return node_get_turtle_token(self.node).decode('utf-8')
+
+ def is_uri(self):
+ """Return whether the value is a URI (resource)."""
+ return node_is_uri(self.node)
+
+ def is_blank(self):
+ """Return whether the value is a blank node (resource with no URI)."""
+ return node_is_blank(self.node)
+
+ def is_literal(self):
+ """Return whether this value is a literal (i.e. not a URI)."""
+ return node_is_literal(self.node)
+
+ def is_string(self):
+ """Return whether this value is a string literal.
+
+ Returns true if value is a string value (and not numeric).
+ """
+ return node_is_string(self.node)
+
+ def get_path(self, hostname=None):
+ """Return the path of a file URI node.
+
+ Returns None if value is not a file URI."""
+ return node_get_path(self.node, hostname).decode('utf-8')
+
+ def is_float(self):
+ """Return whether this value is a decimal literal."""
+ return node_is_float(self.node)
+
+ def is_int(self):
+ """Return whether this value is an integer literal."""
+ return node_is_int(self.node)
+
+ def is_bool(self):
+ """Return whether this value is a boolean."""
+ return node_is_bool(self.node)
+
+class Iter(Structure):
+ """Collection iterator."""
+ def __init__(self, collection, iterator, constructor, iter_get, iter_next, iter_is_end):
+ self.collection = collection
+ self.iterator = iterator
+ self.constructor = constructor
+ self.iter_get = iter_get
+ self.iter_next = iter_next
+ self.iter_is_end = iter_is_end
+
+ def get(self):
+ """Get the current item."""
+ return self.constructor(self.iter_get(self.collection, self.iterator))
+
+ def next(self):
+ """Move to and return the next item."""
+ if self.is_end():
+ raise StopIteration
+ elem = self.get()
+ self.iterator = self.iter_next(self.collection, self.iterator)
+ return elem
+
+ def is_end(self):
+ """Return true if the end of the collection has been reached."""
+ return self.iter_is_end(self.collection, self.iterator)
+
+ __next__ = next
+
+class Collection(Structure):
+ # Base class for all lilv collection wrappers.
+ def __init__(self, collection, iter_begin, constructor, iter_get, iter_next, is_end):
+ self.collection = collection
+ self.constructor = constructor
+ self.iter_begin = iter_begin
+ self.iter_get = iter_get
+ self.iter_next = iter_next
+ self.is_end = is_end
+
+ def __iter__(self):
+ return Iter(self.collection, self.iter_begin(self.collection), self.constructor,
+ self.iter_get, self.iter_next, self.is_end)
+
+ def __getitem__(self, index):
+ if index >= len(self):
+ raise IndexError
+ pos = 0
+ for i in self:
+ if pos == index:
+ return i
+ pos += 1
+
+ def begin(self):
+ return self.__iter__()
+
+ def get(self, iterator):
+ return iterator.get()
+
+class Plugins(Collection):
+ """Collection of plugins."""
+ def __init__(self, world, collection):
+ def constructor(plugin):
+ return Plugin(world, plugin)
+
+ super(Plugins, self).__init__(collection, plugins_begin, constructor, plugins_get, plugins_next, plugins_is_end)
+ self.world = world
+
+ def __contains__(self, key):
+ return bool(self.get_by_uri(_as_uri(key)))
+
+ def __len__(self):
+ return plugins_size(self.collection)
+
+ def __getitem__(self, key):
+ if type(key) == int:
+ return super(Plugins, self).__getitem__(key)
+ return self.get_by_uri(key)
+
+ def get_by_uri(self, uri):
+ plugin = plugins_get_by_uri(self.collection, uri.node)
+ return Plugin(self.world, plugin) if plugin else None
+
+class PluginClasses(Collection):
+ """Collection of plugin classes."""
+ def __init__(self, collection):
+ super(PluginClasses, self).__init__(
+ collection, plugin_classes_begin, PluginClass,
+ plugin_classes_get, plugin_classes_next, plugin_classes_is_end)
+
+ def __contains__(self, key):
+ return bool(self.get_by_uri(_as_uri(key)))
+
+ def __len__(self):
+ return plugin_classes_size(self.collection)
+
+ def __getitem__(self, key):
+ if type(key) == int:
+ return super(PluginClasses, self).__getitem__(key)
+ return self.get_by_uri(key)
+
+ def get_by_uri(self, uri):
+ plugin_class = plugin_classes_get_by_uri(self.collection, uri.node)
+ return PluginClass(plugin_class) if plugin_class else None
+
+class ScalePoints(Collection):
+ """Collection of scale points."""
+ def __init__(self, collection):
+ super(ScalePoints, self).__init__(
+ collection, scale_points_begin, ScalePoint,
+ scale_points_get, scale_points_next, scale_points_is_end)
+
+ def __len__(self):
+ return scale_points_size(self.collection)
+
+class UIs(Collection):
+ """Collection of plugin UIs."""
+ def __init__(self, collection):
+ super(UIs, self).__init__(collection, uis_begin, UI,
+ uis_get, uis_next, uis_is_end)
+
+ def __contains__(self, uri):
+ return bool(self.get_by_uri(_as_uri(uri)))
+
+ def __len__(self):
+ return uis_size(self.collection)
+
+ def __getitem__(self, key):
+ if type(key) == int:
+ return super(UIs, self).__getitem__(key)
+ return self.get_by_uri(key)
+
+ def get_by_uri(self, uri):
+ ui = uis_get_by_uri(self.collection, uri.node)
+ return UI(ui) if ui else None
+
+class Nodes(Collection):
+ """Collection of data nodes."""
+ @classmethod
+ def constructor(ignore, node):
+ return Node(node_duplicate(node))
+
+ def __init__(self, collection):
+ super(Nodes, self).__init__(collection, nodes_begin, Nodes.constructor,
+ nodes_get, nodes_next, nodes_is_end)
+
+ def __contains__(self, value):
+ return nodes_contains(self.collection, value.node)
+
+ def __len__(self):
+ return nodes_size(self.collection)
+
+ def merge(self, b):
+ return Nodes(nodes_merge(self.collection, b.collection))
+
+class Namespace():
+ """Namespace prefix.
+
+ Use attribute syntax to easily create URIs within this namespace, for
+ example::
+
+ >>> world = lilv.World()
+ >>> ns = Namespace(world, "http://example.org/")
+ >>> print(ns.foo)
+ http://example.org/foo
+ """
+ def __init__(self, world, prefix):
+ self.world = world
+ self.prefix = prefix
+
+ def __eq__(self, other):
+ return str(self) == str(other)
+
+ def __str__(self):
+ return self.prefix
+
+ def __getattr__(self, suffix):
+ return self.world.new_uri(self.prefix + suffix)
+
+class World(Structure):
+ """Library context.
+
+ Includes a set of namespaces as the instance variable `ns`, so URIs can be constructed like::
+
+ uri = world.ns.lv2.Plugin
+
+ :ivar ns: Common LV2 namespace prefixes: atom, doap, foaf, lilv, lv2, midi, owl, rdf, rdfs, ui, xsd.
+ """
+ def __init__(self):
+ world = self
+
+ # Define Namespaces class locally so available prefixes are documented
+ class Namespaces():
+ """Set of namespaces.
+
+ Use to easily construct uris, like: ns.lv2.InputPort"""
+
+ atom = Namespace(world, 'http://lv2plug.in/ns/ext/atom#')
+ doap = Namespace(world, 'http://usefulinc.com/ns/doap#')
+ foaf = Namespace(world, 'http://xmlns.com/foaf/0.1/')
+ lilv = Namespace(world, 'http://drobilla.net/ns/lilv#')
+ lv2 = Namespace(world, 'http://lv2plug.in/ns/lv2core#')
+ midi = Namespace(world, 'http://lv2plug.in/ns/ext/midi#')
+ owl = Namespace(world, 'http://www.w3.org/2002/07/owl#')
+ rdf = Namespace(world, 'http://www.w3.org/1999/02/22-rdf-syntax-ns#')
+ rdfs = Namespace(world, 'http://www.w3.org/2000/01/rdf-schema#')
+ ui = Namespace(world, 'http://lv2plug.in/ns/extensions/ui#')
+ xsd = Namespace(world, 'http://www.w3.org/2001/XMLSchema#')
+
+ self.world = _lib.lilv_world_new()
+ self.ns = Namespaces()
+
+ def __del__(self):
+ world_free(self.world)
+
+ def set_option(self, uri, value):
+ """Set a world option.
+
+ Currently recognized options:
+ lilv.OPTION_FILTER_LANG
+ lilv.OPTION_DYN_MANIFEST
+ """
+ return world_set_option(self, uri, value.node)
+
+ def load_all(self):
+ """Load all installed LV2 bundles on the system.
+
+ This is the recommended way for hosts to load LV2 data. It implements the
+ established/standard best practice for discovering all LV2 data on the
+ system. The environment variable LV2_PATH may be used to control where
+ this function will look for bundles.
+
+ Hosts should use this function rather than explicitly load bundles, except
+ in special circumstances (e.g. development utilities, or hosts that ship
+ with special plugin bundles which are installed to a known location).
+ """
+ world_load_all(self.world)
+
+ def load_bundle(self, bundle_uri):
+ """Load a specific bundle.
+
+ `bundle_uri` must be a fully qualified URI to the bundle directory,
+ with the trailing slash, eg. file:///usr/lib/lv2/foo.lv2/
+
+ Normal hosts should not need this function (use load_all()).
+
+ Hosts MUST NOT attach any long-term significance to bundle paths
+ (e.g. in save files), since there are no guarantees they will remain
+ unchanged between (or even during) program invocations. Plugins (among
+ other things) MUST be identified by URIs (not paths) in save files.
+ """
+ world_load_bundle(self.world, bundle_uri.node)
+
+ def load_specifications(self):
+ """Load all specifications from currently loaded bundles.
+
+ This is for hosts that explicitly load specific bundles, its use is not
+ necessary when using load_all(). This function parses the
+ specifications and adds them to the model.
+ """
+ world_load_specifications(self.world)
+
+ def load_plugin_classes(self):
+ """Load all plugin classes from currently loaded specifications.
+
+ Must be called after load_specifications(). This is for hosts
+ that explicitly load specific bundles, its use is not necessary when using
+ load_all().
+ """
+ world_load_plugin_classes(self.world)
+
+ def unload_bundle(self, bundle_uri):
+ """Unload a specific bundle.
+
+ This unloads statements loaded by load_bundle(). Note that this
+ is not necessarily all information loaded from the bundle. If any resources
+ have been separately loaded with load_resource(), they must be
+ separately unloaded with unload_resource().
+ """
+ return world_unload_bundle(self.world, bundle_uri.node)
+
+ def load_resource(self, resource):
+ """Load all the data associated with the given `resource`.
+
+ The resource must be a subject (i.e. a URI or a blank node).
+ Returns the number of files parsed, or -1 on error.
+
+ All accessible data files linked to `resource` with rdfs:seeAlso will be
+ loaded into the world model.
+ """
+ return world_load_resource(self.world, _as_uri(resource).node)
+
+ def unload_resource(self, resource):
+ """Unload all the data associated with the given `resource`.
+
+ The resource must be a subject (i.e. a URI or a blank node).
+
+ This unloads all data loaded by a previous call to
+ load_resource() with the given `resource`.
+ """
+ return world_unload_resource(self.world, _as_uri(resource).node)
+
+ def get_plugin_class(self):
+ """Get the parent of all other plugin classes, lv2:Plugin."""
+ return PluginClass(world_get_plugin_class(self.world))
+
+ def get_plugin_classes(self):
+ """Return a list of all found plugin classes."""
+ return PluginClasses(world_get_plugin_classes(self.world))
+
+ def get_all_plugins(self):
+ """Return a list of all found plugins.
+
+ The returned list contains just enough references to query
+ or instantiate plugins. The data for a particular plugin will not be
+ loaded into memory until a call to an lilv_plugin_* function results in
+ a query (at which time the data is cached with the LilvPlugin so future
+ queries are very fast).
+
+ The returned list and the plugins it contains are owned by `world`
+ and must not be freed by caller.
+ """
+ return Plugins(self, _lib.lilv_world_get_all_plugins(self.world))
+
+ def find_nodes(self, subject, predicate, obj):
+ """Find nodes matching a triple pattern.
+
+ Either `subject` or `object` may be None (i.e. a wildcard), but not both.
+ Returns all matches for the wildcard field, or None.
+ """
+ return Nodes(world_find_nodes(self.world,
+ subject.node if subject is not None else None,
+ predicate.node if predicate is not None else None,
+ obj.node if obj is not None else None))
+
+ def get(self, subject, predicate, obj):
+ """Find a single node that matches a pattern.
+
+ Exactly one of `subject`, `predicate`, `object` must be None.
+
+ Returns the first matching node, or None if no matches are found.
+ """
+ return Node.wrap(world_get(self.world,
+ subject.node if subject is not None else None,
+ predicate.node if predicate is not None else None,
+ obj.node if obj is not None else None))
+
+ def ask(self, subject, predicate, obj):
+ """Return true iff a statement matching a certain pattern exists.
+
+ This is useful for checking if particular statement exists without having to
+ bother with collections and memory management.
+ """
+ return world_ask(self.world,
+ subject.node if subject is not None else None,
+ predicate.node if predicate is not None else None,
+ obj.node if obj is not None else None)
+
+ def new_uri(self, uri):
+ """Create a new URI node."""
+ return Node.wrap(_lib.lilv_new_uri(self.world, uri))
+
+ def new_file_uri(self, host, path):
+ """Create a new file URI node. The host may be None."""
+ return Node.wrap(_lib.lilv_new_file_uri(self.world, host, path))
+
+ def new_string(self, string):
+ """Create a new string node."""
+ return Node.wrap(_lib.lilv_new_string(self.world, string))
+
+ def new_int(self, val):
+ """Create a new int node."""
+ return Node.wrap(_lib.lilv_new_int(self.world, val))
+
+ def new_float(self, val):
+ """Create a new float node."""
+ return Node.wrap(_lib.lilv_new_float(self.world, val))
+
+ def new_bool(self, val):
+ """Create a new bool node."""
+ return Node.wrap(_lib.lilv_new_bool(self.world, val))
+
+class Instance(Structure):
+ """Plugin instance."""
+ __slots__ = [ 'lv2_descriptor', 'lv2_handle', 'pimpl', 'plugin', 'rate', 'instance' ]
+ _fields_ = [
+ ('lv2_descriptor', POINTER(LV2_Descriptor)),
+ ('lv2_handle', LV2_Handle),
+ ('pimpl', POINTER(None)),
+ ]
+
+ def __init__(self, plugin, rate, features=None):
+ self.plugin = plugin
+ self.rate = rate
+ self.instance = plugin_instantiate(plugin.plugin, rate, features)
+
+ def get_uri(self):
+ """Get the URI of the plugin which `instance` is an instance of.
+
+ Returned string is shared and must not be modified or deleted.
+ """
+ return self.get_descriptor().URI
+
+ def connect_port(self, port_index, data):
+ """Connect a port to a data location.
+
+ This may be called regardless of whether the plugin is activated,
+ activation and deactivation does not destroy port connections.
+ """
+ import numpy
+ if data is None:
+ self.get_descriptor().connect_port(
+ self.get_handle(),
+ port_index,
+ data)
+ elif type(data) == numpy.ndarray:
+ self.get_descriptor().connect_port(
+ self.get_handle(),
+ port_index,
+ data.ctypes.data_as(POINTER(c_float)))
+ else:
+ raise Exception("Unsupported data type")
+
+ def activate(self):
+ """Activate a plugin instance.
+
+ This resets all state information in the plugin, except for port data
+ locations (as set by connect_port()). This MUST be called
+ before calling run().
+ """
+ if self.get_descriptor().activate:
+ self.get_descriptor().activate(self.get_handle())
+
+ def run(self, sample_count):
+ """Run `instance` for `sample_count` frames.
+
+ If the hint lv2:hardRTCapable is set for this plugin, this function is
+ guaranteed not to block.
+ """
+ self.get_descriptor().run(self.get_handle(), sample_count)
+
+ def deactivate(self):
+ """Deactivate a plugin instance.
+
+ Note that to run the plugin after this you must activate it, which will
+ reset all state information (except port connections).
+ """
+ if self.get_descriptor().deactivate:
+ self.get_descriptor().deactivate(self.get_handle())
+
+ def get_extension_data(self, uri):
+ """Get extension data from the plugin instance.
+
+ The type and semantics of the data returned is specific to the particular
+ extension, though in all cases it is shared and must not be deleted.
+ """
+ if self.get_descriptor().extension_data:
+ return self.get_descriptor().extension_data(str(uri))
+
+ def get_descriptor(self):
+ """Get the LV2_Descriptor of the plugin instance.
+
+ Normally hosts should not need to access the LV2_Descriptor directly,
+ use the lilv_instance_* functions.
+ """
+ return self.instance[0].lv2_descriptor[0]
+
+ def get_handle(self):
+ """Get the LV2_Handle of the plugin instance.
+
+ Normally hosts should not need to access the LV2_Handle directly,
+ use the lilv_instance_* functions.
+ """
+ return self.instance[0].lv2_handle
+
+class State(Structure):
+ """Plugin state (TODO)."""
+ pass
+
+class VariadicFunction(object):
+ # Wrapper for calling C variadic functions
+ def __init__(self, function, restype, argtypes):
+ self.function = function
+ self.function.restype = restype
+ self.argtypes = argtypes
+
+ def __call__(self, *args):
+ fixed_args = []
+ i = 0
+ for argtype in self.argtypes:
+ fixed_args.append(argtype.from_param(args[i]))
+ i += 1
+ return self.function(*fixed_args + list(args[i:]))
+
+# Set return and argument types for lilv C functions
+
+free.argtypes = [POINTER(None)]
+free.restype = None
+
+# uri_to_path.argtypes = [String]
+# uri_to_path.restype = c_char_p
+
+file_uri_parse.argtypes = [String, POINTER(POINTER(c_char))]
+file_uri_parse.restype = c_char_p
+
+new_uri.argtypes = [POINTER(World), String]
+new_uri.restype = POINTER(Node)
+
+new_file_uri.argtypes = [POINTER(World), c_char_p, String]
+new_file_uri.restype = POINTER(Node)
+
+new_string.argtypes = [POINTER(World), String]
+new_string.restype = POINTER(Node)
+
+new_int.argtypes = [POINTER(World), c_int]
+new_int.restype = POINTER(Node)
+
+new_float.argtypes = [POINTER(World), c_float]
+new_float.restype = POINTER(Node)
+
+new_bool.argtypes = [POINTER(World), c_bool]
+new_bool.restype = POINTER(Node)
+
+node_free.argtypes = [POINTER(Node)]
+node_free.restype = None
+
+node_duplicate.argtypes = [POINTER(Node)]
+node_duplicate.restype = POINTER(Node)
+
+node_equals.argtypes = [POINTER(Node), POINTER(Node)]
+node_equals.restype = c_bool
+
+node_get_turtle_token.argtypes = [POINTER(Node)]
+node_get_turtle_token.restype = c_char_p
+
+node_is_uri.argtypes = [POINTER(Node)]
+node_is_uri.restype = c_bool
+
+node_as_uri.argtypes = [POINTER(Node)]
+node_as_uri.restype = c_char_p
+
+node_is_blank.argtypes = [POINTER(Node)]
+node_is_blank.restype = c_bool
+
+node_as_blank.argtypes = [POINTER(Node)]
+node_as_blank.restype = c_char_p
+
+node_is_literal.argtypes = [POINTER(Node)]
+node_is_literal.restype = c_bool
+
+node_is_string.argtypes = [POINTER(Node)]
+node_is_string.restype = c_bool
+
+node_as_string.argtypes = [POINTER(Node)]
+node_as_string.restype = c_char_p
+
+node_get_path.argtypes = [POINTER(Node), POINTER(POINTER(c_char))]
+node_get_path.restype = c_char_p
+
+node_is_float.argtypes = [POINTER(Node)]
+node_is_float.restype = c_bool
+
+node_as_float.argtypes = [POINTER(Node)]
+node_as_float.restype = c_float
+
+node_is_int.argtypes = [POINTER(Node)]
+node_is_int.restype = c_bool
+
+node_as_int.argtypes = [POINTER(Node)]
+node_as_int.restype = c_int
+
+node_is_bool.argtypes = [POINTER(Node)]
+node_is_bool.restype = c_bool
+
+node_as_bool.argtypes = [POINTER(Node)]
+node_as_bool.restype = c_bool
+
+plugin_classes_free.argtypes = [POINTER(PluginClasses)]
+plugin_classes_free.restype = None
+
+plugin_classes_size.argtypes = [POINTER(PluginClasses)]
+plugin_classes_size.restype = c_uint
+
+plugin_classes_begin.argtypes = [POINTER(PluginClasses)]
+plugin_classes_begin.restype = POINTER(Iter)
+
+plugin_classes_get.argtypes = [POINTER(PluginClasses), POINTER(Iter)]
+plugin_classes_get.restype = POINTER(PluginClass)
+
+plugin_classes_next.argtypes = [POINTER(PluginClasses), POINTER(Iter)]
+plugin_classes_next.restype = POINTER(Iter)
+
+plugin_classes_is_end.argtypes = [POINTER(PluginClasses), POINTER(Iter)]
+plugin_classes_is_end.restype = c_bool
+
+plugin_classes_get_by_uri.argtypes = [POINTER(PluginClasses), POINTER(Node)]
+plugin_classes_get_by_uri.restype = POINTER(PluginClass)
+
+scale_points_free.argtypes = [POINTER(ScalePoints)]
+scale_points_free.restype = None
+
+scale_points_size.argtypes = [POINTER(ScalePoints)]
+scale_points_size.restype = c_uint
+
+scale_points_begin.argtypes = [POINTER(ScalePoints)]
+scale_points_begin.restype = POINTER(Iter)
+
+scale_points_get.argtypes = [POINTER(ScalePoints), POINTER(Iter)]
+scale_points_get.restype = POINTER(ScalePoint)
+
+scale_points_next.argtypes = [POINTER(ScalePoints), POINTER(Iter)]
+scale_points_next.restype = POINTER(Iter)
+
+scale_points_is_end.argtypes = [POINTER(ScalePoints), POINTER(Iter)]
+scale_points_is_end.restype = c_bool
+
+uis_free.argtypes = [POINTER(UIs)]
+uis_free.restype = None
+
+uis_size.argtypes = [POINTER(UIs)]
+uis_size.restype = c_uint
+
+uis_begin.argtypes = [POINTER(UIs)]
+uis_begin.restype = POINTER(Iter)
+
+uis_get.argtypes = [POINTER(UIs), POINTER(Iter)]
+uis_get.restype = POINTER(UI)
+
+uis_next.argtypes = [POINTER(UIs), POINTER(Iter)]
+uis_next.restype = POINTER(Iter)
+
+uis_is_end.argtypes = [POINTER(UIs), POINTER(Iter)]
+uis_is_end.restype = c_bool
+
+uis_get_by_uri.argtypes = [POINTER(UIs), POINTER(Node)]
+uis_get_by_uri.restype = POINTER(UI)
+
+nodes_free.argtypes = [POINTER(Nodes)]
+nodes_free.restype = None
+
+nodes_size.argtypes = [POINTER(Nodes)]
+nodes_size.restype = c_uint
+
+nodes_begin.argtypes = [POINTER(Nodes)]
+nodes_begin.restype = POINTER(Iter)
+
+nodes_get.argtypes = [POINTER(Nodes), POINTER(Iter)]
+nodes_get.restype = POINTER(Node)
+
+nodes_next.argtypes = [POINTER(Nodes), POINTER(Iter)]
+nodes_next.restype = POINTER(Iter)
+
+nodes_is_end.argtypes = [POINTER(Nodes), POINTER(Iter)]
+nodes_is_end.restype = c_bool
+
+nodes_get_first.argtypes = [POINTER(Nodes)]
+nodes_get_first.restype = POINTER(Node)
+
+nodes_contains.argtypes = [POINTER(Nodes), POINTER(Node)]
+nodes_contains.restype = c_bool
+
+nodes_merge.argtypes = [POINTER(Nodes), POINTER(Nodes)]
+nodes_merge.restype = POINTER(Nodes)
+
+plugins_size.argtypes = [POINTER(Plugins)]
+plugins_size.restype = c_uint
+
+plugins_begin.argtypes = [POINTER(Plugins)]
+plugins_begin.restype = POINTER(Iter)
+
+plugins_get.argtypes = [POINTER(Plugins), POINTER(Iter)]
+plugins_get.restype = POINTER(Plugin)
+
+plugins_next.argtypes = [POINTER(Plugins), POINTER(Iter)]
+plugins_next.restype = POINTER(Iter)
+
+plugins_is_end.argtypes = [POINTER(Plugins), POINTER(Iter)]
+plugins_is_end.restype = c_bool
+
+plugins_get_by_uri.argtypes = [POINTER(Plugins), POINTER(Node)]
+plugins_get_by_uri.restype = POINTER(Plugin)
+
+world_new.argtypes = []
+world_new.restype = POINTER(World)
+
+world_set_option.argtypes = [POINTER(World), String, POINTER(Node)]
+world_set_option.restype = None
+
+world_free.argtypes = [POINTER(World)]
+world_free.restype = None
+
+world_load_all.argtypes = [POINTER(World)]
+world_load_all.restype = None
+
+world_load_bundle.argtypes = [POINTER(World), POINTER(Node)]
+world_load_bundle.restype = None
+
+world_load_specifications.argtypes = [POINTER(World)]
+world_load_specifications.restype = None
+
+world_load_plugin_classes.argtypes = [POINTER(World)]
+world_load_plugin_classes.restype = None
+
+world_unload_bundle.argtypes = [POINTER(World), POINTER(Node)]
+world_unload_bundle.restype = c_int
+
+world_load_resource.argtypes = [POINTER(World), POINTER(Node)]
+world_load_resource.restype = c_int
+
+world_unload_resource.argtypes = [POINTER(World), POINTER(Node)]
+world_unload_resource.restype = c_int
+
+world_get_plugin_class.argtypes = [POINTER(World)]
+world_get_plugin_class.restype = POINTER(PluginClass)
+
+world_get_plugin_classes.argtypes = [POINTER(World)]
+world_get_plugin_classes.restype = POINTER(PluginClasses)
+
+world_get_all_plugins.argtypes = [POINTER(World)]
+world_get_all_plugins.restype = POINTER(Plugins)
+
+world_find_nodes.argtypes = [POINTER(World), POINTER(Node), POINTER(Node), POINTER(Node)]
+world_find_nodes.restype = POINTER(Nodes)
+
+world_get.argtypes = [POINTER(World), POINTER(Node), POINTER(Node), POINTER(Node)]
+world_get.restype = POINTER(Node)
+
+world_ask.argtypes = [POINTER(World), POINTER(Node), POINTER(Node), POINTER(Node)]
+world_ask.restype = c_bool
+
+plugin_verify.argtypes = [POINTER(Plugin)]
+plugin_verify.restype = c_bool
+
+plugin_get_uri.argtypes = [POINTER(Plugin)]
+plugin_get_uri.restype = POINTER(Node)
+
+plugin_get_bundle_uri.argtypes = [POINTER(Plugin)]
+plugin_get_bundle_uri.restype = POINTER(Node)
+
+plugin_get_data_uris.argtypes = [POINTER(Plugin)]
+plugin_get_data_uris.restype = POINTER(Nodes)
+
+plugin_get_library_uri.argtypes = [POINTER(Plugin)]
+plugin_get_library_uri.restype = POINTER(Node)
+
+plugin_get_name.argtypes = [POINTER(Plugin)]
+plugin_get_name.restype = POINTER(Node)
+
+plugin_get_class.argtypes = [POINTER(Plugin)]
+plugin_get_class.restype = POINTER(PluginClass)
+
+plugin_get_value.argtypes = [POINTER(Plugin), POINTER(Node)]
+plugin_get_value.restype = POINTER(Nodes)
+
+plugin_has_feature.argtypes = [POINTER(Plugin), POINTER(Node)]
+plugin_has_feature.restype = c_bool
+
+plugin_get_supported_features.argtypes = [POINTER(Plugin)]
+plugin_get_supported_features.restype = POINTER(Nodes)
+
+plugin_get_required_features.argtypes = [POINTER(Plugin)]
+plugin_get_required_features.restype = POINTER(Nodes)
+
+plugin_get_optional_features.argtypes = [POINTER(Plugin)]
+plugin_get_optional_features.restype = POINTER(Nodes)
+
+plugin_has_extension_data.argtypes = [POINTER(Plugin), POINTER(Node)]
+plugin_has_extension_data.restype = c_bool
+
+plugin_get_extension_data.argtypes = [POINTER(Plugin)]
+plugin_get_extension_data.restype = POINTER(Nodes)
+
+plugin_get_num_ports.argtypes = [POINTER(Plugin)]
+plugin_get_num_ports.restype = c_uint32
+
+plugin_get_port_ranges_float.argtypes = [POINTER(Plugin), POINTER(c_float), POINTER(c_float), POINTER(c_float)]
+plugin_get_port_ranges_float.restype = None
+
+plugin_get_num_ports_of_class = VariadicFunction(_lib.lilv_plugin_get_num_ports_of_class,
+ c_uint32,
+ [POINTER(Plugin), POINTER(Node)])
+
+plugin_has_latency.argtypes = [POINTER(Plugin)]
+plugin_has_latency.restype = c_bool
+
+plugin_get_latency_port_index.argtypes = [POINTER(Plugin)]
+plugin_get_latency_port_index.restype = c_uint32
+
+plugin_get_port_by_index.argtypes = [POINTER(Plugin), c_uint32]
+plugin_get_port_by_index.restype = POINTER(Port)
+
+plugin_get_port_by_symbol.argtypes = [POINTER(Plugin), POINTER(Node)]
+plugin_get_port_by_symbol.restype = POINTER(Port)
+
+plugin_get_port_by_designation.argtypes = [POINTER(Plugin), POINTER(Node), POINTER(Node)]
+plugin_get_port_by_designation.restype = POINTER(Port)
+
+plugin_get_project.argtypes = [POINTER(Plugin)]
+plugin_get_project.restype = POINTER(Node)
+
+plugin_get_author_name.argtypes = [POINTER(Plugin)]
+plugin_get_author_name.restype = POINTER(Node)
+
+plugin_get_author_email.argtypes = [POINTER(Plugin)]
+plugin_get_author_email.restype = POINTER(Node)
+
+plugin_get_author_homepage.argtypes = [POINTER(Plugin)]
+plugin_get_author_homepage.restype = POINTER(Node)
+
+plugin_is_replaced.argtypes = [POINTER(Plugin)]
+plugin_is_replaced.restype = c_bool
+
+plugin_get_related.argtypes = [POINTER(Plugin), POINTER(Node)]
+plugin_get_related.restype = POINTER(Nodes)
+
+port_get_node.argtypes = [POINTER(Plugin), POINTER(Port)]
+port_get_node.restype = POINTER(Node)
+
+port_get_value.argtypes = [POINTER(Plugin), POINTER(Port), POINTER(Node)]
+port_get_value.restype = POINTER(Nodes)
+
+port_get.argtypes = [POINTER(Plugin), POINTER(Port), POINTER(Node)]
+port_get.restype = POINTER(Node)
+
+port_get_properties.argtypes = [POINTER(Plugin), POINTER(Port)]
+port_get_properties.restype = POINTER(Nodes)
+
+port_has_property.argtypes = [POINTER(Plugin), POINTER(Port), POINTER(Node)]
+port_has_property.restype = c_bool
+
+port_supports_event.argtypes = [POINTER(Plugin), POINTER(Port), POINTER(Node)]
+port_supports_event.restype = c_bool
+
+port_get_index.argtypes = [POINTER(Plugin), POINTER(Port)]
+port_get_index.restype = c_uint32
+
+port_get_symbol.argtypes = [POINTER(Plugin), POINTER(Port)]
+port_get_symbol.restype = POINTER(Node)
+
+port_get_name.argtypes = [POINTER(Plugin), POINTER(Port)]
+port_get_name.restype = POINTER(Node)
+
+port_get_classes.argtypes = [POINTER(Plugin), POINTER(Port)]
+port_get_classes.restype = POINTER(Nodes)
+
+port_is_a.argtypes = [POINTER(Plugin), POINTER(Port), POINTER(Node)]
+port_is_a.restype = c_bool
+
+port_get_range.argtypes = [POINTER(Plugin), POINTER(Port), POINTER(POINTER(Node)), POINTER(POINTER(Node)), POINTER(POINTER(Node))]
+port_get_range.restype = None
+
+port_get_scale_points.argtypes = [POINTER(Plugin), POINTER(Port)]
+port_get_scale_points.restype = POINTER(ScalePoints)
+
+state_new_from_world.argtypes = [POINTER(World), POINTER(LV2_URID_Map), POINTER(Node)]
+state_new_from_world.restype = POINTER(State)
+
+state_new_from_file.argtypes = [POINTER(World), POINTER(LV2_URID_Map), POINTER(Node), String]
+state_new_from_file.restype = POINTER(State)
+
+state_new_from_string.argtypes = [POINTER(World), POINTER(LV2_URID_Map), String]
+state_new_from_string.restype = POINTER(State)
+
+LilvGetPortValueFunc = CFUNCTYPE(c_void_p, c_char_p, POINTER(None), POINTER(c_uint32), POINTER(c_uint32))
+
+state_new_from_instance.argtypes = [POINTER(Plugin), POINTER(Instance), POINTER(LV2_URID_Map), c_char_p, c_char_p, c_char_p, String, LilvGetPortValueFunc, POINTER(None), c_uint32, POINTER(POINTER(LV2_Feature))]
+state_new_from_instance.restype = POINTER(State)
+
+state_free.argtypes = [POINTER(State)]
+state_free.restype = None
+
+state_equals.argtypes = [POINTER(State), POINTER(State)]
+state_equals.restype = c_bool
+
+state_get_num_properties.argtypes = [POINTER(State)]
+state_get_num_properties.restype = c_uint
+
+state_get_plugin_uri.argtypes = [POINTER(State)]
+state_get_plugin_uri.restype = POINTER(Node)
+
+state_get_uri.argtypes = [POINTER(State)]
+state_get_uri.restype = POINTER(Node)
+
+state_get_label.argtypes = [POINTER(State)]
+state_get_label.restype = c_char_p
+
+state_set_label.argtypes = [POINTER(State), String]
+state_set_label.restype = None
+
+state_set_metadata.argtypes = [POINTER(State), c_uint32, POINTER(None), c_size_t, c_uint32, c_uint32]
+state_set_metadata.restype = c_int
+
+LilvSetPortValueFunc = CFUNCTYPE(None, c_char_p, POINTER(None), POINTER(None), c_uint32, c_uint32)
+state_emit_port_values.argtypes = [POINTER(State), LilvSetPortValueFunc, POINTER(None)]
+state_emit_port_values.restype = None
+
+state_restore.argtypes = [POINTER(State), POINTER(Instance), LilvSetPortValueFunc, POINTER(None), c_uint32, POINTER(POINTER(LV2_Feature))]
+state_restore.restype = None
+
+state_save.argtypes = [POINTER(World), POINTER(LV2_URID_Map), POINTER(LV2_URID_Unmap), POINTER(State), c_char_p, c_char_p, String]
+state_save.restype = c_int
+
+state_to_string.argtypes = [POINTER(World), POINTER(LV2_URID_Map), POINTER(LV2_URID_Unmap), POINTER(State), c_char_p, String]
+state_to_string.restype = c_char_p
+
+state_delete.argtypes = [POINTER(World), POINTER(State)]
+state_delete.restype = c_int
+
+scale_point_get_label.argtypes = [POINTER(ScalePoint)]
+scale_point_get_label.restype = POINTER(Node)
+
+scale_point_get_value.argtypes = [POINTER(ScalePoint)]
+scale_point_get_value.restype = POINTER(Node)
+
+plugin_class_get_parent_uri.argtypes = [POINTER(PluginClass)]
+plugin_class_get_parent_uri.restype = POINTER(Node)
+
+plugin_class_get_uri.argtypes = [POINTER(PluginClass)]
+plugin_class_get_uri.restype = POINTER(Node)
+
+plugin_class_get_label.argtypes = [POINTER(PluginClass)]
+plugin_class_get_label.restype = POINTER(Node)
+
+plugin_class_get_children.argtypes = [POINTER(PluginClass)]
+plugin_class_get_children.restype = POINTER(PluginClasses)
+
+plugin_instantiate.argtypes = [POINTER(Plugin), c_double, POINTER(POINTER(LV2_Feature))]
+plugin_instantiate.restype = POINTER(Instance)
+
+instance_free.argtypes = [POINTER(Instance)]
+instance_free.restype = None
+
+plugin_get_uis.argtypes = [POINTER(Plugin)]
+plugin_get_uis.restype = POINTER(UIs)
+
+ui_get_uri.argtypes = [POINTER(UI)]
+ui_get_uri.restype = POINTER(Node)
+
+ui_get_classes.argtypes = [POINTER(UI)]
+ui_get_classes.restype = POINTER(Nodes)
+
+ui_is_a.argtypes = [POINTER(UI), POINTER(Node)]
+ui_is_a.restype = c_bool
+
+LilvUISupportedFunc = CFUNCTYPE(c_uint, c_char_p, c_char_p)
+
+ui_is_supported.argtypes = [POINTER(UI), LilvUISupportedFunc, POINTER(Node), POINTER(POINTER(Node))]
+ui_is_supported.restype = c_uint
+
+ui_get_bundle_uri.argtypes = [POINTER(UI)]
+ui_get_bundle_uri.restype = POINTER(Node)
+
+ui_get_binary_uri.argtypes = [POINTER(UI)]
+ui_get_binary_uri.restype = POINTER(Node)
+
+OPTION_FILTER_LANG = 'http://drobilla.net/ns/lilv#filter-lang'
+OPTION_DYN_MANIFEST = 'http://drobilla.net/ns/lilv#dyn-manifest'
+
+# Define URI constants for compatibility with old Python bindings
+
+LILV_NS_DOAP = 'http://usefulinc.com/ns/doap#'
+LILV_NS_FOAF = 'http://xmlns.com/foaf/0.1/'
+LILV_NS_LILV = 'http://drobilla.net/ns/lilv#'
+LILV_NS_LV2 = 'http://lv2plug.in/ns/lv2core#'
+LILV_NS_OWL = 'http://www.w3.org/2002/07/owl#'
+LILV_NS_RDF = 'http://www.w3.org/1999/02/22-rdf-syntax-ns#'
+LILV_NS_RDFS = 'http://www.w3.org/2000/01/rdf-schema#'
+LILV_NS_XSD = 'http://www.w3.org/2001/XMLSchema#'
+LILV_URI_ATOM_PORT = 'http://lv2plug.in/ns/ext/atom#AtomPort'
+LILV_URI_AUDIO_PORT = 'http://lv2plug.in/ns/lv2core#AudioPort'
+LILV_URI_CONTROL_PORT = 'http://lv2plug.in/ns/lv2core#ControlPort'
+LILV_URI_CV_PORT = 'http://lv2plug.in/ns/lv2core#CVPort'
+LILV_URI_EVENT_PORT = 'http://lv2plug.in/ns/ext/event#EventPort'
+LILV_URI_INPUT_PORT = 'http://lv2plug.in/ns/lv2core#InputPort'
+LILV_URI_MIDI_EVENT = 'http://lv2plug.in/ns/ext/midi#MidiEvent'
+LILV_URI_OUTPUT_PORT = 'http://lv2plug.in/ns/lv2core#OutputPort'
+LILV_URI_PORT = 'http://lv2plug.in/ns/lv2core#Port'
+LILV_OPTION_FILTER_LANG = 'http://drobilla.net/ns/lilv#filter-lang'
+LILV_OPTION_DYN_MANIFEST = 'http://drobilla.net/ns/lilv#dyn-manifest'
diff --git a/bindings/python/lv2_apply.py b/bindings/python/lv2_apply.py
new file mode 100755
index 0000000..4c7d9b4
--- /dev/null
+++ b/bindings/python/lv2_apply.py
@@ -0,0 +1,159 @@
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+import math
+import lilv
+import sys
+import wave
+import numpy
+
+class WavFile(object):
+ """Helper class for accessing wav file data. Should work on the most common
+ formats (8 bit unsigned, 16 bit signed, 32 bit signed). Audio data is
+ converted to float32."""
+
+ # (struct format code, is_signedtype) for each sample width:
+ WAV_SPECS = {
+ 1: ("B", False),
+ 2: ("h", True),
+ 4: ("l", True),
+ }
+
+ def __init__(self, wav_in_path):
+ self.wav_in = wave.open(wav_in_path, 'r')
+ self.framerate = self.wav_in.getframerate()
+ self.nframes = self.wav_in.getnframes()
+ self.nchannels = self.wav_in.getnchannels()
+ self.sampwidth = self.wav_in.getsampwidth()
+ wav_spec = self.WAV_SPECS[self.sampwidth]
+ self.struct_fmt_code, self.signed = wav_spec
+ self.range = 2 ** (8*self.sampwidth)
+
+ def read(self):
+ """Read data from an open wav file. Return a list of channels, where each
+ channel is a list of floats."""
+ raw_bytes = self.wav_in.readframes(self.nframes)
+ struct_fmt = "%u%s" % (len(raw_bytes) / self.sampwidth, self.struct_fmt_code)
+ data = wave.struct.unpack(struct_fmt, raw_bytes)
+ if self.signed:
+ data = [i / float(self.range/2) for i in data]
+ else:
+ data = [(i - float(range/2)) / float(range/2) for i in data]
+
+ channels = []
+ for i in range(self.nchannels):
+ channels.append([data[j] for j in range(0, len(data), self.nchannels) ])
+
+ return channels
+
+ def close(self):
+ self.wav_in.close()
+
+def main():
+ # Read command line arguments
+ if len(sys.argv) != 4:
+ print('USAGE: lv2_apply.py PLUGIN_URI INPUT_WAV OUTPUT_WAV')
+ sys.exit(1)
+
+ # Initialise Lilv
+ world = lilv.World()
+ ns = world.ns
+ world.load_all()
+
+ plugin_uri = sys.argv[1]
+ wav_in_path = sys.argv[2]
+ wav_out_path = sys.argv[3]
+
+ # Find plugin
+ plugin_uri_node = world.new_uri(plugin_uri)
+ plugins = world.get_all_plugins()
+ if plugin_uri_node not in plugins:
+ print("Unknown plugin `%s'" % plugin_uri)
+ sys.exit(1)
+
+ plugin = plugins[plugin_uri_node]
+ n_audio_in = plugin.get_num_ports_of_class(ns.lv2.InputPort, ns.lv2.AudioPort)
+ n_audio_out = plugin.get_num_ports_of_class(ns.lv2.OutputPort, ns.lv2.AudioPort)
+ if n_audio_out == 0:
+ print("Plugin has no audio outputs\n")
+ sys.exit(1)
+
+ # Open input file
+ try:
+ wav_in = WavFile(wav_in_path)
+ except:
+ print("Failed to open input `%s'\n" % wav_in_path)
+ sys.exit(1)
+
+ if wav_in.nchannels != n_audio_in:
+ print("Input has %d channels, but plugin has %d audio inputs\n" % (
+ wav_in.nchannels, n_audio_in))
+ sys.exit(1)
+
+ # Open output file
+ wav_out = wave.open(wav_out_path, 'w')
+ if not wav_out:
+ print("Failed to open output `%s'\n" % wav_out_path)
+ sys.exit(1)
+
+ # Set output file to same format as input (except possibly nchannels)
+ wav_out.setparams(wav_in.wav_in.getparams())
+ wav_out.setnchannels(n_audio_out)
+
+ print('%s => %s => %s @ %d Hz'
+ % (wav_in_path, plugin.get_name(), wav_out_path, wav_in.framerate))
+
+ instance = lilv.Instance(plugin, wav_in.framerate)
+
+ channels = wav_in.read()
+ wav_in.close()
+
+ # Connect all ports to buffers. NB if we fail to connect any buffer, lilv
+ # will segfault.
+ audio_input_buffers = []
+ audio_output_buffers = []
+ control_input_buffers = []
+ control_output_buffers = []
+ for index in range(plugin.get_num_ports()):
+ port = plugin.get_port_by_index(index)
+ if port.is_a(ns.lv2.InputPort):
+ if port.is_a(ns.lv2.AudioPort):
+ audio_input_buffers.append(numpy.array(channels[len(audio_input_buffers)], numpy.float32))
+ instance.connect_port(index, audio_input_buffers[-1])
+ elif port.is_a(ns.lv2.ControlPort):
+ default = float(port.get(ns.lv2.default))
+ control_input_buffers.append(numpy.array([default], numpy.float32))
+ instance.connect_port(index, control_input_buffers[-1])
+ else:
+ raise ValueError("Unhandled port type")
+ elif port.is_a(ns.lv2.OutputPort):
+ if port.is_a(ns.lv2.AudioPort):
+ audio_output_buffers.append(numpy.array([0] * wav_in.nframes, numpy.float32))
+ instance.connect_port(index, audio_output_buffers[-1])
+ elif port.is_a(ns.lv2.ControlPort):
+ control_output_buffers.append(numpy.array([0], numpy.float32))
+ instance.connect_port(index, control_output_buffers[-1])
+ else:
+ raise ValueError("Unhandled port type")
+
+ # Run the plugin:
+ instance.run(wav_in.nframes)
+
+ # Interleave output buffers:
+ data = numpy.dstack(audio_output_buffers).flatten()
+
+ # Return to original int range:
+ if wav_in.signed:
+ data = data * float(wav_in.range / 2)
+ else:
+ data = (data + 1) * float(wav_in.range/2)
+
+ # Write output file in chunks to stop memory usage getting out of hand:
+ CHUNK_SIZE = 8192
+ for chunk in numpy.array_split(data, CHUNK_SIZE):
+ wav_out.writeframes(wave.struct.pack("%u%s" % (len(chunk), wav_in.struct_fmt_code), *chunk.astype(int)))
+ wav_out.close()
+
+
+if __name__ == "__main__":
+ main()
diff --git a/bindings/python/lv2_list.py b/bindings/python/lv2_list.py
new file mode 100755
index 0000000..babe1b4
--- /dev/null
+++ b/bindings/python/lv2_list.py
@@ -0,0 +1,9 @@
+#!/usr/bin/env python
+
+import lilv
+
+world = lilv.World()
+world.load_all()
+
+for i in world.get_all_plugins():
+ print(i.get_uri())
diff --git a/bindings/test/bindings_test_plugin.c b/bindings/test/bindings_test_plugin.c
new file mode 100644
index 0000000..3d6c763
--- /dev/null
+++ b/bindings/test/bindings_test_plugin.c
@@ -0,0 +1,196 @@
+/*
+ Copyright 2006-2011 David Robillard <d@drobilla.net>
+ Copyright 2006 Steve Harris <steve@plugin.org.uk>
+
+ Permission to use, copy, modify, and/or distribute this software for any
+ purpose with or without fee is hereby granted, provided that the above
+ copyright notice and this permission notice appear in all copies.
+
+ THIS SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+ WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+ MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+ WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+ OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+*/
+
+/** Include standard C headers */
+#include <math.h>
+#include <stdlib.h>
+
+/**
+ LV2 headers are based on the URI of the specification they come from, so a
+ consistent convention can be used even for unofficial extensions. The URI
+ of the core LV2 specification is <http://lv2plug.in/ns/lv2core>, by
+ replacing `http:/` with `lv2` any header in the specification bundle can be
+ included, in this case `lv2.h`.
+*/
+#include "lv2/lv2plug.in/ns/lv2core/lv2.h"
+
+/**
+ The URI is the identifier for a plugin, and how the host associates this
+ implementation in code with its description in data. In this plugin it is
+ only used once in the code, but defining the plugin URI at the top of the
+ file is a good convention to follow. If this URI does not match that used
+ in the data files, the host will fail to load the plugin.
+*/
+#define TEST_URI "http://example.org/lilv-bindings-test-plugin"
+
+/**
+ In code, ports are referred to by index. An enumeration of port indices
+ should be defined for readability.
+*/
+typedef enum {
+ TEST_CONTROL_IN = 0,
+ TEST_CONTROL_OUT = 1,
+ TEST_AUDIO_IN = 2,
+ TEST_AUDIO_OUT = 3
+} PortIndex;
+
+/**
+ Every plugin defines a private structure for the plugin instance. All data
+ associated with a plugin instance is stored here, and is available to
+ every instance method. In this simple plugin, only port buffers need to be
+ stored, since there is no additional instance data. */
+typedef struct {
+ float* buf;
+} Test;
+
+/**
+ The instantiate() function is called by the host to create a new plugin
+ instance. The host passes the plugin descriptor, sample rate, and bundle
+ path for plugins that need to load additional resources (e.g. waveforms).
+ The features parameter contains host-provided features defined in LV2
+ extensions, but this simple plugin does not use any.
+
+ This function is in the ``instantiation'' threading class, so no other
+ methods on this instance will be called concurrently with it.
+*/
+static LV2_Handle
+instantiate(const LV2_Descriptor* descriptor,
+ double rate,
+ const char* bundle_path,
+ const LV2_Feature* const* features)
+{
+ Test* test = (Test*)malloc(sizeof(Test));
+
+ return (LV2_Handle)test;
+}
+
+/**
+ The connect_port() method is called by the host to connect a particular port
+ to a buffer. The plugin must store the data location, but data may not be
+ accessed except in run().
+
+ This method is in the ``audio'' threading class, and is called in the same
+ context as run().
+*/
+static void
+connect_port(LV2_Handle instance,
+ uint32_t port,
+ void* data)
+{
+}
+
+/**
+ The activate() method is called by the host to initialise and prepare the
+ plugin instance for running. The plugin must reset all internal state
+ except for buffer locations set by connect_port(). Since this plugin has
+ no other internal state, this method does nothing.
+
+ This method is in the ``instantiation'' threading class, so no other
+ methods on this instance will be called concurrently with it.
+*/
+static void
+activate(LV2_Handle instance)
+{
+}
+
+/** Process a block of audio (audio thread, must be RT safe). */
+static void
+run(LV2_Handle instance, uint32_t n_samples)
+{
+}
+
+/**
+ The deactivate() method is the counterpart to activate() called by the host
+ after running the plugin. It indicates that the host will not call run()
+ again until another call to activate() and is mainly useful for more
+ advanced plugins with ``live'' characteristics such as those with auxiliary
+ processing threads. As with activate(), this plugin has no use for this
+ information so this method does nothing.
+
+ This method is in the ``instantiation'' threading class, so no other
+ methods on this instance will be called concurrently with it.
+*/
+static void
+deactivate(LV2_Handle instance)
+{
+}
+
+/**
+ Destroy a plugin instance (counterpart to instantiate()).
+
+ This method is in the ``instantiation'' threading class, so no other
+ methods on this instance will be called concurrently with it.
+*/
+static void
+cleanup(LV2_Handle instance)
+{
+ free(instance);
+}
+
+/**
+ The extension_data function returns any extension data supported by the
+ plugin. Note that this is not an instance method, but a function on the
+ plugin descriptor. It is usually used by plugins to implement additional
+ interfaces. This plugin does not have any extension data, so this function
+ returns NULL.
+
+ This method is in the ``discovery'' threading class, so no other functions
+ or methods in this plugin library will be called concurrently with it.
+*/
+static const void*
+extension_data(const char* uri)
+{
+ return NULL;
+}
+
+/**
+ Define the LV2_Descriptor for this plugin. It is best to define descriptors
+ statically to avoid leaking memory and non-portable shared library
+ constructors and destructors to clean up properly.
+*/
+static const LV2_Descriptor descriptor = {
+ TEST_URI,
+ instantiate,
+ connect_port,
+ activate,
+ run,
+ deactivate,
+ cleanup,
+ extension_data
+};
+
+/**
+ The lv2_descriptor() function is the entry point to the plugin library. The
+ host will load the library and call this function repeatedly with increasing
+ indices to find all the plugins defined in the library. The index is not an
+ indentifier, the URI of the returned descriptor is used to determine the
+ identify of the plugin.
+
+ This method is in the ``discovery'' threading class, so no other functions
+ or methods in this plugin library will be called concurrently with it.
+*/
+LV2_SYMBOL_EXPORT
+const LV2_Descriptor*
+lv2_descriptor(uint32_t index)
+{
+ switch (index) {
+ case 0:
+ return &descriptor;
+ default:
+ return NULL;
+ }
+}
diff --git a/bindings/test/bindings_test_plugin.ttl.in b/bindings/test/bindings_test_plugin.ttl.in
new file mode 100644
index 0000000..e8323d5
--- /dev/null
+++ b/bindings/test/bindings_test_plugin.ttl.in
@@ -0,0 +1,62 @@
+# Lilv Bindings Test Plugin
+# Copyright 2011 David Robillard <d@drobilla.net>
+#
+# Permission to use, copy, modify, and/or distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THIS SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+@prefix doap: <http://usefulinc.com/ns/doap#> .
+@prefix foaf: <http://xmlns.com/foaf/0.1/> .
+@prefix lv2: <http://lv2plug.in/ns/lv2core#> .
+@prefix rdf: <http://www.w3.org/1999/02/22-rdf-syntax-ns#> .
+@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
+@prefix ui: <http://lv2plug.in/ns/extensions/ui#> .
+
+<http://example.org/lilv-bindings-test-plugin>
+ a lv2:Plugin ;
+ doap:name "Lilv Bindings Test" ;
+ doap:license <http://opensource.org/licenses/isc> ;
+ lv2:optionalFeature lv2:hardRTCapable ;
+ ui:ui <http://example.org/lilv-bindings-test-plugin-ui> ;
+ lv2:port [
+ a lv2:InputPort ,
+ lv2:ControlPort ;
+ lv2:index 0 ;
+ lv2:symbol "input" ;
+ lv2:name "Input" ;
+ lv2:default 0.5 ;
+ lv2:minimum 0.0 ;
+ lv2:maximum 1.0 ;
+ lv2:scalePoint [ rdfs:label "off" ; rdf:value 0.0 ] ;
+ lv2:scalePoint [ rdfs:label "on" ; rdf:value 1.0 ] ;
+ ] , [
+ a lv2:OutputPort ,
+ lv2:ControlPort ;
+ lv2:index 1 ;
+ lv2:symbol "output" ;
+ lv2:name "Output"
+ ] , [
+ a lv2:AudioPort ,
+ lv2:InputPort ;
+ lv2:index 2 ;
+ lv2:symbol "audio_input" ;
+ lv2:name "Audio Input" ;
+ ] , [
+ a lv2:AudioPort ,
+ lv2:OutputPort ;
+ lv2:index 3 ;
+ lv2:symbol "audio_output" ;
+ lv2:name "Audio Output" ;
+ ] .
+
+<http://example.org/lilv-bindings-test-plugin-ui>
+ a ui:GtkUI ;
+ ui:binary <TODO> .
diff --git a/bindings/test/manifest.ttl.in b/bindings/test/manifest.ttl.in
new file mode 100644
index 0000000..9cc7fa8
--- /dev/null
+++ b/bindings/test/manifest.ttl.in
@@ -0,0 +1,7 @@
+@prefix lv2: <http://lv2plug.in/ns/lv2core#> .
+@prefix rdfs: <http://www.w3.org/2000/01/rdf-schema#> .
+
+<http://example.org/lilv-bindings-test-plugin>
+ a lv2:Plugin ;
+ lv2:binary <bindings_test_plugin@SHLIB_EXT@> ;
+ rdfs:seeAlso <bindings_test_plugin.ttl> .
diff --git a/bindings/test/python/test_api.py b/bindings/test/python/test_api.py
new file mode 100644
index 0000000..f594013
--- /dev/null
+++ b/bindings/test/python/test_api.py
@@ -0,0 +1,290 @@
+# Copyright 2016 David Robillard <d@drobilla.net>
+# Copyright 2013 Kaspar Emanuel <kaspar.emanuel@gmail.com>
+#
+# Permission to use, copy, modify, and/or distribute this software for any
+# purpose with or without fee is hereby granted, provided that the above
+# copyright notice and this permission notice appear in all copies.
+#
+# THIS SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+import lilv
+import unittest
+import os
+
+location = "file://" + os.getcwd() + "/bindings/bindings_test_plugin.lv2/"
+
+class NodeTests(unittest.TestCase):
+ def setUp(self):
+ self.world = lilv.World()
+ def testNodes(self):
+ aint = self.world.new_int(1)
+ aint2 = self.world.new_int(1)
+ aint3 = self.world.new_int(3)
+ afloat = self.world.new_float(2.0)
+ atrue = self.world.new_bool(True)
+ afalse = self.world.new_bool(False)
+ auri = self.world.new_uri("http://example.org")
+ afile = self.world.new_file_uri(None, "/foo/bar")
+ astring = self.world.new_string("hello")
+ self.assertEqual(auri.get_turtle_token(), '<http://example.org>')
+ self.assertTrue(aint.is_int())
+ self.assertTrue(afloat.is_float())
+ self.assertTrue(auri.is_uri())
+ self.assertTrue(astring.is_string())
+ self.assertTrue(astring.is_literal())
+ self.assertFalse(auri.is_blank())
+ self.assertTrue(int(aint) == 1)
+ self.assertTrue(float(afloat) == 2.0)
+ self.assertTrue(bool(atrue))
+ self.assertFalse(bool(afalse))
+ self.assertEqual(afile.get_path(), "/foo/bar")
+ self.assertTrue(aint == aint2)
+ self.assertTrue(aint != aint3)
+ self.assertTrue(aint != afloat)
+ with self.assertRaises(ValueError):
+ int(atrue)
+ with self.assertRaises(ValueError):
+ float(aint)
+ with self.assertRaises(ValueError):
+ bool(astring)
+
+class UriTests(unittest.TestCase):
+ def setUp(self):
+ self.world = lilv.World()
+ self.world.load_all();
+ def testInvalidURI(self):
+ self.plugin_uri = self.world.new_uri("invalid_uri")
+ self.assertIsNone(self.plugin_uri)
+ def testNonExistentURI(self):
+ self.plugin_uri = self.world.new_uri("exist:does_not")
+ self.plugin = self.world.get_all_plugins().get_by_uri(self.plugin_uri)
+ self.assertEqual(self.plugin, None)
+ def testPortTypes(self):
+ self.assertIsNotNone(self.world.new_uri(lilv.LILV_URI_INPUT_PORT))
+ def testPortTypes2(self):
+ self.assertIsNotNone(self.world.new_uri(lilv.LILV_URI_OUTPUT_PORT))
+ def testPortTypes3(self):
+ self.assertIsNotNone(self.world.new_uri(lilv.LILV_URI_AUDIO_PORT))
+ def testPortTypes4(self):
+ self.assertIsNotNone(self.world.new_uri(lilv.LILV_URI_CONTROL_PORT))
+
+class PluginClassTests(unittest.TestCase):
+ def setUp(self):
+ self.world = lilv.World()
+ def testPluginClasses(self):
+ pclass = self.world.get_plugin_class()
+ self.assertIsNotNone(pclass)
+ self.assertIsNone(pclass.get_parent_uri())
+ self.assertIsNotNone(pclass.get_uri())
+ self.assertIsNotNone(pclass.get_label())
+ self.assertEqual(str(pclass.get_uri()), str(pclass))
+ for i in pclass.get_children():
+ self.assertIsNotNone(i)
+ self.assertIsNotNone(i.get_uri())
+ self.assertIsNotNone(i.get_label())
+
+class PluginClassesTests(unittest.TestCase):
+ def setUp(self):
+ self.world = lilv.World()
+ self.world.load_all()
+ def testPluginClasses(self):
+ classes = self.world.get_plugin_classes()
+ pclass = self.world.get_plugin_class()
+ self.assertIsNotNone(classes)
+ self.assertIsNotNone(pclass)
+ self.assertTrue(pclass in classes)
+ self.assertTrue(pclass.get_uri() in classes)
+ self.assertGreater(len(classes), 1)
+ self.assertIsNotNone(classes[0])
+ self.assertIsNotNone(classes[pclass.get_uri()])
+
+class LoadTests(unittest.TestCase):
+ def setUp(self):
+ self.world = lilv.World()
+ self.bundle_uri = self.world.new_uri(location)
+ self.world.load_specifications()
+ self.world.load_plugin_classes()
+ def tearDown(self):
+ del self.world
+ def testLoadUnload(self):
+ self.world.load_bundle(self.bundle_uri)
+ plugins = self.world.get_all_plugins()
+ plugin = plugins.get(plugins.begin())
+ self.world.load_resource(plugin)
+ self.world.unload_resource(plugin)
+ self.world.unload_bundle(self.bundle_uri)
+
+class PluginTests(unittest.TestCase):
+ def setUp(self):
+ self.world = lilv.World()
+ self.world.set_option(lilv.OPTION_FILTER_LANG, self.world.new_bool(True))
+ self.bundle_uri = self.world.new_uri(location)
+ self.assertIsNotNone(self.bundle_uri, "Invalid URI: '" + location + "'")
+ self.world.load_bundle(self.bundle_uri)
+ self.plugins = self.world.get_all_plugins()
+ self.plugin = self.plugins.get(self.plugins.begin())
+ self.assertTrue(self.plugin.verify())
+ self.assertTrue(self.plugin in self.plugins)
+ self.assertTrue(self.plugin.get_uri() in self.plugins)
+ self.assertEqual(self.plugins[self.plugin.get_uri()], self.plugin)
+ self.assertIsNotNone(self.plugin, msg="Test plugin not found at location: '" + location + "'")
+ self.assertEqual(location, str(self.plugin.get_bundle_uri()))
+ self.plugin_uri = self.plugin.get_uri()
+ self.assertEqual(self.plugin.get_uri(), self.plugin_uri, "URI equality broken")
+ self.instance = lilv.Instance(self.plugin, 48000, None)
+ self.assertIsNotNone(self.instance)
+ self.lv2_InputPort = self.world.new_uri(lilv.LILV_URI_INPUT_PORT)
+ self.lv2_OutputPort = self.world.new_uri(lilv.LILV_URI_OUTPUT_PORT)
+ self.lv2_AudioPort = self.world.new_uri(lilv.LILV_URI_AUDIO_PORT)
+ self.lv2_ControlPort = self.world.new_uri(lilv.LILV_URI_CONTROL_PORT)
+ def testGetters(self):
+ self.assertIsNotNone(self.plugin.get_bundle_uri())
+ self.assertGreater(len(self.plugin.get_data_uris()), 0)
+ self.assertIsNotNone(self.plugin.get_library_uri())
+ self.assertTrue(self.plugin.get_name().is_string())
+ self.assertTrue(self.plugin.get_class().get_uri().is_uri())
+ self.assertEqual(len(self.plugin.get_value(self.world.ns.doap.license)), 1)
+ licenses = self.plugin.get_value(self.world.ns.doap.license)
+ features = self.plugin.get_value(self.world.ns.lv2.optionalFeature)
+ self.assertEqual(len(licenses), 1)
+ self.assertTrue(licenses[0] in licenses)
+ with self.assertRaises(IndexError):
+ self.assertIsNone(licenses[len(licenses)])
+ self.assertEqual(len(licenses) + len(features),
+ len(licenses.merge(features)))
+ self.assertEqual(licenses.get(licenses.begin()), self.world.new_uri('http://opensource.org/licenses/isc'))
+ self.assertEqual(licenses[0], licenses.get(licenses.begin()))
+ self.assertTrue(self.plugin.has_feature(self.world.ns.lv2.hardRTCapable))
+ self.assertEqual(len(self.plugin.get_supported_features()), 1)
+ self.assertEqual(len(self.plugin.get_optional_features()), 1)
+ self.assertEqual(len(self.plugin.get_required_features()), 0)
+ self.assertFalse(self.plugin.has_extension_data(self.world.new_uri('http://example.org/nope')))
+ self.assertEqual(len(self.plugin.get_extension_data()), 0)
+ self.assertEqual(len(self.plugin.get_extension_data()), 0)
+ self.assertFalse(self.plugin.has_latency())
+ self.assertIsNone(self.plugin.get_latency_port_index())
+ def testPorts(self):
+ self.assertEqual(self.plugin.get_num_ports(), 4)
+ self.assertIsNotNone(self.plugin.get_port(0))
+ self.assertIsNotNone(self.plugin.get_port(1))
+ self.assertIsNotNone(self.plugin.get_port(2))
+ self.assertIsNotNone(self.plugin.get_port(3))
+ self.assertIsNone(self.plugin.get_port_by_index(4))
+ self.assertIsNotNone(self.plugin.get_port("input"))
+ self.assertIsNotNone(self.plugin.get_port("output"))
+ self.assertIsNotNone(self.plugin.get_port("audio_input"))
+ self.assertIsNotNone(self.plugin.get_port("audio_output"))
+ self.assertIsNone(self.plugin.get_port_by_symbol("nonexistent"))
+ self.assertIsNone(self.plugin.get_port_by_designation(self.world.ns.lv2.InputPort, self.world.ns.lv2.control))
+ self.assertIsNone(self.plugin.get_project())
+ self.assertIsNone(self.plugin.get_author_name())
+ self.assertIsNone(self.plugin.get_author_email())
+ self.assertIsNone(self.plugin.get_author_homepage())
+ self.assertFalse(self.plugin.is_replaced())
+ self.assertEqual(0, len(self.plugin.get_related(self.world.new_uri("http://example.org/Type"))))
+ self.assertEqual(1, self.plugin.get_num_ports_of_class(self.lv2_InputPort, self.lv2_AudioPort))
+ port = self.plugin.get_port("input")
+ self.assertTrue(port.get_node().is_blank())
+ self.assertEqual(0, port.get(self.world.ns.lv2.index))
+ self.assertEqual(1, len(port.get_value(self.world.ns.lv2.symbol)))
+ self.assertEqual(port.get_value(self.world.ns.lv2.symbol)[0], "input")
+ self.assertFalse(port.has_property(self.world.ns.lv2.latency))
+ self.assertFalse(port.supports_event(self.world.ns.midi.MidiEvent))
+ self.assertEqual(0, port.get_index())
+ self.assertEqual("input", port.get_symbol())
+ self.assertEqual("Input", port.get_name())
+ self.assertEqual([self.world.ns.lv2.ControlPort, self.world.ns.lv2.InputPort],
+ list(port.get_classes()))
+ self.assertTrue(port.is_a(self.world.ns.lv2.ControlPort))
+ self.assertFalse(port.is_a(self.world.ns.lv2.AudioPort))
+ self.assertEquals((0.5, 0.0, 1.0), port.get_range())
+ self.assertEquals(0, len(port.get_properties()))
+ def testScalePoints(self):
+ port = self.plugin.get_port("input")
+ points = port.get_scale_points()
+ self.assertEqual(points[0].get_label(), "off")
+ self.assertEqual(points[0].get_value(), 0.0)
+ self.assertEqual(points[1].get_label(), "on")
+ self.assertEqual(points[1].get_value(), 1.0)
+ def testPortCount(self):
+ self.assertEqual(1, self.plugin.get_num_ports_of_class(self.lv2_OutputPort, self.lv2_AudioPort))
+ self.assertEqual(1, self.plugin.get_num_ports_of_class(self.lv2_OutputPort, self.lv2_ControlPort))
+ self.assertEqual(1, self.plugin.get_num_ports_of_class(self.lv2_InputPort, self.lv2_AudioPort))
+ self.assertEqual(1, self.plugin.get_num_ports_of_class(self.lv2_InputPort, self.lv2_ControlPort))
+
+class QueryTests(unittest.TestCase):
+ def setUp(self):
+ self.world = lilv.World()
+ self.world.load_all()
+ self.bundle_uri = self.world.new_uri(location)
+ self.world.load_bundle(self.bundle_uri)
+ self.plugins = self.world.get_all_plugins()
+ self.plugin = self.plugins.get(self.plugins.begin())
+ def testNamespaces(self):
+ self.assertEqual(self.world.ns.lv2, "http://lv2plug.in/ns/lv2core#")
+ self.assertEqual(self.world.ns.lv2.Plugin, "http://lv2plug.in/ns/lv2core#Plugin")
+ def testQuery(self):
+ self.assertTrue(self.world.ask(None,
+ self.world.ns.rdf.type,
+ self.world.ns.lv2.Plugin))
+ self.assertLess(0, len(self.world.find_nodes(None,
+ self.world.ns.rdf.type,
+ self.world.ns.lv2.Plugin)))
+ self.assertEqual(self.plugin.get_uri(), self.world.get(None,
+ self.world.ns.rdf.type,
+ self.world.ns.lv2.Plugin))
+
+class InstanceTests(unittest.TestCase):
+ def setUp(self):
+ self.world = lilv.World()
+ self.bundle_uri = self.world.new_uri(location)
+ self.world.load_bundle(self.bundle_uri)
+ self.plugins = self.world.get_all_plugins()
+ self.plugin = self.plugins[0]
+ self.instance = lilv.Instance(self.plugin, 48000)
+ self.assertEqual(self.plugin.get_uri(), self.instance.get_uri())
+ self.assertIsNone(self.instance.get_extension_data(self.world.new_uri("http://example.org/ext")))
+ self.assertIsNone(self.instance.get_extension_data("http://example.org/ext"))
+ def testRun(self):
+ import numpy
+ n_samples = 100
+ buf = numpy.zeros(n_samples)
+ with self.assertRaises(Exception):
+ self.instance.connect_port(0, "hello")
+ self.instance.connect_port(0, None)
+ self.instance.connect_port(0, None)
+ self.instance.connect_port(2, buf)
+ self.instance.connect_port(3, buf)
+ self.instance.activate()
+ self.instance.run(n_samples)
+ self.instance.deactivate()
+
+class UITests(unittest.TestCase):
+ def setUp(self):
+ self.world = lilv.World()
+ self.bundle_uri = self.world.new_uri(location)
+ self.world.load_bundle(self.bundle_uri)
+ self.plugins = self.world.get_all_plugins()
+ self.plugin = self.plugins[0]
+ def testUI(self):
+ uis = self.plugin.get_uis()
+ ui_uri = self.world.new_uri('http://example.org/lilv-bindings-test-plugin-ui')
+ self.assertEqual(1, len(uis))
+ self.assertEqual(str(uis[0]), str(ui_uri))
+ self.assertEqual(uis[0], str(ui_uri))
+ self.assertEqual(uis[0].get_uri(), ui_uri)
+ self.assertEqual(uis[0].get_bundle_uri(), self.bundle_uri)
+ self.assertEqual(uis[0].get_binary_uri(), str(self.bundle_uri) + "TODO")
+ self.assertEqual(uis[uis[0].get_uri()], uis[0])
+ self.assertTrue(uis[0].is_a(self.world.ns.ui.GtkUI))
+ self.assertTrue(uis[0] in uis)
+ self.assertTrue(uis[0].get_uri() in uis)
+ self.assertEqual([self.world.ns.ui.GtkUI], list(uis[0].get_classes()))
+ for ui in uis:
+ print(ui)