[h5py] 127/455: Cython version check; move utils functions from C to Cython

Ghislain Vaillant ghisvail-guest at moszumanska.debian.org
Thu Jul 2 18:19:25 UTC 2015


This is an automated email from the git hooks/post-receive script.

ghisvail-guest pushed a commit to annotated tag 1.3.0
in repository h5py.

commit f86a54853b1ba95636a6e0eee79e631c6b558d15
Author: andrewcollette <andrew.collette at gmail.com>
Date:   Thu Sep 25 04:13:13 2008 +0000

    Cython version check; move utils functions from C to Cython
---
 h5py/defs.pxd          |  10 +++
 h5py/h5d.pyx           |   7 +-
 h5py/numpy.pxd         |   3 +-
 h5py/tests/test_h5a.py |   2 +-
 h5py/utils.pxd         |  13 ++-
 h5py/utils.pyx         | 125 ++++++++++++++++++++++----
 h5py/utils_low.c       | 118 +------------------------
 h5py/utils_low.h       |   6 --
 setup.py               | 231 +++++++++++++++++++++++++++++--------------------
 9 files changed, 270 insertions(+), 245 deletions(-)

diff --git a/h5py/defs.pxd b/h5py/defs.pxd
index 278733b..f6aa670 100644
--- a/h5py/defs.pxd
+++ b/h5py/defs.pxd
@@ -53,6 +53,16 @@ cdef extern from "stdint.h":
   ctypedef signed long long int int64_t
   ctypedef signed long long int uint64_t 
 
+# === Compatibility definitions and macros for h5py ===========================
+
+#cdef extern from "compat.h":
+
+#  size_t h5py_size_n64
+#  size_t h5py_size_n128
+#  size_t h5py_offset_n64_real
+#  size_t h5py_offset_n64_imag
+#  size_t h5py_offset_n128_real
+#  size_t h5py_offset_n128_imag
 
 # === H5 - Common definitions and library functions ===========================
 
diff --git a/h5py/h5d.pyx b/h5py/h5d.pyx
index 6c8c8e4..a5b8022 100644
--- a/h5py/h5d.pyx
+++ b/h5py/h5d.pyx
@@ -21,7 +21,7 @@ include "sync.pxi"
 from h5 cimport init_hdf5
 from numpy cimport ndarray, import_array, PyArray_DATA, NPY_WRITEABLE
 from utils cimport  check_numpy_read, check_numpy_write, \
-                    require_tuple, convert_tuple, emalloc, efree
+                    convert_tuple, emalloc, efree
 from h5t cimport TypeID, typewrap, py_create
 from h5s cimport SpaceID
 from h5p cimport PropID, propwrap, pdefault
@@ -245,7 +245,7 @@ cdef class DatasetID(ObjectID):
             arr_obj.flags |= NPY_WRITEABLE
 
     @sync
-    def extend(self, object shape):
+    def extend(self, tuple shape):
         """ (TUPLE shape)
 
             Extend the given dataset so it's at least as big as "shape".  Note 
@@ -262,7 +262,8 @@ cdef class DatasetID(ObjectID):
             space_id = H5Dget_space(self.id)
             rank = H5Sget_simple_extent_ndims(space_id)
 
-            require_tuple(shape, 0, rank, "shape")
+            if len(shape) != rank:
+                raise TypeError("New shape length (%d) must match dataset rank (%d)" % (len(shape), rank))
 
             dims = <hsize_t*>emalloc(sizeof(hsize_t)*rank)
             convert_tuple(shape, dims, rank)
diff --git a/h5py/numpy.pxd b/h5py/numpy.pxd
index b8681fa..27148b9 100644
--- a/h5py/numpy.pxd
+++ b/h5py/numpy.pxd
@@ -63,7 +63,7 @@ cdef extern from "numpy/arrayobject.h":
 
   cdef enum:
     NPY_WRITEABLE, NPY_ALIGNED, NPY_C_CONTIGUOUS, NPY_CONTIGUOUS,
-    NPY_FORCECAST, NPY_NOTSWAPPED
+    NPY_FORCECAST, NPY_NOTSWAPPED, NPY_OWNDATA
 
   # Classes
   ctypedef extern class numpy.dtype [object PyArray_Descr]:
@@ -89,6 +89,7 @@ cdef extern from "numpy/arrayobject.h":
     double imag
 
   # Functions
+  int PyArray_DIM(ndarray arr, int i)
   object PyArray_FROM_OF(object arr, int requirements)
 
   object PyArray_GETITEM(object arr, void *itemptr)
diff --git a/h5py/tests/test_h5a.py b/h5py/tests/test_h5a.py
index 3dfc919..fe0ead4 100644
--- a/h5py/tests/test_h5a.py
+++ b/h5py/tests/test_h5a.py
@@ -58,7 +58,7 @@ class TestH5A(TestBase):
             attr = h5a.create(obj, name, htype, space)
             self.assert_(self.is_attr(attr))
             attr.write(arr_ref)
-            self.assertRaises(ValueError, attr.write, arr_fail)
+            self.assertRaises(TypeError, attr.write, arr_fail)
 
             attr = h5a.open_name(obj, name)
             dt = attr.dtype
diff --git a/h5py/utils.pxd b/h5py/utils.pxd
index f0042e3..8558dcd 100644
--- a/h5py/utils.pxd
+++ b/h5py/utils.pxd
@@ -20,15 +20,14 @@ cdef extern from "utils_low.h":
     hid_t create_ieee_complex64(char byteorder, char* real_name, char* img_name) except -1
     hid_t create_ieee_complex128(char byteorder, char* real_name, char* img_name) except -1
 
-    # Numpy array validation
-    int check_numpy_read(ndarray arr, hid_t space_id) except 0
-    int check_numpy_write(ndarray arr, hid_t space_id) except 0
+# === Custom API ==============================================================
 
-    # Memory handling
-    void* emalloc(size_t size) except? NULL
-    void efree(void* ptr)
+# Memory handling
+cdef void* emalloc(size_t size) except? NULL
+cdef void efree(void* ptr)
 
-# === Custom API ==============================================================
+cdef int check_numpy_read(ndarray arr, hid_t space_id=*) except -1
+cdef int check_numpy_write(ndarray arr, hid_t space_id=*) except -1
 
 cdef int convert_tuple(object tuple, hsize_t *dims, hsize_t rank) except -1
 cdef object convert_dims(hsize_t* dims, hsize_t rank)
diff --git a/h5py/utils.pyx b/h5py/utils.pyx
index 243f410..5d7b5ed 100644
--- a/h5py/utils.pyx
+++ b/h5py/utils.pyx
@@ -17,15 +17,111 @@ include "sync.pxi"
 from h5 cimport init_hdf5
 from python_exc cimport PyErr_SetString
 
-from numpy cimport import_array, NPY_UINT16, NPY_UINT32, NPY_UINT64, \
-                   npy_intp, PyArray_SimpleNew, PyArray_ContiguousFromAny, \
-                    PyArray_FROM_OTF, NPY_CONTIGUOUS, NPY_NOTSWAPPED, \
-                    NPY_FORCECAST
+from numpy cimport ndarray, import_array, \
+                    NPY_UINT16, NPY_UINT32, NPY_UINT64,  npy_intp, \
+                    PyArray_SimpleNew, PyArray_ContiguousFromAny, \
+                    PyArray_FROM_OTF, PyArray_DIM, \
+                    NPY_CONTIGUOUS, NPY_NOTSWAPPED, NPY_FORCECAST, \
+                    NPY_C_CONTIGUOUS, NPY_OWNDATA, NPY_WRITEABLE
+
 
 # Initialization
 init_hdf5()
 import_array()
 
+
+# === Exception-aware memory allocation =======================================
+
+cdef void* emalloc(size_t size) except? NULL:
+    # Wrapper for malloc(size) with the following behavior:
+    # 1. Always returns NULL for emalloc(0)
+    # 2. Raises RuntimeError for emalloc(size<0) and returns NULL
+    # 3. Raises RuntimeError if allocation fails and returns NULL
+
+    cdef void *retval = NULL
+
+    if size < 0:
+        PyErr_SetString(RuntimeError, "Attempted negative malloc")
+        return NULL
+    elif size > 0:
+        retval = malloc(size)
+        if retval == NULL:
+            errmsg = "Can't malloc %d bytes" % size
+            PyErr_SetString(MemoryError, errmsg)
+            return NULL
+    else:
+        return NULL
+
+    return retval
+
+cdef void efree(void* what):
+    free(what)
+
+
+# === Testing of NumPy arrays =================================================
+
+cdef int check_numpy(ndarray arr, hid_t space_id, int write):
+    # -1 if exception, NOT AUTOMATICALLY CHECKED
+
+    cdef int required_flags
+    cdef hsize_t arr_rank
+    cdef hsize_t space_rank
+    cdef hsize_t *space_dims = NULL
+    cdef int i
+
+    if arr is None:
+        PyErr_SetString(TypeError, "Array is None")
+        return -1
+
+    # Validate array flags
+
+    if write:
+        if not (arr.flags & (NPY_C_CONTIGUOUS | NPY_OWNDATA | NPY_WRITEABLE)):
+            PyErr_SetString(TypeError, "Array must be writable, C-contiguous and own its data.")
+            return -1
+    else:
+        if not (arr.flags & (NPY_C_CONTIGUOUS | NPY_OWNDATA)):
+            PyErr_SetString(TypeError, "Array must be C-contiguous and own its data.")
+            return -1
+
+    # Validate dataspace compatibility, if it's provided
+
+    if space_id > 0:
+
+        arr_rank = arr.nd
+        space_rank = H5Sget_simple_extent_ndims(space_id)
+
+        if arr_rank != space_rank:
+            err_msg = "Numpy array rank %d must match dataspace rank %d." % (arr_rank, space_rank)
+            PyErr_SetString(TypeError, err_msg)
+            return -1
+
+        space_dims = <hsize_t*>malloc(sizeof(hsize_t)*space_rank)
+        try:
+            space_rank = H5Sget_simple_extent_dims(space_id, space_dims, NULL)
+
+            for i from 0 < i < space_rank:
+
+                if write:
+                    if PyArray_DIM(arr,i) < space_dims[i]:
+                        PyErr_SetString(TypeError, "Array dimensions incompatible with dataspace.")
+                        return -1
+                else:
+                    if PyArray_DIM(arr,i) > space_dims[i]:
+                        PyErr_SetString(TypeError, "Array dimensions incompatible with dataspace.")
+                        return -1
+        finally:
+            free(space_dims)
+    return 1
+
+cdef int check_numpy_write(ndarray arr, hid_t space_id=-1) except -1:
+    return check_numpy(arr, space_id, 1)
+
+cdef int check_numpy_read(ndarray arr, hid_t space_id=-1) except -1:
+    return check_numpy(arr, space_id, 0)
+
+# === Conversion between HDF5 buffers and tuples ==============================
+
 cdef int convert_tuple(object tpl, hsize_t *dims, hsize_t rank) except -1:
     # Convert a Python tuple to an hsize_t array.  You must allocate
     # the array yourself and pass both it and the size to this function.
@@ -100,7 +196,10 @@ cdef object create_hsize_array(object arr):
         raise RuntimeError("Can't map hsize_t %d to Numpy typecode" % sizeof(hsize_t))
 
     return PyArray_FROM_OTF(arr, typecode, NPY_CONTIGUOUS | NPY_NOTSWAPPED | NPY_FORCECAST)
-    
+
+
+# === Argument testing ========================================================
+
 cdef int require_tuple(object tpl, int none_allowed, int size, char* name) except -1:
     # Ensure that tpl is in fact a tuple, or None if none_allowed is nonzero.
     # If size >= 0, also ensure that the length matches.
@@ -110,12 +209,8 @@ cdef int require_tuple(object tpl, int none_allowed, int size, char* name) excep
       (isinstance(tpl, tuple) and (size < 0 or len(tpl) == size)):
         return 1
 
-    nmsg = ""
-    smsg = ""
-    if size >= 0:
-        smsg = " of size %d" % size
-    if none_allowed:
-        nmsg = " or None"
+    nmsg = "" if size < 0 else " of size %d" % size
+    smsg = "" if not none_allowed else " or None"
 
     msg = "%s must be a tuple%s%s." % (name, smsg, nmsg)
     PyErr_SetString(ValueError, msg)
@@ -128,12 +223,8 @@ cdef int require_list(object lst, int none_allowed, int size, char* name) except
       (isinstance(lst, list) and (size < 0 or len(lst) == size)):
         return 1
 
-    nmsg = ""
-    smsg = ""
-    if size >= 0:
-        smsg = " of size %d" % size
-    if none_allowed:
-        nmsg = " or None"
+    nmsg = "" if size < 0 else " of size %d" % size
+    smsg = "" if not none_allowed else " or None"
 
     msg = "%s must be a list%s%s." % (name, smsg, nmsg)
     PyErr_SetString(ValueError, msg)
diff --git a/h5py/utils_low.c b/h5py/utils_low.c
index 500ff19..5934b99 100644
--- a/h5py/utils_low.c
+++ b/h5py/utils_low.c
@@ -25,127 +25,13 @@
 #include "hdf5.h"
 #include "pythread.h"
 
-/* Wrapper for malloc(size) with the following behavior:
-   1. Always returns NULL for emalloc(0)
-   2. Raises RuntimeError for emalloc(size<0) and returns NULL
-   3. Raises RuntimeError if allocation fails and returns NULL
-*/
-void* emalloc(size_t size){
-
-    void *retval = NULL;
-
-    if(size==0) return NULL;
-    if(size<0){
-		PyErr_SetString(PyExc_RuntimeError, "Attempted negative malloc (h5py emalloc)");
-    }
-
-    retval = malloc(size);
-    if(retval == NULL){
-        PyErr_SetString(PyExc_RuntimeError, "Memory allocation failed (h5py emalloc)");
-    }
-
-    return retval;
-}
-
-/* Counterpart to emalloc.  For the moment, just a wrapper for free().
-*/
-void efree(void* ptr){
-    free(ptr);
-}
-
-
-
-/* The functions
-
-    - check_numpy_write(PyObject* arr, hid_t dataspace)
-    - check_numpy_read(PyObject* arr, hid_t dataspace)
-
-   test whether or not a given array object is suitable for reading or writing.
-   If dataspace id is positive, it will be checked for compatibility with
-   the array object's shape.
-
-   Return values:
-    1:  Can read/write
-    0: Failed (Python error raised.)
-*/
-int check_numpy(PyArrayObject* arr, hid_t space_id, int write){
-
-    int required_flags;
-    hsize_t arr_rank;
-    hsize_t space_rank;
-    hsize_t *space_dims = NULL;
-    int i;
-
-    /* Validate array flags */
-
-    if(write){
-        if(!(arr->flags & (NPY_C_CONTIGUOUS | NPY_OWNDATA | NPY_WRITEABLE))){
-            PyErr_SetString(PyExc_ValueError, "Array must be writable, C-contiguous and own its data.");
-            goto failed;
-        } 
-    } else {
-        if(!(arr->flags & (NPY_C_CONTIGUOUS | NPY_OWNDATA))){
-            PyErr_SetString(PyExc_ValueError, "Array must be C-contiguous and own its data.");
-            goto failed;
-        }
-    }
-
-    /* Validate dataspace compatibility, if it's provided. */
-
-    if(space_id > 0){
-
-        arr_rank = arr->nd;
-        space_rank = H5Sget_simple_extent_ndims(space_id);
-        if(space_rank < 0) goto failed;
-
-        if( arr_rank != space_rank){
-            PyErr_SetString(PyExc_ValueError, "Numpy array rank must match dataspace rank.");
-            goto failed;
-        }
-
-        space_dims = (hsize_t*)malloc(sizeof(hsize_t)*space_rank);
-        space_rank = H5Sget_simple_extent_dims(space_id, space_dims, NULL);
-        if(space_rank < 0) goto failed;
-
-        for(i=0; i<space_rank; i++){
-            if(write){
-                if(PyArray_DIM(arr,i) < space_dims[i]){
-                    PyErr_SetString(PyExc_ValueError, "Array dimensions incompatible with dataspace.");
-                    goto failed;
-                }
-            } else {
-                if(PyArray_DIM(arr,i) > space_dims[i]) {
-                    PyErr_SetString(PyExc_ValueError, "Array dimensions incompatible with dataspace.");
-                    goto failed;
-                }
-            } /* if(write) */
-        } /* for */
-    } /* if(space_id > 0) */
-
-  free(space_dims);
-  return 1;
-
-  failed:
-    free(space_dims);
-    if(!PyErr_Occurred()){
-        PyErr_SetString(PyExc_ValueError, "Numpy array is incompatible.");
-    }
-    return 0;
-}
-
-int check_numpy_write(PyArrayObject* arr, hid_t space_id){
-    return check_numpy(arr, space_id, 1);
-}
-
-int check_numpy_read(PyArrayObject* arr, hid_t space_id){
-    return check_numpy(arr, space_id, 0);
-}
-
 
 /* Rewritten versions of create_ieee_complex64/128 from Pytables, to support 
    standard array-interface typecodes and variable names for real/imag parts.  
    Also removed unneeded datatype copying.
    Both return -1 on failure, and raise Python exceptions.
+
+   These must be written in C as they use the HOFFSET macro.
 */
 hid_t create_ieee_complex64(const char byteorder, const char* real_name, const char* img_name) {
   hid_t float_id = -1;
diff --git a/h5py/utils_low.h b/h5py/utils_low.h
index e8e62e3..0e90c2e 100644
--- a/h5py/utils_low.h
+++ b/h5py/utils_low.h
@@ -26,11 +26,5 @@
 hid_t create_ieee_complex64(const char byteorder, const char* real_name, const char* img_name);
 hid_t create_ieee_complex128(const char byteorder, const char* real_name, const char* img_name);
 
-int check_numpy_read(PyArrayObject* arr, hid_t space_id);
-int check_numpy_write(PyArrayObject* arr, hid_t space_id);
-
-void* emalloc(size_t size);
-void efree(void* ptr);
-
 #endif
 
diff --git a/setup.py b/setup.py
index 4827d04..797208c 100644
--- a/setup.py
+++ b/setup.py
@@ -16,20 +16,17 @@
     Setup script for the h5py package.  
 
     All commands take the usual distutils options, like --home, etc.  Cython is
-    not required for installation, but will be invoked if the .c files are
-    missing, one of the --pyrex options is used, or if a non-default API 
-    version or debug level is requested.
+    not required for installation, but will be invoked if one of the --cython
+    options is used, or if non-default options are specified for the build.
 
     To build:
-    python setup.py build
+    python setup.py build [--help for additional options]
 
     To install:
     sudo python setup.py install
 
     To run the test suite locally (won't install anything):
     python setup.py test
-
-    See INSTALL.txt or the h5py manual for additional build options.
 """
 
 import os
@@ -37,8 +34,7 @@ import sys
 import shutil
 import os.path as op
 
-from distutils.cmd import Command
-from distutils.errors import DistutilsError, DistutilsExecError
+from distutils.errors import DistutilsError
 from distutils.core import setup
 from distutils.extension import Extension
 from distutils.command.build import build 
@@ -49,11 +45,10 @@ from distutils.command.sdist import sdist
 NAME = 'h5py'
 VERSION = '0.4.0'
 MIN_NUMPY = '1.0.3'
-MIN_CYTHON = '0.9.8.1'
+MIN_CYTHON = '0.9.8.1.1'
 KNOWN_API = (16,18)    # Legal API levels (1.8.X or 1.6.X)
 SRC_PATH = 'h5py'      # Name of directory with .pyx files
 CMD_CLASS = {}         # Custom command classes for setup()
-HDF5 = None            # Custom HDF5 directory.
 
 # The list of modules depends on max API version
 MODULES = {16:  ['h5', 'h5f', 'h5g', 'h5s', 'h5t', 'h5d', 'h5a', 'h5p', 'h5z',
@@ -85,66 +80,58 @@ try:
 except ImportError:
     fatal("Numpy not installed (version >= %s required)" % MIN_NUMPY)
 
-for arg in sys.argv[:]:
-    if arg.find('--hdf5=') == 0:
-        splitarg = arg.split('=',1)
-        if len(splitarg) != 2:
-            fatal("HDF5 directory not understood (wants --hdf5=/path/to/hdf5)")
-        path = op.abspath(splitarg[1])
-        if not op.exists(path):
-            fatal("HDF5 path is illegal: %s" % path)
-        HDF5 = path
-        sys.argv.remove(arg)
-        
+    
 # === Platform-dependent compiler config ======================================
 
-if os.name == 'nt':
-    if HDF5 is None:
-        fatal("On Windows, HDF5 directory must be specified.")
-
-    libraries = ['hdf5dll']
-    include_dirs = [numpy.get_include(), op.join(HDF5, 'include')]
-    library_dirs = [op.join(HDF5, 'dll2')]  # Must contain only "hdf5dll.dll.a"
-    runtime_dirs = []
-    extra_compile_args = ['-DH5_USE_16_API', '-D_HDF5USEDLL_', '-DH5_SIZEOF_SSIZE_T=4']
-    extra_link_args = []
-    package_data = {'h5py': ['*.pyx', '*.dll', 
-                                    'Microsoft.VC90.CRT/*.manifest',
-                                    'Microsoft.VC90.CRT/*.dll'],
-                       'h5py.tests': ['data/*.hdf5']}
 
-else:   # Assume Unix-like
+class ExtensionCreator(object):
 
-    libraries = ['hdf5']
-    if HDF5 is None:
-        include_dirs = [numpy.get_include(), '/usr/include', '/usr/local/include']
-        library_dirs = ['/usr/lib/', '/usr/local/lib']
-    else:
-        include_dirs = [numpy.get_include(), op.join(HDF5, 'include')]
-        library_dirs = [op.join(HDF5, 'lib')]
-    runtime_dirs = library_dirs
-    extra_compile_args = ['-DH5_USE_16_API', '-Wno-unused', '-Wno-uninitialized']
-    extra_link_args = []
+    """ Figures out what include/library dirs are appropriate, and
+        serves as a factory for Extension instances.  This is in a
+        class as opposed to module code since the HDF5 location
+        isn't known until runtime.
+    """
 
-    package_data = {'h5py': ['*.pyx'],
-                   'h5py.tests': ['data/*.hdf5']}
+    def __init__(self, hdf5_loc=None):
+        if os.name == 'nt':
+            if hdf5_loc is None:
+                fatal("On Windows, HDF5 directory must be specified.")
 
-# The actual extensions themselves are created at runtime, as the list of
-# modules depends on command-line options.
-def create_extension(name):
-    """ Create a distutils Extension object for the given module.  Uses the
-        globals in this file for things like the include directory.
-    """
-    sources = [op.join(SRC_PATH, name+'.c')]+[op.join(SRC_PATH,x) for x in EXTRA_SRC]
-    ext = Extension(NAME+'.'+name,
-                        sources, 
-                        include_dirs = include_dirs, 
-                        libraries = libraries,
-                        library_dirs = library_dirs,
-                        runtime_library_dirs = runtime_dirs,
-                        extra_compile_args = extra_compile_args,
-                        extra_link_args = extra_link_args)
-    return ext
+            self.libraries = ['hdf5dll']
+            self.include_dirs = [numpy.get_include(), op.join(hdf5_loc, 'include')]
+            self.library_dirs = [op.join(hdf5_loc, 'dll2')]  # Must contain only "hdf5dll.dll.a"
+            self.runtime_dirs = []
+            self.extra_compile_args = ['-DH5_USE_16_API', '-D_HDF5USEDLL_', '-DH5_SIZEOF_SSIZE_T=4']
+            self.extra_link_args = []
+
+        else:
+            self.libraries = ['hdf5']
+            if hdf5_loc is None:
+                self.include_dirs = [numpy.get_include(), '/usr/include', '/usr/local/include']
+                self.library_dirs = ['/usr/lib/', '/usr/local/lib']
+            else:
+                self.include_dirs = [numpy.get_include(), op.join(hdf5_loc, 'include')]
+                self.library_dirs = [op.join(hdf5_loc, 'lib')]
+            self.runtime_dirs = self.library_dirs
+            self.extra_compile_args = ['-DH5_USE_16_API', '-Wno-unused', '-Wno-uninitialized']
+            self.extra_link_args = []
+
+    
+    def create_extension(self, name, extra_src=[]):
+        """ Create a distutils Extension object for the given module.  A list
+            of C source files to be included in the compilation can also be
+            provided.
+        """
+        sources = [op.join(SRC_PATH, name+'.c')]+[op.join(SRC_PATH,x) for x in extra_src]
+        ext = Extension(NAME+'.'+name,
+                            sources, 
+                            include_dirs = self.include_dirs, 
+                            libraries = self.libraries,
+                            library_dirs = self.library_dirs,
+                            runtime_library_dirs = self.runtime_dirs,
+                            extra_compile_args = self.extra_compile_args,
+                            extra_link_args = self.extra_link_args)
+        return ext
 
 
 # === Custom extensions for distutils =========================================
@@ -159,35 +146,48 @@ class cybuild(build):
         build.  The advantage is that Cython is run on all files before
         building begins, and is only run when the distribution is actually
         being built (and not, for example, when "sdist" is called for).
+
+        It also populates the list of extension modules at runtime, as this
+        can be changed by some of the command-line options.
     """
 
     user_options = build.user_options + \
                     [('cython','y','Run Cython'),
                      ('cython-only','Y', 'Run Cython and stop'),
-                     ('diag', 'd','Enable library logging'),
+                     ('hdf5=', '5', 'Custom location for HDF5'),
+                     ('diag', 'd','Enable library debug logging'),
                      ('api=', 'a', 'Set API levels (--api=16,18)'),
-                     ('threads', 't', 'Thread-aware')]
-    boolean_options = build.boolean_options + ['cython', 'threads','diag']
+                     ('threads', 't', 'Make library thread-aware')]
+    boolean_options = build.boolean_options + ['cython', 'cython-only', 'threads','diag']
 
     def initialize_options(self):
-        self.cython = False
-        self.cython_only = False
-        self.threads = False
-        self.api = (16,)
-        self.diag = False
-        self._explicit_only = False     # Hack for test subclass
+        """ Specify safe defaults for command-line options. """
+        self.hdf5 = None                # None or a string with the HDF5 dir
+        self.cython = False             # T/F
+        self.cython_only = False        # T/F
+        self.threads = False            # T/F
+        self.api = None                 # None or a tuple (e.g. (16,18))
+        self.diag = False               # T/F
+        self._explicit_only = False     # T/F: Hack for test subclass
         build.initialize_options(self)
 
     def finalize_options(self):
+        """ Validate provided options and ensure consistency.  Note this is
+            only run if at least one option is specified!
+        """
+        if self.hdf5 is not None:
+            self.hdf5 = op.abspath(self.hdf5)
+            if not op.exists(self.hdf5):
+                fatal('Specified HDF5 directory "%s" does not exist' % self.hdf5)
 
         if self.cython_only or  \
-           self.api != (16,) or \
+           self.api is not None or \
            self.threads or \
            self.diag:
             self.cython = True
 
         # Validate API levels
-        if self.api != (16,):
+        if self.api is not None:
             try:
                 self.api = tuple(int(x) for x in self.api.split(',') if len(x) > 0)
                 if len(self.api) == 0 or not all(x in KNOWN_API for x in self.api):
@@ -198,7 +198,7 @@ class cybuild(build):
         build.finalize_options(self)
 
     def _get_pxi(self):
-        """ Get the configuration .pxi for the current options. """
+        """ Generate a Cython .pxi file reflecting the current options. """
 
         pxi_str = \
 """# This file is automatically generated.  Do not edit.
@@ -214,17 +214,29 @@ DEF H5PY_DEBUG = %(DEBUG)d    # Logging-level number, or 0 to disable
 
 DEF H5PY_THREADS = %(THREADS)d  # Enable thread-safety and non-blocking reads
 """
-        pxi_str %= {"VERSION": VERSION, "API_MAX": max(self.api),
+        return pxi_str % {"VERSION": VERSION, "API_MAX": max(self.api),
                     "API_16": 16 in self.api, "API_18": 18 in self.api,
                     "DEBUG": 10 if self.diag else 0, "THREADS": self.threads,
-                    "HDF5": "Default" if HDF5 is None else HDF5}
-
-        return pxi_str
+                    "HDF5": "Default" if self.hdf5 is None else self.hdf5}
 
     def run(self, *args, **kwds):
+        """ Called to perform the actual compilation.  This performs the
+            following steps:
+
+            1. Generate a list of C extension modules for the compiler
+            2. Compare the run-time options with a currently-existing .pxi
+               file, and determine if a Cython recompile is required
+            3. If necessary, recompile all Cython files
+            4. Hand control over to the distutils C build
+        """
 
-        modules = MODULES[max(self.api)]
-        self.distribution.ext_modules = [create_extension(x) for x in modules]
+        if self.api is None:
+            self.api = (min(KNOWN_API),)
+
+        creator = ExtensionCreator(self.hdf5)
+        modules = sorted(MODULES[max(self.api)])
+        self.distribution.ext_modules = \
+            [creator.create_extension(x, EXTRA_SRC) for x in modules]
 
         # Necessary because Cython doesn't detect changes to the .pxi
         recompile_all = False
@@ -255,21 +267,19 @@ DEF H5PY_THREADS = %(THREADS)d  # Enable thread-safety and non-blocking reads
                 f.close()
 
         if self.cython or recompile_all:
-            print "Running Cython..."
             try:
                 from Cython.Compiler.Main import Version, compile, compile_multiple, CompilationOptions
-                from Cython.Distutils import build_ext
             except ImportError:
-                fatal("Cython recompilation required, but Cython not installed.")
+                fatal("Cython recompilation required, but Cython >=%s not installed." % MIN_CYTHON)
 
             if Version.version < MIN_CYTHON:
                 fatal("Old Cython version detected; at least %s required" % MIN_CYTHON)
 
+            print "Running Cython (%s)..." % Version.version
             print "  API levels: %s" % ','.join(str(x) for x in self.api)
             print "  Thread-aware: %s" % ('yes' if self.threads else 'no')
             print "  Diagnostic mode: %s" % ('yes' if self.diag else 'no')
-            print "  HDF5: %s" % ('default' if HDF5 is None else HDF5)
-
+            print "  HDF5: %s" % ('default' if self.hdf5 is None else self.hdf5)
 
             # Build each extension
             # This should be a single call to compile_multiple, but it's
@@ -300,6 +310,11 @@ DEF H5PY_THREADS = %(THREADS)d  # Enable thread-safety and non-blocking reads
         build.run(self, *args, **kwds)
 
 class test(cybuild):
+
+    """ Run unit tests.  As a special case, won't run Cython unless --cython
+        or --cython-only are specified.
+    """
+
     description = "Build and run unit tests"
     user_options = cybuild.user_options + \
                    [('sections=','s','Comma separated list of tests ("-" prefix to NOT run)')]
@@ -309,7 +324,6 @@ class test(cybuild):
         cybuild.initialize_options(self)
 
     def finalize_options(self):
-        pass
         cybuild.finalize_options(self)
 
     def run(self):
@@ -332,6 +346,7 @@ class doc(cybuild):
 
     def run(self):
 
+        self._explicit_only = True
         cybuild.run(self)
 
         for x in ('docs', 'docs/api-html'):
@@ -355,20 +370,36 @@ class doc(cybuild):
 
 class cyclean(clean):
 
+    """ Standard distutils clean extended to clean up Cython-generated files.
+    """
+
+    user_options = clean.user_options + \
+                   [('doc','d','Also destroy compiled documentation')]
+    boolean_options = clean.boolean_options + ['doc']
+
+    def initialize_options(self):
+        self.doc = False
+        clean.initialize_options(self)
+
+    def finalize_options(self):
+        clean.finalize_options(self)
+
     def run(self):
         
         allmodules = set()
         for x in MODULES.values():
             allmodules.update(x)
 
-        for x in ('build','docs/api-html', 'docs/manual-html'):
-            try:
-                shutil.rmtree(x)
-            except OSError:
-                pass
+        if self.doc:
+            for x in ('docs/api-html', 'docs/manual-html') :
+                try:
+                    shutil.rmtree(x)
+                except OSError:
+                    pass
+
         fnames = [ op.join(SRC_PATH, x+'.dep') for x in allmodules ] + \
                  [ op.join(SRC_PATH, x+'.c') for x in allmodules ] + \
-                 [ 'MANIFEST']
+                 [ op.join(SRC_PATH, 'config.pxi')]
 
         for name in fnames:
             try:
@@ -380,6 +411,8 @@ class cyclean(clean):
 
 class new_sdist(sdist):
 
+    """ Version of sdist that doesn't cache the MANIFEST file """
+
     def run(self):
         if os.path.exists('MANIFEST'):
             os.remove('MANIFEST')
@@ -425,6 +458,16 @@ data structures and their HDF5 equivalents vastly simplifies the process of
 reading and writing data from Python. 
 """
 
+# Windows requires a custom C runtime
+if os.name == 'nt':
+    package_data = {'h5py': ['*.pyx', '*.dll', 
+                            'Microsoft.VC90.CRT/*.manifest',
+                            'Microsoft.VC90.CRT/*.dll'],
+                       'h5py.tests': ['data/*.hdf5']}
+else:
+    package_data = {'h5py': ['*.pyx'],
+                   'h5py.tests': ['data/*.hdf5']}
+
 setup(
   name = NAME,
   version = VERSION,

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-science/packages/h5py.git



More information about the debian-science-commits mailing list