[h5py] 130/455: Fix broken setup.py behavior, refactor h5t, more 1.8.X additions
Ghislain Vaillant
ghisvail-guest at moszumanska.debian.org
Thu Jul 2 18:19:25 UTC 2015
This is an automated email from the git hooks/post-receive script.
ghisvail-guest pushed a commit to annotated tag 1.3.0
in repository h5py.
commit 8fae14f69a5b009c2037a00ada7ac562f321df66
Author: andrewcollette <andrew.collette at gmail.com>
Date: Mon Sep 29 23:30:41 2008 +0000
Fix broken setup.py behavior, refactor h5t, more 1.8.X additions
---
h5py/defs.pxd | 298 ++++++++++++++++++++++-----------------
h5py/h5.pxd | 2 +
h5py/h5.pyx | 16 ++-
h5py/h5l18.pyx | 66 +++++++++
h5py/h5o18.pyx | 139 +++++++++++++++++++
h5py/h5t.pxd | 4 +-
h5py/h5t.pyx | 407 +++++++++++++++++++++++++++++++-----------------------
h5py/highlevel.py | 27 +++-
setup.py | 254 +++++++++++++++++++---------------
9 files changed, 793 insertions(+), 420 deletions(-)
diff --git a/h5py/defs.pxd b/h5py/defs.pxd
index ac46836..a8fc733 100644
--- a/h5py/defs.pxd
+++ b/h5py/defs.pxd
@@ -452,164 +452,202 @@ cdef extern from "hdf5.h":
IF H5PY_18API:
- cdef extern from "hdf5.h":
+ cdef extern from "hdf5.h":
- # TODO: put both versions in h5t.pxd
- ctypedef enum H5T_cset_t:
- H5T_CSET_ERROR = -1, #
- H5T_CSET_ASCII = 0, # US ASCII
- H5T_CSET_UTF8 = 1, # UTF-8 Unicode encoding
+ # TODO: put both versions in h5t.pxd
+ ctypedef enum H5T_cset_t:
+ H5T_CSET_ERROR = -1, #
+ H5T_CSET_ASCII = 0, # US ASCII
+ H5T_CSET_UTF8 = 1, # UTF-8 Unicode encoding
- unsigned int H5L_MAX_LINK_NAME_LEN # ((uint32_t) (-1)) (4GB - 1)
+ unsigned int H5L_MAX_LINK_NAME_LEN # ((uint32_t) (-1)) (4GB - 1)
- # Link class types.
- # * Values less than 64 are reserved for the HDF5 library's internal use.
- # * Values 64 to 255 are for "user-defined" link class types; these types are
- # * defined by HDF5 but their behavior can be overridden by users.
- # * Users who want to create new classes of links should contact the HDF5
- # * development team at hdfhelp at ncsa.uiuc.edu .
- # * These values can never change because they appear in HDF5 files.
- #
- ctypedef enum H5L_type_t:
- H5L_TYPE_ERROR = (-1), # Invalid link type id
- H5L_TYPE_HARD = 0, # Hard link id
- H5L_TYPE_SOFT = 1, # Soft link id
- H5L_TYPE_EXTERNAL = 64, # External link id
- H5L_TYPE_MAX = 255 # Maximum link type id
+ # Link class types.
+ # * Values less than 64 are reserved for the HDF5 library's internal use.
+ # * Values 64 to 255 are for "user-defined" link class types; these types are
+ # * defined by HDF5 but their behavior can be overridden by users.
+ # * Users who want to create new classes of links should contact the HDF5
+ # * development team at hdfhelp at ncsa.uiuc.edu .
+ # * These values can never change because they appear in HDF5 files.
+ #
+ ctypedef enum H5L_type_t:
+ H5L_TYPE_ERROR = (-1), # Invalid link type id
+ H5L_TYPE_HARD = 0, # Hard link id
+ H5L_TYPE_SOFT = 1, # Soft link id
+ H5L_TYPE_EXTERNAL = 64, # External link id
+ H5L_TYPE_MAX = 255 # Maximum link type id
- # Information struct for link (for H5Lget_info/H5Lget_info_by_idx)
- cdef union _add_u:
- haddr_t address # Address hard link points to
- size_t val_size # Size of a soft link or UD link value
+ # Information struct for link (for H5Lget_info/H5Lget_info_by_idx)
+ cdef union _add_u:
+ haddr_t address # Address hard link points to
+ size_t val_size # Size of a soft link or UD link value
- ctypedef struct H5L_info_t:
- H5L_type_t type # Type of link
- hbool_t corder_valid # Indicate if creation order is valid
- int64_t corder # Creation order
- H5T_cset_t cset # Character set of link name
- _add_u u
+ ctypedef struct H5L_info_t:
+ H5L_type_t type # Type of link
+ hbool_t corder_valid # Indicate if creation order is valid
+ int64_t corder # Creation order
+ H5T_cset_t cset # Character set of link name
+ _add_u u
- # Prototype for H5Literate/H5Literate_by_name() operator
- ctypedef herr_t (*H5L_iterate_t) (hid_t group, char *name, H5L_info_t *info,
- void *op_data)
+ # Prototype for H5Literate/H5Literate_by_name() operator
+ ctypedef herr_t (*H5L_iterate_t) (hid_t group, char *name, H5L_info_t *info,
+ void *op_data)
- # Links API
+ # Links API
- herr_t H5Lmove(hid_t src_loc, char *src_name, hid_t dst_loc,
- char *dst_name, hid_t lcpl_id, hid_t lapl_id) except *
+ herr_t H5Lmove(hid_t src_loc, char *src_name, hid_t dst_loc,
+ char *dst_name, hid_t lcpl_id, hid_t lapl_id) except *
- herr_t H5Lcopy(hid_t src_loc, char *src_name, hid_t dst_loc,
- char *dst_name, hid_t lcpl_id, hid_t lapl_id) except *
+ herr_t H5Lcopy(hid_t src_loc, char *src_name, hid_t dst_loc,
+ char *dst_name, hid_t lcpl_id, hid_t lapl_id) except *
- herr_t H5Lcreate_hard(hid_t cur_loc, char *cur_name,
- hid_t dst_loc, char *dst_name, hid_t lcpl_id, hid_t lapl_id) except *
+ herr_t H5Lcreate_hard(hid_t cur_loc, char *cur_name,
+ hid_t dst_loc, char *dst_name, hid_t lcpl_id, hid_t lapl_id) except *
- herr_t H5Lcreate_soft(char *link_target, hid_t link_loc_id,
- char *link_name, hid_t lcpl_id, hid_t lapl_id) except *
+ herr_t H5Lcreate_soft(char *link_target, hid_t link_loc_id,
+ char *link_name, hid_t lcpl_id, hid_t lapl_id) except *
- herr_t H5Ldelete(hid_t loc_id, char *name, hid_t lapl_id) except *
+ herr_t H5Ldelete(hid_t loc_id, char *name, hid_t lapl_id) except *
- herr_t H5Ldelete_by_idx(hid_t loc_id, char *group_name,
- H5_index_t idx_type, H5_iter_order_t order, hsize_t n, hid_t lapl_id) except *
+ herr_t H5Ldelete_by_idx(hid_t loc_id, char *group_name,
+ H5_index_t idx_type, H5_iter_order_t order, hsize_t n, hid_t lapl_id) except *
- herr_t H5Lget_val(hid_t loc_id, char *name, void *bufout,
- size_t size, hid_t lapl_id) except *
+ herr_t H5Lget_val(hid_t loc_id, char *name, void *bufout,
+ size_t size, hid_t lapl_id) except *
- herr_t H5Lget_val_by_idx(hid_t loc_id, char *group_name,
- H5_index_t idx_type, H5_iter_order_t order, hsize_t n,
- void *bufout, size_t size, hid_t lapl_id) except *
+ herr_t H5Lget_val_by_idx(hid_t loc_id, char *group_name,
+ H5_index_t idx_type, H5_iter_order_t order, hsize_t n,
+ void *bufout, size_t size, hid_t lapl_id) except *
- htri_t H5Lexists(hid_t loc_id, char *name, hid_t lapl_id) except *
+ htri_t H5Lexists(hid_t loc_id, char *name, hid_t lapl_id) except *
- herr_t H5Lget_info(hid_t loc_id, char *name,
- H5L_info_t *linfo, hid_t lapl_id) except *
+ herr_t H5Lget_info(hid_t loc_id, char *name,
+ H5L_info_t *linfo, hid_t lapl_id) except *
- herr_t H5Lget_info_by_idx(hid_t loc_id, char *group_name,
- H5_index_t idx_type, H5_iter_order_t order, hsize_t n,
- H5L_info_t *linfo, hid_t lapl_id) except *
+ herr_t H5Lget_info_by_idx(hid_t loc_id, char *group_name,
+ H5_index_t idx_type, H5_iter_order_t order, hsize_t n,
+ H5L_info_t *linfo, hid_t lapl_id) except *
- ssize_t H5Lget_name_by_idx(hid_t loc_id, char *group_name,
- H5_index_t idx_type, H5_iter_order_t order, hsize_t n,
- char *name, size_t size, hid_t lapl_id) except *
+ ssize_t H5Lget_name_by_idx(hid_t loc_id, char *group_name,
+ H5_index_t idx_type, H5_iter_order_t order, hsize_t n,
+ char *name, size_t size, hid_t lapl_id) except *
- herr_t H5Literate(hid_t grp_id, H5_index_t idx_type,
- H5_iter_order_t order, hsize_t *idx, H5L_iterate_t op, void *op_data) except *
+ herr_t H5Literate(hid_t grp_id, H5_index_t idx_type,
+ H5_iter_order_t order, hsize_t *idx, H5L_iterate_t op, void *op_data) except *
- herr_t H5Literate_by_name(hid_t loc_id, char *group_name,
- H5_index_t idx_type, H5_iter_order_t order, hsize_t *idx,
- H5L_iterate_t op, void *op_data, hid_t lapl_id) except *
+ herr_t H5Literate_by_name(hid_t loc_id, char *group_name,
+ H5_index_t idx_type, H5_iter_order_t order, hsize_t *idx,
+ H5L_iterate_t op, void *op_data, hid_t lapl_id) except *
- herr_t H5Lvisit(hid_t grp_id, H5_index_t idx_type, H5_iter_order_t order,
- H5L_iterate_t op, void *op_data) except *
+ herr_t H5Lvisit(hid_t grp_id, H5_index_t idx_type, H5_iter_order_t order,
+ H5L_iterate_t op, void *op_data) except *
- herr_t H5Lvisit_by_name(hid_t loc_id, char *group_name,
- H5_index_t idx_type, H5_iter_order_t order, H5L_iterate_t op,
- void *op_data, hid_t lapl_id) except *
+ herr_t H5Lvisit_by_name(hid_t loc_id, char *group_name,
+ H5_index_t idx_type, H5_iter_order_t order, H5L_iterate_t op,
+ void *op_data, hid_t lapl_id) except *
# === H5O - General object operations (1.8.X only) ============================
IF H5PY_18API:
- cdef extern from "hdf5.h":
-
- ctypedef uint32_t H5O_msg_crt_idx_t
-
- ctypedef enum H5O_type_t:
- H5O_TYPE_UNKNOWN = -1, # Unknown object type
- H5O_TYPE_GROUP, # Object is a group
- H5O_TYPE_DATASET, # Object is a dataset
- H5O_TYPE_NAMED_DATATYPE, # Object is a named data type
- H5O_TYPE_NTYPES # Number of different object types (must be last!)
-
- # --- Components for the H5O_info_t struct ----------------------------------
-
- cdef struct space:
- hsize_t total # Total space for storing object header in file
- hsize_t meta # Space within header for object header metadata information
- hsize_t mesg # Space within header for actual message information
- hsize_t free # Free space within object header
-
- cdef struct mesg:
- unsigned long present # Flags to indicate presence of message type in header
- unsigned long shared # Flags to indicate message type is shared in header
-
- cdef struct hdr:
- unsigned version # Version number of header format in file
- unsigned nmesgs # Number of object header messages
- unsigned nchunks # Number of object header chunks
- unsigned flags # Object header status flags
- space space
- mesg mesg
-
- ctypedef struct H5_ih_info_t:
- hsize_t index_size, # btree and/or list
- hsize_t heap_size
-
- cdef struct meta_size:
- H5_ih_info_t obj, # v1/v2 B-tree & local/fractal heap for groups, B-tree for chunked datasets
- H5_ih_info_t attr # v2 B-tree & heap for attributes
-
- ctypedef struct H5O_info_t:
- unsigned long fileno # File number that object is located in
- haddr_t addr # Object address in file
- H5O_type_t type # Basic object type (group, dataset, etc.)
- unsigned rc # Reference count of object
- time_t atime # Access time
- time_t mtime # Modification time
- time_t ctime # Change time
- time_t btime # Birth time
- hsize_t num_attrs # # of attributes attached to object
- hdr hdr
- meta_size meta_size
-
- ctypedef herr_t (*H5O_iterate_t)(hid_t obj, char *name, H5O_info_t *info,
- void *op_data)
-
- herr_t H5Ovisit(hid_t obj_id, H5_index_t idx_type, H5_iter_order_t order,
- H5O_iterate_t op, void *op_data) except *
-
- herr_t H5Oget_info(hid_t loc_id, H5O_info_t *oinfo) except *
-
+ cdef extern from "hdf5.h":
+
+ ctypedef uint32_t H5O_msg_crt_idx_t
+
+ ctypedef enum H5O_type_t:
+ H5O_TYPE_UNKNOWN = -1, # Unknown object type
+ H5O_TYPE_GROUP, # Object is a group
+ H5O_TYPE_DATASET, # Object is a dataset
+ H5O_TYPE_NAMED_DATATYPE, # Object is a named data type
+ H5O_TYPE_NTYPES # Number of different object types (must be last!)
+
+ unsigned int H5O_COPY_SHALLOW_HIERARCHY_FLAG # (0x0001u) Copy only immediate members
+ unsigned int H5O_COPY_EXPAND_SOFT_LINK_FLAG # (0x0002u) Expand soft links into new objects
+ unsigned int H5O_COPY_EXPAND_EXT_LINK_FLAG # (0x0004u) Expand external links into new objects
+ unsigned int H5O_COPY_EXPAND_REFERENCE_FLAG # (0x0008u) Copy objects that are pointed by references
+ unsigned int H5O_COPY_WITHOUT_ATTR_FLAG # (0x0010u) Copy object without copying attributes
+ unsigned int H5O_COPY_PRESERVE_NULL_FLAG # (0x0020u) Copy NULL messages (empty space)
+ unsigned int H5O_COPY_ALL # (0x003Fu) All object copying flags (for internal checking)
+
+ # --- Components for the H5O_info_t struct ----------------------------------
+
+ cdef struct space:
+ hsize_t total # Total space for storing object header in file
+ hsize_t meta # Space within header for object header metadata information
+ hsize_t mesg # Space within header for actual message information
+ hsize_t free # Free space within object header
+
+ cdef struct mesg:
+ unsigned long present # Flags to indicate presence of message type in header
+ unsigned long shared # Flags to indicate message type is shared in header
+
+ cdef struct hdr:
+ unsigned version # Version number of header format in file
+ unsigned nmesgs # Number of object header messages
+ unsigned nchunks # Number of object header chunks
+ unsigned flags # Object header status flags
+ space space
+ mesg mesg
+
+ ctypedef struct H5_ih_info_t:
+ hsize_t index_size, # btree and/or list
+ hsize_t heap_size
+
+ cdef struct meta_size:
+ H5_ih_info_t obj, # v1/v2 B-tree & local/fractal heap for groups, B-tree for chunked datasets
+ H5_ih_info_t attr # v2 B-tree & heap for attributes
+
+ ctypedef struct H5O_info_t:
+ unsigned long fileno # File number that object is located in
+ haddr_t addr # Object address in file
+ H5O_type_t type # Basic object type (group, dataset, etc.)
+ unsigned rc # Reference count of object
+ time_t atime # Access time
+ time_t mtime # Modification time
+ time_t ctime # Change time
+ time_t btime # Birth time
+ hsize_t num_attrs # # of attributes attached to object
+ hdr hdr
+ meta_size meta_size
+
+ # --- H5O API -------------------------------------------------------------
+
+ hid_t H5Oopen(hid_t loc_id, char *name, hid_t lapl_id) except *
+ hid_t H5Oopen_by_addr(hid_t loc_id, haddr_t addr) except *
+ hid_t H5Oopen_by_idx(hid_t loc_id, char *group_name,
+ H5_index_t idx_type, H5_iter_order_t order, hsize_t n, hid_t lapl_id) except *
+
+ herr_t H5Oget_info(hid_t loc_id, H5O_info_t *oinfo) except *
+ herr_t H5Oget_info_by_name(hid_t loc_id, char *name, H5O_info_t *oinfo,
+ hid_t lapl_id) except *
+ herr_t H5Oget_info_by_idx(hid_t loc_id, char *group_name,
+ H5_index_t idx_type, H5_iter_order_t order, hsize_t n, H5O_info_t *oinfo,
+ hid_t lapl_id) except *
+
+ herr_t H5Olink(hid_t obj_id, hid_t new_loc_id, char *new_name,
+ hid_t lcpl_id, hid_t lapl_id) except *
+ herr_t H5Ocopy(hid_t src_loc_id, char *src_name, hid_t dst_loc_id,
+ char *dst_name, hid_t ocpypl_id, hid_t lcpl_id) except *
+
+ herr_t H5Oincr_refcount(hid_t object_id) except *
+ herr_t H5Odecr_refcount(hid_t object_id) except *
+
+ herr_t H5Oset_comment(hid_t obj_id, char *comment) except *
+ herr_t H5Oset_comment_by_name(hid_t loc_id, char *name,
+ char *comment, hid_t lapl_id) except *
+ ssize_t H5Oget_comment(hid_t obj_id, char *comment, size_t bufsize) except *
+ ssize_t H5Oget_comment_by_name(hid_t loc_id, char *name,
+ char *comment, size_t bufsize, hid_t lapl_id) except *
+
+ ctypedef herr_t (*H5O_iterate_t)(hid_t obj, char *name, H5O_info_t *info,
+ void *op_data)
+
+ herr_t H5Ovisit(hid_t obj_id, H5_index_t idx_type, H5_iter_order_t order,
+ H5O_iterate_t op, void *op_data) except *
+ herr_t H5Ovisit_by_name(hid_t loc_id, char *obj_name,
+ H5_index_t idx_type, H5_iter_order_t order, H5O_iterate_t op,
+ void *op_data, hid_t lapl_id) except *
+ herr_t H5Oclose(hid_t object_id) except *
# === H5P - Property list API =================================================
diff --git a/h5py/h5.pxd b/h5py/h5.pxd
index 8b8b350..6bff2c8 100644
--- a/h5py/h5.pxd
+++ b/h5py/h5.pxd
@@ -32,6 +32,8 @@ cdef class H5PYConfig:
cdef readonly object DEBUG
cdef readonly object THREADS
+cpdef H5PYConfig get_config()
+
cdef class ObjectID:
cdef object __weakref__
diff --git a/h5py/h5.pyx b/h5py/h5.pyx
index c2d60c2..b521785 100644
--- a/h5py/h5.pyx
+++ b/h5py/h5.pyx
@@ -38,6 +38,14 @@ from python_exc cimport PyErr_SetString
import atexit
import threading
+IF H5PY_18API:
+ ITER_INC = H5_ITER_INC # Increasing order
+ ITER_DEC = H5_ITER_DEC # Decreasing order
+ ITER_NATIVE = H5_ITER_NATIVE # No particular order, whatever is fastest
+
+ INDEX_NAME = H5_INDEX_NAME # Index on names
+ INDEX_CRT_ORDER = H5_INDEX_CRT_ORDER # Index on creation order
+
cdef class H5PYConfig:
"""
@@ -85,6 +93,12 @@ Complex names: %s"""
bool(self.DEBUG), self.complex_names)
return rstr
+cdef H5PYConfig cfg = H5PYConfig()
+
+cpdef H5PYConfig get_config():
+ """ Get a reference to the global library configuration object """
+ return cfg
+
# === Bootstrap diagnostics and threading, before decorator is defined ===
IF H5PY_DEBUG:
@@ -671,8 +685,6 @@ api_version = "%d.%d" % api_version_tuple
version = H5PY_VERSION
version_tuple = tuple([int(x) for x in version.split('.')])
-config = H5PYConfig()
-
diff --git a/h5py/h5l18.pyx b/h5py/h5l18.pyx
new file mode 100644
index 0000000..8932364
--- /dev/null
+++ b/h5py/h5l18.pyx
@@ -0,0 +1,66 @@
+#+
+#
+# This file is part of h5py, a low-level Python interface to the HDF5 library.
+#
+# Copyright (C) 2008 Andrew Collette
+# http://h5py.alfven.org
+# License: BSD (See LICENSE.txt for full license)
+#
+# $Date$
+#
+#-
+
+"""
+ API for the "H5L" family of link-related operations
+"""
+
+include "config.pxi"
+include "sync.pxi"
+
+from h5 cimport init_hdf5
+init_hdf5()
+
+cdef class LinkProxy(ObjectID):
+
+ """
+ Proxy class which provides access to the HDF5 "H5L" API.
+
+ These come attached to GroupID objects as "obj.links". Since every
+ H5L function operates on at least one group, the methods provided
+ operate on their parent group identifier. For example:
+
+ >>> g = h5g.open(fid, '/')
+ >>> g.links.exists("MyGroup")
+ True
+ >>> g.links.exists("FooBar")
+ False
+
+ Hashable: No
+ Equality: Undefined
+ """
+
+ def __cinit__(self, hid_t id_):
+ # At this point the ObjectID constructor has already been called.
+
+ # The identifier in question is the hid_t for the parent GroupID.
+ # We need to manually incref the identifier because it's now
+ # shared by both this object and the parent.
+ H5Iinc_ref(self.id)
+
+ def __richcmp__(self, object other, int how):
+ return NotImplemented
+
+ def __hash__(self):
+ raise TypeError("Link proxies are unhashable; use the parent group instead.")
+
+ @sync
+ def exists(self, char* name):
+ """ (STRING name) => BOOL
+
+ Check if a link of the specified name exists in this group.
+ """
+ return <bint>(H5Lexists(self.id, name, H5P_DEFAULT))
+
+
+
+
diff --git a/h5py/h5o18.pyx b/h5py/h5o18.pyx
new file mode 100644
index 0000000..9728190
--- /dev/null
+++ b/h5py/h5o18.pyx
@@ -0,0 +1,139 @@
+#+
+#
+# This file is part of h5py, a low-level Python interface to the HDF5 library.
+#
+# Copyright (C) 2008 Andrew Collette
+# http://h5py.alfven.org
+# License: BSD (See LICENSE.txt for full license)
+#
+# $Date$
+#
+#-
+
+include "config.pxi"
+include "sync.pxi"
+
+# Module for the new "H5O" functions introduced in HDF5 1.8.0. Not even
+# built with API compatibility level below 1.8.
+
+# Pyrex compile-time imports
+from h5 cimport init_hdf5, ObjectID
+from h5i cimport wrap_identifier
+
+# Initialization
+init_hdf5()
+
+cdef class ObjInfo:
+
+ cdef H5O_info_t infostruct
+ cdef object __weakref__
+
+ property fileno:
+ def __get__(self):
+ return self.infostruct.fileno
+ property addr:
+ def __get__(self):
+ return self.infostruct.addr
+ property type:
+ def __get__(self):
+ return <int>self.infostruct.type
+ property rc:
+ def __get__(self):
+ return self.infostruct.rc
+
+ def __copy__(self):
+ cdef ObjInfo newcopy
+ newcopy = ObjInfo()
+ newcopy.infostruct = self.infostruct
+ return newcopy
+
+ at sync
+def get_info(ObjectID obj not None):
+ """ (ObjectID obj) => ObjInfo
+ """
+
+ cdef ObjInfo info
+ info = ObjInfo()
+
+ H5Oget_info(obj.id, &info.infostruct)
+ return info
+
+cdef class _Visit_Data:
+
+ cdef object func
+ cdef object exc
+ cdef object retval
+ cdef ObjInfo objinfo
+
+ def __init__(self, func):
+ self.func = func
+ self.exc = None
+ self.retval = None
+ self.objinfo = ObjInfo()
+
+cdef herr_t visit_cb(hid_t obj, char* name, H5O_info_t *info, void* data):
+
+ cdef _Visit_Data wrapper
+ wrapper = <_Visit_Data>data
+
+ wrapper.objinfo.infostruct = info[0]
+
+ try:
+ retval = wrapper.func(name, wrapper.objinfo)
+ except StopIteration:
+ return 1
+ except BaseException, e: # The exception MUST be trapped, including SystemExit
+ wrapper.exc = e
+ return 1
+
+ if retval is not None:
+ wrapper.retval = retval
+ return 1
+
+ return 0
+
+ at sync
+def visit(ObjectID obj not None, object func, int idx_type=H5_INDEX_NAME,
+ int order=H5_ITER_NATIVE):
+ """ (ObjectID obj, CALLABLE func, INT idx_type=, INT order=)
+
+ Recursively iterate a function or callable object over this group's
+ contents. Your callable should match the signature:
+
+ func(name, info)
+
+ where "name" is (a) name relative to the starting group, and "info" is
+ an ObjInfo instance describing each object. Please note the same
+ ObjInfo instance is provided call to call, with its values mutated.
+ Don't store references to it; use the copy module instead.
+
+ Your callable should also conform to the following behavior:
+
+ 1. Return None for normal iteration; raise StopIteration to cancel
+ and return None from h5o.visit.
+
+ 2. Returning a value other than None cancels iteration and immediately
+ returns that value from h5o.visit.
+
+ 3. Raising any other exception aborts iteration; the exception will
+ be correctly propagated.
+ """
+ cdef _Visit_Data wrapper
+ wrapper = _Visit_Data(func)
+
+ H5Ovisit(obj.id, <H5_index_t>idx_type, <H5_iter_order_t>order, visit_cb, <void*>wrapper)
+
+ if wrapper.exc is not None:
+ raise wrapper.exc
+
+ return wrapper.retval # None or custom value
+
+
+
+
+
+
+
+
+
+
diff --git a/h5py/h5t.pxd b/h5py/h5t.pxd
index 448cc87..7d9fb7f 100644
--- a/h5py/h5t.pxd
+++ b/h5py/h5t.pxd
@@ -68,8 +68,8 @@ cdef class TypeCompoundID(TypeCompositeID):
# === C API for other modules =================================================
-cdef object typewrap(hid_t id_)
-cpdef object py_create(object dtype, object enum_vals=*)
+cdef TypeID typewrap(hid_t id_)
+cpdef TypeID py_create(object dtype, dict enum_vals=*)
diff --git a/h5py/h5t.pyx b/h5py/h5t.pyx
index 2a0dc5a..b060bde 100644
--- a/h5py/h5t.pyx
+++ b/h5py/h5t.pyx
@@ -19,40 +19,40 @@
1. Translating between Numpy dtypes and HDF5 type objects
- All identifier classes have a property "dtype", returning a Numpy
- dtype which as closely as possible matches the HDF5 type.
+ All identifier classes have a property "dtype", returning a Numpy
+ dtype which as closely as possible matches the HDF5 type.
- The module function py_create is the complement to this property, and
- is the standard way to translate Numpy dtypes to HDF5 type identifiers.
- Unlike the dtype property, HDF5 datatypes returned by this function are
- guaranteed to be binary-compatible with their Numpy dtype counterparts
+ The module function py_create is the complement to this property, and
+ is the standard way to translate Numpy dtypes to HDF5 type identifiers.
+ Unlike the dtype property, HDF5 datatypes returned by this function are
+ guaranteed to be binary-compatible with their Numpy dtype counterparts
2. Complex numbers
- Complex number support has been refactored in this version of h5py.
- HDF5 has no native concept of a complex number. Numpy complex types
- are translated to two-element compound types with two floating-point
- fields. When a two-element compound type is encountered in a file with
- compatible field names, it is treated as a complex type.
+ Complex number support has been refactored in this version of h5py.
+ HDF5 has no native concept of a complex number. Numpy complex types
+ are translated to two-element compound types with two floating-point
+ fields. When a two-element compound type is encountered in a file with
+ compatible field names, it is treated as a complex type.
- The names for complex types are set as a property on the global
- configuration object, available at h5.config.
+ The names for complex types are set as a property on the global
+ configuration object, available at "h5py.config".
3. Enumerated types
- Native support for "dtype-encoded" enums has been dropped, as it proved
- to be too unwieldy. Enumerated types are read from HDF5 files as
- Numpy integers (kind i or u).
+ NumPy has no native concept of an enumerated type. Data of this type
+ will be read from the HDF5 file as integers, depending on the base
+ type of the enum.
- You can get at the fields of an enum through the standard HDF5 API
- calls, which are presented as methods of class TypeEnumID.
- Additionally, the py_create function allows you to create HDF5
- enumerated types by passing in a dictionary along with a Numpy dtype.
+ You can get at the fields of an enum through the standard HDF5 API
+ calls, which are presented as methods of class TypeEnumID.
+ Additionally, the py_create function allows you to create HDF5
+ enumerated types by passing in a dictionary along with a Numpy dtype.
4. Variable-length types
- VLEN types can be manipulated, but reading and writing data in VLEN
- format is not supported. This applies to VLEN strings as well.
+ VLEN datatype objects can be manipulated, but reading and writing data
+ in VLEN format is not supported. This applies to VLEN strings as well.
5. Datatypes can be pickled if HDF5 1.8.X is available.
"""
@@ -61,7 +61,7 @@ include "config.pxi"
include "sync.pxi"
# Pyrex compile-time imports
-from h5 cimport init_hdf5, PHIL, get_phil, H5PYConfig
+from h5 cimport init_hdf5, H5PYConfig, get_config, PHIL, get_phil
from numpy cimport dtype, ndarray
from python_string cimport PyString_FromStringAndSize
@@ -75,11 +75,12 @@ init_hdf5()
import sys
import h5
-cdef H5PYConfig cfg = h5.config
+cdef H5PYConfig cfg = get_config()
+cdef PHIL phil = get_phil()
# === Custom C API ============================================================
-cdef object typewrap(hid_t id_):
+cdef TypeID typewrap(hid_t id_):
cdef H5T_class_t cls
cls = H5Tget_class(id_)
@@ -201,7 +202,7 @@ STD_U16BE = lockid(H5T_STD_U16BE)
STD_U32BE = lockid(H5T_STD_U32BE)
STD_U64BE = lockid(H5T_STD_U64BE)
-# Native integer types by bytesize
+# Native types by bytesize
NATIVE_INT8 = lockid(H5T_NATIVE_INT8)
NATIVE_UINT8 = lockid(H5T_NATIVE_UINT8)
NATIVE_INT16 = lockid(H5T_NATIVE_INT16)
@@ -210,6 +211,8 @@ NATIVE_INT32 = lockid(H5T_NATIVE_INT32)
NATIVE_UINT32 = lockid(H5T_NATIVE_UINT32)
NATIVE_INT64 = lockid(H5T_NATIVE_INT64)
NATIVE_UINT64 = lockid(H5T_NATIVE_UINT64)
+NATIVE_FLOAT = lockid(H5T_NATIVE_FLOAT)
+NATIVE_DOUBLE = lockid(H5T_NATIVE_DOUBLE)
# Unix time types
UNIX_D32LE = lockid(H5T_UNIX_D32LE)
@@ -239,8 +242,8 @@ def create(int classtype, size_t size):
Create a new HDF5 type object. Legal class values are
COMPOUND and OPAQUE. Use enum_create for enums.
"""
- # If it's not one of these, the library SEGFAULTS. Thanks, guys.
- # Also, creating ENUM types doesn't work right.
+
+ # HDF5 versions 1.6.X segfault with anything else
if classtype != H5T_COMPOUND and classtype != H5T_OPAQUE:
raise ValueError("Class must be COMPOUND or OPAQUE.")
@@ -256,15 +259,14 @@ def open(ObjectID group not None, char* name):
@sync
def array_create(TypeID base not None, object dims_tpl):
- """ (TypeID base, TUPLE dimensions)
+ """ (TypeID base, TUPLE dimensions) => TypeArrayID
Create a new array datatype, using and HDF5 parent type and
dimensions given via a tuple of positive integers. "Unlimited"
dimensions are not allowed.
"""
cdef hsize_t rank
- cdef hsize_t *dims
- dims = NULL
+ cdef hsize_t *dims = NULL
require_tuple(dims_tpl, 0, -1, "dims_tpl")
rank = len(dims_tpl)
@@ -272,7 +274,7 @@ def array_create(TypeID base not None, object dims_tpl):
try:
convert_tuple(dims_tpl, dims, rank)
- return typewrap(H5Tarray_create(base.id, rank, dims, NULL))
+ return TypeArrayID(H5Tarray_create(base.id, rank, dims, NULL))
finally:
efree(dims)
@@ -297,15 +299,13 @@ def vlen_create(TypeID base not None):
IF H5PY_18API:
@sync
- def decode(buf):
+ def decode(char* buf):
""" (STRING buf) => TypeID
- Unserialize an HDF5 type. Bear in mind that you can also use the
- Python pickle/unpickle machinery to do this.
+ Unserialize an HDF5 type. You can also do this with the native
+ Python pickling machinery.
"""
- cdef char* buf_
- buf_ = buf
- return typewrap(H5Tdecode(<unsigned char*>buf_))
+ return typewrap(H5Tdecode(<unsigned char*>buf))
# === Base type class =========================================================
@@ -319,18 +319,19 @@ cdef class TypeID(ObjectID):
"""
def __hash__(self):
- try:
- # Try to use object header first
- return ObjectID.__hash__(self)
- except TypeError:
- # It's a transient type object
- IF H5PY_18API:
- if self._locked:
- self._hash = hash(self.encode())
- else:
- raise TypeError("Only locked or committed types can be hashed")
- ELSE:
- raise TypeError("Only committed types can be hashed")
+ if self._hash is None:
+ try:
+ # Try to use object header first
+ return ObjectID.__hash__(self)
+ except TypeError:
+ # It's a transient type object
+ IF H5PY_18API:
+ if self._locked:
+ self._hash = hash(self.encode())
+ else:
+ raise TypeError("Only locked or committed types can be hashed")
+ ELSE:
+ raise TypeError("Only committed types can be hashed")
return self._hash
@@ -469,10 +470,8 @@ cdef class TypeID(ObjectID):
native Python pickle/unpickle machinery to do this. The
returned string may contain binary values, including NULLs.
"""
- cdef size_t nalloc
- cdef char* buf
- buf = NULL
- nalloc = 0
+ cdef size_t nalloc = 0
+ cdef char* buf = NULL
H5Tencode(self.id, NULL, &nalloc)
buf = <char*>emalloc(sizeof(char)*nalloc)
@@ -491,10 +490,8 @@ cdef class TypeID(ObjectID):
return (type(self), (-1,), self.encode())
@sync
- def __setstate__(self, state):
- cdef char* buf
- buf = state
- self.id = H5Tdecode(<unsigned char*>buf)
+ def __setstate__(self, char* state):
+ self.id = H5Tdecode(<unsigned char*>state)
# === Top-level classes (inherit directly from TypeID) ========================
@@ -521,8 +518,7 @@ cdef class TypeArrayID(TypeID):
a tuple of integers.
"""
cdef hsize_t rank
- cdef hsize_t* dims
- dims = NULL
+ cdef hsize_t* dims = NULL
rank = H5Tget_array_dims(self.id, NULL, NULL)
dims = <hsize_t*>emalloc(sizeof(hsize_t)*rank)
@@ -564,8 +560,7 @@ cdef class TypeOpaqueID(TypeID):
Get the tag associated with an opaque datatype.
"""
- cdef char* buf
- buf = NULL
+ cdef char* buf = NULL
try:
buf = H5Tget_tag(self.id)
@@ -599,8 +594,7 @@ cdef class TypeStringID(TypeID):
def get_cset(self):
""" () => INT character_set
- Retrieve the character set used for a string. Currently only
- CSET_ASCII is supported.
+ Retrieve the character set used for a string.
"""
return <int>H5Tget_cset(self.id)
@@ -608,8 +602,7 @@ cdef class TypeStringID(TypeID):
def set_cset(self, int cset):
""" (INT character_set)
- Set the character set used for a string. Currently only
- CSET_ASCII is supported.
+ Set the character set used for a string.
"""
H5Tset_cset(self.id, <H5T_cset_t>cset)
@@ -652,7 +645,7 @@ cdef class TypeVlenID(TypeID):
cdef class TypeTimeID(TypeID):
"""
- Unix-style time_t
+ Unix-style time_t (deprecated)
"""
pass
@@ -670,6 +663,7 @@ cdef class TypeReferenceID(TypeID):
"""
pass
+
# === Numeric classes (integers and floats) ===================================
cdef class TypeAtomicID(TypeID):
@@ -787,6 +781,7 @@ cdef class TypeIntegerID(TypeAtomicID):
return dtype( _order_map[self.get_order()] +
_sign_map[self.get_sign()] + str(self.get_size()) )
+
cdef class TypeFloatID(TypeAtomicID):
"""
@@ -1128,142 +1123,212 @@ cdef class TypeEnumID(TypeCompositeID):
tmp_type = self.get_super()
return tmp_type.py_dtype()
-# === Python extension functions ==============================================
-# Map array protocol strings to their HDF5 atomic equivalents
-# This only works for integers (signed and unsigned) and floats
-cdef dict _code_map = {
- "<i1": STD_I8LE, "<i2": STD_I16LE, "<i4": STD_I32LE, "<i8": STD_I64LE,
- ">i1": STD_I8BE, ">i2": STD_I16BE, ">i4": STD_I32BE, ">i8": STD_I64BE,
- "|i1": NATIVE_INT8, "|u1": NATIVE_UINT8,
- "<u1": STD_U8LE, "<u2": STD_U16LE, "<u4": STD_U32LE, "<u8": STD_U64LE,
- ">u1": STD_U8BE, ">u2": STD_U16BE, ">u4": STD_U32BE, ">u8": STD_U64BE,
- "<f4": IEEE_F32LE, "<f8": IEEE_F64LE, ">f4": IEEE_F32BE, ">f8": IEEE_F64BE
- }
+# === Translation from NumPy dtypes to HDF5 type objects ======================
-cpdef object py_create(object dtype_in, object enum_vals=None):
- """ (OBJECT dtype_in, DICT enum=None) => TypeID
+# The following series of native-C functions each translate a specific class
+# of NumPy dtype into an HDF5 type object. The result is guaranteed to be
+# transient and unlocked.
- Given a Numpy dtype object, generate a byte-for-byte memory-compatible
- HDF5 datatype object. The result is guaranteed to be transient and
- unlocked.
+cdef dict _float_le = {4: H5T_IEEE_F32LE, 8: H5T_IEEE_F64LE}
+cdef dict _float_be = {4: H5T_IEEE_F32BE, 8: H5T_IEEE_F64BE}
+cdef dict _float_nt = {4: H5T_NATIVE_FLOAT, 8: H5T_NATIVE_DOUBLE}
- Argument dtype_in may be a dtype object, or anything which can be
- converted to a dtype, including strings like '<i4'.
+cdef dict _int_le = {1: H5T_STD_I8LE, 2: H5T_STD_I16LE, 4: H5T_STD_I32LE, 8: H5T_STD_I64LE}
+cdef dict _int_be = {1: H5T_STD_I8BE, 2: H5T_STD_I16BE, 4: H5T_STD_I32BE, 8: H5T_STD_I64BE}
+cdef dict _int_nt = {1: H5T_NATIVE_INT8, 2: H5T_NATIVE_INT16, 4: H5T_NATIVE_INT32, 8: H5T_NATIVE_INT64}
- enum:
- A optional dictionary mapping names to integer values. If the
- type being converted is an integer (Numpy kind i/u), the resulting
- HDF5 type will be an enumeration with that base type, and the
- given values. Ignored for all other types.
- """
- global _code_map
+cdef dict _uint_le = {1: H5T_STD_U8LE, 2: H5T_STD_U16LE, 4: H5T_STD_U32LE, 8: H5T_STD_U64LE}
+cdef dict _uint_be = {1: H5T_STD_U8BE, 2: H5T_STD_U16BE, 4: H5T_STD_U32BE, 8: H5T_STD_U64BE}
+cdef dict _uint_nt = {1: H5T_NATIVE_UINT8, 2: H5T_NATIVE_UINT16, 4: H5T_NATIVE_UINT32, 8: H5T_NATIVE_UINT64}
- cdef dtype dt # The dtype we'll be converting
- cdef TypeID otype # The output TypeID
+cdef TypeFloatID _c_float(dtype dt):
+ # Floats (single and double)
+ cdef hid_t tid
- cdef hid_t tid, tid_sub
+ if dt.byteorder == c'<':
+ tid = _float_le[dt.elsize]
+ elif dt.byteorder == c'>':
+ tid = _float_be[dt.elsize]
+ else:
+ tid = _float_nt[dt.elsize]
- cdef TypeID tmp
+ return TypeFloatID(H5Tcopy(tid))
- # For compound types
- cdef dtype dt_tmp
- cdef size_t offset
+cdef TypeIntegerID _c_int(dtype dt):
+ # Integers (ints and uints)
+ cdef hid_t tid
- cdef char kind
- cdef char byteorder
- cdef int length
-
+ if dt.kind == c'i':
+ if dt.byteorder == c'<':
+ tid = _int_le[dt.elsize]
+ elif dt.byteorder == c'>':
+ tid = _int_be[dt.elsize]
+ else:
+ tid = _int_nt[dt.elsize]
+ elif dt.kind == c'u':
+ if dt.byteorder == c'<':
+ tid = _uint_le[dt.elsize]
+ elif dt.byteorder == c'>':
+ tid = _uint_be[dt.elsize]
+ else:
+ tid = _uint_nt[dt.elsize]
+ else:
+ raise TypeError('Illegal int kind "%s"' % dt.kind)
- cdef size_t c_size, c_off_r, c_off_i
+ return TypeIntegerID(H5Tcopy(tid))
- dt = dtype(dtype_in)
- otype = None
+cdef TypeEnumID _c_enum(dtype dt, dict vals):
+ # Enums
+ cdef TypeIntegerID base
+ cdef TypeEnumID out
- kind = dt.kind
- byteorder = dt.byteorder
- length = int(dt.str[2:]) # is there a better way to do this?
- names = dt.names
+ base = _c_int(dt)
+ out = TypeEnumID(H5Tenum_create(base.id))
+ for name in sorted(vals):
+ out.enum_insert(name, vals[name])
+ return out
- # Void types with field names are considered to be compound
- if kind == c'V' and names is not None:
-
- tid = H5Tcreate(H5T_COMPOUND, length)
+cdef TypeArrayID _c_array(dtype dt):
+ # Arrays
+ cdef dtype base
+ cdef TypeID type_base
+ cdef tuple shape
- for name in names:
- dt_tmp, offset = dt.fields[name]
- tmp = py_create(dt_tmp)
- H5Tinsert(tid, name, offset, tmp.id)
+ base, shape = dt.subdtype
+ type_base = py_create(base)
+ return array_create(type_base, shape)
- otype = TypeCompoundID(tid)
+cdef TypeOpaqueID _c_opaque(dtype dt):
+ # Opaque
+ return TypeOpaqueID(H5Tcreate(H5T_OPAQUE, dt.itemsize))
- # Enums may be created out of integer types
- elif (kind == c'u' or kind == c'i') and enum_vals is not None:
+cdef TypeStringID _c_string(dtype dt):
+ # Strings (fixed-length)
+ cdef hid_t tid
- otype = enum_create(_code_map[dt.str])
+ tid = H5Tcopy(H5T_C_S1)
+ H5Tset_size(tid, dt.itemsize)
+ return TypeStringID(tid)
- for key in sorted(enum_vals):
- otype.enum_insert(key, enum_vals[key])
+cdef TypeCompoundID _c_complex(dtype dt):
+ # Complex numbers (names depend on cfg)
+ global cfg
- # Integers and floats map directly to HDF5 atomic types
- elif kind == c'u' or kind == c'i'or kind == c'f':
+ cdef hid_t tid, tid_sub
+ cdef size_t size, off_r, off_i
+
+ cdef size_t length = dt.itemsize
+ cdef char byteorder = dt.byteorder
+
+ if length == 8:
+ size = h5py_size_n64
+ off_r = h5py_offset_n64_real
+ off_i = h5py_offset_n64_imag
+ if byteorder == c'<':
+ tid_sub = H5T_IEEE_F32LE
+ elif byteorder == c'>':
+ tid_sub = H5T_IEEE_F32BE
+ else:
+ tid_sub = H5T_NATIVE_FLOAT
+ elif length == 16:
+ size = h5py_size_n128
+ off_r = h5py_offset_n128_real
+ off_i = h5py_offset_n128_imag
+ if byteorder == c'<':
+ tid_sub = H5T_IEEE_F64LE
+ elif byteorder == c'>':
+ tid_sub = H5T_IEEE_F64BE
+ else:
+ tid_sub = H5T_NATIVE_DOUBLE
+ else:
+ raise TypeError("Illegal length %d for complex dtype" % length)
- otype = _code_map[dt.str].copy()
+ tid = H5Tcreate(H5T_COMPOUND, size)
+ H5Tinsert(tid, cfg._r_name, off_r, tid_sub)
+ H5Tinsert(tid, cfg._i_name, off_i, tid_sub)
- # Complex numbers are stored as HDF5 structs, with names defined at runtime
- elif kind == c'c':
+ return TypeCompoundID(tid)
- if length == 8:
- c_size = h5py_size_n64
- c_off_r = h5py_offset_n64_real
- c_off_i = h5py_offset_n64_imag
- if byteorder == c'<':
- tid_sub = H5T_IEEE_F32LE
- elif byteorder == c'>':
- tid_sub = H5T_IEEE_F32BE
- else:
- tid_sub = H5T_NATIVE_FLOAT
- elif length == 16:
- c_size = h5py_size_n128
- c_off_r = h5py_offset_n128_real
- c_off_i = h5py_offset_n128_imag
- if byteorder == c'<':
- tid_sub = H5T_IEEE_F64LE
- elif byteorder == c'>':
- tid_sub = H5T_IEEE_F64BE
- else:
- tid_sub = H5T_NATIVE_DOUBLE
- else:
- raise ValueError("Unsupported length %d for complex dtype: %s" % (length, repr(dt)))
+cdef TypeCompoundID _c_compound(dtype dt):
+ # Compound datatypes
- tid = H5Tcreate(H5T_COMPOUND, c_size)
- H5Tinsert(tid, cfg._r_name, c_off_r, tid_sub)
- H5Tinsert(tid, cfg._i_name, c_off_i, tid_sub)
+ cdef hid_t tid
+ cdef TypeID type_tmp
+ cdef dtype dt_tmp
+ cdef size_t offset
- otype = TypeCompoundID(tid)
+ cdef dict fields = dt.fields
+ cdef tuple names = dt.names
- # Opaque/array types are differentiated by the presence of a subdtype
- elif kind == c'V':
+ tid = H5Tcreate(H5T_COMPOUND, dt.itemsize)
- if dt.subdtype:
+ for name in names:
+ dt_tmp, offset = dt.fields[name]
+ type_tmp = py_create(dt_tmp)
+ H5Tinsert(tid, name, offset, type_tmp.id)
- dt_tmp, shape = dt.subdtype
- base = py_create(dt_tmp)
- otype = array_create(base, shape)
+ return TypeCompoundID(tid)
- else:
- otype = create(H5T_OPAQUE, length)
-
- # Strings are stored C-style; everything after first NULL is garbage.
- elif kind == c'S':
- otype = C_S1.copy()
- otype.set_size(length)
- else:
- raise ValueError("No conversion path for dtype: %s" % repr(dt))
+cpdef TypeID py_create(object dtype_in, dict enum_vals=None):
+ """ (OBJECT dtype_in, DICT enum_vals=None) => TypeID
+
+ Given a Numpy dtype object, generate a byte-for-byte memory-compatible
+ HDF5 datatype object. The result is guaranteed to be transient and
+ unlocked.
+
+ Argument dtype_in may be a dtype object, or anything which can be
+ converted to a dtype, including strings like '<i4'.
+
+ enum_vals:
+ A optional dictionary mapping names to integer values. If the
+ type being converted is an integer (Numpy kind i/u), the resulting
+ HDF5 type will be an enumeration with that base type, and the
+ given values. Ignored for all other types.
+ """
+ cdef dtype dt = dtype(dtype_in)
+ cdef char kind = dt.kind
+
+ phil.acquire()
+ try:
+ # Float
+ if kind == c'f':
+ return _c_float(dt)
+
+ # Integer
+ elif kind == c'u' or kind == c'i':
+
+ if enum_vals is not None:
+ return _c_enum(dt, enum_vals)
+ else:
+ return _c_int(dt)
- return otype
+ # Complex
+ elif kind == c'c':
+ return _c_complex(dt)
+
+ # Compound
+ elif kind == c'V' and dt.names is not None:
+ return _c_compound(dt)
+
+ # Array or opaque
+ elif kind == c'V':
+ if dt.subdtype is not None:
+ return _c_array(dt)
+ else:
+ return _c_opaque(dt)
+
+ # String
+ elif kind == c'S':
+ return _c_string(dt)
+
+ # Unrecognized
+ else:
+ raise TypeError("No conversion path for dtype: %s" % repr(dt))
+ finally:
+ phil.release()
diff --git a/h5py/highlevel.py b/h5py/highlevel.py
index abc18b7..f3cf176 100644
--- a/h5py/highlevel.py
+++ b/h5py/highlevel.py
@@ -58,6 +58,9 @@ from utils_hl import slice_select, hbasename, strhdr, strlist, guess_chunk
from utils_hl import CoordsList
from browse import _H5Browser
+config = h5.get_config()
+if config.API_18:
+ from h5py import h5o, h5l
__all__ = ["File", "Group", "Dataset",
"Datatype", "AttributeManager", "CoordsList"]
@@ -275,7 +278,29 @@ class Group(HLObject):
if cmnt != '':
outstr += '\nComment:\n'+cmnt
return outstr
-
+
+ def visit(self, func):
+ """ Recursively iterate a function or callable object over the file,
+ calling it exactly once with each object name. Return None to
+ continue iteration, or anything else to immediately return that
+ value.
+
+ Example:
+ # List the entire contents of the file
+ >>> list_of_names = []
+ >>> grp.visit(list_of_names.append)
+
+ Only available with HDF5 1.8.X.
+ """
+ if not config.API_18:
+ raise NotImplementedError("This feature is only available with HDF5 1.8.0 and later")
+
+ with self._lock:
+ def call_proxy(name, info):
+ return func(name)
+
+ return h5o.visit(self.id, call_proxy)
+
def __str__(self):
with self._lock:
try:
diff --git a/setup.py b/setup.py
index 2c6efe4..0ea984e 100644
--- a/setup.py
+++ b/setup.py
@@ -12,6 +12,8 @@
#
#-
+from __future__ import with_statement
+
"""
Setup script for the h5py package.
@@ -32,6 +34,8 @@
import os
import sys
import shutil
+import commands
+import pickle
import os.path as op
from distutils.errors import DistutilsError
@@ -135,56 +139,72 @@ class ExtensionCreator(object):
class cybuild(build):
- """ Cython-aware subclass of the distutils build command
-
- It handles the h5py-centric configuration for the build processing,
- generating the compile-time definitions file "config.pxi" and
- running Cython before handing control over to the native distutils
- build. The advantage is that Cython is run on all files before
- building begins, and is only run when the distribution is actually
- being built (and not, for example, when "sdist" is called for).
-
- It also populates the list of extension modules at runtime, as this
- can be changed by some of the command-line options.
+ """ Cython-aware builder
"""
user_options = build.user_options + \
- [('cython','y','Run Cython'),
+ [('hdf5=', '5', 'Custom location for HDF5'),
+ ('api=', 'a', 'Set API levels (--api=16,18)'),
+ ('cython','y','Run Cython'),
('cython-only','Y', 'Run Cython and stop'),
- ('hdf5=', '5', 'Custom location for HDF5'),
('diag', 'd','Enable library debug logging'),
- ('api=', 'a', 'Set API levels (--api=16,18)'),
('threads', 't', 'Make library thread-aware')]
+
boolean_options = build.boolean_options + ['cython', 'cython-only', 'threads','diag']
+
+ def get_hdf5_version(self):
+ """ Try to determine the installed HDF5 version and return a tuple
+ containing the appropriate API levels, or None if it can't be
+ determined.
+ """
+ if self.hdf5 is not None:
+ cmd = reduce(op.join, (self.hdf5, 'bin', 'h5cc'))+" -showconfig"
+ else:
+ cmd = "h5cc -showconfig"
+ output = commands.getoutput(cmd)
+ l = output.find("HDF5 Version")
+ if l > 0:
+ if output[l:l+30].find('1.8') > 0:
+ return (16,18)
+ elif output[l:l+30].find('1.6') > 0:
+ return (16,)
+ return None
+
def initialize_options(self):
- """ Specify safe defaults for command-line options. """
- self.hdf5 = None # None or a string with the HDF5 dir
- self.cython = False # T/F
- self.cython_only = False # T/F
- self.threads = False # T/F
- self.api = None # None or a tuple (e.g. (16,18))
- self.diag = False # T/F
- self._explicit_only = False # T/F: Hack for test subclass
build.initialize_options(self)
+ self._default = True
+
+ # Build options
+ self.hdf5 = None
+ self.api = None
+
+ # Cython (config) options
+ self.cython = False
+ self.cython_only = False
+ self.diag = False
+ self.threads = False
+
def finalize_options(self):
- """ Validate provided options and ensure consistency. Note this is
- only run if at least one option is specified!
- """
+
+ build.finalize_options(self)
+
if self.hdf5 is not None:
+ self._default = False
self.hdf5 = op.abspath(self.hdf5)
if not op.exists(self.hdf5):
fatal('Specified HDF5 directory "%s" does not exist' % self.hdf5)
- if self.cython_only or \
- self.api is not None or \
- self.threads or \
- self.diag:
- self.cython = True
-
- # Validate API levels
- if self.api is not None:
+ if self.api is None:
+ # Try to guess the installed HDF5 version
+ self.api = self.get_hdf5_version()
+ if self.api is None:
+ warn("Can't determine HDF5 version, assuming 1.6 (use --api= to override)")
+ self.api = (16,)
+ else:
+ # User specified the API levels
+ self._default = False
try:
self.api = tuple(int(x) for x in self.api.split(',') if len(x) > 0)
if len(self.api) == 0 or not all(x in KNOWN_API for x in self.api):
@@ -192,9 +212,38 @@ class cybuild(build):
except Exception:
fatal('Illegal option %s to --api= (legal values are %s)' % (self.api, ','.join(str(x) for x in KNOWN_API)))
- build.finalize_options(self)
+ if self.cython_only or self.diag or self.threads:
+ self._default = False
+ self.cython = True
- def _get_pxi(self):
+ def run(self):
+
+ if self._default and op.exists('buildconf.pickle'):
+ # Read extensions info from pickle file
+ print "=> Using existing build configuration"
+ with open('buildconf.pickle','r') as f:
+ modules, extensions = pickle.load(f)
+ else:
+ print "=> Creating new build configuration"
+
+ modules = MODULES[max(self.api)]
+ creator = ExtensionCreator(self.hdf5)
+ extensions = [creator.create_extension(x) for x in modules]
+ with open('buildconf.pickle','w') as f:
+ pickle.dump((modules, extensions), f)
+
+ self.distribution.ext_modules = extensions
+
+ # Rebuild the C source files if necessary
+ if self.cython:
+ self.compile_cython(modules)
+ if self.cython_only:
+ exit(0)
+
+ # Hand over control to distutils
+ build.run(self)
+
+ def get_pxi(self):
""" Generate a Cython .pxi file reflecting the current options. """
pxi_str = \
@@ -216,95 +265,73 @@ DEF H5PY_THREADS = %(THREADS)d # Enable thread-safety and non-blocking reads
"DEBUG": 10 if self.diag else 0, "THREADS": self.threads,
"HDF5": "Default" if self.hdf5 is None else self.hdf5}
- def run(self, *args, **kwds):
- """ Called to perform the actual compilation. This performs the
- following steps:
-
- 1. Generate a list of C extension modules for the compiler
- 2. Compare the run-time options with a currently-existing .pxi
- file, and determine if a Cython recompile is required
- 3. If necessary, recompile all Cython files
- 4. Hand control over to the distutils C build
+ def compile_cython(self, modules):
+ """ If needed, regenerate the C source files for the build process
"""
- if self.api is None:
- self.api = (min(KNOWN_API),)
+ try:
+ from Cython.Compiler.Main import Version, compile, compile_multiple, CompilationOptions
+ except ImportError:
+ fatal("Cython recompilation required, but Cython >=%s not installed." % MIN_CYTHON)
+
+ if Version.version < MIN_CYTHON:
+ fatal("Old Cython version detected; at least %s required" % MIN_CYTHON)
- creator = ExtensionCreator(self.hdf5)
- modules = sorted(MODULES[max(self.api)])
- self.distribution.ext_modules = \
- [creator.create_extension(x) for x in modules]
+ print "Running Cython (%s)..." % Version.version
+ print " API levels: %s" % ','.join(str(x) for x in self.api)
+ print " Thread-aware: %s" % ('yes' if self.threads else 'no')
+ print " Diagnostic mode: %s" % ('yes' if self.diag else 'no')
+ print " HDF5: %s" % ('default' if self.hdf5 is None else self.hdf5)
# Necessary because Cython doesn't detect changes to the .pxi
recompile_all = False
# Check if the config.pxi file needs to be updated for the given
# command-line options.
- if self.cython or not self._explicit_only:
- pxi_path = op.join(SRC_PATH, 'config.pxi')
- pxi = self._get_pxi()
- if not op.exists(pxi_path):
- try:
- f = open(pxi_path, 'w')
- f.write(pxi)
- f.close()
- except IOError:
- fatal('Failed write to "%s"' % pxi_path)
- recompile_all = True
- else:
- try:
- f = open(pxi_path, 'r+')
- except IOError:
- fatal("Can't read file %s" % pxi_path)
- if f.read() != pxi:
- f.close()
- f = open(pxi_path, 'w')
- f.write(pxi)
- recompile_all = True
+ pxi_path = op.join(SRC_PATH, 'config.pxi')
+ pxi = self.get_pxi()
+ if not op.exists(pxi_path):
+ try:
+ f = open(pxi_path, 'w')
+ f.write(pxi)
f.close()
-
- if self.cython or recompile_all:
+ except IOError:
+ fatal('Failed write to "%s"' % pxi_path)
+ recompile_all = True
+ else:
try:
- from Cython.Compiler.Main import Version, compile, compile_multiple, CompilationOptions
- except ImportError:
- fatal("Cython recompilation required, but Cython >=%s not installed." % MIN_CYTHON)
-
- if Version.version < MIN_CYTHON:
- fatal("Old Cython version detected; at least %s required" % MIN_CYTHON)
-
- print "Running Cython (%s)..." % Version.version
- print " API levels: %s" % ','.join(str(x) for x in self.api)
- print " Thread-aware: %s" % ('yes' if self.threads else 'no')
- print " Diagnostic mode: %s" % ('yes' if self.diag else 'no')
- print " HDF5: %s" % ('default' if self.hdf5 is None else self.hdf5)
-
- # Build each extension
- # This should be a single call to compile_multiple, but it's
- # broken in Cython 0.9.8.1.1
- if 1:
- cyopts = CompilationOptions(verbose=False)
- for module in modules:
- pyx_path = op.join(SRC_PATH,module+'.pyx')
- c_path = op.join(SRC_PATH,module+'.c')
- if not op.exists(c_path) or \
- os.stat(pyx_path).st_mtime > os.stat(c_path).st_mtime or \
- recompile_all or\
- self.force:
- print "Cythoning %s" % pyx_path
- result = compile(pyx_path, cyopts)
- if result.num_errors != 0:
- fatal("Cython error; aborting.")
- else:
- cyopts = CompilationOptions(verbose=True, timestamps=True)
- modpaths = [op.join(SRC_PATH, x+'.pyx') for x in modules]
- result = compile_multiple(modpaths, cyopts)
- if result.num_errors != 0:
- fatal("%d Cython errors; aborting" % result.num_errors)
-
- if self.cython_only:
- exit(0)
-
- build.run(self, *args, **kwds)
+ f = open(pxi_path, 'r+')
+ except IOError:
+ fatal("Can't read file %s" % pxi_path)
+ if f.read() != pxi:
+ f.close()
+ f = open(pxi_path, 'w')
+ f.write(pxi)
+ recompile_all = True
+ f.close()
+
+ # Build each extension
+ # This should be a single call to compile_multiple, but it's
+ # broken in Cython 0.9.8.1.1
+ if 1:
+ cyopts = CompilationOptions(verbose=False)
+ for module in modules:
+ pyx_path = op.join(SRC_PATH,module+'.pyx')
+ c_path = op.join(SRC_PATH,module+'.c')
+ if not op.exists(c_path) or \
+ os.stat(pyx_path).st_mtime > os.stat(c_path).st_mtime or \
+ recompile_all or\
+ self.force:
+ print "Cythoning %s" % pyx_path
+ result = compile(pyx_path, cyopts)
+ if result.num_errors != 0:
+ fatal("Cython error; aborting.")
+ else:
+ cyopts = CompilationOptions(verbose=True, timestamps=True)
+ modpaths = [op.join(SRC_PATH, x+'.pyx') for x in modules]
+ result = compile_multiple(modpaths, cyopts)
+ if result.num_errors != 0:
+ fatal("%d Cython errors; aborting" % result.num_errors)
class test(cybuild):
@@ -324,7 +351,6 @@ class test(cybuild):
cybuild.finalize_options(self)
def run(self):
- self._explicit_only = True # Ignore config.pxi disagreement unless --cython
cybuild.run(self)
oldpath = sys.path
try:
--
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-science/packages/h5py.git
More information about the debian-science-commits
mailing list