[h5py] 62/455: Misc. changes, start migrating test suite. Getting close to 0.2

Ghislain Vaillant ghisvail-guest at moszumanska.debian.org
Thu Jul 2 18:19:17 UTC 2015


This is an automated email from the git hooks/post-receive script.

ghisvail-guest pushed a commit to annotated tag 1.3.0
in repository h5py.

commit 872aa70f078ad9337d94ffc1d245900a21fae968
Author: andrewcollette <andrew.collette at gmail.com>
Date:   Mon Jun 30 23:08:26 2008 +0000

    Misc. changes, start migrating test suite.  Getting close to 0.2
---
 h5py/h5.pyx                |  15 ++
 h5py/h5a.pyx               |  38 ++---
 h5py/h5d.pxd               |   2 +-
 h5py/h5d.pyx               |  61 ++++---
 h5py/h5f.pxd               |   9 +-
 h5py/h5f.pyx               | 123 ++++++++------
 h5py/h5g.pyx               |  15 +-
 h5py/h5i.pyx               |   7 +-
 h5py/h5p.pxd               |   2 +
 h5py/h5p.pyx               | 132 ++++++++-------
 h5py/h5s.pyx               |   8 +-
 h5py/h5t.pyx               | 176 ++++++++-----------
 h5py/h5z.pyx               |  20 +--
 h5py/highlevel.py          | 410 ++++++++++-----------------------------------
 h5py/tests/__init__.py     |  11 +-
 h5py/tests/common.py       |  38 +++--
 h5py/tests/test_h5a.py     | 211 ++++++++---------------
 h5py/tests/test_h5d.py     |  98 ++++-------
 h5py/tests/test_h5f.py     |  51 +++---
 h5py/tests/test_h5g.py     | 144 +++++++---------
 h5py/tests/test_h5i.py     |  14 +-
 obsolete/attrs.hdf5        | Bin 26656 -> 0 bytes
 obsolete/definitions.pxd   |  62 -------
 obsolete/defs_h5common.pxd |   5 -
 obsolete/file.hdf5         | Bin 976 -> 0 bytes
 obsolete/fragments.pyx     | 138 ---------------
 obsolete/test.h5           | Bin 7768 -> 0 bytes
 obsolete/test_h5a.pyx      | 138 ---------------
 obsolete/test_h5f.pyx      |  76 ---------
 obsolete/test_h5g.pyx      | 184 --------------------
 obsolete/test_h5s.pyx      | 130 --------------
 obsolete/test_simple.hdf5  | Bin 12336 -> 0 bytes
 obsolete/transactions.py   | 279 ------------------------------
 setup.py                   |  29 +++-
 34 files changed, 647 insertions(+), 1979 deletions(-)

diff --git a/h5py/h5.pyx b/h5py/h5.pyx
index 036505f..92c7b7d 100644
--- a/h5py/h5.pyx
+++ b/h5py/h5.pyx
@@ -74,6 +74,21 @@ cdef class ObjectID:
         copy._locked = self._locked
         return copy
 
+    def __richcmp__(self, object other, int how):
+        """ Supports only == and != """
+
+        if how == 2 or how == 3:
+            
+            if not hasattr(other, 'id'):
+                return False
+            eq = isinstance(other, type(self)) and self.id == other.id
+
+            if how == 2:
+                return eq
+            return not eq
+
+        raise TypeError("Only equality comparisons are supported.")
+
     def __str__(self):
         if H5Iget_type(self.id) != H5I_BADID:
             ref = str(H5Iget_ref(self.id))
diff --git a/h5py/h5a.pyx b/h5py/h5a.pyx
index d8e9f35..f89de2a 100644
--- a/h5py/h5a.pyx
+++ b/h5py/h5a.pyx
@@ -131,14 +131,16 @@ cdef class AttrID(ObjectID):
         shape:  A Numpy-style shape tuple representing the dataspace
     """
     property name:
+        """ The attribute's name
+        """
         def __get__(self):
             return self.get_name()
 
     property shape:
+        """ A Numpy-style shape tuple representing the attribute dataspace.
+        """
         def __get__(self):
-            """ Retrieve the dataspace of this attribute, as a Numpy-style 
-                shape tuple.
-            """
+
             cdef SpaceID space
             space = None
             try:
@@ -149,14 +151,12 @@ cdef class AttrID(ObjectID):
                     space.close()
 
     property dtype:
+        """ A Numpy-stype dtype object representing the attribute's datatype
+        """
         def __get__(self):
-            """ Obtain the data-type of this attribute as a Numpy dtype.  Note that the
-                resulting dtype is not guaranteed to be byte-for-byte compatible with
-                the underlying HDF5 datatype, but is appropriate for use in e.g. the 
-                read() and write() functions defined in this module.
-            """
+
             cdef TypeID tid
-            tid = typewrap(H5Tget_type(self.id))
+            tid = typewrap(H5Aget_type(self.id))
             return tid.py_dtype()
 
     def close(self):
@@ -170,13 +170,13 @@ cdef class AttrID(ObjectID):
 
     def read(self, ndarray arr_obj not None):
         """ (NDARRAY arr_obj)
-            
+
             Read the attribute data into the given Numpy array.  Note that the 
             Numpy array must have the same shape as the HDF5 attribute, and a 
             conversion-compatible datatype.
 
-            The Numpy array must be writable, C-contiguous and own its data.  If
-            this is not the case, an ValueError is raised and the read fails.
+            The Numpy array must be writable, C-contiguous and own its data.
+            If this is not the case, an ValueError is raised and the read fails.
         """
         cdef TypeID mtype
         cdef hid_t space_id
@@ -187,7 +187,7 @@ cdef class AttrID(ObjectID):
             space_id = H5Aget_space(self.id)
             check_numpy_write(arr_obj, space_id)
 
-            mtype = h5t.py_translate_dtype(arr_obj.dtype)
+            mtype = h5t.py_create(arr_obj.dtype)
 
             H5Aread(self.id, mtype.id, PyArray_DATA(arr_obj))
 
@@ -200,12 +200,12 @@ cdef class AttrID(ObjectID):
     def write(self, ndarray arr_obj not None):
         """ (NDARRAY arr_obj)
 
-            Write the contents of a Numpy array too the attribute.  Note that the 
-            Numpy array must have the same shape as the HDF5 attribute, and a 
-            conversion-compatible datatype.  
+            Write the contents of a Numpy array too the attribute.  Note that
+            the Numpy array must have the same shape as the HDF5 attribute, and
+            a conversion-compatible datatype.  
 
-            The Numpy array must be C-contiguous and own its data.  If this is not
-            the case, ValueError will be raised and the write will fail.
+            The Numpy array must be C-contiguous and own its data.  If this is
+            not the case, ValueError will be raised and the write will fail.
         """
         cdef TypeID mtype
         cdef hid_t space_id
@@ -215,7 +215,7 @@ cdef class AttrID(ObjectID):
         try:
             space_id = H5Aget_space(self.id)
             check_numpy_read(arr_obj, space_id)
-            mtype = h5t.py_translate_dtype(arr_obj.dtype)
+            mtype = h5t.py_create(arr_obj.dtype)
 
             H5Awrite(self.id, mtype.id, PyArray_DATA(arr_obj))
 
diff --git a/h5py/h5d.pxd b/h5py/h5d.pxd
index 42fd690..bfa0c25 100644
--- a/h5py/h5d.pxd
+++ b/h5py/h5d.pxd
@@ -22,7 +22,7 @@ cdef class DatasetID(ObjectID):
 
 from h5t cimport class TypeID, typewrap
 from h5s cimport class SpaceID
-from h5p cimport class PropID, PropDCID, PropDXID, pdefault
+from h5p cimport class PropID, pdefault, propwrap, PropDCID, PropDXID
 from numpy cimport class ndarray
 
 
diff --git a/h5py/h5d.pyx b/h5py/h5d.pyx
index 648e429..02b2528 100644
--- a/h5py/h5d.pyx
+++ b/h5py/h5d.pyx
@@ -11,15 +11,7 @@
 #-
 
 """
-    Provides access to the low-level HDF5 "H5D" dataset interface
-
-    Most H5D calls are unchanged.  Since dataset I/O is done with Numpy objects,
-    read and write calls do not require you to explicitly define a datatype;
-    the type of the given Numpy array is used instead.
-
-    The py_* family of functions in this module provide a significantly 
-    simpler interface.  They should be sufficient for nearly all dataset
-    operations from Python.
+    Provides access to the low-level HDF5 "H5D" dataset interface.
 """
 
 # Pyrex compile-time imports
@@ -70,10 +62,8 @@ def create(ObjectID loc not None, char* name, TypeID tid not None,
          PropDCID plist=None ) 
         => DatasetID
 
-        Create a new dataset under an HDF5 file or group id.  Keyword plist 
+        Create a new dataset under an HDF5 file or group.  Keyword plist 
         may be a dataset creation property list.
-
-        For a friendlier version of this function, try py_create()
     """
     cdef hid_t plist_id
     plist_id = pdefault(plist)
@@ -87,7 +77,6 @@ def open(ObjectID loc not None, char* name):
     return DatasetID(H5Dopen(loc.id, name))
 
 
-
 # === Dataset I/O =============================================================
 
 cdef class DatasetID(ObjectID):
@@ -106,6 +95,27 @@ cdef class DatasetID(ObjectID):
         rank:   Integer giving dataset rank
     """
 
+    property dtype:
+        """ Numpy-style dtype object representing the dataset type """
+        def __get__(self):
+            cdef TypeID tid
+            tid = self.get_type()
+            return tid.dtype
+
+    property shape:
+        """ Numpy-stype shape tuple representing the dataspace """
+        def __get__(self):
+            cdef SpaceID sid
+            sid = self.get_space()
+            return sid.get_simple_extent_dims()
+
+    property rank:
+        """ Integer giving the dataset rank (0 = scalar) """
+        def __get__(self):
+            cdef SpaceID sid
+            sid = self.get_space()
+            return sid.get_simple_extent_ndims()
+
     def close(self):
         """ ()
 
@@ -133,8 +143,6 @@ cdef class DatasetID(ObjectID):
             wide variety of dataspace configurations are possible, this is not
             checked.  You can easily crash Python by reading in data from too
             large a dataspace.
-            
-            For a friendlier version of this function, try py_read_slab().
         """
         cdef TypeID mtype
         cdef hid_t plist_id
@@ -142,7 +150,7 @@ cdef class DatasetID(ObjectID):
         plist_id = pdefault(plist)
 
         try:
-            mtype = h5t.py_translate_dtype(arr_obj.dtype)
+            mtype = h5t.py_create(arr_obj.dtype)
             check_numpy_write(arr_obj, -1)
 
             H5Dread(self.id, mtype.id, mspace.id, fspace.id, plist_id, PyArray_DATA(arr_obj))
@@ -153,7 +161,8 @@ cdef class DatasetID(ObjectID):
         
     def write(self, SpaceID mspace not None, SpaceID fspace not None, 
                     ndarray arr_obj not None, PropDXID plist=None):
-        """ (SpaceID mspace, SpaceID fspace, NDARRAY arr_obj, PropDXID plist=None)
+        """ (SpaceID mspace, SpaceID fspace, NDARRAY arr_obj, 
+             PropDXID plist=None)
 
             Write data from a Numpy array to an HDF5 dataset. Keyword plist may 
             be a dataset transfer property list.
@@ -161,8 +170,6 @@ cdef class DatasetID(ObjectID):
             The provided Numpy array must be C-contiguous, and own its data.  
             If this is not the case, ValueError will be raised and the read 
             will fail.
-
-            For a friendlier version of this function, try py_write_slab()
         """
         cdef TypeID mtype
         cdef hid_t plist_id
@@ -170,7 +177,7 @@ cdef class DatasetID(ObjectID):
         plist_id = pdefault(plist)
 
         try:
-            mtype = h5t.py_translate_dtype(arr_obj.dtype)
+            mtype = h5t.py_create(arr_obj.dtype)
             check_numpy_read(arr_obj, -1)
 
             H5Dwrite(self.id, mtype.id, mspace.id, fspace.id, plist_id, PyArray_DATA(arr_obj))
@@ -235,12 +242,12 @@ cdef class DatasetID(ObjectID):
         return typewrap(H5Dget_type(self.id))
 
     def get_create_plist(self):
-        """ () => PropDSCreateID
+        """ () => PropDCID
 
-            Create a new copy of the dataset creation property list used when 
-            this dataset was created.
+            Create an return a new copy of the dataset creation property list
+            used when this dataset was created.
         """
-        return PropDCID(H5Dget_create_plist(self.id))
+        return propwrap(H5Dget_create_plist(self.id))
 
     def get_offset(self):
         """ () => LONG offset
@@ -252,9 +259,9 @@ cdef class DatasetID(ObjectID):
     def get_storage_size(self):
         """ () => LONG storage_size
 
-            Determine the amount of file space required for a dataset.  Note this
-            only counts the space which has actually been allocated; it may even
-            be zero.
+            Determine the amount of file space required for a dataset.  Note 
+            this only counts the space which has actually been allocated; it 
+            may even be zero.
         """
         return H5Dget_storage_size(self.id)
 
diff --git a/h5py/h5f.pxd b/h5py/h5f.pxd
index 2d88423..29bb49c 100644
--- a/h5py/h5f.pxd
+++ b/h5py/h5f.pxd
@@ -23,8 +23,13 @@ cdef class FileID(ObjectID):
 cdef extern from "hdf5.h":
 
   # File constants
-  int H5F_ACC_TRUNC, H5F_ACC_RDONLY, H5F_ACC_RDWR, H5F_ACC_EXCL
-  int H5F_ACC_DEBUG, H5F_ACC_CREAT
+  cdef enum:
+    H5F_ACC_TRUNC
+    H5F_ACC_RDONLY
+    H5F_ACC_RDWR
+    H5F_ACC_EXCL
+    H5F_ACC_DEBUG
+    H5F_ACC_CREAT
 
   # The difference between a single file and a set of mounted files
   cdef enum H5F_scope_t:
diff --git a/h5py/h5f.pyx b/h5py/h5f.pyx
index 5a150c6..7765eb9 100644
--- a/h5py/h5f.pyx
+++ b/h5py/h5f.pyx
@@ -15,7 +15,7 @@
 """
 
 # Pyrex compile-time imports
-from h5p cimport PropFCID, PropFAID, PropMID, pdefault, H5P_DEFAULT
+from h5p cimport propwrap, pdefault, PropFAID, PropFCID, PropMID
 from utils cimport emalloc, efree, pybool
 
 # Runtime imports
@@ -85,7 +85,7 @@ def is_hdf5(char* name):
     """
     return pybool(H5Fis_hdf5(name))
 
-def flush(ObjectID obj, int scope=H5F_SCOPE_LOCAL):
+def flush(ObjectID obj not None, int scope=H5F_SCOPE_LOCAL):
     """ (ObjectID obj, INT scope=SCOPE_LOCAL)
 
         Tell the HDF5 library to flush file buffers to disk.  "obj" may
@@ -133,7 +133,74 @@ def get_name(ObjectID obj not None):
     finally:
         efree(name)
 
-# === XXXX ===
+def get_obj_count(object where=OBJ_ALL, int types=H5F_OBJ_ALL):
+    """ (OBJECT where=OBJ_ALL, types=OBJ_ALL) => INT n_objs
+
+        Get the number of open objects.
+
+        where:  Either a FileID instance representing an HDF5 file, or the
+                special constant OBJ_ALL, to count objects in all files.
+
+        type:   Specify what kinds of object to include.  May be one of OBJ_*, 
+                or any bitwise combination (e.g. OBJ_FILE | OBJ_ATTR).  
+
+                The special value OBJ_ALL matches all object types, and 
+                OBJ_LOCAL will only match objects opened through a specific 
+                identifier.
+    """
+    cdef hid_t where_id
+    if typecheck(where, FileID):
+        where_id = where.id
+    elif typecheck(where, int) or typecheck(where, long):
+        where_id = where
+    else:
+        raise TypeError("Location must be a FileID or OBJ_ALL.")
+
+    return H5Fget_obj_count(where_id, types)
+
+def get_obj_ids(object where=OBJ_ALL, int types=H5F_OBJ_ALL):
+    """ (OBJECT where=OBJ_ALL, types=OBJ_ALL) => LIST open_ids
+
+        Get a list of identifier instances for open objects.
+
+        where:  Either a FileID instance representing an HDF5 file, or the
+                special constant OBJ_ALL, to list objects in all files.
+
+        type:   Specify what kinds of object to include.  May be one of OBJ_*, 
+                or any bitwise combination (e.g. OBJ_FILE | OBJ_ATTR).  
+
+                The special value OBJ_ALL matches all object types, and 
+                OBJ_LOCAL will only match objects opened through a specific 
+                identifier.
+    """
+    cdef int count
+    cdef int i
+    cdef hid_t where_id
+    cdef hid_t *obj_list
+    cdef list py_obj_list
+    obj_list = NULL
+    py_obj_list = []
+
+    if typecheck(where, FileID):
+        where_id = where.id
+    elif typecheck(where, int) or typecheck(where, long):
+        where_id = where
+    else:
+        raise TypeError("Location must be a FileID or OBJ_ALL.")
+
+    try:
+        count = H5Fget_obj_count(where_id, types)
+        obj_list = <hid_t*>emalloc(sizeof(hid_t)*count)
+
+        H5Fget_obj_ids(where_id, types, count, obj_list)
+        for i from 0<=i<count:
+            py_obj_list.append(obj_list[i])
+        return py_obj_list
+
+    finally:
+        efree(obj_list)
+
+# === FileID implementation ===================================================
 
 cdef class FileID(ObjectID):
 
@@ -177,7 +244,7 @@ cdef class FileID(ObjectID):
 
             Retrieve a copy of the property list used to create this file.
         """
-        return PropFCID(H5Fget_create_plist(self.id))
+        return propwrap(H5Fget_create_plist(self.id))
 
     def get_access_plist(self):
         """ () => PropFAID
@@ -185,58 +252,18 @@ cdef class FileID(ObjectID):
             Retrieve a copy of the property list which manages access 
             to this file.
         """
-        return PropFAID(H5Fget_access_plist(self.id))
+        return propwrap(H5Fget_access_plist(self.id))
 
     def get_freespace(self):
-        """ () => LONG free space
+        """ () => LONG freespace
 
-            Determine the amount of free space in this file.  Note that this only
-            tracks free space until the file is closed.
+            Determine the amount of free space in this file.  Note that this
+            only tracks free space until the file is closed.
         """
         return H5Fget_freespace(self.id)
 
 
-    def get_obj_count(self, int types=H5F_OBJ_ALL):
-        """ (INT types=OBJ_ALL) => INT n_objs
-
-            Get the number of open objects in the file.  The value of "types" 
-            may be one of h5f.OBJ_*, or any bitwise combination (e.g. 
-            OBJ_FILE | OBJ_ATTR).  
 
-            The special value OBJ_ALL matches all object types, and 
-            OBJ_LOCAL will only match objects opened through this specific 
-            identifier.
-        """
-        return H5Fget_obj_count(self.id, types)
-
-    def get_obj_ids(self, int types=H5F_OBJ_ALL):
-        """ (INT types=OBJ_ALL) => LIST open_ids
-
-            Get a list of identifiers for open objects in the file.  The value 
-            of "types" may be one of h5f.OBJ_*, or any bitwise combination (e.g. 
-            OBJ_FILE | OBJ_ATTR). 
-
-            The special value OBJ_ALL matches all object types, and 
-            OBJ_LOCAL will only match objects opened through this specific 
-            identifier. 
-        """
-        cdef int count
-        cdef hid_t *obj_list
-        cdef int i
-        obj_list = NULL
-
-        py_obj_list = []
-        try:
-            count = H5Fget_obj_count(self.id, types)
-            obj_list = <hid_t*>emalloc(sizeof(hid_t)*count)
-
-            H5Fget_obj_ids(self.id, types, count, obj_list)
-            for i from 0<=i<count:
-                py_obj_list.append(obj_list[i])
-            return py_obj_list
-
-        finally:
-            efree(obj_list)
 
 
 
diff --git a/h5py/h5g.pyx b/h5py/h5g.pyx
index f9d9ca4..6a8a0a8 100644
--- a/h5py/h5g.pyx
+++ b/h5py/h5g.pyx
@@ -179,7 +179,7 @@ cdef class GroupID(ObjectID):
     def get_num_objs(self):
         """ () => INT number_of_objects
 
-            Get the number of objects attached to a given group.
+            Get the number of objects directly attached to a given group.
         """
         cdef hsize_t size
         H5Gget_num_objs(self.id, &size)
@@ -227,7 +227,7 @@ cdef class GroupID(ObjectID):
         cdef herr_t retval
         retval = H5Gget_objtype_by_idx(self.id, idx)
         if retval < 0:
-            raise H5Error((0,"Invalid argument."))
+            raise H5Error((1,"Invalid argument."))
         return retval
 
     def get_objinfo(self, char* name, int follow_link=1):
@@ -260,7 +260,7 @@ cdef class GroupID(ObjectID):
     def get_linkval(self, char* name):
         """ (STRING name) => STRING link_value
 
-            Retrieve the value of the given symbolic link.
+            Retrieve the value (target name) of a symbolic link.
         """
         cdef char* value
         cdef H5G_stat_t statbuf
@@ -307,3 +307,12 @@ cdef class GroupID(ObjectID):
         finally:
             efree(cmnt)
 
+    def py_exists(self, char* name):
+
+        try:
+            self.get_objinfo(name)
+        except H5Error:
+            return False    
+        return True
+
+
diff --git a/h5py/h5i.pyx b/h5py/h5i.pyx
index 04bfe4a..c87d895 100644
--- a/h5py/h5i.pyx
+++ b/h5py/h5i.pyx
@@ -63,10 +63,10 @@ def get_name(ObjectID obj not None):
     cdef char* name
 
     namelen = <int>H5Iget_name(obj.id, NULL, 0)
-    assert namelen >= 0
     if namelen == 0:
         return None
 
+    assert namelen > 0
     name = <char*>emalloc(sizeof(char)*(namelen+1))
     try:
         H5Iget_name(obj.id, name, namelen+1)
@@ -80,7 +80,6 @@ def get_file_id(ObjectID obj not None):
 
         Obtain an identifier for the file in which this object resides.
     """
-    # TODO: does the library function correctly increment the ref count?
     return FileID(H5Iget_file_id(obj.id))
 
 def inc_ref(ObjectID obj not None):
@@ -98,10 +97,6 @@ def get_ref(ObjectID obj not None):
     """ (ObjectID obj)
 
         Retrieve the reference count for the given object.
-
-        This function is provided for debugging only.  Reference counting
-        is automatically synchronized with Python, and you can easily break
-        ObjectID instances by abusing this function.
     """
     return H5Iget_ref(obj.id)
 
diff --git a/h5py/h5p.pxd b/h5py/h5p.pxd
index 6eec6eb..a0b7c1b 100644
--- a/h5py/h5p.pxd
+++ b/h5py/h5p.pxd
@@ -54,6 +54,7 @@ cdef class PropMID(PropInstanceID):
     pass
 
 cdef hid_t pdefault(PropID pid)
+cdef object propwrap(hid_t id_in)
 
 cdef extern from "hdf5.h":
 
@@ -121,6 +122,7 @@ cdef extern from "hdf5.h":
   int    H5Pget_class(hid_t plist_id) except *
   herr_t H5Pclose(hid_t plist_id) except *
   htri_t H5Pequal( hid_t id1, hid_t id2  ) except *
+  herr_t H5Pclose_class(hid_t id) except *
 
   # File creation properties
   herr_t H5Pget_version(hid_t plist, unsigned int *super_, unsigned int* freelist, 
diff --git a/h5py/h5p.pyx b/h5py/h5p.pyx
index d75f2ce..03cadd5 100644
--- a/h5py/h5p.pyx
+++ b/h5py/h5p.pyx
@@ -23,53 +23,60 @@ from utils cimport  require_tuple, convert_dims, convert_tuple, \
 # Runtime imports
 import h5
 
-cdef object lockid(hid_t id_in):
+# === C API ===================================================================
+
+cdef hid_t pdefault(PropID pid):
+
+    if pid is None:
+        return <hid_t>H5P_DEFAULT
+    return pid.id
+
+cdef object propwrap(hid_t id_in):
+
+    clsid = H5Pget_class(id_in)
+    try:
+        if H5Pequal(clsid, H5P_FILE_CREATE):
+            pcls = PropFCID
+        elif H5Pequal(clsid, H5P_FILE_ACCESS):
+            pcls = PropFAID
+        elif H5Pequal(clsid, H5P_DATASET_CREATE):
+            pcls = PropDCID
+        elif H5Pequal(clsid, H5P_DATASET_XFER):
+            pcls = PropDXID
+        elif H5Pequal(clsid, H5P_MOUNT):
+            pcls = PropMID
+        else:
+            raise ValueError("No class found for ID %d" % id_in)
+
+        return pcls(id_in)
+    finally:
+        H5Pclose_class(clsid)
+
+cdef object lockcls(hid_t id_in):
     cdef PropClassID pid
     pid = PropClassID(id_in)
     pid._locked = 1
     return pid
 
+
 # === Public constants and data structures ====================================
 
 # Property list classes
 # These need to be locked, as the library won't let you close them.
-NO_CLASS       = lockid(H5P_NO_CLASS)
-FILE_CREATE    = lockid(H5P_FILE_CREATE)
-FILE_ACCESS    = lockid(H5P_FILE_ACCESS)
-DATASET_CREATE = lockid(H5P_DATASET_CREATE)
-DATASET_XFER   = lockid(H5P_DATASET_XFER)
-MOUNT          = lockid(H5P_MOUNT)
-
-DEFAULT = lockid(H5P_DEFAULT)  # really 0 but whatever
-
-_classmapper = { H5P_FILE_CREATE: PropFCID,
-                 H5P_FILE_ACCESS: PropFAID,
-                 H5P_DATASET_CREATE: PropDCID,
-                 H5P_DATASET_XFER: PropDXID,
-                 H5P_MOUNT: PropMID }
-
-# === C API and extension types ===============================================
-
-cdef hid_t pdefault(PropID pid):
-
-    if pid is None:
-        return <hid_t>H5P_DEFAULT
-    
-    return pid.id
-
-cdef class PropID(ObjectID):
-    
-    """ Base class for all operations which are valid on both property list 
-        instances and classes.
-    """
-    pass
+NO_CLASS       = lockcls(H5P_NO_CLASS)
+FILE_CREATE    = lockcls(H5P_FILE_CREATE)
+FILE_ACCESS    = lockcls(H5P_FILE_ACCESS)
+DATASET_CREATE = lockcls(H5P_DATASET_CREATE)
+DATASET_XFER   = lockcls(H5P_DATASET_XFER)
+MOUNT          = lockcls(H5P_MOUNT)
 
-# === Property list HDF5 classes ==============================================
+DEFAULT = None   # In the HDF5 header files this is actually 0, which is an
+                 # invalid identifier.  The new strategy for default options
+                 # is to make them all None, to better match the Python style
+                 # for keyword arguments.
 
-cdef class PropClassID(PropID):
-    pass
 
-# === Property list HDF5 instances ============================================
+# === Property list functional API ============================================
 
 def create(PropClassID cls not None):
     """ (PropClassID cls) => PropID
@@ -81,21 +88,30 @@ def create(PropClassID cls not None):
             DATASET_XFER
             MOUNT
     """
-    try:
-        type_ = _classmapper[cls.id]
-    except KeyError:
-        raise ValueError("Invalid class")
+    cdef hid_t newid
+    newid = H5Pcreate(cls.id)
+    return propwrap(newid)
+
+# === Class API ===============================================================
 
-    return type_(H5Pcreate(cls.id))
+cdef class PropID(ObjectID):
+
+    def equal(self, PropID plist not None):
+        """ (PropID plist) => BOOL
+
+            Compare this property list (or class) to another for equality.
+        """
+        return pybool(H5Pequal(self.id, plist.id))
 
 cdef class PropInstanceID(PropID):
 
     """
-        Base class for property list instance objects
+        Base class for property list instance objects.  Provides methods which
+        are common across all HDF5 property list classes.
     """
 
     def copy(self):
-        """ () => PropList new_property_list_id
+        """ () => PropList newid
 
             Create a new copy of an existing property list object.
         """
@@ -117,12 +133,6 @@ cdef class PropInstanceID(PropID):
         """
         return PropClassID(H5Pget_class(self.id))
 
-    def equal(self, PropID plist not None):
-        """ (PropID plist) => BOOL
-
-            Compare this property list to another for equality.
-        """
-        return pybool(H5Pequal(self.id, plist.id))
 
 # === File creation ===========================================================
 
@@ -180,7 +190,7 @@ cdef class PropFCID(PropInstanceID):
         H5Pset_sizes(self.id, addr, size)
 
     def get_sizes(self):
-        """ () => TUPLE sizes    [File creation]
+        """ () => TUPLE sizes
 
             Determine addressing offsets and lengths for objects in an 
             HDF5 file, in bytes.  Return value is a 2-tuple with values:
@@ -212,14 +222,14 @@ cdef class PropFCID(PropInstanceID):
         return (ik, lk)
 
     def set_istore_k(self, unsigned int ik):
-        """ (UINT ik)    [File creation]
+        """ (UINT ik)
 
             See hdf5 docs for H5Pset_istore_k.
         """
         H5Pset_istore_k(self.id, ik)
     
     def get_istore_k(self):
-        """ () => UINT ik    [File creation]
+        """ () => UINT ik
 
             See HDF5 docs for H5Pget_istore_k
         """
@@ -232,16 +242,16 @@ cdef class PropFCID(PropInstanceID):
 cdef class PropDCID(PropInstanceID):
 
     """
-        Represents a dataset creation property list
+        Represents a dataset creation property list.
     """
 
     def set_layout(self, int layout_code):
         """ (INT layout_code)
 
             Set dataset storage strategy; legal values are:
-            * h5d.COMPACT
-            * h5d.CONTIGUOUS
-            * h5d.CHUNKED
+                h5d.COMPACT
+                h5d.CONTIGUOUS
+                h5d.CHUNKED
         """
         H5Pset_layout(self.id, layout_code)
     
@@ -249,9 +259,9 @@ cdef class PropDCID(PropInstanceID):
         """ () => INT layout_code
 
             Determine the storage strategy of a dataset; legal values are:
-            * h5d.COMPACT
-            * h5d.CONTIGUOUS
-            * h5d.CHUNKED
+                h5d.COMPACT
+                h5d.CONTIGUOUS
+                h5d.CHUNKED
         """
         return <int>H5Pget_layout(self.id)
 
@@ -277,7 +287,7 @@ cdef class PropDCID(PropInstanceID):
             efree(dims)
     
     def get_chunk(self):
-        """ () => TUPLE chunk_dimensions    [Dataset creation]
+        """ () => TUPLE chunk_dimensions
 
             Obtain the dataset chunk size, as a tuple.
         """
@@ -339,12 +349,15 @@ cdef class PropDCID(PropInstanceID):
         """ (INT filter_code, UINT flags=0, TUPLE values=None)
 
             Set a filter in the pipeline.  Params are:
+
             filter_code:
                 h5z.FILTER_DEFLATE
                 h5z.FILTER_SHUFFLE
                 h5z.FILTER_FLETCHER32
                 h5z.FILTER_SZIP
+
             flags:  Bit flags (h5z.FLAG_*) setting filter properties
+
             values: TUPLE of UINTS giving auxiliary data for the filter.
         """
         cdef size_t nelements
@@ -381,6 +394,7 @@ cdef class PropDCID(PropInstanceID):
         """ (UINT filter_idx) => TUPLE filter_info
 
             Get information about a filter, identified by its index.
+
             Tuple entries are:
             0: INT filter code (h5z.FILTER_*)
             1: UINT flags (h5z.FLAG_*)
diff --git a/h5py/h5s.pyx b/h5py/h5s.pyx
index 8cc0159..2deb75f 100644
--- a/h5py/h5s.pyx
+++ b/h5py/h5s.pyx
@@ -102,10 +102,15 @@ def create_simple(object dims_tpl, object max_dims_tpl=None):
         efree(dims)
         efree(max_dims)
 
+# === H5S class API ===========================================================
+
 cdef class SpaceID(ObjectID):
 
     """
         Represents a dataspace identifier.
+
+        Properties:
+        shape:  Numpy-style shape tuple with dimensions.
     """
 
     property shape:
@@ -336,7 +341,8 @@ cdef class SpaceID(ObjectID):
     def select_valid(self):
         """ () => BOOL select_valid
             
-            Determine if the current selection falls within the dataspace extent.
+            Determine if the current selection falls within
+            the dataspace extent.
         """
         return pybool(H5Sselect_valid(self.id))
 
diff --git a/h5py/h5t.pyx b/h5py/h5t.pyx
index 3bb748b..e406474 100644
--- a/h5py/h5t.pyx
+++ b/h5py/h5t.pyx
@@ -12,55 +12,6 @@
 
 """
     HDF5 "H5T" data-type API
-
-    Provides access to the HDF5 data-type object interface.  Functions
-    are provided to convert HDF5 datatype object back and forth from Numpy
-    dtype objects.  Constants are also defined in this module for a variety
-    of HDF5 native types and classes. Points of interest:
-
-    1. Translation
-
-        The functions py_translate_h5t and py_translate_dtype do the heavy
-        lifting required to go between HDF5 datatype objects and Numpy dtypes.
-
-        Since the HDF5 library can represent a greater range of types than
-        Numpy, the conversion is asymmetric.  Attempting to convert an HDF5
-        type to a Numpy dtype will result in a dtype object which matches
-        as closely as possible.  In contrast, converting from a Numpy dtype
-        to an HDF5 type will always result in a precise, byte-compatible 
-        description of the Numpy data layout.
-
-    2. Complex numbers
-
-        Since HDF5 has no native complex types, and the native Numpy
-        representation is a struct with two floating-point members, complex
-        numbers are saved as HDF5 compound objects.
-
-        These compound objects have exactly two fields, with IEEE 32- or 64-
-        bit format, and default names "r" and "i".  Since other conventions
-        exist for field naming, and in fact may be essential for compatibility
-        with external tools, new names can be specified as arguments to
-        both py_translate_* functions.
-
-    3. Enumerations
-
-        There is no native Numpy or Python type for enumerations.  Since an
-        enumerated type is simply a mapping between string names and integer
-        values, I have implemented enum support through dictionaries.  
-
-        An HDF5 H5T_ENUM type is converted to the appropriate Numpy integer 
-        type (e.g. <u4, etc.), and a dictionary mapping names to values is also 
-        generated. This dictionary is attached to the dtype object via the
-        functions py_enum_attach and py_enum_recover.
-
-        The exact dtype declaration is given below; howeve, the py_enum*
-        functions should encapsulate almost all meaningful operations.
-
-        enum_dict = {'RED': 0L, 'GREEN': 1L}
-
-        dtype( ('<i4', [ ( (enum_dict, 'enum'),   '<i4' )] ) )
-                  ^             ^         ^         ^
-             (main type)  (metadata) (field name) (field type)
 """
 
 # Pyrex compile-time imports
@@ -269,8 +220,8 @@ def open(ObjectID group not None, char* name):
 def array_create(TypeID base not None, object dims_tpl):
     """ (TypeID base, TUPLE dimensions)
 
-        Create a new array datatype, of parent type <base_type_id> and
-        dimensions given via a tuple of non-negative integers.  "Unlimited" 
+        Create a new array datatype, using and HDF5 parent type and
+        dimensions given via a tuple of positive integers.  "Unlimited" 
         dimensions are not allowed.
     """
     cdef hsize_t rank
@@ -283,7 +234,7 @@ def array_create(TypeID base not None, object dims_tpl):
 
     try:
         convert_tuple(dims_tpl, dims, rank)
-        return H5Tarray_create(base.id, rank, dims, NULL)
+        return typewrap(H5Tarray_create(base.id, rank, dims, NULL))
     finally:
         efree(dims)
 
@@ -323,7 +274,7 @@ cdef class TypeID(ObjectID):
         cpy._complex_names = self._complex_names
         return cpy
 
-    property py_complex_names:
+    property complex_names:
         """ Either () or a 2-tuple (real, imag) determining how complex types
             are read/written using HDF5 compound types.
         """
@@ -571,7 +522,7 @@ cdef class TypeBitfieldID(TypeID):
 cdef class TypeAtomicID(TypeID):
 
     """
-        Represents an atomic datatype (including variable-length datatypes).
+        Represents an atomic datatype (float or integer).
     """
 
     def get_order(self):
@@ -580,7 +531,6 @@ cdef class TypeAtomicID(TypeID):
             Obtain the byte order of the datatype; one of:
              ORDER_LE
              ORDER_BE
-             ORDER_NATIVE
         """
         return <int>H5Tget_order(self.id)
 
@@ -590,7 +540,6 @@ cdef class TypeAtomicID(TypeID):
             Set the byte order of the datatype; must be one of
              ORDER_LE
              ORDER_BE
-             ORDER_NATIVE
         """
         H5Tset_order(self.id, <H5T_order_t>order)
 
@@ -626,9 +575,9 @@ cdef class TypeAtomicID(TypeID):
         """ () => (INT lsb_pad_code, INT msb_pad_code)
 
             Determine the padding type.  Possible values are:
-                PAD_ZERO
-                PAD_ONE
-                PAD_BACKGROUND
+             PAD_ZERO
+             PAD_ONE
+             PAD_BACKGROUND
         """
         cdef H5T_pad_t lsb
         cdef H5T_pad_t msb
@@ -639,9 +588,9 @@ cdef class TypeAtomicID(TypeID):
         """ (INT lsb_pad_code, INT msb_pad_code)
 
             Set the padding type.  Possible values are:
-                PAD_ZERO
-                PAD_ONE
-                PAD_BACKGROUND
+             PAD_ZERO
+             PAD_ONE
+             PAD_BACKGROUND
         """
         H5Tset_pad(self.id, <H5T_pad_t>lsb, <H5T_pad_t>msb)
 
@@ -655,7 +604,7 @@ cdef class TypeIntegerID(TypeAtomicID):
     def get_sign(self):
         """ () => INT sign
 
-            Obtain the "signedness" of the datatype; one of:
+            Get the "signedness" of the datatype; one of:
               SGN_NONE:  Unsigned
               SGN_2:     Signed 2's complement
         """
@@ -687,11 +636,11 @@ cdef class TypeFloatID(TypeAtomicID):
 
             Get information about floating-point bit fields.  See the HDF5
             docs for a better description.  Tuple has to following members:
-                0: UINT spos
-                1: UINT epos
-                2: UINT esize
-                3: UINT mpos
-                4: UINT msize
+             0: UINT spos
+             1: UINT epos
+             2: UINT esize
+             3: UINT mpos
+             4: UINT msize
         """
         cdef size_t spos, epos, esize, mpos, msize
         H5Tget_fields(self.id, &spos, &epos, &esize, &mpos, &msize)
@@ -724,9 +673,9 @@ cdef class TypeFloatID(TypeAtomicID):
         """ () => INT normalization_code
 
             Get the normalization strategy.  Legal values are:
-                NORM_IMPLIED
-                NORM_MSBSET
-                NORM_NONE
+             NORM_IMPLIED
+             NORM_MSBSET
+             NORM_NONE
         """
         return <int>H5Tget_norm(self.id)
 
@@ -734,9 +683,9 @@ cdef class TypeFloatID(TypeAtomicID):
         """ (INT normalization_code)
 
             Set the normalization strategy.  Legal values are:
-                NORM_IMPLIED
-                NORM_MSBSET
-                NORM_NONE
+             NORM_IMPLIED
+             NORM_MSBSET
+             NORM_NONE
         """
         H5Tset_norm(self.id, <H5T_norm_t>norm)
 
@@ -744,9 +693,9 @@ cdef class TypeFloatID(TypeAtomicID):
         """ () => INT pad_code
 
             Determine the internal padding strategy.  Legal values are:
-                PAD_ZERO
-                PAD_ONE
-                PAD_BACKGROUND
+             PAD_ZERO
+             PAD_ONE
+             PAD_BACKGROUND
         """
         return <int>H5Tget_inpad(self.id)
 
@@ -754,15 +703,15 @@ cdef class TypeFloatID(TypeAtomicID):
         """ (INT pad_code)
 
             Set the internal padding strategy.  Legal values are:
-                PAD_ZERO
-                PAD_ONE
-                PAD_BACKGROUND
+             PAD_ZERO
+             PAD_ONE
+             PAD_BACKGROUND
         """
         H5Tset_inpad(self.id, <H5T_pad_t>pad_code)
 
     cdef object py_dtype(self):
         # Translation function for floating-point types
-        return dtype( _order_map[self.get_order()] + "f" + 
+        return dtype( _order_map[self.get_order()] + "f" + \
                       str(self.get_size()) )
 
 
@@ -835,6 +784,8 @@ cdef class TypeCompoundID(TypeCompositeID):
             Determine the offset, in bytes, of the beginning of the specified
             member of a compound datatype.
         """
+        if member < 0:
+            raise ValueError("Member index must be non-negative.")
         return H5Tget_member_offset(self.id, member)
 
     def get_member_type(self, int member):
@@ -869,9 +820,10 @@ cdef class TypeCompoundID(TypeCompositeID):
         cdef TypeID tmp_type
         cdef list field_names
         cdef list field_types
-        nfields = self.get_nmembers()
+        cdef int nfields
         field_names = []
         field_types = []
+        nfields = self.get_nmembers()
 
         # First step: read field names and their Numpy dtypes into 
         # two separate arrays.
@@ -885,7 +837,7 @@ cdef class TypeCompoundID(TypeCompositeID):
                 tmp_type.close()
 
         # 1. Check if it should be converted to a complex number
-        if len(field_names) == 2                    and \
+        if len(field_names) == 2                     and \
             tuple(field_names) == self.complex_names and \
             field_types[0] == field_types[1]         and \
             field_types[0].kind == 'f':
@@ -991,18 +943,19 @@ cdef class TypeEnumID(TypeCompositeID):
         return val
 
     cdef object py_dtype(self):
-        # Translation function for enum types
+        # Translation function for enum types; 
         cdef TypeID tmp_type
         tmp_type = self.get_super()
         try:
             typeobj = tmp_type.py_dtype()
         finally:
             tmp_type.close()
+        return typeobj
 
 # === Python extension functions ==============================================
 
 
-def py_create(dtype dt not None, object complex_names=None):
+def py_create(dtype dt not None, object complex_names=None, enum=None):
     """ ( DTYPE dt, TUPLE complex_names=None, DICT enum=None) => TypeID
 
         Given a Numpy dtype object, generate a byte-for-byte memory-compatible
@@ -1011,10 +964,19 @@ def py_create(dtype dt not None, object complex_names=None):
 
         complex_names:
             Specifies when and how to interpret Python complex numbers as
-            HDF5 compound datatypes.  May be None or a tuple with strings 
+            HDF5 compound datatypes.  May be None or a tuple with strings
             (real name, img name).  "None" indicates the default mapping of
-            ("r", "i"). This option is also applied to subtypes of arrays 
-            and compound types.
+            ("r", "i").
+
+            This option is applied recursively to subtypes of arrays and
+            compound types.  Additionally, these names are stored in the
+            returned HDF5 type object.
+
+        enum:
+            A dictionary mapping names to integer values.  If the type being
+            converted is an integer (kind i/u), the resulting HDF5 type will
+            be an enumeration with that base type, and the given values.
+            Ignored for all other types.
     """
     cdef TypeID otype
     cdef TypeID base
@@ -1026,28 +988,16 @@ def py_create(dtype dt not None, object complex_names=None):
 
     otype = None
 
-    if complex_names is None:
-        complex_names = DEFAULT_COMPLEX_NAMES
-    else: 
-        _validate_complex(complex_names)
+    _complex_names = ('r','i')
 
     kind = dt.kind
     byteorder = dt.byteorder
     length = int(dt.str[2:])  # is there a better way to do this?
     names = dt.names
-        
-    # Check for an enum dict first
-    if enum is not None:
-        if kind != c'i' and kind != c'u':
-            raise ValueError("Enumerated types may only have integer bases.")
-    
-        otype = enum_create(_code_map[dt.str])
 
-        for key in sorted(enum):
-            otype.enum_insert(key, enum[key])
 
-    # Anything with field names is considered to be a compound type
-    elif names is not None:
+    # Void types with field names are considered to be compound
+    if kind == c'V' and names is not None:
         otype = create(H5T_COMPOUND, length)
         for name in names:
             dt_tmp, offset = dt.fields[name]
@@ -1057,12 +1007,18 @@ def py_create(dtype dt not None, object complex_names=None):
             finally:
                 tmp.close()
 
+    # Enums may be created out of integer types
+    elif (kind == c'u' or kind == c'i') and enum is not None:
+
+        otype = enum_create(_code_map[dt.str])
+
+        for key in sorted(enum):
+            otype.enum_insert(key, enum[key])
+
     # Integers and floats map directly to HDF5 atomic types
-    elif kind == c'u' or kind  == c'i' or kind == c'f': 
-        try:
-            otype =  _code_map[dt.str].copy()
-        except KeyError:
-            raise ValueError("Failed to find '%s' in atomic code map" % dt.str)
+    elif kind == c'u' or kind == c'i'or kind == c'f':
+
+        otype =  _code_map[dt.str].copy()
 
     # Complex numbers are stored as HDF5 structs, with names defined at runtime
     elif kind == c'c':
@@ -1087,9 +1043,9 @@ def py_create(dtype dt not None, object complex_names=None):
         else:
             otype = create(H5T_OPAQUE, length)
                 
-    # Strings are assumed to be stored C-style.
+    # Strings are stored C-style; everything after first NULL is garbage.
     elif kind == c'S':
-        otype = typewrap(H5Tcopy(H5T_C_S1))
+        otype = C_S1.copy()
         otype.set_size(length)
 
     else:
diff --git a/h5py/h5z.pyx b/h5py/h5z.pyx
index 18365fb..d662da3 100644
--- a/h5py/h5z.pyx
+++ b/h5py/h5z.pyx
@@ -62,10 +62,10 @@ def filter_avail(int filter_code):
         Determine if the given filter is available to the library.
 
         The filter code should be one of:
-            FILTER_DEFLATE
-            FILTER_SHUFFLE
-            FILTER_FLETCHER32
-            FILTER_SZIP
+         FILTER_DEFLATE
+         FILTER_SHUFFLE
+         FILTER_FLETCHER32
+         FILTER_SZIP
     """
     return pybool(H5Zfilter_avail(<H5Z_filter_t>filter_code))
 
@@ -75,14 +75,14 @@ def get_filter_info(int filter_code):
         Retrieve a bitfield with information about the given filter.
 
         The filter code should be one of:
-            FILTER_DEFLATE
-            FILTER_SHUFFLE
-            FILTER_FLETCHER32
-            FILTER_SZIP
+         FILTER_DEFLATE
+         FILTER_SHUFFLE
+         FILTER_FLETCHER32
+         FILTER_SZIP
 
         Valid bitmasks for use with the returned bitfield are:
-          FILTER_CONFIG_ENCODE_ENABLED
-          FILTER_CONFIG_DECODE_ENABLED
+         FILTER_CONFIG_ENCODE_ENABLED
+         FILTER_CONFIG_DECODE_ENABLED
     """
     cdef unsigned int flags
     H5Zget_filter_info(<H5Z_filter_t>filter_id, &flags)
diff --git a/h5py/highlevel.py b/h5py/highlevel.py
index 4f862b1..975a9c5 100644
--- a/h5py/highlevel.py
+++ b/h5py/highlevel.py
@@ -63,51 +63,18 @@ from h5e import H5Error
 class Dataset(object):
 
     """ High-level interface to an HDF5 dataset
-
-        A Dataset object is designed to permit "Numpy-like" access to the 
-        underlying HDF5 dataset.  It supports array-style indexing, which 
-        returns Numpy ndarrays.  "Extended-recarray" slicing is also possible;
-        specify the names of fields you want along with the numerical slices.
-        The underlying array can also be written to using the indexing syntax.
-
-        HDF5 attribute access is provided through the property obj.attrs.  See
-        the AttributeManager class documentation for more information.
-
-        Read-only properties:
-        shape       Tuple containing array dimensions
-        dtype       A Numpy dtype representing the array data-type.
-
-        Writable properties:
-        cnames:     HDF5 compound names used for complex I/O.  This can be
-                    None, (), or a 2-tuple with ("realname", "imgname").
     """
 
     # --- Properties (Dataset) ------------------------------------------------
 
-    #: Numpy-style shape tuple giving dataset dimensions
-    shape = property(lambda self: h5d.py_shape(self.id))
-
-    #: Numpy dtype representing the datatype
-    dtype = property(lambda self: h5d.py_dtype(self.id))
+    shape = property(lambda self: self.id.shape,
+        doc = "Numpy-style shape tuple giving dataset dimensions")
 
-    def _set_byteorder(self, order):
-        if order is not None:
-            h5t._validate_byteorder(order)
-        self._byteorder = order
-    
-    #: Set to <, > or = to coerce I/0 to a particular byteorder, or None to use default.
-    byteorder = property(lambda self: self._byteorder, _set_byteorder)
+    dtype = property(lambda self: self.id.dtype,
+        doc = "Numpy dtype representing the datatype")
 
-    def _set_cnames(self, names):
-        if names is not None:
-            h5t._validate_complex(names)
-        self._cnames = names
-
-    #: Set to (realname, imgname) to control I/O of Python complex numbers.
-    cnames = property(lambda self: self._cnames, _set_cnames)
-
-    #: Provides access to HDF5 attributes. See AttributeManager docstring.
-    attrs = property(lambda self: self._attrs)
+    attrs = property(lambda self: self._attrs,
+        doc = "Provides access to HDF5 attributes")
 
     # --- Public interface (Dataset) ------------------------------------------
 
@@ -143,12 +110,32 @@ class Dataset(object):
                 raise ValueError('You cannot specify keywords when opening a dataset.')
             self.id = h5d.open(group.id, name)
         else:
-            self.id = h5d.py_create(group.id, name, data, shape, 
-                                    chunks, compression, shuffle, fletcher32)
+            if ((data is None) and not (shape and dtype)) or \
+               ((data is not None) and (shape or dtype)):
+                raise ValueError("Either data or both shape and dtype must be specified.")
+            
+            if data is not None:
+                shape = data.shape
+                dtype = data.dtype
+
+            plist = h5p.create(h5p.DATASET_CREATE)
+            if chunks:
+                plist.set_chunks(chunks)
+            if shuffle:
+                plist.set_shuffle()
+            if compression:
+                plist.set_deflate(compression)
+            if fletcher32:
+                plist.set_fletcher32()
+
+            space_id = h5s.create_simple(shape)
+            type_id = h5t.py_create(dtype)
+
+            self.id = h5d.create(group.id, name, type_id, space_id, plist)
+            if data is not None:
+                self.id.write(h5s.ALL, h5s.ALL, data)
 
         self._attrs = AttributeManager(self)
-        self._byteorder = None
-        self._cnames = None
 
     def __getitem__(self, args):
         """ Read a slice from the underlying HDF5 array.  Takes slices and
@@ -164,20 +151,35 @@ class Dataset(object):
         """
         start, count, stride, names = slicer(self.shape, args)
 
-        if names is not None and self.dtype.names is None:
-            raise ValueError('This dataset has no named fields.')
-        tid = 0
-        try:
-            tid = h5d.get_type(self.id)
-            dt = h5t.py_translate_h5t(tid, byteorder=self._byteorder,
-                                     compound_names=names,
-                                     complex_names=self._cnames)
-        finally:
-            if tid != 0:
-                h5t.close(tid)
+        if not (len(start) == len(count) == len(stride) == self.id.rank):
+            raise ValueError("Indices do not match dataset rank (%d)" % self.id.rank)
+
+        htype = self.id.get_type()
+        if len(names) > 0:
+            if htype.get_class() == h5t.COMPOUND:
+                mtype = h5t.create(h5t.COMPOUND)
+
+                offset = 0
+                for idx in range(htype.get_nmembers()):
+                    hname = htype.get_member_name(idx)
+                    if hname in names:
+                        subtype = h5type.get_member_type(idx)
+                        mtype.insert(hname, offset, subtype)
+                        offset += subtype.get_size()
+            else:
+                raise ValueError("This dataset has no named fields.")
+        else:
+            mtype = htype
+
+        fspace = self.id.get_space()
+        fspace.select_hyperslab(start, count, stride)
+        mspace = h5s.create_simple(count)
 
-        arr = h5d.py_read_slab(self.id, start, count, stride, dtype=dt)
-        if names is not None and len(names) == 1:
+        arr = ndarray(count, mtype.dtype)
+
+        self.id.read(mspace, fspace, arr)
+
+        if len(names) == 1
             # Match Numpy convention for recarray indexing
             return arr[names[0]]
         return arr
@@ -190,21 +192,12 @@ class Dataset(object):
         """
         val = args[-1]
         start, count, stride, names = slicer(val.shape, args[:-1])
-        if names is not None:
+        if len(names) > 0:
             raise ValueError("Field names are not allowed for write.")
 
+        self.id.
         h5d.py_write_slab(self.id, args[-1], start, stride)
 
-    def close(self):
-        """ Force the HDF5 library to close and free this object. This
-            will be called automatically when the object is garbage collected,
-            if it hasn't already.
-        """
-        h5d.close(self.id)
-
-    def __del__(self):
-        if h5i.get_type(self.id) == h5i.DATASET:
-            h5d.close(self.id)
 
     def __str__(self):
         return 'Dataset: '+str(self.shape)+'  '+repr(self.dtype)
@@ -529,259 +522,42 @@ class NamedType(object):
         if self.id is not None:
             h5t.close(self.id)
 
-
-# === Browsing and interactivity ==============================================
-
-import inspect
-import string
-import posixpath
-
-
-class _H5Browse(object):
-
-    def __init__(self):
-        self.filename = None
-        self.file_obj = None
-        self.path = None
-
-    def _loadfile(self, filename):
-        if self.file_obj is not None:
-            self.file_obj.close()
-            self.filename = None
-
-        self.file_obj = File(filename, 'r+')
-        self.filename = filename
-
-    def __call__(self, filename=None, importdict=None):
-        """ Browse a new file, or the current one.
-        """
-        if filename is not None:
-            self._loadfile(filename)
-        else:
-            if self.file_obj is None:
-                raise ValueError("Must provide filename if no file is currently open")
-
-        if importdict is None:  # hang on tight... here we go...
-            importdict = inspect.currentframe().f_back.f_globals
-
-        cmdinstance = _H5Cmd(self.file_obj, self.filename, importdict, self.path)
-        cmdinstance.browse()
-        self.path = cmdinstance.path
-
-class _H5Cmd(cmd.Cmd):
-
-    def __init__(self, file_obj, filename, importdict, groupname=None):
-        cmd.Cmd.__init__(self)
-        self.file = file_obj
-        self.filename = filename
-
-        if groupname is None:
-            groupname = '/'
-        self.group = self.file[groupname]
-        self.path = groupname
-
-        self.prompt = os.path.basename(self.filename)+' '+os.path.basename(self.path)+'> '
-
-        self.importdict = importdict
-
-    def browse(self):
-        self.cmdloop('Browsing "%s". Type "help" for commands, "exit" to exit.' % os.path.basename(self.filename))
-
-    def _safename(self, name):
-        legal = string.ascii_letters + '0123456789'
-        instring = list(name)
-        for idx, x in enumerate(instring):
-            if x not in legal:
-                instring[idx] = '_'
-        if instring[0] not in string.ascii_letters:
-            instring = ['_']+instring
-        return ''.join(instring)
-
-    def do_ls(self, line):
-
-        def padline(line, width, trunc=True):
-            slen = len(line)
-            if slen >= width:
-                if trunc:
-                    line = line[0:width-4]+'... '
-                else:
-                    line = line+' '
-            else:
-                line = line + ' '*(width-slen)
-            return line
-
-        extended = False
-        trunc = True
-        if line.strip() == '-l':
-            extended = True
-        if line.strip() == '-ll':
-            extended = True
-            trunc = False
-
-        for name in self.group:
-            outstring = name
-            type_code = h5g.get_objinfo(self.group.id, name).type
-            if type_code == h5g.GROUP:
-                outstring += "/"
-
-            if extended:
-                outstring = padline(outstring, 20, trunc)
-                codestring = str(self.group[name])
-                outstring += padline(codestring, 60, trunc)
-
-            print outstring
-
-    def do_cd(self, path):
-        """ cd <path>
-        """
-        path = posixpath.normpath(posixpath.join(self.path, path))
-        try:
-            group = Group(self.file, path)
-            self.prompt = os.path.basename(self.filename)+' '+os.path.basename(path)+'> '
-        except H5Error, e:
-            print e.message
-        self.path = path
-        self.group = group
-
-    def do_import(self, line):
-        if self.importdict is None:
-            print "Can't import variables (no import dict provided)."
-        line = line.strip()
-        objname, as_string, newname = line.partition(' as ')
-        newname = newname.strip()
-        objname = objname.strip()
-        if len(newname) == 0:
-            newname = objname
-        try:
-            self.importdict[newname] = self.group[objname]
-        except H5Error, e:
-            print e.message
-
-    def do_exit(self, line):
-        return True
-
-    def do_EOF(self, line):
-        return self.do_exit(line)
-
-    def do_pwd(self, line):
-        print self.path
-
-    def complete_import(self, text, line, begidx, endidx):
-        return [x for x in self.group if x.find(text)==0]
-
-    def complete_cd(self, text, line, begidx, endidx):
-        return [x for x in self.group if x.find(text)==0 \
-                    and h5g.get_objinfo(self.group.id,x).type == h5g.GROUP]
-
-    def help_cd(self):
-        print ""
-        print "cd <name>"
-        print "    Enter a subgroup of the current group"
-        print ""
-
-    def help_pwd(self):
-        print ""
-        print "pwd"
-        print "    Print current path"
-        print ""
-
-    def help_ls(self):
-        print ""
-        print "ls [-l] [-ll]"
-        print "    Print the contents of the current group."
-        print "    Optional long format with -l (80 columns)"
-        print "    Very long format (-ll) has no column limit."
-        print ""
-
-    def help_import(self):
-        print ""
-        print "import <name> [as <python_name>]"
-        print "    Import a member of the current group as a Python object" 
-        print "    at the interactive level, optionally under a different"
-        print "    name."
-        print ""
-
-
-
 # === Utility functions =======================================================
 
-def _open_arbitrary(group_obj, name):
-    """ Figure out the type of an object attached to an HDF5 group and return 
-        the appropriate high-level interface object.
-
-        Currently supports Group, Dataset, and NamedDatatype
-    """
-    info = h5g.get_objinfo(group_obj.id, name)
-
-    if info.type == h5g.GROUP:      # group
-        return Group(group_obj, name)
-
-    elif info.type == h5g.DATASET:  # dataset
-        return Dataset(group_obj, name)
-
-    elif info.type == h5g.DATATYPE: # named type
-        return NamedDatatype(group_obj, name)
-
-    raise NotImplementedError('Object type "%s" unsupported by the high-level interface.' % h5g.PY_TYPE[info.type])
-
 def slicer(shape, args):
-    """ Processes arguments to __getitem__ methods.  
-    
-        shape:  Dataset shape (tuple)
-        args:   Raw __getitem__ args; integers, slices or strings in any order.
-        
-        Returns 4-tuple:
-        (start, count, stride, names)
-        Start/count/stride are guaranteed not to be None.
-        Names will either be None or a list of non-zero length.
     """
+        Parse Numpy-style extended slices.  Correctly handle:
+        1. Recarray-style field strings (more than one!)
+        2. Slice objects
+        3. Ellipsis objects
+    """
+    rank = len(shape)
 
     if not isinstance(args, tuple):
         args = (args,)
+    args = list(args)
 
-    rank = len(shape)
-    
-    slices = []     # Holds both slice objects and integer indices.
-    names = []      # Field names (strings)
-
-    # Sort slice-like arguments from strings
-    for arg in args:
-        if isinstance(arg, int) or isinstance(arg, long) or isinstance(arg, slice):
-            slices.append(arg)
-        elif isinstance(arg, str):
-            names.append(arg)
-        else:
-            raise TypeError("Unsupported slice type (must be int/long/slice/str): %s" % repr(arg))
-
-    # If there are no names, this is interpreted to mean "all names."  So
-    # return None instead of an empty sequence.
-    if len(names) == 0:
-        names = None
-    else:
-        names = tuple(names)
-
-    # Check for special cases
-
-    # 1. No numeric slices == full dataspace
-    if len(slices) == 0:
-            return ((0,)*rank, shape, (1,)*rank, names)
+    slices = []
+    names = []
 
-    # 2. Single numeric slice ":" == full dataspace
-    if len(slices) == 1 and isinstance(slices[0], slice):
-        slice_ = slices[0]
-        if slice_.stop == None and slice_.step == None and slice_.stop == None:
-            return ((0,)*rank, shape, (1,)*rank, names)
-
-    # Validate slices
-    if len(slices) != rank:
-        raise ValueError("Number of numeric slices must match dataset rank (%d)" % rank)
+    # Sort arguments
+    for entry in args[:]:
+        if isinstance(entry, str):
+            names.append(entry)
+        else:
+            slices.append(entry)
 
     start = []
     count = []
     stride = []
 
-    # Parse slices to assemble hyperslab start/count/stride tuples
+    # Hack to allow Numpy-style row indexing
+    if len(slices) == 1:
+        args.append(Ellipsis)
+
+    # Expand integers and ellipsis arguments to slices
     for dim, arg in enumerate(slices):
+
         if isinstance(arg, int) or isinstance(arg, long):
             if arg < 0:
                 raise ValueError("Negative indices are not allowed.")
@@ -823,20 +599,20 @@ def slicer(shape, args):
             stride.append(st)
             count.append(cc)
 
-    return (tuple(start), tuple(count), tuple(stride), names)
-
-
-#: Command-line HDF5 file "shell": browse(name) (or browse() for last file).
-browse = _H5Browse()
-
-
-
-
-
-
-
+        elif arg == Ellipsis:
+            nslices = rank-(len(slices)-1)
+            if nslices <= 0:
+                continue
+            for x in range(nslices):
+                idx = dim+x
+                start.append(0)
+                count.append(shape[dim+x])
+                stride.append(1)
 
+        else:
+            raise ValueError("Bad slice type %s" % repr(arg))
 
+    return (start, count, stride, names)
 
 
 
diff --git a/h5py/tests/__init__.py b/h5py/tests/__init__.py
index f7f5501..9650cfa 100644
--- a/h5py/tests/__init__.py
+++ b/h5py/tests/__init__.py
@@ -12,14 +12,13 @@
 
 import unittest
 import sys
-import test_h5a, test_h5f, test_h5i, test_h5d, \
-        test_h5g, test_h5, test_h5s, test_h5p, test_highlevel
+import test_h5a, test_h5d, test_h5f, test_h5g, test_h5i, test_h5p
 
-from h5py import h5a, h5f, h5g, h5d, h5s, h5i, h5z, h5p, highlevel
+from h5py import h5a, h5f, h5g, h5d, h5s, h5i, h5z, h5p
 
-TEST_CASES = (test_h5a.TestH5A, test_h5f.TestH5F, test_h5g.TestH5G,
-              test_h5i.TestH5I, test_h5d.TestH5D, test_h5.TestH5,
-              test_h5s.TestH5S, test_h5p.TestH5P, test_highlevel.TestHighlevel)
+TEST_CASES = (test_h5a.TestH5A, test_h5d.TestH5D, test_h5f.TestH5F, 
+              test_h5g.TestH5G, test_h5i.TestH5I, test_h5p.TestH5P)#, test_h5.TestH5,
+              #test_h5s.TestH5S, test_highlevel.TestHighlevel)
 
 def buildsuite(cases):
 
diff --git a/h5py/tests/common.py b/h5py/tests/common.py
index 109ca1a..7669bc3 100644
--- a/h5py/tests/common.py
+++ b/h5py/tests/common.py
@@ -15,23 +15,35 @@ import os
 import shutil
 from h5py import h5f, h5p
 
-def getcopy(filename):
-    """ Create a temporary working copy of "filename". Return is a 2-tuple
-        containing (HDF5 file id, file name)
+class HCopy(object):
+
+    """
+        Use:
+
+        from __future__ import with_statement
+
+        with HCopy(filename) as fid:
+            fid.frob()
+            obj = h5g.open(fid, whatever)
+            ...
     """
-    newname = tempfile.mktemp('.hdf5')
-    shutil.copy(filename, newname)
+    def __init__(self, filename):
+        self.filename = filename
+        self.tmpname = None
 
-    plist = h5p.create(h5p.FILE_ACCESS)
-    h5p.set_fclose_degree(plist, h5f.CLOSE_STRONG)
-    fid = h5f.open(newname, h5f.ACC_RDWR)
-    h5p.close(plist)
+    def __enter__(self):
+        self.tmpname = tempfile.mktemp('.hdf5')
+        shutil.copy(self.filename, self.tmpname)
 
-    return (fid, newname)
+        plist = h5p.create(h5p.FILE_ACCESS)
+        plist.set_fclose_degree(h5f.CLOSE_STRONG)
+        self.fid = h5f.open(self.tmpname, h5f.ACC_RDWR)
+        plist.close()
+        return self.fid
 
-def deletecopy(fid, newname):
-    h5f.close(fid)
-    os.unlink(newname)
+    def __exit__(self, *args):
+        self.fid.close()
+        os.unlink(self.tmpname)
 
 def errstr(arg1, arg2, msg=''):
     """ Used to mimic assertEqual-style auto-repr, where assertEqual doesn't
diff --git a/h5py/tests/test_h5a.py b/h5py/tests/test_h5a.py
index e8beb4e..d55333d 100644
--- a/h5py/tests/test_h5a.py
+++ b/h5py/tests/test_h5a.py
@@ -9,17 +9,17 @@
 # $Date$
 # 
 #-
+from __future__ import with_statement
 
 import unittest
 from numpy import array, ndarray, dtype, all, ones
 import os
 
-import h5py
-from h5py import h5a
-from h5py import h5f, h5g, h5i, h5t, h5s
-from h5py.h5e import H5Error
+from common import HCopy, errstr
 
-from common import getcopy, deletecopy, errstr
+import h5py
+from h5py import h5, h5a, h5f, h5g, h5i, h5t, h5s
+from h5py.h5 import H5Error
 
 
 HDFNAME = os.path.join(os.path.dirname(h5py.__file__), 'tests/data/attributes.hdf5')
@@ -38,72 +38,70 @@ class TestH5A(unittest.TestCase):
         self.obj = h5g.open(self.fid, OBJECTNAME)
 
     def tearDown(self):
-        h5g.close(self.obj)
-        h5f.close(self.fid)
+        self.obj.close()
+        self.fid.close()
 
-    def is_attr(self, aid):
-        return (h5i.get_type(aid) == h5i.ATTR)
+    def is_attr(self, attr):
+        return (h5i.get_type(attr) == h5i.ATTR)
 
     # === General attribute operations ========================================
 
     def test_create_write(self):
-        fid, filename = getcopy(HDFNAME)
-        obj = h5g.open(fid, OBJECTNAME)
-        for name, (value, dt, shape) in NEW_ATTRIBUTES.iteritems():
-            arr_ref = array(value, dtype=dt)
-            arr_fail = ones((15,15), dtype=dt)
-
-            sid = h5s.create(h5s.SCALAR)
-            tid = h5t.py_translate_dtype(dt)
-
-            aid = h5a.create(obj, name, tid, sid)
-            self.assert_(self.is_attr(aid))
-            h5a.write(aid, arr_ref)
-            self.assertRaises(ValueError, h5a.write, aid, arr_fail)
-            h5a.close(aid)
-
-            arr_val = h5a.py_get(obj,name)
-            self.assert_(all(arr_val == arr_ref), errstr(arr_val, arr_ref))
-            h5s.close(sid)
-        h5g.close(obj)
-        deletecopy(fid, filename)
-        
-        self.assertRaises(H5Error, h5a.create, -1, "FOOBAR", -1, -1)
-        self.assertRaises(H5Error, h5a.write, -1, arr_ref)
+
+        with HCopy(HDFNAME) as fid:
+            obj = h5g.open(fid, OBJECTNAME)
+            for name, (value, dt, shape) in NEW_ATTRIBUTES.iteritems():
+                arr_ref = array(value, dtype=dt)
+                arr_fail = ones((15,15), dtype=dt)
+
+                space = h5s.create(h5s.SCALAR)
+                htype = h5t.py_create(dt)
+
+                attr = h5a.create(obj, name, htype, space)
+                self.assert_(self.is_attr(attr))
+                attr.write(arr_ref)
+                self.assertRaises(ValueError, attr.write, arr_fail)
+                attr.close()
+
+                attr = h5a.open_name(obj, name)
+                dt = attr.dtype
+                shape = attr.shape
+                arr_val = ndarray(shape, dtype=dt)
+                attr.read(arr_val)
+                attr.close()
+                self.assert_(all(arr_val == arr_ref), errstr(arr_val, arr_ref))
+
+            obj.close()
 
     def test_open_idx(self):
         for idx, name in enumerate(ATTRIBUTES_ORDER):
-            aid = h5a.open_idx(self.obj, idx)
-            self.assert_(self.is_attr(aid), "Open: index %d" % idx)
-            h5a.close(aid)
-    
-        self.assertRaises(H5Error, h5a.open_idx, -1, 0)
+            attr = h5a.open_idx(self.obj, idx)
+            self.assert_(self.is_attr(attr), "Open: index %d" % idx)
+            attr.close()
 
     def test_open_name(self):
         for name in ATTRIBUTES:
-            aid = h5a.open_name(self.obj, name)
-            self.assert_(self.is_attr(aid), 'Open: name "%s"' % name)
-            h5a.close(aid)
-
-        self.assertRaises(H5Error, h5a.open_name, -1, "foo")
+            attr = h5a.open_name(self.obj, name)
+            self.assert_(self.is_attr(attr), 'Open: name "%s"' % name)
+            attr.close()
 
     def test_close(self):
-        aid = h5a.open_idx(self.obj, 0)
-        self.assert_(self.is_attr(aid))
-        h5a.close(aid)
-        self.assert_(not self.is_attr(aid))
-    
-        self.assertRaises(H5Error, h5a.close, -1)
+        attr = h5a.open_idx(self.obj, 0)
+        self.assert_(self.is_attr(attr))
+        attr.close()
+        self.assert_(not self.is_attr(attr))
 
     def test_delete(self):
-        fid, filename = getcopy(HDFNAME)
-        obj = h5g.open(fid, OBJECTNAME)
-        self.assert_(h5a.py_exists(obj, ATTRIBUTES_ORDER[0]))
-        h5a.delete(obj, ATTRIBUTES_ORDER[0])
-        self.assert_(not h5a.py_exists(obj, ATTRIBUTES_ORDER[0]))
-        deletecopy(fid, filename)
+        with HCopy(HDFNAME) as fid:
+            obj = h5g.open(fid, OBJECTNAME)
+
+            attr = h5a.open_name(obj, ATTRIBUTES_ORDER[0])
+            self.assert_(self.is_attr(attr))
+            attr.close()
+
+            h5a.delete(obj, ATTRIBUTES_ORDER[0])
+            self.assertRaises(H5Error, h5a.open_name, obj, ATTRIBUTES_ORDER[0])
 
-        self.assertRaises(H5Error, h5a.delete, -1, "foo")
 
     # === Attribute I/O =======================================================
 
@@ -111,64 +109,46 @@ class TestH5A(unittest.TestCase):
         for name in ATTRIBUTES:
             value, dt, shape = ATTRIBUTES[name]
 
-            aid = h5a.open_name(self.obj, name)
+            attr = h5a.open_name(self.obj, name)
             arr_holder = ndarray(shape, dtype=dt)
             arr_reference = array(value, dtype=dt)
 
-            if len(shape) != 0:
-                arr_fail = ndarray((), dtype=dt)
-                self.assertRaises(ValueError, h5a.read, aid, arr_fail)
+            self.assertEqual(attr.shape, shape)
+            self.assertEqual(attr.dtype, dt)
 
-            h5a.read(aid, arr_holder)
+            attr.read(arr_holder)
             self.assert_( all(arr_holder == arr_reference),
                 errstr(arr_reference, arr_holder, 'Attr "%s"):\n' % name, ))
 
-            h5a.close(aid)
-        
-        self.assertRaises(H5Error, h5a.read, -1, arr_holder)
+            attr.close()
 
-    # h5a.write is done by test_create_write
+    # write is done by test_create_write
 
     # === Attribute inspection ================================================
 
     def test_get_num_attrs(self):
         n = h5a.get_num_attrs(self.obj)
         self.assertEqual(n, len(ATTRIBUTES))
-        self.assertRaises(H5Error, h5a.get_num_attrs, -1)
 
     def test_get_name(self):
     
         for name in ATTRIBUTES:
-            aid = h5a.open_name(self.obj, name)
-            supposed_name = h5a.get_name(aid)
-            self.assertEqual(supposed_name, name)
-            h5a.close(aid)
-
-        self.assertRaises(H5Error, h5a.get_name, -1)
+            attr = h5a.open_name(self.obj, name)
+            self.assertEqual(attr.get_name(), name)
 
     def test_get_space(self):
 
         for name, (value, dt, shape) in ATTRIBUTES.iteritems():
-            aid = h5a.open_name(self.obj, name)
-            sid = h5a.get_space(aid)
-            shape_tpl = h5s.get_simple_extent_dims(sid)
+            attr = h5a.open_name(self.obj, name)
+            space = attr.get_space()
+            shape_tpl = space.get_simple_extent_dims()
             self.assertEqual(shape_tpl, shape)
-            h5s.close(sid)
-            h5a.close(aid)
-
-        self.assertRaises(H5Error, h5a.get_space, -1)
 
     def test_get_type(self):
 
         for name, (value, dt, shape) in ATTRIBUTES.iteritems():
-            aid = h5a.open_name(self.obj, name)
-            tid = h5a.get_type(aid)
-            supposed_dtype = h5t.py_translate_h5t(tid)
-            self.assertEqual(supposed_dtype, dt)
-            h5t.close(tid)
-            h5a.close(aid)
-
-        self.assertRaises(H5Error, h5a.get_type, -1)
+            attr = h5a.open_name(self.obj, name)
+            htype = attr.get_type()
 
     def test_iterate(self):
 
@@ -201,65 +181,6 @@ class TestH5A(unittest.TestCase):
         h5a.iterate(self.obj, iterate_two, namelist, 1)
         self.assertEqual(namelist, ATTRIBUTES_ORDER[1:3])
 
-        self.assertRaises(H5Error, h5a.iterate, -1, iterate_two, namelist)
-
-
-    # === Python extensions ===================================================
-
-    def test_py_listattrs(self):
-        self.assertEqual(h5a.py_listattrs(self.obj), ATTRIBUTES_ORDER)
-        self.assertRaises(H5Error, h5a.py_listattrs, -1)
-
-    def test_py_shape(self):
-        
-        for name, (value, dt, shape) in ATTRIBUTES.iteritems():
-            aid = h5a.open_name(self.obj, name)
-            retshape = h5a.py_shape(aid)
-            self.assertEqual(retshape, shape) 
-            h5a.close(aid)
-        self.assertRaises(H5Error, h5a.py_shape, -1)
-
-    def test_py_dtype(self):
-
-        for name, (value, dt, shape) in ATTRIBUTES.iteritems():
-            aid = h5a.open_name(self.obj, name)
-            self.assertEqual(h5a.py_dtype(aid),dt)
-            h5a.close(aid)
-        self.assertRaises(H5Error, h5a.py_dtype, -1)
-
-    def test_py_get(self):
-
-        for name, (value, dt, shape) in ATTRIBUTES.iteritems():
-            arr_reference = array(value, dtype=dt)
-            arr_returned = h5a.py_get(self.obj, name)
-            self.assert_(all(arr_returned == arr_reference), 
-                errstr(arr_reference, arr_returned))
-        self.assertRaises(H5Error, h5a.py_get, -1, "foo")
-
-    def test_py_set(self):
-
-        fid, filename = getcopy(HDFNAME)
-        obj = h5g.open(fid, OBJECTNAME)
-
-        for name, (value, dt, shape) in NEW_ATTRIBUTES.iteritems():
-            arr_reference = array(value, dtype=dt)
-            h5a.py_set(obj, name, arr_reference)
-            arr_ret = h5a.py_get(obj, name)
-            self.assert_( all( arr_ret == arr_reference), errstr(arr_ret, arr_reference))
-        h5g.close(obj)
-        deletecopy(fid, filename)
-
-        self.assertRaises(H5Error, h5a.py_set, -1, "foo", arr_reference)
-
-
-    def test_py_exists(self):
-
-        for name in ATTRIBUTES:
-            self.assert_(h5a.py_exists(self.obj, name), name)
-
-        self.assert_(not h5a.py_exists(self.obj, 'SOME OTHER ATTRIBUTE') )
-            
-        # py_exists will never intentionally raise an exception
 
 
 
diff --git a/h5py/tests/test_h5d.py b/h5py/tests/test_h5d.py
index 9921bf8..97bf3f9 100644
--- a/h5py/tests/test_h5d.py
+++ b/h5py/tests/test_h5d.py
@@ -9,14 +9,16 @@
 # $Date$
 # 
 #-
+from __future__ import with_statement
 
 import unittest
 import os
 import numpy
+from common import HCopy
 
 import h5py
 from h5py import h5f, h5d, h5i, h5s, h5t, h5p
-from h5py.h5e import H5Error
+from h5py.h5 import H5Error
 
 HDFNAME = os.path.join(os.path.dirname(h5py.__file__), 'tests/data/smpl_compound_chunked.hdf5')
 DTYPE = numpy.dtype([('a_name','>i4'),
@@ -40,95 +42,51 @@ class TestH5D(unittest.TestCase):
 
     def setUp(self):
         self.fid = h5f.open(HDFNAME, h5f.ACC_RDONLY)
-        self.did = h5d.open(self.fid, "CompoundChunked")
+        self.dset = h5d.open(self.fid, "CompoundChunked")
 
     def tearDown(self):
-        h5d.close(self.did)
-        h5f.close(self.fid)
+        self.dset.close()
+        self.fid.close()
 
     def test_open_close(self):
-        h5d.close(self.did)
-        self.assertEqual(h5i.get_type(self.did), h5i.BADID)
-        self.did = h5d.open(self.fid, "CompoundChunked")
-        self.assertEqual(h5i.get_type(self.did), h5i.DATASET)
-
-        self.assertRaises(H5Error, h5d.open, self.fid, "Something else")
-        self.assertRaises(H5Error, h5d.close, -1)
+        with HCopy(HDFNAME) as fid:
+            dset = h5d.open(fid, "CompoundChunked")
+            self.assertEqual(h5i.get_type(dset), h5i.DATASET)
+            dset.close()
+            self.assertEqual(h5i.get_type(dset), h5i.BADID)
 
     def test_read(self):
         array = numpy.ndarray(SHAPE, dtype=DTYPE)
 
-        h5d.read(self.did, h5s.ALL, h5s.ALL, array)
+        self.dset.read(h5s.ALL, h5s.ALL, array)
         for name in DTYPE.fields:
             self.assert_(numpy.all(array[name] == basearray[name]), "%s::\n%s\n!=\n%s" % (name, array[name], basearray[name]))
 
-        self.assertRaises(H5Error, h5d.read, -1, h5s.ALL, h5s.ALL, array)
-
     def test_get_space(self):
-        sid = h5d.get_space(self.did)
-        try:
-            shape = h5s.get_simple_extent_dims(sid)
-            self.assertEqual(shape, SHAPE)
-        finally:
-            h5s.close(sid)
-        self.assertRaises(H5Error, h5d.get_space, -1)
+        space = self.dset.get_space()
+        self.assertEqual(space.get_simple_extent_dims(), SHAPE)
 
     def test_get_space_status(self):
-        status = h5d.get_space_status(self.did)
-        self.assert_(status in h5d.PY_SPACE_STATUS)
-        self.assertRaises(H5Error, h5d.get_space_status, -1)
+        status = self.dset.get_space_status()
+        self.assert_(status > 0)
 
-    def test_get_offset(self):
-        # Chunked datasets have no offset.  New test dset needed.
-        self.assertRaises(H5Error, h5d.get_offset, -1)
+    # Chunked datasets have no offset.  New test dset needed.
+    #
+    #def test_get_offset(self):
+    #    pass
 
     def test_get_storage_size(self):
-        # This function can't intentionally raise an exception.
-        self.assert_(h5d.get_storage_size(self.did) >= 0)
+        self.assert_(self.dset.get_storage_size() >= 0)
 
     def test_get_type(self):
-        # We're not testing datatype conversion here; that's for test_h5t
-        tid = h5d.get_type(self.did)
-        try:
-            self.assertEqual(h5i.get_type(tid), h5i.DATATYPE)
-        finally:
-            h5t.close(tid)
-        self.assertRaises(H5Error, h5d.get_type, -1)
+        self.assertEqual(self.dset.get_type().dtype, DTYPE)
 
     def test_get_create_plist(self):
-        pid = h5d.get_create_plist(self.did)
-        try:
-            self.assertEqual(h5i.get_type(pid), h5i.GENPROP_LST)
-        finally:
-            h5p.close(pid)
-
-        self.assertRaises(H5Error, h5d.get_create_plist, -1)
-
-    def test_py_shape(self):
-        self.assertEqual(h5d.py_shape(self.did), SHAPE)
-        self.assertRaises(H5Error, h5d.py_shape, -1)
-
-    def test_py_rank(self):
-        self.assertEqual(h5d.py_rank(self.did), 1)
-        self.assertRaises(H5Error, h5d.py_rank, -1)
-
-    def test_py_dtype(self):
-        self.assertEqual(type(h5d.py_dtype(self.did)), numpy.dtype)
-        self.assertRaises(H5Error, h5d.py_dtype, -1)
-        
-        
-
-
-
-
-
-
-
-
-
-
-
-
-    
+        pid = self.dset.get_create_plist()
+        self.assertEqual(h5i.get_type(pid), h5i.GENPROP_LST)
 
+    def test_py(self):
+        self.assertEqual(self.dset.dtype, DTYPE)
+        self.assertEqual(self.dset.shape, SHAPE)
+        self.assertEqual(self.dset.rank, len(SHAPE))
 
diff --git a/h5py/tests/test_h5f.py b/h5py/tests/test_h5f.py
index 89bfef3..8c9c63f 100644
--- a/h5py/tests/test_h5f.py
+++ b/h5py/tests/test_h5f.py
@@ -16,8 +16,7 @@ import os
 
 import h5py
 from h5py import h5f, h5i, h5p
-from common import getcopy, deletecopy, errstr
-from h5py.h5e import H5Error
+from h5py.h5 import H5Error
 
 HDFNAME = os.path.join(os.path.dirname(h5py.__file__), 'tests/data/attributes.hdf5')
 
@@ -27,30 +26,31 @@ class TestH5F(unittest.TestCase):
         self.fid = h5f.open(HDFNAME, h5f.ACC_RDONLY)
 
     def tearDown(self):
-        h5f.close(self.fid)
+        self.fid.close()
 
     def test_open_close(self):
         fid = h5f.open(HDFNAME, h5f.ACC_RDONLY)
         self.assertEqual(h5i.get_type(fid), h5i.FILE)
-        h5f.close(fid)
+        fid.close()
         self.assertEqual(h5i.get_type(fid), h5i.BADID)
 
         self.assertRaises(H5Error, h5f.open, 'SOME OTHER NAME')
-        self.assertRaises(H5Error, h5f.close, -1)
 
     def test_create(self):
         name = tempfile.mktemp('.hdf5')
         fid = h5f.create(name)
-        self.assertEqual(h5i.get_type(fid), h5i.FILE)
-        h5f.close(fid)
-        self.assertRaises(H5Error, h5f.create, name, h5f.ACC_EXCL)
-        os.unlink(name)
+        try:
+            self.assertEqual(h5i.get_type(fid), h5i.FILE)
+            fid.close()
+            self.assertRaises(H5Error, h5f.create, name, h5f.ACC_EXCL)
+        finally:
+            try:
+                os.unlink(name)
+            except OSError:
+                pass
 
     def test_flush(self):
-        fid, fname = getcopy(HDFNAME)
-        h5f.flush(fid)
-        self.assertRaises(H5Error, h5f.flush, -1)
-        deletecopy(fid, fname)
+        h5f.flush(self.fid)
 
     def test_is_hdf5(self):
         fd, name = tempfile.mkstemp('.hdf5')
@@ -58,34 +58,29 @@ class TestH5F(unittest.TestCase):
         try:
             self.assert_(not h5f.is_hdf5(name))
         finally:
-            os.unlink(name)
+            try:
+                os.unlink(name)
+            except OSError:
+                pass
 
         self.assert_(h5f.is_hdf5(HDFNAME))
 
     def test_get_filesize(self):
-
-        self.assertEqual(h5f.get_filesize(self.fid), os.stat(HDFNAME).st_size)
-        self.assertRaises(H5Error, h5f.get_filesize, -1)
+        self.assertEqual(self.fid.get_filesize(), os.stat(HDFNAME).st_size)
 
     def test_get_create_plist(self):
-        cplist = h5f.get_create_plist(self.fid)
-        self.assert_(h5p.equal(h5p.get_class(cplist), h5p.FILE_CREATE))
-        h5p.close(cplist)
-        self.assertRaises(H5Error, h5f.get_create_plist, -1)
+        cplist = self.fid.get_create_plist()
+        self.assert_(cplist.get_class().equal(h5p.FILE_CREATE))
 
     def test_get_access_plist(self):
-        aplist = h5f.get_access_plist(self.fid)
-        self.assert_(h5p.equal(h5p.get_class(aplist), h5p.FILE_ACCESS))
-        h5p.close(aplist)
-        self.assertRaises(H5Error, h5f.get_access_plist, -1)
+        aplist = self.fid.get_access_plist()
+        self.assert_(aplist.get_class().equal(h5p.FILE_ACCESS))
 
     def test_get_freespace(self):
-        self.assert_(h5f.get_freespace(self.fid) >= 0)
-        self.assertRaises(H5Error, h5f.get_freespace, -1)
+        self.assert_(self.fid.get_freespace() >= 0)
 
     def test_get_name(self):
         self.assertEqual(h5f.get_name(self.fid), HDFNAME)
-        self.assertRaises(H5Error, h5f.get_name, -1)
 
     def test_get_obj_count(self):
         self.assert_(h5f.get_obj_count(self.fid, h5f.OBJ_ALL) >= 0)
diff --git a/h5py/tests/test_h5g.py b/h5py/tests/test_h5g.py
index 8a6e0dc..62fd9d5 100644
--- a/h5py/tests/test_h5g.py
+++ b/h5py/tests/test_h5g.py
@@ -9,6 +9,7 @@
 # $Date$
 # 
 #-
+from __future__ import with_statement
 
 import unittest
 import tempfile
@@ -16,9 +17,9 @@ import os
 
 import h5py
 from h5py import h5f, h5g, h5i
-from h5py.h5e import H5Error
+from h5py.h5 import H5Error
 
-from common import getcopy, deletecopy
+from common import HCopy
 
 HDFNAME = os.path.join(os.path.dirname(h5py.__file__), 'tests/data/attributes.hdf5')
 OBJECTNAME = 'Group'
@@ -32,95 +33,83 @@ class TestH5G(unittest.TestCase):
         self.obj = h5g.open(self.fid, OBJECTNAME)
 
     def tearDown(self):
-        h5g.close(self.obj)
-        h5f.close(self.fid)
+        self.obj.close()
+        self.fid.close()
 
     def is_grp(self, item):
         return h5i.get_type(item) == h5i.GROUP
 
     def test_open_close(self):
         for name in TEST_GROUPS:
-            gid = h5g.open(self.obj, name)
-            self.assert_(self.is_grp(gid))
-            h5g.close(gid)
-            self.assert_(not self.is_grp(gid))
+            grp = h5g.open(self.obj, name)
+            self.assert_(self.is_grp(grp))
+            grp.close()
+            self.assert_(not self.is_grp(grp))
         
         self.assertRaises(H5Error, h5g.open, self.obj, 'Some other group')
-        self.assertRaises(H5Error, h5g.close, -1)
 
     def test_create(self):
-        fid, filename = getcopy(HDFNAME)
-        obj = h5g.open(fid, OBJECTNAME)
 
-        gid = h5g.create(obj, 'New group')
-        h5g.close(gid)
-        self.assert_(h5g.py_exists(obj, 'New group'))
-        self.assertRaises(H5Error, h5g.create, obj, 'New group')
+        with HCopy(HDFNAME) as fid:
 
-        deletecopy(fid, filename)
+            obj = h5g.open(fid, OBJECTNAME)
+            grp = h5g.create(obj, 'New group')
+            grp.close()
+            self.assert_(obj.py_exists('New group'))
 
     def test_link_unlink_move_linkval(self):
-        fid, filename = getcopy(HDFNAME)
-        obj = h5g.open(fid, OBJECTNAME)
-
-        # symlink
-        h5g.link(obj, TEST_GROUPS[1], NEW_LINK_NAME, h5g.LINK_SOFT)
-        self.assertEqual(h5g.get_objinfo(obj, NEW_LINK_NAME, follow_link=False).type, h5g.LINK)
-        self.assertEqual(h5g.get_linkval(obj, NEW_LINK_NAME), TEST_GROUPS[1])
-
-        deletecopy(fid, filename)
-        fid, filename = getcopy(HDFNAME)
-        obj = h5g.open(fid, OBJECTNAME)
-
-        # local link
-        h5g.link(obj, TEST_GROUPS[1], NEW_LINK_NAME, h5g.LINK_HARD)
-        self.assert_( h5g.py_exists(obj, NEW_LINK_NAME) )
-
-        # test local unlink
-        h5g.unlink(obj, NEW_LINK_NAME)
-        self.assert_(not h5g.py_exists(obj, NEW_LINK_NAME))
-
-        # remote link
-        rgid = h5g.open(obj, TEST_GROUPS[0])
-        h5g.link(obj, TEST_GROUPS[0], NEW_LINK_NAME, h5g.LINK_HARD, rgid)
-        self.assert_( h5g.py_exists(rgid, NEW_LINK_NAME) )
-    
-        # remote unlink
-        h5g.unlink(rgid, NEW_LINK_NAME)
-        self.assert_( not h5g.py_exists(rgid, NEW_LINK_NAME) )
-        h5g.close(rgid)
 
-        # move
-        h5g.move(obj, TEST_GROUPS[2], NEW_LINK_NAME)
-        self.assert_(h5g.py_exists(obj, NEW_LINK_NAME))
-        self.assert_(not h5g.py_exists(obj, TEST_GROUPS[2]))
+        with HCopy(HDFNAME) as fid:
+
+            obj = h5g.open(fid, OBJECTNAME)
+
+            # symlink
+            obj.link(TEST_GROUPS[1], NEW_LINK_NAME, h5g.LINK_SOFT)
+            self.assertEqual(obj.get_objinfo(NEW_LINK_NAME, follow_link=False).type, h5g.LINK)
+            self.assertEqual(obj.get_linkval(NEW_LINK_NAME), TEST_GROUPS[1])
+
+        with HCopy(HDFNAME) as fid:
 
-        self.assertRaises(H5Error, h5g.move, obj, 'Ghost group', 'blah')
-        self.assertRaises(H5Error, h5g.unlink, obj, 'Some other name')
-        self.assertRaises(H5Error, h5g.link, obj, 'Ghost group', 'blah') 
-        self.assertRaises(H5Error, h5g.get_linkval, -1, "foobar")
+            obj = h5g.open(fid, OBJECTNAME)
+
+            # local link
+            obj.link(TEST_GROUPS[1], NEW_LINK_NAME, h5g.LINK_HARD)
+            self.assert_( obj.py_exists(NEW_LINK_NAME) )
+
+            # test local unlink
+            obj.unlink(NEW_LINK_NAME)
+            self.assert_(not obj.py_exists(NEW_LINK_NAME))
+
+            # remote link
+            rgrp = h5g.open(obj, TEST_GROUPS[0])
+            obj.link(TEST_GROUPS[0], NEW_LINK_NAME, h5g.LINK_HARD, rgrp)
+            self.assert_( rgrp.py_exists(NEW_LINK_NAME) )
+        
+            # remote unlink
+            rgrp.unlink(NEW_LINK_NAME)
+            self.assert_( not rgrp.py_exists(NEW_LINK_NAME) )
 
-        h5g.close(obj)
+            # move
+            obj.move( TEST_GROUPS[2], NEW_LINK_NAME)
+            self.assert_(obj.py_exists(NEW_LINK_NAME))
+            self.assert_(not obj.py_exists(TEST_GROUPS[2]))
 
-        deletecopy(fid, filename)
 
     def test_get_num_objs(self):
 
-        self.assertEqual(h5g.get_num_objs(self.obj), 3)
-        self.assertRaises(H5Error, h5g.get_num_objs, -1)
+        self.assertEqual(self.obj.get_num_objs(), 3)
+
 
     def test_objname_objtype(self):
 
         for idx, name in enumerate(TEST_GROUPS):
-            self.assertEqual(h5g.get_objname_by_idx(self.obj, idx), name)
-            self.assertEqual(h5g.get_objtype_by_idx(self.obj, idx), h5g.GROUP)
+            self.assertEqual(self.obj.get_objname_by_idx(idx), name)
+            self.assertEqual(self.obj.get_objtype_by_idx(idx), h5g.GROUP)
 
-        self.assertRaises(H5Error, h5g.get_objname_by_idx, self.obj, -1)
-        self.assertRaises(H5Error, h5g.get_objtype_by_idx, self.obj, -1)
 
     def test_get_objinfo(self):
 
-        retval = h5g.get_objinfo(self.obj, '.')
+        retval = self.obj.get_objinfo('.')
         retval.fileno
         retval.objno
         self.assertEqual(retval.nlink, 1)
@@ -128,9 +117,6 @@ class TestH5G(unittest.TestCase):
         retval.mtime
         retval.linklen
 
-        self.assertRaises(H5Error, h5g.get_objinfo, self.obj, 'Something else')
-
-
     def test_iterate(self):
 
         def iterate_all(id, name, namelist):
@@ -164,34 +150,18 @@ class TestH5G(unittest.TestCase):
 
     def test_get_set_comment(self):
 
-        fid, filename = getcopy(HDFNAME)
-        obj = h5g.open(fid, OBJECTNAME)
-
-        h5g.set_comment(obj, TEST_GROUPS[0], "This is a comment.")
-        self.assertEqual(h5g.get_comment(obj, TEST_GROUPS[0]), "This is a comment.")
+        with HCopy(HDFNAME) as fid:
 
-        self.assertRaises(H5Error, h5g.set_comment, -1, "foo", "bar")
-        self.assertRaises(H5Error, h5g.get_comment, -1, "foo")
+            obj = h5g.open(fid, OBJECTNAME)
 
-        deletecopy(fid, filename)
+            obj.set_comment(TEST_GROUPS[0], "This is a comment.")
+            self.assertEqual(obj.get_comment(TEST_GROUPS[0]), "This is a comment.")
 
-    def test_py_listnames(self):
-
-        self.assertEqual(h5g.py_listnames(self.obj), TEST_GROUPS)
-        self.assertRaises(H5Error, h5g.py_listnames, -1)
-
-    def test_py_iternames(self):
-
-        iterator = h5g.py_iternames(self.obj)
-        self.assertEqual(list(iterator), TEST_GROUPS)
-        #self.assertRaises(StopIteration, iterator.next()) bug in unittest
-        
-        self.assertRaises(H5Error, h5g.py_iternames, -1)
 
     def test_py_exists(self):
 
-        self.assert_(h5g.py_exists(self.obj, TEST_GROUPS[0]))
-        self.assert_(not h5g.py_exists(self.obj, 'Something else'))
+        self.assert_(self.obj.py_exists(TEST_GROUPS[0]))
+        self.assert_(not self.obj.py_exists('Something else'))
 
 
 
diff --git a/h5py/tests/test_h5i.py b/h5py/tests/test_h5i.py
index 8bee1f9..ab0a86f 100644
--- a/h5py/tests/test_h5i.py
+++ b/h5py/tests/test_h5i.py
@@ -14,7 +14,7 @@ import os
 
 import h5py
 from h5py import h5f, h5g, h5i, h5t
-from h5py.h5e import H5Error
+from h5py.h5 import H5Error
 
 HDFNAME = os.path.join(os.path.dirname(h5py.__file__), 'tests/data/attributes.hdf5')
 OBJECTNAME = 'Group'
@@ -26,23 +26,21 @@ class TestH5I(unittest.TestCase):
         self.obj = h5g.open(self.fid, OBJECTNAME)
 
     def tearDown(self):
-        h5g.close(self.obj)
-        h5f.close(self.fid)
+        self.obj.close()
+        self.fid.close()
+
 
     def test_get_type(self):
         self.assertEqual(h5i.get_type(self.fid), h5i.FILE)
         self.assertEqual(h5i.get_type(self.obj), h5i.GROUP)
-        self.assertEqual(h5i.get_type(-1), h5i.BADID)
 
     def test_get_name(self):
         self.assertEqual(h5i.get_name(self.obj), '/Group')
         self.assertEqual(h5i.get_name(h5t.STD_I8LE), None)
-        self.assertEqual(h5i.get_name(-1), None)
 
     def test_get_file_id(self):
         nfid = h5i.get_file_id(self.obj)
         self.assertEqual(nfid, self.fid)
-        self.assertRaises(H5Error, h5i.get_file_id, -1)
 
     def test_refs(self):
         refcnt = h5i.get_ref(self.obj)
@@ -54,10 +52,6 @@ class TestH5I(unittest.TestCase):
         h5i.dec_ref(self.obj)
         self.assertEqual(h5i.get_ref(self.obj), refcnt)
 
-        self.assertRaises(H5Error, h5i.get_ref, -1)
-        self.assertRaises(H5Error, h5i.inc_ref, -1)
-        self.assertRaises(H5Error, h5i.dec_ref, -1)
-
 
 
 
diff --git a/obsolete/attrs.hdf5 b/obsolete/attrs.hdf5
deleted file mode 100644
index 3ecb615..0000000
Binary files a/obsolete/attrs.hdf5 and /dev/null differ
diff --git a/obsolete/definitions.pxd b/obsolete/definitions.pxd
deleted file mode 100644
index fb39341..0000000
--- a/obsolete/definitions.pxd
+++ /dev/null
@@ -1,62 +0,0 @@
-#  Ei!, emacs, this is -*-Python-*- mode
-########################################################################
-#
-#       License: BSD
-#       Created: June 20, 2005
-#       Author:  Francesc Altet - faltet at carabos.com
-#
-#       $Id: definitions.pyd 1018 2005-06-20 09:43:34Z faltet $
-#
-########################################################################
-
-"""Here are some definitions for sharing between extensions.
-
-"""
-
-import sys
-
-from defs_c cimport size_t, time_t
-from defs_h5 cimport hid_t, hbool_t, herr_t, htri_t, hsize_t, hssize_t, hvl_t
-
-# Structs and types from HDF5
-cdef extern from "hdf5.h":
-
-
-  int H5FD_LOG_LOC_WRITE, H5FD_LOG_ALL
-  int H5I_INVALID_HID
-
-  # Native types
-  # NOT MOVED
-  cdef enum:
-    H5T_C_S1
-
-
-  # The order to retrieve atomic native datatype
-  # NOT MOVED
-  cdef enum H5T_direction_t:
-    H5T_DIR_DEFAULT     = 0,    #default direction is inscendent
-    H5T_DIR_ASCEND      = 1,    #in inscendent order
-    H5T_DIR_DESCEND     = 2     #in descendent order
-
-
-
-
-
-
-  # === HDF5 API ==============================================================
-
-
-
-
-
-  
-
-
-
-
-
-
-
-
-
-
diff --git a/obsolete/defs_h5common.pxd b/obsolete/defs_h5common.pxd
deleted file mode 100644
index eff2399..0000000
--- a/obsolete/defs_h5common.pxd
+++ /dev/null
@@ -1,5 +0,0 @@
-from defs_h5 cimport hsize_t
-
-cdef hsize_t* tuple_to_dims(object dims_tpl, int rank)
-cdef object dims_to_tuple(hsize_t* dims, int rank)
-
diff --git a/obsolete/file.hdf5 b/obsolete/file.hdf5
deleted file mode 100644
index c11879d..0000000
Binary files a/obsolete/file.hdf5 and /dev/null differ
diff --git a/obsolete/fragments.pyx b/obsolete/fragments.pyx
deleted file mode 100644
index db47b54..0000000
--- a/obsolete/fragments.pyx
+++ /dev/null
@@ -1,138 +0,0 @@
-def set_fill_value(hid_t plist, object value):
-    """ (INT plist, INT type_id, ARRAY_SCALAR value)
-        For lists of class CLASS_DATASET_CREATE
-
-        Set the fill value for the dataset. <value> should be a NumPy array 
-        scalar or 0-dimensional array.  It's up to you to make sure the dtype 
-        of the scalar is compatible with the type of whatever dataset you want 
-        to use this list on.
-
-        As a special exception, providing a value of None means the fill is
-        undefined (HDF5 default is otherwise zero-fill).
-    """
-    cdef hid_t type_id
-    cdef herr_t retval
-    cdef void* data_ptr
-
-    raise NotImplementedError()
-
-    if value is None:
-        retval = H5Pset_fill_value(plist, 0, NULL)
-        if retval < 0:
-            raise PropertyError("Failed to undefine fill value on list %d" % plist)
-        return
-
-    if not PyArray_CheckScalar(value):
-        raise ValueError("Given fill value must be a Numpy array scalar or 0-dimensional array")
-
-    data_ptr = malloc(128)
-    PyArray_ScalarAsCtype(value, data_ptr)
-    type_id = h5t.py_dtype_to_h5t(value.dtype)
-
-    retval = H5Pset_fill_value(plist, type_id, data_ptr)
-    if retval < 0:
-        free(data_ptr)
-        H5Tclose(type_id)
-        raise PropertyError("Failed to set fill value on list %d to %s" % (plist, repr(value)))
-
-    free(data_ptr)
-    H5Tclose(type_id)
-
-def get_fill_value(hid_t plist, object dtype_in):
-    """ (INT plist_id, DTYPE dtype_in) => ARRAY_SCALAR value
-
-        Obtain the fill value.  Due to restrictions in the HDF5 library
-        design, you have to provide a Numpy dtype object specifying the
-        fill value type.  The function will raise an exception if this
-        type is not conversion-compatible with the fill value type recorded
-        in the list.
-    """
-    raise NotImplementedError()
-    cdef herr_t retval
-    cdef hid_t type_id
-
-def set_filter(hid_t plist, int filter_code, unsigned int flags, object data_tpl=()):
-    """ (INT plist_id, INT filter_type_code, UINT flags, TUPLE data)
-    """
-
-    cdef unsigned int *data
-    cdef size_t datalen
-    cdef int i
-    cdef herr_t retval
-
-    if !PyTuple_Check(data_tpl):
-        raise ValueError("Data for the filter must be a tuple of integers")
-
-    datalen = len(data_tpl)
-    data = <unsigned int*>malloc(sizeof(unsigned int)*datalen)
-
-    try:
-        for i from 0<=i<datalen:
-            data[i] = data_tpl[i]
-
-        retval = H5Pset_filter(plist, filter_code, flags, data_len, data)
-        if retval < 0:
-            raise PropertyError("Failed to set filter code %d on list %d; flags %d, data %s" % (filter_code, plist, flags, str(data_tpl)))
-    finally:
-        free(data) 
-    
-def all_filters_avail(hid_t plist):
-
-    cdef htri_t retval
-    retval = H5Pall_filters_avail(plist)
-    if retval < 0:
-        raise PropertyError("Failed to determine filter status on list %d" % plist)
-    return bool(retval)
-
-def get_nfilters(hid_t plist)
-
-    cdef int retval
-    retval = H5Pget_nfilters(plist)
-    if retval < 0:
-        raise PropertyError("Failed to determine number of filters in list %d" % plist)
-    return retval
-
-cdef class FilterInfo:
-
-    cdef object name
-    cdef int code
-    cdef unsigned int flags
-    cdef object data
-
-def get_filter_info(hid_t plist, unsigned int filter_no):
-
-    cdef char namearr[256]
-    cdef int namelen
-    cdef unsigned int flags
-    cdef size_t datalen
-    cdef unsigned int data[256]
-    cdef int retval
-    cdef int i
-
-    datalen = 256
-    namelen = 256
-
-    retval = <int>H5Pget_filter(plist, filter_no, &flags, &datalen, &data, namelen, &namearr)
-    if retval < 0:
-        raise PropertyError("Failed to get info for filter %d on list %d" % (filter_no, plist))
-    
-    # HDF5 docs claim the string may not be properly terminated.
-    for i from 0<=i<namelen:
-        if namearr[i] == c'\0':
-            break
-    if i == namelen:
-        namearr[namelen-1] = c'\0'
-
-    tpl = PyTuple_New(datalen)
-    for i from 0<=i<datalen:
-        tmp = data[i]
-        Py_INCREF(tmp)  # to get around pyrex issues
-        PyTuple_SetItem(tpl, i, tmp)
-
-    info = FilterInfo()
-    info.name = &namearr
-    info.code = retval
-    info.flags = flags
-    info.data = tpl
-
-    return info
diff --git a/obsolete/test.h5 b/obsolete/test.h5
deleted file mode 100644
index e386791..0000000
Binary files a/obsolete/test.h5 and /dev/null differ
diff --git a/obsolete/test_h5a.pyx b/obsolete/test_h5a.pyx
deleted file mode 100644
index 3b53697..0000000
--- a/obsolete/test_h5a.pyx
+++ /dev/null
@@ -1,138 +0,0 @@
-##### Preamble block ##########################################################
-# 
-# This file is part of the "h5py" HDF5 Interface for Python project.
-# 
-# Copyright 2008 Andrew Collette
-# http://software.alfven.org
-# License: BSD  (See file "LICENSE" for complete license, or the URL above)
-# 
-##### End preamble block ######################################################
-
-from defs_h5t cimport H5T_NATIVE_INT8
-from defs_h5i cimport H5Iget_type, H5I_ATTR, H5I_BADID
-from defs_h5a cimport H5Aclose, H5Acreate, H5Adelete, H5Awrite
-from defs_h5p cimport H5P_DEFAULT
-from defs_h5s cimport H5Screate
-
-import os
-import unittest
-import tempfile
-
-import numpy
-import h5f
-import h5g
-import h5a
-import h5s
-import h5t
-
-from errors import H5AttributeError
-
-SCL_NAME = 'SCALAR ATTRIBUTE'
-ARR_NAME = 'ARRAY ATTRIBUTE'
-TEST_NAME = 'TEST ATTRIBUTE'
-
-class TestH5A(unittest.TestCase):
-
-    def setUp(self):
-        self.fname = tempfile.mktemp(".hdf5")
-        self.fid = h5f.create(self.fname, h5f.ACC_TRUNC)
-        self.gid = h5g.create(self.fid, "GROUP")
-        sid = h5s.create(h5s.CLASS_SCALAR)
-        sid2 = h5s.create_simple((2,3))
-        self.scl_attr = H5Acreate(self.gid, SCL_NAME, H5T_NATIVE_INT8, sid, H5P_DEFAULT)
-        self.arr_attr = H5Acreate(self.gid, ARR_NAME, H5T_NATIVE_INT8, sid2, H5P_DEFAULT)
-        h5s.close(sid2)
-        h5s.close(sid)
-
-    def tearDown(self):
-        H5Aclose(self.arr_attr)
-        H5Aclose(self.scl_attr)
-        h5g.close(self.gid)
-        h5f.close(self.fid)
-        os.unlink(self.fname)
-    
-    def testcreate(self):
-        sid = h5s.create(h5s.CLASS_SCALAR)
-        aid = h5a.create(self.gid, TEST_NAME, H5T_NATIVE_INT8, sid)
-        self.assert_(H5Iget_type(aid) == H5I_ATTR)
-        H5Aclose(aid)
-        H5Adelete(self.gid, TEST_NAME)
-        h5s.close(sid)
-
-    def test_open_idx(self):
-        aid = h5a.open_idx(self.gid, 0)
-        self.assert_(h5a.get_name(aid) == SCL_NAME)
-        H5Aclose(aid)
-        aid = h5a.open_idx(self.gid, 1)
-        self.assert_(h5a.get_name(aid) == ARR_NAME)
-        H5Aclose(aid)
-
-        self.assertRaises(H5AttributeError, h5a.open_idx, self.gid, 2)
-
-    def test_open_name(self):
-        aid = h5a.open_name(self.gid, SCL_NAME)
-        self.assert_(H5Iget_type(aid) == H5I_ATTR)
-        H5Aclose(aid)
-
-    def test_close(self):
-        sid = H5Screate(h5s.CLASS_SCALAR)
-        aid = H5Acreate(self.gid, TEST_NAME, H5T_NATIVE_INT8, sid, H5P_DEFAULT)
-        h5s.close(sid)
-        self.assert_(H5Iget_type(aid) == H5I_ATTR)
-        h5a.close(aid)
-        self.assert_(H5Iget_type(aid) == H5I_BADID)
-
-    def test_delete(self):
-        cdef char foo
-        foo = 1
-
-        sid = H5Screate(h5s.CLASS_SCALAR)
-        aid = H5Acreate(self.gid, TEST_NAME, H5T_NATIVE_INT8, sid, H5P_DEFAULT)
-        h5s.close(sid)
-        self.assert_(H5Iget_type(aid) == H5I_ATTR)
-
-        retval = H5Awrite(aid, H5T_NATIVE_INT8, &foo)
-        assert retval >= 0
-        
-        H5Aclose(aid)
-
-        aid = h5a.open_name(self.gid, TEST_NAME)
-        h5a.close(aid)
-
-        h5a.delete(self.gid, TEST_NAME)
-        self.assertRaises(H5AttributeError, h5a.open_name, self.gid, TEST_NAME)
-
-    def test_read(self):
-
-        cdef char foo
-        foo = 42
-        sid = H5Screate(h5s.CLASS_SCALAR)
-        aid = H5Acreate(self.gid, TEST_NAME, H5T_NATIVE_INT8, sid, H5P_DEFAULT)
-        h5s.close(sid)
-        self.assert_(H5Iget_type(aid) == H5I_ATTR)
-
-        retval = H5Awrite(aid, H5T_NATIVE_INT8, &foo)
-        assert retval >= 0
-
-        a = numpy.ndarray((1,),dtype=h5t.py_h5t_to_dtype(H5T_NATIVE_INT8))
-        h5a.read(aid, a)
-
-        self.assert_(a[0] == 42)
-        H5Aclose(aid)
-        H5Adelete(self.gid, TEST_NAME)
-
-        
-
-        
-
-        
-
-
-
-
-
-
-
-
-
-
diff --git a/obsolete/test_h5f.pyx b/obsolete/test_h5f.pyx
deleted file mode 100644
index 8df1933..0000000
--- a/obsolete/test_h5f.pyx
+++ /dev/null
@@ -1,76 +0,0 @@
-##### Preamble block ##########################################################
-# 
-# This file is part of the "h5py" HDF5 Interface for Python project.
-# 
-# Copyright 2008 Andrew Collette
-# http://software.alfven.org
-# License: BSD  (See file "LICENSE" for complete license, or the URL above)
-# 
-##### End preamble block ######################################################
-
-from defs_h5f cimport H5Fopen, H5Fclose,\
-                      H5F_ACC_TRUNC, H5F_ACC_RDONLY, H5F_ACC_RDWR, H5F_ACC_EXCL
-from defs_h5p cimport H5P_DEFAULT
-from defs_h5i cimport H5Iget_type, H5I_FILE
-                 
-import unittest
-import os
-import tempfile
-
-import h5f
-from errors import FileError
-
-"""
-    Tests functions defined in h5f.  Requires HDF5 file; default name
-    is "test_simple.hdf5".
-
-"""
-TEST_FILE = "test_simple.hdf5"
-
-class TestH5F(unittest.TestCase):
-
-    def testopen(self):
-        os.chmod(TEST_FILE, 0600)
-
-        fid = h5f.open(TEST_FILE, flags=h5f.ACC_RDWR)
-        self.assert_(H5Iget_type(fid) == H5I_FILE)
-        H5Fclose(fid)
-
-        fid = h5f.open(TEST_FILE, flags=h5f.ACC_RDONLY)
-        self.assert_(H5Iget_type(fid) == H5I_FILE)
-        H5Fclose(fid)     
-
-        os.chmod(TEST_FILE, 0400)
-        
-        fid = h5f.open(TEST_FILE, flags=H5F_ACC_RDONLY)
-        self.assert_(H5Iget_type(fid) == H5I_FILE)
-        H5Fclose(fid)     
-
-        self.assertRaises(FileError, h5f.open, TEST_FILE, flags=h5f.ACC_RDWR)
-        
-    def testclose(self):
-        os.chmod(TEST_FILE, 0600)
-        fid = H5Fopen(TEST_FILE, H5F_ACC_RDWR, H5P_DEFAULT)
-        self.assert_(H5Iget_type(fid) == H5I_FILE)
-        h5f.close(fid)
-
-    def testcreate(self):
-
-        fd, name = tempfile.mkstemp('.hdf5')
-        os.close(fd)
-
-        fid = h5f.create(name, flags=h5f.ACC_TRUNC)
-        self.assert_(H5Iget_type(fid) == H5I_FILE)
-        H5Fclose(fid)   
-
-        self.assertRaises(FileError, h5f.create, name, flags=h5f.ACC_EXCL)
-        
-        os.unlink(name)
-
-
-
-
-
-
-
-
diff --git a/obsolete/test_h5g.pyx b/obsolete/test_h5g.pyx
deleted file mode 100644
index 0dd36d4..0000000
--- a/obsolete/test_h5g.pyx
+++ /dev/null
@@ -1,184 +0,0 @@
-##### Preamble block ##########################################################
-# 
-# This file is part of the "h5py" HDF5 Interface for Python project.
-# 
-# Copyright 2008 Andrew Collette
-# http://software.alfven.org
-# License: BSD  (See file "LICENSE" for complete license, or the URL above)
-# 
-##### End preamble block ######################################################
-
-from defs_h5g cimport H5Gclose, H5Gopen, H5Gget_objinfo, H5Gunlink, \
-                      H5G_GROUP, H5G_stat_t
-from defs_h5i cimport H5Iget_type, H5I_GROUP, H5I_BADID
-
-import unittest
-import shutil
-import tempfile
-import os
-
-import h5f
-import h5g
-
-from errors import GroupError
-
-""" Depends on h5f
-"""
-
-TEST_FILE = "test_simple.hdf5"
-TEST_GROUPS = ["columns", "detector"]
-NEW_GROUP_NAME = "XXXNEWGROUPXXX"
-NEW_LINK_NAME = "linked"
-
-# Pyrex doesn't let you nest functions
-# These are used in the iterator test.
-def ifunc1(gid, name, data):
-    data.append(name)
-    return None
-
-def ifunc2(gid, name, data):
-    data.append(name)
-    return 0
-
-def ifunc3(gid, name, data):
-    data.append(name)
-    return 1
-
-def ifunc4(gid, name, data):
-    data.append(name)
-    return -1
-
-
-class TestH5G(unittest.TestCase):
-
-    def setUp(self):
-        self.fname = tempfile.mktemp(".hdf5")
-        shutil.copyfile(TEST_FILE, self.fname)
-        self.fid = h5f.open(self.fname, flags=h5f.ACC_RDWR)
-
-    def tearDown(self):
-        h5f.close(self.fid)
-        os.unlink(self.fname)
-
-    def testopen(self):
-        for name in TEST_GROUPS:
-            gid = h5g.open(self.fid, name)
-            self.assert_(H5Iget_type(gid) == H5I_GROUP)
-            H5Gclose(gid)
-
-    def testclose(self):
-        for name in TEST_GROUPS:
-            gid = H5Gopen(self.fid, name)
-            h5g.close(gid)
-            self.assert_(H5Iget_type(gid) == H5I_BADID)
-
-    def testcreate(self):
-        gid = h5g.create(self.fid, NEW_GROUP_NAME)
-        self.assert_( H5Gget_objinfo(self.fid, NEW_GROUP_NAME, 0, NULL) >= 0 )
-        H5Gclose(gid)
-        H5Gunlink(self.fid, NEW_GROUP_NAME)
-
-    def testlink(self):
-        # local link
-        h5g.link(self.fid, TEST_GROUPS[1], NEW_LINK_NAME, h5g.LINK_HARD)
-        self.assert_( H5Gget_objinfo(self.fid, NEW_LINK_NAME, 0, NULL) >= 0 )
-
-        # test local unlink
-        h5g.unlink(self.fid, NEW_LINK_NAME)
-        self.assert_( H5Gget_objinfo(self.fid, NEW_LINK_NAME, 0, NULL) < 0 )
-
-        # remote link
-        rgid = H5Gopen(self.fid, TEST_GROUPS[0])
-        h5g.link(self.fid, TEST_GROUPS[0], NEW_LINK_NAME, h5g.LINK_HARD, rgid)
-        self.assert_( H5Gget_objinfo(rgid, NEW_LINK_NAME, 0, NULL) >= 0 )
-    
-        h5g.unlink(rgid, NEW_LINK_NAME)
-        self.assert_( H5Gget_objinfo(rgid, NEW_LINK_NAME, 0, NULL) < 0 )
-
-    def testmove(self):
-        tname = TEST_GROUPS[0]+'_2'
-
-        # local move
-        h5g.move(self.fid, TEST_GROUPS[0], tname)
-        self.assert_( H5Gget_objinfo(self.fid, TEST_GROUPS[0], 0, NULL) < 0 )        
-        self.assert_( H5Gget_objinfo(self.fid, tname, 0, NULL) >= 0 )
-
-        h5g.move(self.fid, TEST_GROUPS[0]+'_2', TEST_GROUPS[0])
-        self.assert_( H5Gget_objinfo(self.fid, TEST_GROUPS[0], 0, NULL) >= 0 )        
-        self.assert_( H5Gget_objinfo(self.fid, tname, 0, NULL) <0 )
-
-        gid = H5Gopen(self.fid, TEST_GROUPS[1])
-
-        # remote move
-        h5g.move(self.fid, TEST_GROUPS[0], TEST_GROUPS[0], gid)
-        self.assert_( H5Gget_objinfo(self.fid, TEST_GROUPS[0], 0, NULL) < 0 )    
-        self.assert_( H5Gget_objinfo(gid, TEST_GROUPS[0], 0, NULL) >= 0 )
-
-        h5g.move(gid, TEST_GROUPS[0], TEST_GROUPS[0], self.fid)
-        self.assert_( H5Gget_objinfo(self.fid, TEST_GROUPS[0], 0, NULL) >= 0 )    
-        self.assert_( H5Gget_objinfo(gid, TEST_GROUPS[0], 0, NULL) < 0 )
-
-        H5Gclose(gid)
-
-    def test_get_num_objs(self):
-        self.assert_(h5g.get_num_objs(self.fid) == 2)
-
-    def test_get_objname_by_idx(self):
-
-        for idx, name in enumerate(TEST_GROUPS):
-            self.assert_(h5g.get_objname_by_idx(self.fid, idx) == name)
-
-    def test_get_objtype_by_idx(self):
-
-        for idx, name in enumerate(TEST_GROUPS):
-            self.assert_(h5g.get_objtype_by_idx(self.fid, idx) == h5g.OBJ_GROUP)
-        
-    def test_get_objinfo(self):
-
-        cdef H5G_stat_t stat
-        H5Gget_objinfo(self.fid, TEST_GROUPS[0], 1, &stat)
-        qstat = h5g.get_objinfo(self.fid, TEST_GROUPS[0])
-
-        self.assert_(qstat.fileno[0] == stat.fileno[0])
-        self.assert_(qstat.fileno[1] == stat.fileno[1])
-        self.assert_(qstat.nlink == stat.nlink)
-        self.assert_(qstat.type == <int>stat.type)
-        self.assert_(qstat.mtime == stat.mtime)
-        self.assert_(qstat.linklen == stat.linklen)
-
-    def test_iterate(self):
-
-        nlist = []
-        h5g.iterate(self.fid, '.', ifunc1, nlist)
-        self.assert_(nlist == TEST_GROUPS )
-        
-        nlist = []
-        h5g.iterate(self.fid, '.', ifunc2, nlist)
-        self.assert_(nlist == TEST_GROUPS)
-
-        nlist = []
-        h5g.iterate(self.fid, '.', ifunc3, nlist)
-        self.assert_(nlist == [TEST_GROUPS[0]])
-
-        nlist = []
-        self.assertRaises(GroupError, h5g.iterate, self.fid, '.', ifunc4, nlist)
-        self.assert_(nlist == [TEST_GROUPS[0]])
-
-    def test_py_listnames(self):
-
-        thelist = h5g.py_listnames(self.fid)
-        self.assert_(thelist == TEST_GROUPS)
-
-        
-    def test_py_iternames(self):
-        iterator = h5g.py_iternames(self.fid)
-        thelist = list( iterator )
-        self.assert_(thelist == TEST_GROUPS)
-        self.assertRaises(StopIteration, iterator.next)
-        
-
-
-
-
-    
-
diff --git a/obsolete/test_h5s.pyx b/obsolete/test_h5s.pyx
deleted file mode 100644
index 85a1363..0000000
--- a/obsolete/test_h5s.pyx
+++ /dev/null
@@ -1,130 +0,0 @@
-##### Preamble block ##########################################################
-# 
-# This file is part of the "h5py" HDF5 Interface for Python project.
-# 
-# Copyright 2008 Andrew Collette
-# http://software.alfven.org
-# License: BSD  (See file "LICENSE" for complete license, or the URL above)
-# 
-##### End preamble block ######################################################
-
-from defs_c   cimport malloc, free
-from defs_h5  cimport hsize_t
-from defs_h5i cimport H5Iget_type, H5I_BADID, H5I_DATASPACE
-from defs_h5s cimport H5Sget_simple_extent_ndims, H5Sget_simple_extent_dims, \
-                      H5Sclose, H5Screate_simple, H5Sget_select_bounds, \
-                      H5Sselect_none
-
-import unittest
-import h5s
-from errors import DataspaceError
-
-cdef int NDIMS
-NDIMS = 3
-DIMS = (10,13,24)
-SELECT_START        = (1, 1, 5)
-SELECT_LEN          = (2, 3, 4)
-SELECT_STRIDE       = (1, 1, 2)
-SELECT_BBOX_START   = (1, 1, 5)
-SELECT_BBOX_END     = (2, 3, 11)
-
-class TestH5S(unittest.TestCase):
-
-    def setUp(self):
-
-        cdef hsize_t *dims
-        dims = <hsize_t*>malloc(sizeof(hsize_t)*NDIMS)
-        for idx, val in enumerate(DIMS):
-            dims[idx] = val
-        self.sid = H5Screate_simple(NDIMS, dims, NULL)
-        free(dims)
-
-    def tearDown(self):
-        H5Sclose(self.sid)
-
-    def test_close(self):
-
-        self.assert_(H5Iget_type(self.sid) == H5I_DATASPACE)
-        h5s.close(self.sid)
-        self.assert_(H5Iget_type(self.sid) == H5I_BADID)
-        self.setUp()
-
-    def test_create(self):
-
-        cdef hsize_t *dims
-        sid = h5s.create_simple(DIMS)
-
-        self.assert_(H5Sget_simple_extent_ndims(sid) == NDIMS)
-        dims = <hsize_t*>malloc(sizeof(hsize_t)*NDIMS)
-        H5Sget_simple_extent_dims(sid, dims, NULL)
-        for idx, val in enumerate(DIMS):
-            self.assert_( dims[idx] == val )
-        free(dims)
-        H5Sclose(sid)
-
-    def test_ndims(self):
-        self.assert_(h5s.get_simple_extent_ndims(self.sid) == NDIMS)
-        self.assertRaises(DataspaceError, h5s.get_simple_extent_ndims, -1)
-
-    def test_dims(self):
-        self.assert_(h5s.get_simple_extent_dims(self.sid) == DIMS)
-        self.assertRaises(DataspaceError, h5s.get_simple_extent_dims, -1)
-
-    def test_hyperslab(self):
-
-        cdef hsize_t *start
-        cdef hsize_t *end
-
-        self.assertRaises(DataspaceError, h5s.select_hyperslab, self.sid, (1,), (1,) )
-        self.assertRaises(DataspaceError, h5s.select_hyperslab, self.sid, SELECT_START, SELECT_LEN, SELECT_STRIDE[0:2] )
-        self.assertRaises(DataspaceError, h5s.select_hyperslab, self.sid, SELECT_START, SELECT_LEN[0:2], SELECT_STRIDE )
-        self.assertRaises(DataspaceError, h5s.select_hyperslab, self.sid, SELECT_START[0:2], SELECT_LEN, SELECT_STRIDE )
-
-        H5Sselect_none(self.sid)
-
-        start = <hsize_t*>malloc(sizeof(hsize_t)*NDIMS)
-        end = <hsize_t*>malloc(sizeof(hsize_t)*NDIMS)
-
-        h5s.select_hyperslab(self.sid, SELECT_START, SELECT_LEN, SELECT_STRIDE)
-        H5Sget_select_bounds(self.sid, start, end)
-
-        for idx in range(NDIMS):
-            self.assert_( start[idx] == SELECT_BBOX_START[idx] )
-            self.assert_( end[idx] == SELECT_BBOX_END[idx] )
-        free(start)
-        free(end)
-
-        H5Sselect_none(self.sid)
-
-        
-        
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-        
diff --git a/obsolete/test_simple.hdf5 b/obsolete/test_simple.hdf5
deleted file mode 100644
index 4786eea..0000000
Binary files a/obsolete/test_simple.hdf5 and /dev/null differ
diff --git a/obsolete/transactions.py b/obsolete/transactions.py
deleted file mode 100644
index d5cc34a..0000000
--- a/obsolete/transactions.py
+++ /dev/null
@@ -1,279 +0,0 @@
-import time
-
-class TransactionError(StandardError):
-    """ Base class for errors in the transactions module.
-    """
-    pass
-
-class TransactionStateError(TransactionError):
-    """ Attempted an operation which doesn't make sense in the current context.
-    """
-    pass
-
-class IllegalTransactionError(TransactionError):
-    pass
-
-
-class Action(object):
-
-    """ Represents the smallest component of a transaction.
-
-        Each Action object is a simple state machine which can do three things:
-        do, undo and commit.  Each of these actions is specified by a 3-tuple
-        containing (callable, args, kwds).  Whatever the "do" callable does is
-        assumed to be an atomic operation, which is fully reversed by the 
-        "undo" operation.  The "commit" operation is optional, and can be
-        used to e.g. clean up state information used for undo.
-
-        States are
-        READY:      The action has not been performed. 
-                    Legal calls:
-                        do() => DONE
-        DONE:       The action has been performed.
-                    Legal calls:
-                        undo() => READY
-                        commit() => COMMITTED
-        COMMITTED:  The action has been performed and cannot be undone.
-                    No legal calls.
-
-        Peforming an illegal call (in the wrong state) will raise a 
-        TransactionStateError.
-    """
-
-    READY = 0
-    DONE = 1
-    COMMITTED = 2
-
-    def __init__(self, name, do, undo, commit=None):
-        """ Create a new action object, from a name and a do/undo pair.
-
-            "Do" and "undo" are required and must be 3-tuples containing
-            (callable, args_tuple, keywords_dictionary).  "commit" is optional.
-
-            do:     Perform some kind of atomic operation
-            undo:   Completely reverse the effects of the "do" operation.
-            commit: Clean up any temporary data created by the "do" operation.
-
-            Any return value from the callable is ignored.  It must not raise
-            an exception.
-
-        """
-        self.name = name
-        self._validate(do, 'do')
-        self._validate(undo, 'undo')
-        if commit is not None:
-            self._validate(commit, 'commit')
-        self._do = do
-        self._undo = undo
-        self._commit = commit
-        self.state = Action.READY
-
-    def _validate(self, item, name):
-        if isinstance(item, tuple)      and \
-          len(item) == 3                and \
-          hasattr(item[0],'__call__')   and \
-          isinstance(item[1], tuple)    and \
-          isinstance(item[2], dict):
-            return
-        raise ValueError('"%s" must be a 3-tuple (callable, args, kwds); got %s' % (name, str(item)) )
-
-    def _call(self, tpl):
-        if tpl is not None:
-            f, a, k = tpl
-            f(*a, **k)
-
-    def do(self):
-        if self.state != Action.READY:
-            raise TransactionStateError('Action "%s" is not in the ready state (got %d)' % (self.name, self.state))
-        self._call(self._do)
-        self.state = Action.DONE
-
-    def undo(self):
-        if self.state != Action.DONE:
-            raise TransactionStateError('Action "%s" is not in the "done" state for undo (got %d)' % (self.name, self.state))
-        self._call(self._undo)
-        self.state = Action.READY
-
-    def commit(self):
-        if self.state != Action.DONE:
-            raise TransactionStateError('Action "%s" is not in the "done" state for commit (got %d)' % (self.name, self.state))
-        self._call(self._commit)
-        self.state = Action.COMMITTED
-
-
-class TransactionManager(object):
-
-    """ Provides locking and transaction support, via a stack of Action objects.
- 
-        Objects of this class are designed to manage access to a set of
-        resources, manipulated by calling functions or bound methods.  It
-        implements two conceptually different interfaces:
-
-        1. Locking
-        Since a single transaction manager can process commands affecting a
-        variety of objects, you can "lock" an object by providing an identifing
-        token (which could even be the object itself) via the function lock().
-        Attempting to lock an object twice will raise ValueError.
-
-        2. Transactions
-        Transactions are implemented via a stack of Action objects.  Add actions
-        to the manager via do().  Each action is immediately performed when
-        this method is called.  At any time you can call commit(), to purge the
-        stack, or rollback() to reverse all changes up to the last commit().
-        Additionally, by calling undo() and redo(), you can manually walk the
-        object through the entire stack of transaction states.
-
-    """
-
-    # Pointer states:
-    # None:     uninitialized
-    # 0:        off the bottom of the stack
-    # <double>: index of an action on the stack
-
-    def __init__(self, max_size=None):
-        """ Create a new transaction manager.  The optional max_size keyword
-            indicates the maximum allowed stack size.  When this limit is 
-            reached, the oldest action is committed and discarded when a new
-            action is added.
-        """
-        self.stack = {}
-        self.locks = set()
-        self.ptr = None
-        if max_size is not None and max_size < 1:
-            raise ValueError("Stack size must be at least 1 (got %d)" % max_size)
-        self.max_size = max_size
-
-    # --- Locking code --------------------------------------------------------
-
-    def is_locked(self, item):
-        """ Determine if this resource is currently locked.
-        """
-        return item in self.locks
-
-    def lock(self, item):
-        """ Lock a resource.  Raises ValueError if it's already locked.
-        """
-        if item in self.locks:
-            raise ValueError('%s is already locked for transactions.' % str(item))
-        self.locks.add(item)
-
-    def unlock(self, item):
-        """ Release a resource.  Raises ValueError if it's already locked.
-        """
-        if not item in self.locks:
-            raise ValueError("%s is not locked for transactions." % item)
-        self.locks.remove(item)
-
-    # --- Transactions code ---------------------------------------------------
-
-    def commit(self):
-        """ Commit every action which is in the "done" state, and reset
-            the stack.
-        """ 
-        for t in sorted(self.stack):
-            action = self.stack[t]
-            if action.state == Action.DONE:
-                action.commit()
-        self.stack = {}
-        self.ptr = None
-    
-    def rollback(self):
-        """ Undo every action which is in the "done" state, and reset
-            the stack.
-        """
-        for t in sorted(self.stack, reverse=True):
-            action = self.stack[t]
-            if action.state == Action.DONE:
-                action.undo()
-        self.stack = {}
-        self.ptr = None
-        
-    def can_redo(self):
-        """ Determine if the stack/pointer system is capable of redoing the
-            next action on the stack.  Fails if the pointer is None or at
-            the top of the stack.
-        """
-        if self.ptr is None:
-            return False
-        assert self.ptr in self.stack or self.ptr == 0
-
-        if self.ptr == max(self.stack):
-            return False
-        return True
-
-    def can_undo(self):
-        """ Determine if the stack/pointer system is capable of undoing the
-            action currently pointed at.  Fails if the pointer is None, or
-            off the bottom of the stack.
-        """
-        if self.ptr is None or self.ptr == 0:
-            return False
-        assert self.ptr in self.stack
-
-        return True
-
-    def do(self, action):
-        """ Perform the given action and add it to the stack.  Implicitly
-            discards any newer actions on the stack, and sets the pointer
-            to the new item.  If the resulting stack size is greater than
-            max_size, commit and discard the oldest action.
-
-            The action's do() method is called before any modification is 
-            made to the stack; if it raises an exception this won't trash
-            the object.
-        """
-        action.do()
-
-        assert len(self.stack) == 0 or self.ptr in self.stack
-
-        for t in sorted(self.stack):
-            if t > self.ptr:
-                assert self.stack[t].state == Action.READY
-                del self.stack[t]
-
-        key = time.time()
-        self.stack[key] = action
-        self.ptr = key
-
-        if self.max_size is not None and len(stack) > self.max_size:
-            key = min(self.stack)
-            self.stack[key].commit()
-            del self.stack[key]
-
-    def undo(self):
-        """ Undo the action targeted by the current pointer, and move the
-            pointer down one level on the stack.  Does nothing if the pointer 
-            system isn't ready for an Undo.
-        """
-        if self.can_undo():
-            self.stack[self.ptr].undo()
-            keys = sorted(self.stack)
-            idx = keys.index(self.ptr)
-            if idx == 0:
-                self.ptr = 0
-            else:
-                self.ptr = keys[idx-1]
-
-    def redo(self):
-        """ Increment the pointer and redo the resulting action.  Does nothing
-            if the pointer system isn't ready for a Redo.
-        """
-        if self.can_redo():
-            keys = sorted(self.stack)
-            if self.ptr == 0:
-                self.ptr = min(keys)
-            else:
-                idx = keys.index(self.ptr)
-                self.ptr = keys[idx+1]
-            self.stack[self.ptr].do()
-
-
-
-
-
-
-
-
-
-
-
diff --git a/setup.py b/setup.py
index edc6a14..7de4235 100644
--- a/setup.py
+++ b/setup.py
@@ -35,8 +35,9 @@
         --readme:   Compile the RST readme file into an HTML fragment
 
     Universal options:
-        --pyrex         Have Pyrex recompile the *.pyx files
-        --pyrex-only    Have Pyrex recompile the *.pyx files, and stop.
+        --pyrex         Have Pyrex recompile changed *.pyx files
+        --pyrex-only    Have Pyrex recompile changed *.pyx files, and stop.
+        --pyrex-force   Recompile all *.pyx files, regardless of timestamps
 """
 
 # === Global constants ========================================================
@@ -75,6 +76,8 @@ def warn(instring):
 
 ENABLE_PYREX = False
 PYREX_ONLY = False
+PYREX_FORCE = False
+DEV_MODE = False
 for arg in sys.argv[:]:
     if arg == '--pyrex':
         ENABLE_PYREX = True
@@ -83,6 +86,13 @@ for arg in sys.argv[:]:
         ENABLE_PYREX = True
         PYREX_ONLY = True
         sys.argv.remove(arg)
+    if arg == '--pyrex-force':
+        ENABLE_PYREX=True
+        PYREX_FORCE = True
+        sys.argv.remove(arg)
+
+if "dev" in sys.argv:
+    DEV_MODE = True
 
 if 'sdist' in sys.argv and os.path.exists('MANIFEST'):
     warn("Cleaning up stale MANIFEST file")
@@ -152,9 +162,15 @@ if ENABLE_PYREX or not all([os.path.exists(x+'.c') for x in pyrex_sources]):
 
         if Version.version >= MIN_PYREX:
             from Pyrex.Compiler.Main import compile_multiple, CompilationOptions
-            results = compile_multiple( [x+'.pyx' for x in pyrex_sources], CompilationOptions(verbose=True))
+
+            opts = CompilationOptions(verbose=True, timestamps=(not PYREX_FORCE))
+            results = compile_multiple( [x+'.pyx' for x in pyrex_sources], opts)
+
             if results.num_errors != 0:
-                fatal("%d Pyrex compilation errors encountered; aborting." % results.num_errors)
+                if DEV_MODE:
+                    warn("%d Pyrex compilation errors encountered." % results.num_errors)
+                else:
+                    fatal("%d Pyrex compilation errors encountered; aborting." % results.num_errors)
             if PYREX_ONLY:
                 exit(0)
         else:
@@ -228,7 +244,10 @@ class dev(Command):
                      [ 'MANIFEST', os.path.join(pyx_src_path, 'version.py')]
 
             for name in fnames:
-                os.remove(name)
+                try:
+                    os.remove(name)
+                except OSError:
+                    pass
 
         if self.doc:
             buildobj = self.distribution.get_command_obj('build')

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-science/packages/h5py.git



More information about the debian-science-commits mailing list