[h5py] 52/455: Begin yet another massive API change, this time to OO identifiers. For the moment, totally broken.

Ghislain Vaillant ghisvail-guest at moszumanska.debian.org
Thu Jul 2 18:19:16 UTC 2015


This is an automated email from the git hooks/post-receive script.

ghisvail-guest pushed a commit to annotated tag 1.3.0
in repository h5py.

commit 084acb1ee4045c1f33039a46429eb70d7fa8659d
Author: andrewcollette <andrew.collette at gmail.com>
Date:   Mon Jun 16 23:49:07 2008 +0000

    Begin yet another massive API change, this time to OO identifiers.  For the moment, totally broken.
---
 h5py/h5.pxd  |  33 +--
 h5py/h5.pyx  |  32 ++-
 h5py/h5a.pxd |   5 +-
 h5py/h5a.pyx | 406 ++++++++++++++--------------------
 h5py/h5d.pxd |   4 +
 h5py/h5d.pyx | 697 ++++++++++++++++-------------------------------------------
 h5py/h5f.pxd |   4 +
 h5py/h5f.pyx | 249 +++++++++++----------
 h5py/h5g.pxd |   4 +
 h5py/h5g.pyx | 475 ++++++++++++++++------------------------
 h5py/h5i.pyx |  58 +++--
 h5py/h5p.pxd |  25 +++
 h5py/h5p.pyx | 459 ++++++++++++++++++++-------------------
 h5py/h5r.pyx |  30 ++-
 h5py/h5s.pxd |   4 +
 h5py/h5s.pyx | 675 ++++++++++++++++++++++++++++-----------------------------
 h5py/h5t.pxd |   4 +
 h5py/h5t.pyx | 550 +++++++++++++++++++++++-----------------------
 18 files changed, 1632 insertions(+), 2082 deletions(-)

diff --git a/h5py/h5.pxd b/h5py/h5.pxd
index da6fd40..2477896 100644
--- a/h5py/h5.pxd
+++ b/h5py/h5.pxd
@@ -48,37 +48,8 @@ cdef class ObjectID:
     """ Base wrapper class for HDF5 object identifiers """
     cdef readonly hid_t id
 
-cdef class FileID(ObjectID):
-    """ File identifier """
-
-cdef class GroupID(ObjectID):
-    """ Group identifier """
-    pass
-
-cdef class SpaceID(ObjectID):
-    """ Dataspace identifier """
-    pass
-
-cdef class DatasetID(ObjectID):
-    """ Dataset identifier """
-    pass
-
-cdef class TypeID(ObjectID):
-    """ Datatype identifier """
-    pass
-
-cdef class AttrID(ObjectID):
-    """ Attribute identifier """
-    pass
-
-cdef class PropID(ObjectID):
-    """ Property list (class or instance) identifier """
-    pass
-
-
-
-
-
+cdef class LockableID(ObjectID):
+    cdef readonly int _locked
 
 
 
diff --git a/h5py/h5.pyx b/h5py/h5.pyx
index 8d20081..14cbd4d 100644
--- a/h5py/h5.pyx
+++ b/h5py/h5.pyx
@@ -94,28 +94,26 @@ cdef class ObjectID:
 
         return "%d [%s] %s" % (self.id, ref, self.__class__.__name__)
 
-cdef class FileID(ObjectID):
-
-    def close(self):
-        # todo: recursive close of all open ids
-        if H5Iget_type(self.id) != H5I_BADID:
-            H5Fclose(self.id)
-
-cdef class TypeID(ObjectID):
+cdef class LockableID(ObjectID):
 
     def __dealloc__(self):
-        """ For type objects, we have to intercept the damn "can't close
-            an immutable type" BS.  Conveniently, there's no way to check
-            if a type is immutable.
-        """
-        if H5Iget_type(self.id) != H5I_BADID:
-            try:
-                H5Idec_ref(self.id)
-            except EnvironmentError:
-                pass
+        if not self._locked and H5Iget_type(self.id) != H5I_BADID:
+            H5Idec_ref(self.id)
 
+    def __copy__(self):
+        return type(self)(self.id)
 
+    def __str__(self):
+        if H5Iget_type(self.id) != H5I_BADID:
+            ref = str(H5Iget_ref(self.id))
+        else:
+            ref = "INVALID"
+        if self._locked:
+            lstr = "locked"
+        else:
+            lstr = "unlocked"
 
+        return "%d [%s] (%s) %s" % (self.id, ref, lstr, self.__class__.__name__)
 
 
 
diff --git a/h5py/h5a.pxd b/h5py/h5a.pxd
index 59168a8..20961e8 100644
--- a/h5py/h5a.pxd
+++ b/h5py/h5a.pxd
@@ -15,6 +15,10 @@
 # directory.
 
 include "std_defs.pxi"
+from h5 cimport ObjectID
+
+cdef class AttrID(ObjectID):
+    pass
 
 cdef extern from "hdf5.h":
 
@@ -41,4 +45,3 @@ cdef extern from "hdf5.h":
 
 
 
-
diff --git a/h5py/h5a.pyx b/h5py/h5a.pyx
index d69ed32..fe79636 100644
--- a/h5py/h5a.pyx
+++ b/h5py/h5a.pyx
@@ -16,8 +16,8 @@
 
 # Pyrex compile-time imports
 from h5p cimport H5P_DEFAULT
-from h5t cimport PY_H5Tclose
-from h5s cimport H5Sclose
+from h5t cimport TypeID, PY_H5Tclose
+from h5s cimport SpaceID, H5Sclose
 
 from numpy cimport import_array, ndarray, PyArray_DATA
 from utils cimport  check_numpy_read, check_numpy_write, \
@@ -32,17 +32,19 @@ import_array()
 
 # === General attribute operations ============================================
 
-def create(hid_t loc_id, char* name, hid_t type_id, hid_t space_id):
-    """ (INT loc_id, STRING name, INT type_id, INT space_id) => INT attr_id
+def create(ObjectID loc_id not None, char* name, TypeID type_id not None, 
+            SpaceID space_id not None):
+    """ (ObjectID loc_id, STRING name, TypeID type_id, SpaceID space_id) 
+        => INT attr_id
 
         Create a new attribute attached to a parent object, specifiying an 
         HDF5 datatype and dataspace.  For a friendlier version of this function
         try py_create().
     """
-    return H5Acreate(loc_id, name, type_id, space_id, H5P_DEFAULT)
+    return AttrID(H5Acreate(loc_id.id, name, type_id.id, space_id.id, H5P_DEFAULT))
 
-def open_idx(hid_t loc_id, int idx):
-    """ (INT loc_id, UINT idx) => INT attr_id
+def open_idx(ObjectID loc_id not None, int idx):
+    """ (ObjectID loc_id, UINT idx) => INT attr_id
 
         Open an exisiting attribute on an object, by zero-based index.
     """
@@ -51,131 +53,35 @@ def open_idx(hid_t loc_id, int idx):
     # HDF5 library.
     if idx < 0:
         raise ValueError("Index must be a non-negative integer.")
-    return H5Aopen_idx(loc_id, idx)
+    return AttrID(H5Aopen_idx(loc_id.id, idx))
 
-def open_name(hid_t loc_id, char* name):
-    """ (INT loc_id, STRING name) => INT attr_id
+def open_name(ObjectID loc_id not None, char* name):
+    """ (ObjectID loc_id, STRING name) => INT attr_id
 
         Open an existing attribute on an object, by name.
     """
-    return H5Aopen_name(loc_id, name)
+    return AttrID(H5Aopen_name(loc_id.id, name))
 
-def close(hid_t attr_id):
-    """ (INT attr_id)
+def close(AttrID attr_id not None):
+    """ (AttrID attr_id)
 
         Close this attribute and release resources.
     """
-    H5Aclose(attr_id)
+    H5Aclose(attr_id.id)
 
-def delete(hid_t loc_id, char* name):
-    """ (INT loc_id, STRING name)
+def delete(ObjectID loc_id not None, char* name):
+    """ (ObjectID loc_id, STRING name)
 
         Remove an attribute from an object.
     """
-    H5Adelete(loc_id, name)
+    H5Adelete(loc_id.id, name)
 
-# === Attribute I/O ===========================================================
-
-def read(hid_t attr_id, ndarray arr_obj):
-    """ (INT attr_id, NDARRAY arr_obj)
-        
-        Read the attribute data into the given Numpy array.  Note that the 
-        Numpy array must have the same shape as the HDF5 attribute, and a 
-        conversion-compatible datatype.
-
-        The Numpy array must be writable, C-contiguous and own its data.  If
-        this is not the case, an ValueError is raised and the read fails.
-    """
-    cdef hid_t mtype_id
-    cdef hid_t space_id
-    mtype_id = 0
-    space_id = 0
-
-    try:
-        mtype_id = h5t.py_translate_dtype(arr_obj.dtype)
-        space_id = H5Aget_space(attr_id)
-        check_numpy_write(arr_obj, space_id)
-
-        H5Aread(attr_id, mtype_id, PyArray_DATA(arr_obj))
-
-    finally:
-        if mtype_id:
-            PY_H5Tclose(mtype_id)
-        if space_id:
-            H5Sclose(space_id)
-
-def write(hid_t attr_id, ndarray arr_obj):
-    """ (INT attr_id, NDARRAY arr_obj)
-
-        Write the contents of a Numpy array too the attribute.  Note that the 
-        Numpy array must have the same shape as the HDF5 attribute, and a 
-        conversion-compatible datatype.  
-
-        The Numpy array must be C-contiguous and own its data.  If this is not
-        the case, ValueError will be raised and the write will fail.
-    """
-    
-    cdef hid_t mtype_id
-    cdef hid_t space_id
-    mtype_id = 0
-    space_id = 0
-
-    try:
-        mtype_id = h5t.py_translate_dtype(arr_obj.dtype)
-        space_id = H5Aget_space(attr_id)
-        check_numpy_read(arr_obj, space_id)
-
-        H5Awrite(attr_id, mtype_id, PyArray_DATA(arr_obj))
-
-    finally:
-        if mtype_id:
-            PY_H5Tclose(mtype_id)
-        if space_id:
-            H5Sclose(space_id)
-
-# === Attribute inspection ====================================================
-
-def get_num_attrs(hid_t loc_id):
-    """ (INT loc_id) => INT number_of_attributes
+def get_num_attrs(ObjectID loc_id not None):
+    """ (ObjectID loc_id) => INT number_of_attributes
 
         Determine the number of attributes attached to an HDF5 object.
     """
-    return H5Aget_num_attrs(loc_id)
-
-def get_name(hid_t attr_id):
-    """ (INT attr_id) => STRING name
-
-        Determine the name of an attribute, given its identifier.
-    """
-    cdef int blen
-    cdef char* buf
-    buf = NULL
-
-    try:
-        blen = H5Aget_name(attr_id, 0, NULL)
-        assert blen >= 0
-        buf = <char*>emalloc(sizeof(char)*blen+1)
-        blen = H5Aget_name(attr_id, blen+1, buf)
-        strout = buf
-    finally:
-        efree(buf)
-
-    return strout
-
-def get_space(hid_t attr_id):
-    """ (INT attr_id) => INT space_id
-
-        Create and return a copy of the attribute's dataspace.
-    """
-    return H5Aget_space(attr_id)
-
-def get_type(hid_t attr_id):
-    """ (INT attr_id) => INT type_id
-
-        Create and return a copy of the attribute's datatype.
-    """
-    return H5Aget_type(attr_id)
-
+    return H5Aget_num_attrs(loc_id.id)
 
 cdef herr_t iter_cb(hid_t loc_id, char *attr_name, object int_tpl):
 
@@ -193,9 +99,8 @@ cdef herr_t iter_cb(hid_t loc_id, char *attr_name, object int_tpl):
 
     return 0
 
-
-def iterate(hid_t loc_id, object func, object data=None, int startidx=0):
-    """ (INT loc_id, FUNCTION func, OBJECT data=None, UINT startidx=0)
+def iterate(ObjectID loc_id not None, object func, object data=None, int startidx=0):
+    """ (ObjectID loc_id, FUNCTION func, OBJECT data=None, UINT startidx=0)
         => INT last_attribute_index
 
         Iterate an arbitrary Python function over the attributes attached
@@ -218,140 +123,161 @@ def iterate(hid_t loc_id, object func, object data=None, int startidx=0):
 
     int_tpl = (func, data,[])
 
-    retval = H5Aiterate(loc_id, &i, <H5A_operator_t>iter_cb, int_tpl)
+    retval = H5Aiterate(loc_id.id, &i, <H5A_operator_t>iter_cb, int_tpl)
 
     if retval < 0:
         if len(int_tpl[2]) != 0:
             raise int_tpl[2][0]
 
-# === Python extensions =======================================================
+# === Attribute class & methods ===============================================
 
-# Pyrex doesn't allow lambdas
-def _name_cb(hid_t loc_id, char* name, data):
-    data.append(name)
+cdef class AttrID(ObjectID):
 
-def py_listattrs(hid_t loc_id):
-    """ (INT loc_id) => LIST attribute_list
-
-        Create a Python list of attribute names attached to this object.
-    """
-    nlist = []
-    iterate(loc_id, _name_cb, nlist)
-    return nlist
-    
-def py_create(hid_t loc_id, char* name, object dtype_in, object shape):
-    """ (INT loc_id, STRING name, DTYPE dtype_in, TUPLE shape)
-
-        Create an attribute from a Numpy dtype and a shape tuple.  To
-        create a scalar attribute, provide an empty tuple. If you're creating
-        an attribute from an existing array or scalar, consider using py_set().
     """
-    cdef hid_t sid
-    cdef hid_t type_id
-    cdef hid_t aid
-    sid = 0
-    type_id = 0
-
-    try:
-        sid = h5s.create_simple(shape)
-        type_id = h5t.py_translate_dtype(dtype_in)
-
-        return create(loc_id, name, type_id, sid)
-
-    finally:
-        if sid:
-            H5Sclose(sid)
-        if type_id:
-            PY_H5Tclose(type_id)
+        Logical representation of an HDF5 attribute identifier.
 
+        Objects of this class can be used in any HDF5 function call
+        which expects an attribute identifier.  Additionally, all H5A*
+        functions which always take an attribute instance as the first
+        argument are presented as methods of this class.  
 
-def py_shape(hid_t attr_id):
-    """ (INT attr_id) => TUPLE shape
+        Properties:
 
-        Retrieve the dataspace of this attribute, as a Numpy-style shape tuple.
+        name:   The attribute's name
+        dtype:  A Numpy dtype representing this attribute's type
+        shape:  A Numpy-style shape tuple representing the dataspace
     """
-    cdef hid_t sid
-    sid = 0
-    
-    try:
-        sid = H5Aget_space(attr_id)
-        return h5s.get_simple_extent_dims(sid)
-
-    finally:
-        if sid:
-            H5Sclose(sid)
-
-def py_dtype(hid_t attr_id):
-    """ (INT attr_id) => DTYPE numpy_dtype
-
-        Obtain the data-type of this attribute as a Numpy dtype.  Note that the
-        resulting dtype is not guaranteed to be byte-for-byte compatible with
-        the underlying HDF5 datatype, but is appropriate for use in e.g. the 
-        read() and write() functions defined in this module.
-    """
-    cdef hid_t type_id
-    type_id = 0
-    
-    try:
-        type_id = H5Aget_type(attr_id)
-        return h5t.py_translate_h5t(type_id)
-    finally:
-        if type_id:
-            PY_H5Tclose(type_id)
-
-def py_get(hid_t parent_id, char* name):
-    """ (INT parent_id, STRING name)
-
-        Read an attribute and return the contents as a Numpy ndarray.
-        A 0-dimensional array is returned in the case of a scalar attribute.
-    """
-    cdef hid_t attr_id
-    attr_id = H5Aopen_name(parent_id, name)
-    try:
-        space = py_shape(attr_id)
-        dtype = py_dtype(attr_id)
-
-        arr = ndarray(space, dtype=dtype)
-        read(attr_id, arr)
-        return arr
-
-    finally:
-        H5Aclose(attr_id)
-
-def py_set(hid_t parent_id, char* name, ndarray arr):
-    """ (INT parent_id, STRING name, NDARRAY arr)
-
-        Create an attribute and initialize its type, space, and contents to
-        a Numpy ndarray.  Note that this function does not return an
-        identifier; the attribute is created and then closed.  Fails if an 
-        attribute of the same name already exists.
-    """
-    cdef hid_t attr_id
-    attr_id = py_create(parent_id, name, arr.dtype, arr.shape)
-    try:
-        write(attr_id, arr)
-    except:
-        H5Aclose(attr_id)
-        H5Adelete(parent_id, name)
-        raise
-
-    H5Aclose(attr_id)
-
-def py_exists(hid_t parent_id, char* name):
-    """ (INT parent_id, STRING name) => BOOL exists
-
-        Determine if the specified attribute exists.  Useful before calling
-        py_set().
-    """
-    cdef hid_t attr_id
-    
-    try:
-        attr_id = H5Aopen_name(parent_id, name)
-    except:
-        return False
+    property name:
+        def __get__(self):
+            return self.get_name()
+
+    property shape:
+        def __get__(self):
+            """ Retrieve the dataspace of this attribute, as a Numpy-style 
+                shape tuple.
+            """
+            cdef hid_t sid
+            sid = 0
+            try:
+                sid = H5Aget_space(self.id)
+                return h5s.get_simple_extent_dims(sid)
+            finally:
+                if sid:
+                    H5Sclose(sid)
+
+    property dtype:
+        def __get__(self):
+            """ Obtain the data-type of this attribute as a Numpy dtype.  Note that the
+                resulting dtype is not guaranteed to be byte-for-byte compatible with
+                the underlying HDF5 datatype, but is appropriate for use in e.g. the 
+                read() and write() functions defined in this module.
+            """
+            cdef hid_t type_id
+            type_id = 0
+            
+            try:
+                type_id = H5Aget_type(self.id)
+                return h5t.py_translate_h5t(type_id)
+            finally:
+                if type_id:
+                    PY_H5Tclose(type_id)
+
+    def read(self, ndarray arr_obj not None):
+        """ (NDARRAY arr_obj)
+            
+            Read the attribute data into the given Numpy array.  Note that the 
+            Numpy array must have the same shape as the HDF5 attribute, and a 
+            conversion-compatible datatype.
+
+            The Numpy array must be writable, C-contiguous and own its data.  If
+            this is not the case, an ValueError is raised and the read fails.
+        """
+        cdef hid_t attr_id
+        cdef hid_t mtype_id
+        cdef hid_t space_id
+        attr_id = self.id
+        mtype_id = 0
+        space_id = 0
+
+        try:
+            mtype_id = h5t.py_translate_dtype(arr_obj.dtype)
+            space_id = H5Aget_space(attr_id)
+            check_numpy_write(arr_obj, space_id)
+
+            H5Aread(attr_id, mtype_id, PyArray_DATA(arr_obj))
+
+        finally:
+            if mtype_id:
+                PY_H5Tclose(mtype_id)
+            if space_id:
+                H5Sclose(space_id)
+
+    def write(self, ndarray arr_obj not None):
+        """ (NDARRAY arr_obj)
+
+            Write the contents of a Numpy array too the attribute.  Note that the 
+            Numpy array must have the same shape as the HDF5 attribute, and a 
+            conversion-compatible datatype.  
+
+            The Numpy array must be C-contiguous and own its data.  If this is not
+            the case, ValueError will be raised and the write will fail.
+        """
+        cdef hid_t attr_id
+        cdef hid_t mtype_id
+        cdef hid_t space_id
+        attr_id = self.id
+        mtype_id = 0
+        space_id = 0
+
+        try:
+            mtype_id = h5t.py_translate_dtype(arr_obj.dtype)
+            space_id = H5Aget_space(attr_id)
+            check_numpy_read(arr_obj, space_id)
+
+            H5Awrite(attr_id, mtype_id, PyArray_DATA(arr_obj))
+
+        finally:
+            if mtype_id:
+                PY_H5Tclose(mtype_id)
+            if space_id:
+                H5Sclose(space_id)
+
+    def get_name(self):
+        """ () => STRING name
+
+            Determine the name of an attribute, given its identifier.
+        """
+        cdef hid_t attr_id
+        cdef int blen
+        cdef char* buf
+        attr_id = self.id
+        buf = NULL
+
+        try:
+            blen = H5Aget_name(attr_id, 0, NULL)
+            assert blen >= 0
+            buf = <char*>emalloc(sizeof(char)*blen+1)
+            blen = H5Aget_name(attr_id, blen+1, buf)
+            strout = buf
+        finally:
+            efree(buf)
+
+        return strout
+
+    def get_space(self):
+        """ () => INT space_id
+
+            Create and return a copy of the attribute's dataspace.
+        """
+        return SpaceID(H5Aget_space(self.id))
+
+    def get_type(self):
+        """ () => INT type_id
+
+            Create and return a copy of the attribute's datatype.
+        """
+        return TypeID(H5Aget_type(self.id))
 
-    H5Aclose(attr_id)
-    return True
 
 
 
diff --git a/h5py/h5d.pxd b/h5py/h5d.pxd
index b0cd6aa..55e9db6 100644
--- a/h5py/h5d.pxd
+++ b/h5py/h5d.pxd
@@ -15,6 +15,10 @@
 # directory.
 
 include "std_defs.pxi"
+from h5 cimport ObjectID
+
+cdef class DatasetID(ObjectID):
+    pass
 
 cdef extern from "hdf5.h":
 
diff --git a/h5py/h5d.pyx b/h5py/h5d.pyx
index 8875d53..aeae3ed 100644
--- a/h5py/h5d.pyx
+++ b/h5py/h5d.pyx
@@ -23,6 +23,9 @@
 """
 
 # Pyrex compile-time imports
+from h5t cimport TypeID
+from h5s cimport SpaceID
+from h5p cimport PropID, pdefault
 from h5s cimport H5S_ALL, H5S_UNLIMITED, H5S_SCALAR, H5S_SIMPLE, \
                     H5Sget_simple_extent_type, H5Sclose, H5Sselect_all, \
                     H5Sget_simple_extent_ndims, H5Sget_select_npoints
@@ -66,541 +69,205 @@ FILL_VALUE_USER_DEFINED = H5D_FILL_VALUE_USER_DEFINED
 
 # === Basic dataset operations ================================================
 
-def create(int loc_id, char* name, hid_t type_id, hid_t space_id, hid_t plist=H5P_DEFAULT):
-    """ ( INT loc_id, STRING name, INT type_id, INT space_id,
-          INT plist=H5P_DEFAULT ) 
-        => INT dataset_id
+def create(ObjectID loc_id not None, char* name, TypeID type_id not None, 
+            SpaceID space_id not None, PropID plist=None):
+    """ (ObjectID loc_id, STRING name, TypeID type_id, SpaceID space_id,
+          PropID plist=None ) 
+        => DatasetID
 
         Create a new dataset under an HDF5 file or group id.  Keyword plist 
-        should be a dataset creation property list.
+        may be a dataset creation property list.
 
         For a friendlier version of this function, try py_create()
     """
-    return H5Dcreate(loc_id, name, type_id, space_id, plist)
+    cdef hid_t plist_id
+    plist_id = pdefault(plist)
+    return DatasetID(H5Dcreate(loc_id.id, name, type_id.id, space_id.id, plist_id))
 
-def open(hid_t loc_id, char* name):
-    """ (INT loc_id, STRING name) => INT dataset_id
+def open(ObjectID loc_id not None, char* name):
+    """ (ObjectID loc_id, STRING name) => DatasetID
 
         Open an existing dataset attached to a group or file object, by name.
     """
-    return H5Dopen(loc_id, name)
+    return DatasetID(H5Dopen(loc_id, name))
 
-def close(hid_t dset_id):
-    """ (INT dset_id)
+def close(DatasetID dset_id not None):
+    """ (DatasetID dset_id)
     """
-    H5Dclose(dset_id)
+    H5Dclose(dset_id.id)
 
 # === Dataset I/O =============================================================
 
-def read(hid_t dset_id, hid_t mspace_id, hid_t fspace_id, ndarray arr_obj, 
-                                                    hid_t plist=H5P_DEFAULT):
-    """ ( INT dset_id, INT mspace_id, INT fspace_id, NDARRAY arr_obj, 
-          INT plist=H5P_DEFAULT)
+cdef class DatasetID(ObjectID):
 
-        Read data from an HDF5 dataset into a Numpy array.  For maximum 
-        flexibility, you can specify dataspaces for the file and the Numpy
-        object. Keyword plist may be a dataset transfer property list.
-
-        The provided Numpy array must be writable, C-contiguous, and own
-        its data.  If this is not the case, ValueError will be raised and the 
-        read will fail.
-
-        It is your responsibility to ensure that the memory dataspace
-        provided is compatible with the shape of the Numpy array.  Since a
-        wide variety of dataspace configurations are possible, this is not
-        checked.  You can easily crash Python by reading in data from too
-        large a dataspace.
-        
-        For a friendlier version of this function, try py_read_slab().
-    """
-    cdef hid_t mtype_id
-    mtype_id = 0
-
-    try:
-        mtype_id = h5t.py_translate_dtype(arr_obj.dtype)
-        check_numpy_write(arr_obj, -1)
-
-        H5Dread(dset_id, mtype_id, mspace_id, fspace_id, plist, PyArray_DATA(arr_obj))
-
-    finally:
-        if mtype_id:
-            PY_H5Tclose(mtype_id)
-        
-def write(hid_t dset_id, hid_t mspace_id, hid_t fspace_id, ndarray arr_obj, 
-                                                    hid_t plist=H5P_DEFAULT):
-    """ ( INT dset_id, INT mspace_id, INT fspace_id, NDARRAY arr_obj, 
-          INT plist=H5P_DEFAULT )
-
-        Write data from a Numpy array to an HDF5 dataset. Keyword plist may be 
-        a dataset transfer property list.
-
-        The provided Numpy array must be C-contiguous, and own its data.  If 
-        this is not the case, ValueError will be raised and the read will fail.
-
-        For a friendlier version of this function, try py_write_slab()
-    """
-    cdef hid_t mtype_id
-    mtype_id = 0
-
-    try:
-        mtype_id = h5t.py_translate_dtype(arr_obj.dtype)
-        check_numpy_read(arr_obj, -1)
-
-        H5Dwrite(dset_id, mtype_id, mspace_id, fspace_id, plist, PyArray_DATA(arr_obj))
-
-    finally:
-        if mtype_id:
-            PY_H5Tclose(mtype_id)
-
-def extend(hid_t dset_id, object shape):
-    """ (INT dset_id, TUPLE shape)
-
-        Extend the given dataset so it's at least as big as "shape".  Note that
-        a dataset may only be extended up to the maximum dimensions of its
-        dataspace, which are fixed when the dataset is created.
-    """
-    cdef hsize_t* dims
-    cdef int rank
-    cdef hid_t space_id
-    space_id = 0
-    dims = NULL
-
-    try:
-        space_id = H5Dget_space(dset_id)
-        rank = H5Sget_simple_extent_ndims(space_id)
-
-        require_tuple(shape, 0, rank, "shape")
-        dims = <hsize_t*>emalloc(sizeof(hsize_t)*rank)
-        convert_tuple(shape, dims, rank)
-        H5Dextend(dset_id, dims)
-
-    finally:
-        efree(dims)
-        if space_id:
-            H5Sclose(space_id)
-
-# === Dataset inspection ======================================================
-
-def get_space(hid_t dset_id):
-    """ (INT dset_id) => INT space_id
-
-        Create and return a new copy of the dataspace for this dataset.  
-        You're responsible for closing it.
-    """
-    return H5Dget_space(dset_id)
-
-def get_space_status(hid_t dset_id):
-    """ (INT dset_id) => INT space_status_code
-
-        Determine if space has been allocated for a dataset.  
-        Return value is one of:
-            SPACE_STATUS_NOT_ALLOCATED
-            SPACE_STATUS_PART_ALLOCATED
-            SPACE_STATUS_ALLOCATED 
     """
-    cdef H5D_space_status_t status
-    H5Dget_space_status(dset_id, &status)
-    return <int>status
-
-def get_type(hid_t dset_id):
-    """ (INT dset_id) => INT type_id
-
-        Create and return a new copy of the datatype for this dataset.
-        You're responsible for closing it.
-    """
-    return H5Dget_type(dset_id)
-
-def get_create_plist(hid_t dset_id):
-    """ (INT dset_id) => INT property_list_id
-
-        Create a new copy of the dataset creation property list used when this
-        dataset was created.  You're responsible for closing it.
-    """
-    return H5Dget_create_plist(dset_id)
-
-def get_offset(hid_t dset_id):
-    """ (INT dset_id) => LONG offset
-
-        Get the offset of this dataset in the file, in bytes.
-    """
-    return H5Dget_offset(dset_id)
-
-def get_storage_size(hid_t dset_id):
-    """ (INT dset_id) => LONG storage_size
-
-        Determine the amount of file space required for a dataset.  Note this
-        only counts the space which has actually been allocated; it may even
-        be zero.
-    """
-    return H5Dget_storage_size(dset_id)
-
-# === Python extensions =======================================================
-
-def py_create(hid_t parent_id, char* name, object data=None, object dtype=None,
-              object shape=None, object chunks=None, object compression=None,
-              object shuffle=False, object fletcher32=False):
-    """ ( INT parent_id, STRING name, NDARRAY data=None, DTYPE dtype=None,
-          TUPLE shape=None, TUPLE chunks=None, PY_INT compression=None,
-          BOOL shuffle=False, BOOL fletcher32=False )
-        => INT dataset_id
-
-        Create an HDF5 dataset from Python.  You must supply *either* a Numpy
-        array, in which case the dataset will be initialized to its type,
-        shape, and contents, *or* both a tuple giving the dimensions and a 
-        Numpy dtype object.
-
-        This function also works for scalar arrays; providing a "shape" tuple 
-        of () or a 0-dimensional array for "data" will result in a scalar 
-        (h5s.SCALAR) dataspace for the new dataset, rather than a slab
-        (h5s.SIMPLE).
-
-        Additional options (* is default):
-        chunks          A tuple containing chunk sizes, or *None
-        compression     Enable DEFLATE compression at this level (0-9) or *None
-        shuffle         Enable/*disable shuffle filter
-        fletcher32      Enable/*disable Fletcher32 error detection
-    """
-    cdef hid_t dset_id
-    cdef hid_t type_id
-    cdef hid_t space_id
-    cdef hid_t plist
-    space_id = 0
-    type_id = 0
-    dset_id = 0
-    plist = 0
-
-    if (data is None and not (dtype and shape)) or (data is not None and (dtype or shape)):
-        raise ValueError("*Either* a Numpy array *or* both a dtype and shape must be provided.")
-
-    if data is not None:
-        shape = data.shape
-        dtype = data.dtype
-
-    try:
-        if len(shape) == 0:
-            space_id = h5s.create(H5S_SCALAR)  # let's be explicit
-        else:
-            space_id = h5s.create_simple(shape)
-
-        type_id = h5t.py_translate_dtype(dtype)
-    
-        if( chunks or compression or shuffle or fletcher32):
-            plist = h5p.create(H5P_DATASET_CREATE)
-            if chunks:
-                h5p.set_chunk(plist, chunks)    # required for compression
-            if shuffle:
-                h5p.set_shuffle(plist)          # must immediately precede compression
-            if compression:
-                h5p.set_deflate(plist, compression)
-            if fletcher32:
-                h5p.set_fletcher32(plist)
-        else:
-            plist = H5P_DEFAULT
-
-        dset_id = create(parent_id, name, type_id, space_id, plist)
-
-        if data is not None:
-            write(dset_id, H5S_ALL, H5S_ALL, data)
-
-    finally:
-        if space_id:
-            H5Sclose(space_id)
-        if type_id:
-            PY_H5Tclose(type_id)
-        if plist:
-            H5Pclose(plist)
-
-    return dset_id
-
-def py_read_slab(hid_t ds_id, object start, object count, 
-                 object stride=None, dtype=None):
-    """ (INT ds_id, TUPLE start, TUPLE count, TUPLE stride=None, 
-         DTYPE dtype=None)
-        => NDARRAY numpy_array_out
-
-        Read a hyperslab from an existing HDF5 dataset, and return it as a
-        Numpy array. Dimensions are specified by:
-
-        start:  Tuple of integers indicating the start of the selection.
-
-        count:  Tuple of integers indicating how many elements to read.
-
-        stride: Pitch of the selection.  Data points at <start> are always
-                selected.  If None (default), the HDF5 library default of "1" 
-                will be used for all axes.
-
-        If a Numpy dtype object is passed in through "dtype", it will be used
-        as the type object for the returned array, and the library will attempt
-        to convert between datatypes during the read operation.  If no
-        automatic conversion path exists, an exception will be raised.
-
-        As is customary when slicing into Numpy array objects, no dimensions 
-        with length 1 are present in the returned array.  Additionally, if the
-        HDF5 dataset has a scalar dataspace, then only None or empty tuples are
-        allowed for start, count and stride, and the returned array will be
-        0-dimensional (arr.shape == ()).
-    """
-    cdef hid_t mem_space
-    cdef hid_t file_space
-    cdef hid_t type_id
-    cdef int rank
-    cdef int i
-
-    mem_space  = 0
-    file_space = 0
-    type_id    = 0
-
-    try:
-        # Obtain the Numpy dtype of the array
-        if dtype is None:
-            type_id = H5Dget_type(ds_id)
-            dtype = h5t.py_translate_h5t(type_id)
-
-        file_space = H5Dget_space(ds_id)
-        space_type = H5Sget_simple_extent_type(file_space)
-        
-        if space_type == H5S_SCALAR:
-
-            # This probably indicates a logic error in the caller's program;
-            # don't just ignore it.
-            for item in (start, count, stride):
-                if item is not None and item != ():
-                    raise ValueError("For a scalar dataset, start/count/stride must be None or ().")
-
-            arr = ndarray( (), dtype=dtype)
-            read(ds_id, H5S_ALL, H5S_ALL, arr)
-
-        elif space_type == H5S_SIMPLE:
-
-            # Attempt hyperslab selection on the dataset file space. 
-            # The selection function performs validation of start/count/stride.
-            h5s.select_hyperslab(file_space, start, count, stride)
-
-            # Initialize Numpy array; no singlet dimensions allowed.
-            npy_count = []
-            for i from 0<=i<len(count):
-                if count[i] != 0 and count[i] != 1:
-                    npy_count.append(count[i])
-            npy_count = tuple(npy_count)
-            arr = ndarray(npy_count, dtype=dtype)
-
-            mem_space = h5s.create_simple(npy_count)
-            read(ds_id, mem_space, file_space, arr)
-
-        else:
-            raise NotImplementedError("Dataspace type %d is unsupported" % space_type)
-
-    finally:
-        if mem_space:
-            H5Sclose(mem_space)
-        if file_space:
-            H5Sclose(file_space)
-        if type_id:
-            PY_H5Tclose(type_id)
-
-    return arr
-
-def py_write_slab(hid_t ds_id, ndarray arr, object start, object stride=None):
-    """ (INT ds_id, NDARRAY arr_obj, TUPLE start, TUPLE stride=None)
-
-        Write the entire contents of a Numpy array into an HDF5 dataset.
-        The size of the given array must fit within the dataspace of the
-        HDF5 dataset.
-
-        start:  Tuple of integers giving offset for write.
+        Represents an HDF5 dataset identifier.
 
-        stride: Pitch of write in dataset.  The elements of "start" are always
-                selected.  If None, the HDF5 library default value "1" will be 
-                used for all dimensions.
+        Objects of this class may be used in any HDF5 function which expects
+        a dataset identifier.  Also, all H5D* functions which take a dataset
+        instance as their first argument are presented as methods of this
+        class.
 
-        The underlying function depends on write access to the data area of the
-        Numpy array.  See the caveats in h5d.write.
-
-        Please note that this function does absolutely no array broadcasting;
-        if you want to write a (2,3) array to an (N,2,3) or (2,3,N) dataset,
-        you'll have to do it yourself from Numpy.
+        Properties:
+        dtype:  Numpy dtype representing the dataset type
+        shape:  Numpy-style shape tuple representing the dataspace
+        rank:   Integer giving dataset rank
     """
-    cdef hid_t mem_space
-    cdef hid_t file_space
-    cdef int rank
-
-    mem_space  = 0
-    file_space = 0
-
-    count = arr_obj.shape
-
-    try:
-        file_space = H5Dget_space(ds_id)
-        space_type = H5Sget_simple_extent_type(file_space)
+    property dtype:
+        def __get__(self):
+            pass
+
+    property shape:
+        def __get__(self):
+            pass
+
+    property rank:
+        def __get__(self):
+            pass
+
+    def read(self, SpaceID mspace_id not None, SpaceID fspace_id not None, 
+                   ndarray arr_obj not None, PropID plist=None):
+        """ (SpaceID mspace_id, SpaceID fspace_id, NDARRAY arr_obj, 
+             PropID plist=None)
+
+            Read data from an HDF5 dataset into a Numpy array.  For maximum 
+            flexibility, you can specify dataspaces for the file and the Numpy
+            object. Keyword plist may be a dataset transfer property list.
+
+            The provided Numpy array must be writable, C-contiguous, and own
+            its data.  If this is not the case, ValueError will be raised and 
+            the read will fail.
+
+            It is your responsibility to ensure that the memory dataspace
+            provided is compatible with the shape of the Numpy array.  Since a
+            wide variety of dataspace configurations are possible, this is not
+            checked.  You can easily crash Python by reading in data from too
+            large a dataspace.
+            
+            For a friendlier version of this function, try py_read_slab().
+        """
+        cdef hid_t mtype_id
+        cdef hid_t plist_id
+        plist_id = pdefault(plist)
+        mtype_id = 0
+
+        try:
+            mtype_id = h5t.py_translate_dtype(arr_obj.dtype)
+            check_numpy_write(arr_obj, -1)
+
+            H5Dread(self.id, mtype_id.id, mspace_id.id, fspace_id.id, plist_id, PyArray_DATA(arr_obj))
+
+        finally:
+            if mtype_id:
+                PY_H5Tclose(mtype_id)
         
-        if space_type == H5S_SCALAR:
-
-            for item in (start, count, stride):
-                if item is not None and item != ():
-                    raise ValueError("For a scalar dataset, start/count/stride must be None or ().")
-            write(ds_id, H5S_ALL, H5S_ALL, arr)
-
-        elif space_type == H5S_SIMPLE:
-
-            # Attempt hyperslab selection on the dataset file space. 
-            # The selection function performs validation of start/count/stride.
-            h5s.select_hyperslab(file_space, start, count, stride)
-            mem_space = h5s.create_simple(count)
-
-            write(ds_id, mem_space, file_space, arr)
-
-        else:
-            raise ValueError("Dataspace type %d is unsupported" % space_type)
-
-    finally:
-        if mem_space:
-            H5Sclose(mem_space)
-        if file_space:
-            H5Sclose(file_space)
-
-def py_shape(hid_t dset_id):
-    """ (INT dset_id) => TUPLE shape
-
-        Obtain the dataspace of an HDF5 dataset, as a tuple.
-    """
-    cdef int space_id
-    space_id = 0
-
-    try:
-        space_id = H5Dget_space(dset_id)
-        shape = h5s.get_simple_extent_dims(space_id)
-        return shape
-    finally:
-        if space_id:
-            H5Sclose(space_id)
-
-def py_rank(hid_t dset_id):
-    """ (INT dset_id) => INT rank
-
-        Obtain the rank of an HDF5 dataset.
-    """
-    cdef int space_id
-    space_id = 0
-
-    try:
-        space_id = H5Dget_space(dset_id)
-        return H5Sget_simple_extent_ndims(space_id)
-    finally:
-        if space_id:
-            H5Sclose(space_id)
-
-def py_dtype(hid_t dset_id):
-    """ (INT dset_id) => DTYPE numpy_dtype
-
-        Get the datatype of an HDF5 dataset, converted to a Numpy dtype.
-    """
-    cdef hid_t type_id
-    type_id = 0
-
-    try:
-        type_id = H5Dget_type(dset_id)
-        return h5t.py_translate_h5t(type_id)
-    finally:
-        if type_id:
-            PY_H5Tclose(type_id)
-
-def py_patch(hid_t ds_source, hid_t ds_sink, hid_t transfer_space):
-    """ (INT ds_source, INT ds_sink, INT transfer_space)
-
-        Transfer selected elements from one dataset to another.  The transfer
-        selection must be compatible with both the source and sink datasets, or
-        an exception will be raised. 
-
-        This function will allocate a memory buffer large enough to hold the
-        entire selection at once.  Looping and memory limitation constraints 
-        are the caller's responsibility.
-    """
-    cdef hid_t source_space 
-    cdef hid_t sink_space
-    cdef hid_t mem_space
-    cdef hid_t source_type
-    cdef void* xfer_buf
-
-    cdef hssize_t npoints
-    cdef size_t type_size
-
-    source_space = 0    
-    sink_space = 0
-    mem_space = 0
-    source_type = 0
-    xfer_buf = NULL
-
-    try:
-        source_space = H5Dget_space(ds_source)
-        sink_space = H5Dget_space(sink)
-        source_type = H5Dget_type(source)
-
-        npoints = H5Sget_select_npoints(space_id)
-        type_size = H5Tget_size(source_type)
-
-        mem_space = h5s.create_simple((npoints,))
-        H5Sselect_all(mem_space)
-
-        # This assumes that reading into a contiguous buffer and then writing
-        # out again to the same selection preserves the arrangement of data
-        # elements.  I think this is a reasonable assumption.
-
-        xfer_buf = emalloc(npoints*type_size)
-
-        # Let the HDF5 library do dataspace validation; the worst that can
-        # happen is that the write will fail after taking a while to read.
-
-        H5Dread(ds_source, source_type, mem_space, transfer_space, H5P_DEFAULT, xfer_buf)
-        H5Dwrite(ds_sink, source_type, mem_space, transfer_space, H5P_DEFAULT, xfer_buf)
-
-    finally:
-        efree(xfer_buf)
-        if source_space:
-            H5Sclose(source_space)
-        if sink_space:
-            H5Sclose(sink_space)
-        if mem_space:
-            H5Sclose(mem_space)
-        if source_type:
-            PY_H5Tclose(source_type)
-
-
-PY_LAYOUT = DDict({ H5D_COMPACT: 'COMPACT LAYOUT', 
-               H5D_CONTIGUOUS: 'CONTIGUOUS LAYOUT',
-               H5D_CHUNKED: 'CHUNKED LAYOUT'})
-PY_ALLOC_TIME = DDict({ H5D_ALLOC_TIME_DEFAULT: 'DEFAULT ALLOC TIME', 
-                        H5D_ALLOC_TIME_LATE:'LATE ALLOC TIME',
-                        H5D_ALLOC_TIME_EARLY: 'EARLY ALLOC TIME', 
-                        H5D_ALLOC_TIME_INCR: 'INCR ALLOC TIME' })
-PY_SPACE_STATUS = DDict({ H5D_SPACE_STATUS_NOT_ALLOCATED: 'SPACE NOT ALLOCATED', 
-                    H5D_SPACE_STATUS_PART_ALLOCATED: 'SPACE PARTIALLY ALLOCATED',
-                    H5D_SPACE_STATUS_ALLOCATED: 'SPACE FULLY ALLOCATED'})
-PY_FILL_TIME = DDict({ H5D_FILL_TIME_ALLOC: 'FILL AT ALLOCATION TIME',
-                        H5D_FILL_TIME_NEVER: 'NEVER FILL',
-                        H5D_FILL_TIME_IFSET: 'FILL IF SET'})
-PY_FILL_VALUE = DDict({H5D_FILL_VALUE_UNDEFINED: 'UNDEFINED FILL VALUE',
-                        H5D_FILL_VALUE_DEFAULT: 'DEFAULT FILL VALUE',
-                        H5D_FILL_VALUE_USER_DEFINED: 'USER-DEFINED FILL VALUE'})
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
+    def write(self, SpaceID mspace_id not None, SpaceID fspace_id not None, 
+                    ndarray arr_obj not None, PropID plist=None):
+        """ (SpaceID mspace_id, SpaceID fspace_id, NDARRAY arr_obj, 
+             PropID plist=None)
+
+            Write data from a Numpy array to an HDF5 dataset. Keyword plist may be 
+            a dataset transfer property list.
+
+            The provided Numpy array must be C-contiguous, and own its data.  If 
+            this is not the case, ValueError will be raised and the read will fail.
+
+            For a friendlier version of this function, try py_write_slab()
+        """
+        cdef hid_t mtype_id
+        cdef hid_t plist_id
+        plist_id = pdefault(plist)
+        mtype_id = 0
+
+        try:
+            mtype_id = h5t.py_translate_dtype(arr_obj.dtype)
+            check_numpy_read(arr_obj, -1)
+
+            H5Dwrite(self.id, mtype_id.id, mspace_id.id, fspace_id.id, plist_id, PyArray_DATA(arr_obj))
+
+        finally:
+            if mtype_id:
+                PY_H5Tclose(mtype_id)
+
+    def extend(self, object shape):
+        """ (TUPLE shape)
+
+            Extend the given dataset so it's at least as big as "shape".  Note 
+            that a dataset may only be extended up to the maximum dimensions of 
+            its dataspace, which are fixed when the dataset is created.
+        """
+        cdef hid_t dset_id
+        cdef hsize_t* dims
+        cdef int rank
+        cdef hid_t space_id
+        dset_id = self.id
+        space_id = 0
+        dims = NULL
+
+        try:
+            space_id = H5Dget_space(dset_id)
+            rank = H5Sget_simple_extent_ndims(space_id)
+
+            require_tuple(shape, 0, rank, "shape")
+            dims = <hsize_t*>emalloc(sizeof(hsize_t)*rank)
+            convert_tuple(shape, dims, rank)
+            H5Dextend(dset_id, dims)
+
+        finally:
+            efree(dims)
+            if space_id:
+                H5Sclose(space_id)
+
+    def get_space(self):
+        """ () => SpaceID space_id
+
+            Create and return a new copy of the dataspace for this dataset.
+        """
+        return SpaceID(H5Dget_space(self.id))
+
+    def get_space_status(self):
+        """ () => INT space_status_code
+
+            Determine if space has been allocated for a dataset.  
+            Return value is one of:
+                SPACE_STATUS_NOT_ALLOCATED
+                SPACE_STATUS_PART_ALLOCATED
+                SPACE_STATUS_ALLOCATED 
+        """
+        cdef H5D_space_status_t status
+        H5Dget_space_status(self.id, &status)
+        return <int>status
+
+    def get_type(self):
+        """ () => TypeID
+
+            Create and return a new copy of the datatype for this dataset.
+        """
+        return TypeID(H5Dget_type(dset_id))
+
+    def get_create_plist(self):
+        """ () => PropDSCreateID
+
+            Create a new copy of the dataset creation property list used when 
+            this dataset was created.
+        """
+        return PropDSCreateID(H5Dget_create_plist(self.id))
+
+    def get_offset(self):
+        """ () => LONG offset
+
+            Get the offset of this dataset in the file, in bytes.
+        """
+        return H5Dget_offset(self.id)
+
+    def get_storage_size(hid_t dset_id):
+        """ () => LONG storage_size
+
+            Determine the amount of file space required for a dataset.  Note this
+            only counts the space which has actually been allocated; it may even
+            be zero.
+        """
+        return H5Dget_storage_size(self.id)
 
 
 
diff --git a/h5py/h5f.pxd b/h5py/h5f.pxd
index 4165d48..9e3606d 100644
--- a/h5py/h5f.pxd
+++ b/h5py/h5f.pxd
@@ -15,6 +15,10 @@
 # directory.
 
 include "std_defs.pxi"
+from h5 cimport ObjectID
+
+cdef class FileID(ObjectID):
+    pass
 
 cdef extern from "hdf5.h":
 
diff --git a/h5py/h5f.pyx b/h5py/h5f.pyx
index b9ba0bd..85d9740 100644
--- a/h5py/h5f.pyx
+++ b/h5py/h5f.pyx
@@ -15,7 +15,7 @@
 """
 
 # Pyrex compile-time imports
-from h5p cimport H5P_DEFAULT
+from h5p cimport PropID, pdefault, H5P_DEFAULT
 from utils cimport emalloc, efree, pybool
 
 # Runtime imports
@@ -47,47 +47,40 @@ OBJ_LOCAL   = H5F_OBJ_LOCAL
 
 # === File operations =========================================================
 
-def open(char* name, unsigned int flags=H5F_ACC_RDWR, hid_t access_id=H5P_DEFAULT):
-    """ (STRING name, UINT flags=ACC_RDWR, INT access_id=H5P_DEFAULT)
-        => INT file_id
+def open(char* name, unsigned int flags=H5F_ACC_RDWR, ):
+    """ (STRING name, UINT flags=ACC_RDWR, PropID accesslist=None)
+        => FileID
 
         Open an existing HDF5 file.  Keyword "flags" may be ACC_RWDR or
-        ACC_RDONLY.  Keyword "access_id" may be a file access property list.
+        ACC_RDONLY.
     """
-    return H5Fopen(name, flags, access_id)
+    cdef hid_t plist_id
+    plist_id = pdefault(accesslist)
+    return FileID(H5Fopen(name, flags, accesslist))
 
-def close(hid_t file_id):
-    """ (INT file_id)
+def close(FileID file_id):
+    """ (FileID file_id)
     """
-    H5Fclose(file_id)
+    H5Fclose(file_id.id)
 
-def create(char* name, int flags=H5F_ACC_TRUNC, hid_t create_id=H5P_DEFAULT, 
-                                                hid_t access_id=H5P_DEFAULT):
-    """ (STRING name, INT flags=ACC_TRUNC, INT create_id=H5P_DEFAULT,
-            INT access_id=H5P_DEFAULT)
-        => INT file_id
+def create(char* name, int flags=H5F_ACC_TRUNC, PropID createlist=None,
+                                                PropID accesslist=None):
+    """ (STRING name, INT flags=ACC_TRUNC, PropID createlist=None,
+                                           PropID accesslist=None)
+        => FileID
 
         Create a new HDF5 file.  Keyword "flags" may be either:
             ACC_TRUNC:  Truncate an existing file, discarding its data
             ACC_EXCL:   Fail if a conflicting file exists
 
         To keep the behavior in line with that of Python's built-in functions,
-        the default is ACC_TRUNC.  Be careful! Keywords create_id and 
-        access_id  may be file creation and access property lists, 
-        respectively.
+        the default is ACC_TRUNC.  Be careful!
     """
-    return H5Fcreate(name, flags, create_id, access_id)
-
-def flush(hid_t file_id, int scope=H5F_SCOPE_LOCAL):
-    """ (INT file_id, INT scope=SCOPE_LOCAL)
-
-        Tell the HDF5 library to flush file buffers to disk.  file_id may
-        be the file identifier, or the identifier of any object residing in
-        the file.  Keyword "scope" may be:
-            SCOPE_LOCAL:    Flush only the given file
-            SCOPE_GLOBAL:   Flush the entire virtual file
-    """
-    H5Fflush(file_id, <H5F_scope_t>scope)
+    cdef hid_t create_id
+    cdef hid_t access_id
+    create_id = pdefault(createlist)
+    access_id = pdefault(accesslist)
+    return FileID(H5Fcreate(name, flags, create_id, access_id))
 
 def is_hdf5(char* name):
     """ (STRING name) => BOOL is_hdf5
@@ -97,63 +90,25 @@ def is_hdf5(char* name):
     """
     return pybool(H5Fis_hdf5(name))
 
-def reopen(hid_t file_id):
-    """ (INT file_id) => INT new_file_id
-
-        Retrieve another identifier for a file (which must still be open).
-    """
-    return H5Freopen(file_id)
-
-def mount(hid_t loc_id, char* name, hid_t file_id, hid_t plist_id=H5P_DEFAULT):
-    """ (INT loc_id, STRING name, INT file_id, INT plist_id=H5P_DEFAULT)
+def mount(ObjectID loc_id not None, char* name, FileID file_id not None, 
+          PropID mountlist=None)
+    """ (ObjectID loc_id, STRING name, FileID file_id, PropID mountlist=None)
     
-        Mount an open file as "name" under group loc_id.  If present, plist_id 
+        Mount an open file as "name" under group loc_id.  If present, mountlist
         is a mount property list.
     """
-    H5Fmount(loc_id, name, file_id, plist_id)
+    cdef hid_t plist_id
+    plist_id = pdefault(mountlist)
+    H5Fmount(loc_id.id, name, file_id.id, plist_id)
     
-def unmount(hid_t loc_id, char* name):
-    """ (INT loc_id, STRING name)
+def unmount(ObjectID loc_id not None, char* name):
+    """ (ObjectID loc_id, STRING name)
 
         Unmount a file, mounted as "name" under group loc_id.
     """
-    H5Funmount(loc_id, name)
-
-# === File inspection =========================================================
-
-def get_filesize(hid_t file_id):
-    """ (INT file_id) => LONG size
-
-        Determine the total size (in bytes) of the HDF5 file, 
-        including any user block.
-    """
-    cdef hsize_t size
-    H5Fget_filesize(file_id, &size)
-    return size
-
-def get_create_plist(hid_t file_id):
-    """ (INT file_id) => INT plist_id
-
-        Retrieve a copy of the property list used to create this file.
-    """
-    return H5Fget_create_plist(file_id)
-
-def get_access_plist(hid_t file_id):
-    """ (INT file_id) => INT plist_id
-
-        Retrieve a copy of the property list which manages access to this file.
-    """
-    return H5Fget_access_plist(file_id)
+    H5Funmount(loc_id.id, name)
 
-def get_freespace(hid_t file_id):
-    """ (INT file_id) => LONG free space
-
-        Determine the amount of free space in this file.  Note that this only
-        tracks free space until the file is closed.
-    """
-    return H5Fget_freespace(file_id)
-
-def get_name(hid_t obj_id):
+def get_name(ObjectID obj_id not None):
     """ (INT obj_id) => STRING file_name
         
         Determine the name of the file in which the specified object resides.
@@ -162,72 +117,116 @@ def get_name(hid_t obj_id):
     cdef char* name
     name = NULL
 
-    size = H5Fget_name(obj_id, NULL, 0)
+    size = H5Fget_name(obj_id.id, NULL, 0)
     assert size >= 0
     name = <char*>emalloc(sizeof(char)*(size+1))
     try:    
-        H5Fget_name(obj_id, name, size+1)
+        H5Fget_name(obj_id.id, name, size+1)
         pname = name
         return pname
     finally:
         efree(name)
-    
-def get_obj_count(hid_t file_id, int types):
-    """ (INT file_id, INT types) => INT n_objs
-
-        Get the number of open objects in the file.  The value of "types" may
-        be one of h5f.OBJ_*, or any bitwise combination (e.g. 
-        OBJ_FILE | OBJ_ATTR).  The special value OBJ_ALL matches all object
-        types, and OBJ_LOCAL will only match objects opened through this
-        specific identifier.
-    """
-    return H5Fget_obj_count(file_id, types)
 
-def get_obj_ids(hid_t file_id, int types):
-    """ (INT file_id, INT types) => LIST open_ids
+# === XXXX ===
+
+cdef class FileID(ObjectID):
 
-        Get a list of identifiers for open objects in the file.  The value of 
-        "types" may be one of h5f.OBJ_*, or any bitwise combination (e.g. 
-        OBJ_FILE | OBJ_ATTR).  The special value OBJ_ALL matches all object
-        types, and OBJ_LOCAL will only match objects opened through this
-        specific identifier.
+    """ 
+        Represents an HDF5 file identifier.
     """
-    cdef int count
-    cdef hid_t *obj_list
-    cdef int i
-    obj_list = NULL
 
-    py_obj_list = []
-    try:
-        count = H5Fget_obj_count(file_id, types)
-        obj_list = <hid_t*>emalloc(sizeof(hid_t)*count)
+    def flush(self, int scope=H5F_SCOPE_LOCAL):
+        """ (INT scope=SCOPE_LOCAL)
 
-        H5Fget_obj_ids(file_id, types, count, obj_list)
-        for i from 0<=i<count:
-            py_obj_list.append(obj_list[i])
-        return py_obj_list
+            Tell the HDF5 library to flush file buffers to disk.  file_id may
+            be the file identifier, or the identifier of any object residing in
+            the file.  Keyword "scope" may be:
+                SCOPE_LOCAL:    Flush only the given file
+                SCOPE_GLOBAL:   Flush the entire virtual file
+        """
+        H5Fflush(self.id, <H5F_scope_t>scope)
 
-    finally:
-        efree(obj_list)
 
+    def reopen(self):
+        """ () => INT new_file_id
+
+            Retrieve another identifier for a file (which must still be open).
+        """
+        return FileID(H5Freopen(self.id))
 
-# === Python extensions =======================================================
 
-PY_SCOPE = DDict({  H5F_SCOPE_LOCAL: 'LOCAL SCOPE', 
-                    H5F_SCOPE_GLOBAL: 'GLOBAL SCOPE' })
-PY_CLOSE = DDict({ H5F_CLOSE_WEAK: 'CLOSE WEAK', 
-                    H5F_CLOSE_SEMI: 'CLOSE SEMI', 
-                    H5F_CLOSE_STRONG: 'CLOSE STRONG', 
-                    H5F_CLOSE_DEFAULT: 'DEFAULT CLOSE STRENGTH' })
-PY_OBJ = DDict({ H5F_OBJ_FILE: 'FILE', H5F_OBJ_DATASET: 'DATASET',
-                H5F_OBJ_GROUP: 'GROUP', H5F_OBJ_DATATYPE: 'DATATYPE',
-                H5F_OBJ_ATTR: 'ATTRIBUTE', H5F_OBJ_ALL: 'ALL', 
-                H5F_OBJ_LOCAL: 'LOCAL' })
-PY_ACC = DDict({ H5F_ACC_TRUNC: 'TRUNCATE', H5F_ACC_EXCL: 'EXCLUSIVE ACCESS',
-                 H5F_ACC_RDWR: 'READ-WRITE', H5F_ACC_RDONLY: 'READ-ONLY' })
+    def get_filesize(self):
+        """ () => LONG size
 
+            Determine the total size (in bytes) of the HDF5 file, 
+            including any user block.
+        """
+        cdef hsize_t size
+        H5Fget_filesize(self.id, &size)
+        return size
 
+    def get_create_plist(self):
+        """ () => PropFCID
 
+            Retrieve a copy of the property list used to create this file.
+        """
+        return PropFCID(H5Fget_create_plist(self.id))
+
+    def get_access_plist(self):
+        """ () => PropFAID
+
+            Retrieve a copy of the property list which manages access 
+            to this file.
+        """
+        return PropFAID(H5Fget_access_plist(self.id))
+
+    def get_freespace(self):
+        """ () => LONG free space
+
+            Determine the amount of free space in this file.  Note that this only
+            tracks free space until the file is closed.
+        """
+        return H5Fget_freespace(self.id)
+
+
+    
+    def get_obj_count(self, int types=H5F_OBJ_ALL):
+        """ (INT types=OBJ_ALL) => INT n_objs
+
+            Get the number of open objects in the file.  The value of "types" 
+            may be one of h5f.OBJ_*, or any bitwise combination (e.g. 
+            OBJ_FILE | OBJ_ATTR).  The special value OBJ_ALL matches all object
+            types, and OBJ_LOCAL will only match objects opened through this
+            specific identifier.
+        """
+        return H5Fget_obj_count(self.id, types)
+
+    def get_obj_ids(self, int types=H5F_OBJ_ALL):
+        """ (INT types=OBJ_ALL) => LIST open_ids
+
+            Get a list of identifiers for open objects in the file.  The value of 
+            "types" may be one of h5f.OBJ_*, or any bitwise combination (e.g. 
+            OBJ_FILE | OBJ_ATTR).  The special value OBJ_ALL matches all object
+            types, and OBJ_LOCAL will only match objects opened through this
+            specific identifier.
+        """
+        cdef int count
+        cdef hid_t *obj_list
+        cdef int i
+        obj_list = NULL
+
+        py_obj_list = []
+        try:
+            count = H5Fget_obj_count(self.id, types)
+            obj_list = <hid_t*>emalloc(sizeof(hid_t)*count)
+
+            H5Fget_obj_ids(self.id, types, count, obj_list)
+            for i from 0<=i<count:
+                py_obj_list.append(obj_list[i])
+            return py_obj_list
+
+        finally:
+            efree(obj_list)
 
 
 
diff --git a/h5py/h5g.pxd b/h5py/h5g.pxd
index 96d76ff..dd736a2 100644
--- a/h5py/h5g.pxd
+++ b/h5py/h5g.pxd
@@ -15,6 +15,10 @@
 # directory.
 
 include "std_defs.pxi"
+from h5 cimport ObjectID
+
+cdef class GroupID(ObjectID):
+    pass
 
 cdef extern from "hdf5.h":
 
diff --git a/h5py/h5g.pyx b/h5py/h5g.pyx
index 5885cb4..ff3f745 100644
--- a/h5py/h5g.pyx
+++ b/h5py/h5g.pyx
@@ -59,145 +59,26 @@ cdef class GroupStat:
 
 # === Basic group management ==================================================
 
-def open(hid_t loc_id, char* name):
-    """ (INT loc_id, STRING name)
+def open(ObjectID loc_id not None, char* name):
+    """ (ObjectID loc_id, STRING name)
 
         Open an existing HDF5 group, attached to some other group.
     """
-    return H5Gopen(loc_id, name)
+    return GroupID(H5Gopen(loc_id.id, name))
 
-def close(hid_t group_id):
-    """ (INT group_id)
+def close(GroupID group_id not None):
+    """ (GroupID group_id)
     """
-    H5Gclose(group_id)
+    H5Gclose(group_id.id)
 
-def create(hid_t loc_id, char* name, int size_hint=-1):
-    """ (INT loc_id, STRING name, INT size_hint=-1)
+def create(ObjectID loc_id not None, char* name, int size_hint=-1):
+    """ (ObjectID loc_id, STRING name, INT size_hint=-1)
 
         Create a new group, under a given parent group.  If given, size_hint
         is an estimate of the space to reserve (in bytes) for group member
         names.
     """
-    return H5Gcreate(loc_id, name, size_hint)
-
-# === Group member management =================================================
-
-def link(hid_t loc_id, char* current_name, char* new_name, int link_type=H5G_LINK_HARD, hid_t remote_id=-1):
-    """ ( INT loc_id, STRING current_name, STRING new_name, 
-          INT link_type=LINK_HARD, INT remote_id=-1) 
-
-        Create a new hard or soft link.  loc_id and current_name identify
-        the link target (object the link will point to).  The new link is
-        identified by new_name and (optionally) another group id "remote_id".
-
-        Link types are:
-            LINK_HARD:  Hard link to existing object (default)
-            LINK_SOFT:  Symbolic link; link target need not exist.
-    """
-    if remote_id < 0:
-        remote_id = loc_id
-
-    H5Glink2(loc_id, current_name, <H5G_link_t>link_type, remote_id, new_name)
-
-def unlink(hid_t loc_id, char* name):
-    """ (INT loc_id, STRING name)
-
-        Remove a link to an object from the given group.
-    """
-    H5Gunlink(loc_id, name)
-
-def move(hid_t loc_id, char* current_name, char* new_name, hid_t remote_id=-1):
-    """ (INT loc_id, STRING current_name, STRING new_name, INT remote_id=-1)
-
-        Relink an object.  loc_id and current_name identify the object.
-        new_name and (optionally) another group id "remote_id" determine
-        where it should be moved.
-    """
-    if remote_id < 0:
-        remote_id = loc_id
-    H5Gmove2(loc_id, current_name, remote_id, new_name)
-
-
-# === Member inspection =======================================================
-
-def get_num_objs(hid_t loc_id):
-    """ (INT loc_id) => INT number_of_objects
-
-        Get the number of objects attached to a given group.
-    """
-    cdef hsize_t size
-    H5Gget_num_objs(loc_id, &size)
-    return size
-
-def get_objname_by_idx(hid_t loc_id, hsize_t idx):
-    """ (INT loc_id, INT idx) => STRING object_name
-
-        Get the name of a group member given its zero-based index.
-
-        Due to a limitation of the HDF5 library, the generic exception
-        H5Error (errno 1) is raised if the idx parameter is out-of-range.
-    """
-    cdef int size
-    cdef char* buf
-    buf = NULL
-
-    # This function does not properly raise an exception
-    size = H5Gget_objname_by_idx(loc_id, idx, NULL, 0)
-    if size < 0:
-        raise H5Error((1,"Invalid argument"))
-
-    buf = <char*>emalloc(sizeof(char)*(size+1))
-    try:
-        H5Gget_objname_by_idx(loc_id, idx, buf, size+1)
-        pystring = buf
-        return pystring
-    finally:
-        efree(buf)
-
-def get_objtype_by_idx(hid_t loc_id, hsize_t idx):
-    """ (INT loc_id, INT idx) => INT object_type_code
-
-        Get the type of an object attached to a group, given its zero-based
-        index.  Possible return values are:
-            - LINK
-            - GROUP
-            - DATASET
-            - DATATYPE
-
-        Due to a limitation of the HDF5 library, the generic exception
-        H5Error (errno 1) is raised if the idx parameter is out-of-range.
-    """
-    # This function does not properly raise an exception
-    cdef herr_t retval
-    retval = H5Gget_objtype_by_idx(loc_id, idx)
-    if retval < 0:
-        raise H5Error((0,"Invalid argument."))
-    return retval
-
-def get_objinfo(hid_t loc_id, char* name, int follow_link=1):
-    """ (INT loc_id, STRING name, BOOL follow_link=True)
-        => GroupStat object
-
-        Obtain information about an arbitrary object attached to a group. The
-        return value is a GroupStat object; see that class's docstring
-        for a description of its attributes.  If follow_link is True (default)
-        and the object is a symbolic link, the information returned describes 
-        its target.  Otherwise the information describes the link itself.
-    """
-    cdef H5G_stat_t stat
-    cdef GroupStat statobj
-
-    H5Gget_objinfo(loc_id, name, follow_link, &stat)
-
-    statobj = GroupStat()
-    statobj.fileno = (stat.fileno[0], stat.fileno[1])
-    statobj.objno = (stat.objno[0], stat.objno[1])
-    statobj.nlink = stat.nlink
-    statobj.type = stat.type
-    statobj.mtime = stat.mtime
-    statobj.linklen = stat.linklen
-
-    return statobj
+    return GroupID(H5Gcreate(loc_id.id, name, size_hint))
 
 cdef herr_t iter_cb_helper(hid_t gid, char *name, object int_tpl) except -1:
     # Callback function for H5Giterate
@@ -214,8 +95,9 @@ cdef herr_t iter_cb_helper(hid_t gid, char *name, object int_tpl) except -1:
 
     return 0
 
-def iterate(hid_t loc_id, char* name, object func, object data=None, int startidx=0):
-    """ (INT loc_id, STRING name, FUNCTION func, OBJECT data=None, 
+def iterate(GroupID loc_id not None, char* name, object func, object data=None, 
+            int startidx=0):
+    """ (GroupID loc_id, STRING name, FUNCTION func, OBJECT data=None, 
             UINT startidx=0) => INT last_index_processed
 
         Iterate an arbitrary Python function over a group.  Note that the
@@ -240,164 +122,189 @@ def iterate(hid_t loc_id, char* name, object func, object data=None, int startid
     i = startidx
     int_tpl = (func, data)
 
-    H5Giterate(loc_id, name, &i, <H5G_iterate_t>iter_cb_helper, int_tpl)
-
-def get_linkval(hid_t loc_id, char* name):
-    """ (INT loc_id, STRING name) => STRING link_value
-
-        Retrieve the value of the given symbolic link.
-    """
-    cdef char* value
-    cdef H5G_stat_t statbuf
-    value = NULL
-
-    H5Gget_objinfo(loc_id, name, 0, &statbuf)
-
-    if statbuf.type != H5G_LINK:
-        raise ValueError('"%s" is not a symbolic link.' % name)
-
-    value = <char*>emalloc(sizeof(char)*(statbuf.linklen+1))
-    try:
-        H5Gget_linkval(loc_id, name, statbuf.linklen+1, value)
-        pyvalue = value
-        return pyvalue
-
-    finally:
-        efree(value)
-
-
-def set_comment(hid_t loc_id, char* name, char* comment):
-    """ (INT loc_id, STRING name, STRING comment)
-
-        Set the comment on a group member.
-    """
-    H5Gset_comment(loc_id, name, comment)
-
-
-def get_comment(hid_t loc_id, char* name):
-    """ (INT loc_id, STRING name) => STRING comment
-
-        Retrieve the comment for a group member.
-    """
-    cdef int cmnt_len
-    cdef char* cmnt
-    cmnt = NULL
-
-    cmnt_len = H5Gget_comment(loc_id, name, 0, NULL)
-    assert cmnt_len >= 0
-
-    cmnt = <char*>emalloc(sizeof(char)*(cmnt_len+1))
-    try:
-        H5Gget_comment(loc_id, name, cmnt_len+1, cmnt)
-        py_cmnt = cmnt
-        return py_cmnt
-    finally:
-        efree(cmnt)
-
-# === Custom extensions =======================================================
-
-def py_listnames(hid_t group_id):
-    """ (INT group_id) => LIST names_list
-
-        Create a Python list of the object names directly attached to a group.
-    """
-    cdef int nitems
-    cdef int i
-
-    namelist = []
-    nitems = get_num_objs(group_id)
+    H5Giterate(loc_id.id, name, &i, <H5G_iterate_t>iter_cb_helper, int_tpl)
 
-    for i from 0 <= i < nitems:
-        namelist.append(get_objname_by_idx(group_id, i))
-
-    return namelist
+# === Group member management =================================================
 
-cdef class _GroupIterator:
+cdef class GroupID(ObjectID):
 
-    """ Iterator object which yields names of group members.
-        These objects are created by py_iternames; don't create them yourself.
     """
-
-    cdef hid_t gid
-    cdef int idx
-    cdef int nitems
-
-    def __init__(self, int gid):
-        self.gid = gid
-        self.idx = 0
-        self.nitems = get_num_objs(gid)
-
-    def __next__(self):
-        cdef hsize_t nobjs
-        nobjs = -1
-        H5Gget_num_objs(self.gid, &nobjs)
-        if nobjs != self.nitems:
-            raise RuntimeError("Group length changed during iteration")
-        if self.idx >= self.nitems:
-            raise StopIteration()
-        name = get_objname_by_idx(self.gid, self.idx)
-        self.idx  = self.idx + 1
-        return name
-
-    def __iter__(self):
-        return self
-
-def py_iternames(hid_t group_id):
-    """ (INT group_id) => ITERATOR names_iterator
-
-        Create an iterator object which yields names attached to the current
-        group.  Mutating group members is OK, but do *NOT* change the group 
-        membership while iterating over it.
+        Represents an HDF5 group identifier
     """
-    return _GroupIterator(group_id)
-
-def py_exists(hid_t group_id, char* name, int follow_link=1):
-    """ (INT group_id, STRING name, BOOL follow_link=True) => BOOL exists
-
-        Determine if a named member exists in the given group.  If follow_link
-        is True (default), symbolic links will be dereferenced. Note this
-        function will not raise an exception if group_id is invalid.
-    """
-    try:
-        H5Gget_objinfo(group_id, name, follow_link, NULL)
-    except H5Error:
-        return False
-    return True
-
-PY_TYPE = DDict({H5G_UNKNOWN: "UNKNOWN OBJ TYPE", 
-            H5G_LINK: "LINK", H5G_GROUP: "GROUP",
-            H5G_DATASET: "DATASET", H5G_TYPE: "DATATYPE" })
-PY_LINK = DDict({H5G_LINK_ERROR: "ERROR", H5G_LINK_HARD: "HARDLINK", 
-                H5G_LINK_SOFT: "SOFTLINK" })
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-    
 
+    def link(self, char* current_name, char* new_name, 
+             int link_type=H5G_LINK_HARD, GroupID remote=None):
+        """ ( STRING current_name, STRING new_name, 
+              INT link_type=LINK_HARD, GroupID remote=None)
+
+            Create a new hard or soft link.  current_name identifies
+            the link target (object the link will point to).  The new link is
+            identified by new_name and (optionally) another group "remote".
+
+            Link types are:
+                LINK_HARD:  Hard link to existing object (default)
+                LINK_SOFT:  Symbolic link; link target need not exist.
+        """
+        cdef hid_t remote_id
+        if remote is None:
+            remote_id = self.id
+        else:
+            remote_id = remote.id
+
+        H5Glink2(self.id, current_name, <H5G_link_t>link_type, remote_id, new_name)
+
+    def unlink(self, char* name):
+        """ (INT loc_id, STRING name)
+
+            Remove a link to an object from this group
+        """
+        H5Gunlink(self.id, name)
+
+    def move(self, char* current_name, char* new_name, GroupID remote=None):
+        """ (INT loc_id, STRING current_name, STRING new_name, 
+                GroupID remote=None)
+
+            Relink an object.  loc_id and current_name identify the object.
+            new_name and (optionally) another group "remote" determine
+            where it should be moved.
+        """
+        cdef hid_t remote_id
+        if remote is None:
+            remote_id = self.id
+        else:
+            remote_id = remote.id
+
+        H5Gmove2(self.id, current_name, remote_id, new_name)
+
+    def get_num_objs(self):
+        """ () => INT number_of_objects
+
+            Get the number of objects attached to a given group.
+        """
+        cdef hsize_t size
+        H5Gget_num_objs(self.id, &size)
+        return size
+
+    def get_objname_by_idx(self, hsize_t idx):
+        """ (INT idx) => STRING object_name
+
+            Get the name of a group member given its zero-based index.
+
+            Due to a limitation of the HDF5 library, the generic exception
+            H5Error (errno 1) is raised if the idx parameter is out-of-range.
+        """
+        cdef int size
+        cdef char* buf
+        buf = NULL
+
+        # This function does not properly raise an exception
+        size = H5Gget_objname_by_idx(self.id, idx, NULL, 0)
+        if size < 0:
+            raise H5Error((1,"Invalid argument"))
+
+        buf = <char*>emalloc(sizeof(char)*(size+1))
+        try:
+            H5Gget_objname_by_idx(self.id, idx, buf, size+1)
+            pystring = buf
+            return pystring
+        finally:
+            efree(buf)
+
+    def get_objtype_by_idx(self, hsize_t idx):
+        """ (INT idx) => INT object_type_code
+
+            Get the type of an object attached to a group, given its zero-based
+            index.  Possible return values are:
+                - LINK
+                - GROUP
+                - DATASET
+                - DATATYPE
+
+            Due to a limitation of the HDF5 library, the generic exception
+            H5Error (errno 1) is raised if the idx parameter is out-of-range.
+        """
+        # This function does not properly raise an exception
+        cdef herr_t retval
+        retval = H5Gget_objtype_by_idx(self.id, idx)
+        if retval < 0:
+            raise H5Error((0,"Invalid argument."))
+        return retval
+
+    def get_objinfo(self, char* name, int follow_link=1):
+        """ (STRING name, BOOL follow_link=True)
+            => GroupStat object
+
+            Obtain information about an arbitrary object attached to a group. The
+            return value is a GroupStat object; see that class's docstring
+            for a description of its attributes.  If follow_link is True (default)
+            and the object is a symbolic link, the information returned describes 
+            its target.  Otherwise the information describes the link itself.
+        """
+        cdef H5G_stat_t stat
+        cdef GroupStat statobj
+
+        H5Gget_objinfo(self.id, name, follow_link, &stat)
+
+        statobj = GroupStat()
+        statobj.fileno = (stat.fileno[0], stat.fileno[1])
+        statobj.objno = (stat.objno[0], stat.objno[1])
+        statobj.nlink = stat.nlink
+        statobj.type = stat.type
+        statobj.mtime = stat.mtime
+        statobj.linklen = stat.linklen
+
+        return statobj
+
+
+    def get_linkval(self, char* name):
+        """ (STRING name) => STRING link_value
+
+            Retrieve the value of the given symbolic link.
+        """
+        cdef char* value
+        cdef H5G_stat_t statbuf
+        value = NULL
+
+        H5Gget_objinfo(self.id, name, 0, &statbuf)
+
+        if statbuf.type != H5G_LINK:
+            raise ValueError('"%s" is not a symbolic link.' % name)
+
+        value = <char*>emalloc(sizeof(char)*(statbuf.linklen+1))
+        try:
+            H5Gget_linkval(self.id, name, statbuf.linklen+1, value)
+            pyvalue = value
+            return pyvalue
+
+        finally:
+            efree(value)
+
+
+    def set_comment(self, char* name, char* comment):
+        """ (STRING name, STRING comment)
+
+            Set the comment on a group member.
+        """
+        H5Gset_comment(self.id name, comment)
+
+
+    def get_comment(self, char* name):
+        """ (STRING name) => STRING comment
+
+            Retrieve the comment for a group member.
+        """
+        cdef int cmnt_len
+        cdef char* cmnt
+        cmnt = NULL
+
+        cmnt_len = H5Gget_comment(self.id, name, 0, NULL)
+        assert cmnt_len >= 0
+
+        cmnt = <char*>emalloc(sizeof(char)*(cmnt_len+1))
+        try:
+            H5Gget_comment(self.id, name, cmnt_len+1, cmnt)
+            py_cmnt = cmnt
+            return py_cmnt
+        finally:
+            efree(cmnt)
 
diff --git a/h5py/h5i.pyx b/h5py/h5i.pyx
index 296ea4b..854d006 100644
--- a/h5py/h5i.pyx
+++ b/h5py/h5i.pyx
@@ -15,6 +15,8 @@
 """
 
 # Pyrex compile-time imports
+from h5 cimport ObjectID
+from h5f cimport FileID
 from utils cimport emalloc, efree
 
 # Runtime imports
@@ -41,17 +43,17 @@ PY_TYPE = DDict({ H5I_BADID: 'BAD ID', H5I_FILE: 'FILE', H5I_GROUP: 'GROUP',
 
 # === Identifier API ==========================================================
 
-def get_type(hid_t obj_id):
-    """ (INT obj_id) => INT type_code
+def get_type(ObjectID obj not None):
+    """ (ObjectID obj) => INT type_code
 
-        Determine the type of an arbitrary HDF5 object.  The return value is
-        always one of the type constants defined in this module; if the ID is 
-        invalid, BADID is returned.
+        Determine the HDF5 typecode of an arbitrary HDF5 object.  The return 
+        value is always one of the type constants defined in this module; if 
+        the ID is invalid, BADID is returned.
     """
-    return <int>H5Iget_type(obj_id)
+    return <int>H5Iget_type(obj.id)
 
-def get_name(hid_t obj_id):
-    """ (INT obj_id) => STRING name or None
+def get_name(ObjectID obj not None):
+    """ (ObjectID obj) => STRING name or None
 
         Determine (a) name of an HDF5 object.  Because an object has as many
         names as there are hard links to it, this may not be unique.  If
@@ -61,47 +63,59 @@ def get_name(hid_t obj_id):
     cdef int namelen
     cdef char* name
 
-    namelen = <int>H5Iget_name(obj_id, NULL, 0)
+    namelen = <int>H5Iget_name(obj.id, NULL, 0)
     assert namelen >= 0
     if namelen == 0:
         return None
 
     name = <char*>emalloc(sizeof(char)*(namelen+1))
     try:
-        H5Iget_name(obj_id, name, namelen+1)
+        H5Iget_name(obj.id, name, namelen+1)
         pystring = name
         return pystring
     finally:
         efree(name)
 
-def get_file_id(hid_t obj_id):
-    """ (INT obj_id) => INT file_id
+def get_file_id(ObjectID obj not None):
+    """ (ObjectID obj) => FileID
 
         Obtain an identifier for the file in which this object resides,
         re-opening the file if necessary.
     """
-    return H5Iget_file_id(obj_id)
+    return FileID(H5Iget_file_id(obj.id))
 
-def inc_ref(hid_t obj_id):
-    """ (INT obj_id)
+def inc_ref(ObjectID obj not None):
+    """ (ObjectID obj)
 
         Increment the reference count for the given object.
+
+        This function is provided for debugging only.  Reference counting
+        is automatically synchronized with Python, and you can easily break
+        ObjectID instances by abusing this function.
     """
-    H5Iinc_ref(obj_id)
+    H5Iinc_ref(obj.id)
 
-def get_ref(hid_t obj_id):
-    """ (INT obj_id)
+def get_ref(ObjectID obj not None):
+    """ (ObjectID obj)
 
         Retrieve the reference count for the given object.
+
+        This function is provided for debugging only.  Reference counting
+        is automatically synchronized with Python, and you can easily break
+        ObjectID instances by abusing this function.
     """
-    return H5Iget_ref(obj_id)
+    return H5Iget_ref(obj.id)
 
-def dec_ref(hid_t obj_id):
-    """ (INT obj_id)
+def dec_ref(ObjectID obj not None):
+    """ (ObjectID obj)
 
         Decrement the reference count for the given object.
+
+        This function is provided for debugging only.  Reference counting
+        is automatically synchronized with Python, and you can easily break
+        ObjectID instances by abusing this function.
     """
-    H5Idec_ref(obj_id)
+    H5Idec_ref(obj.id)
 
 
 
diff --git a/h5py/h5p.pxd b/h5py/h5p.pxd
index 2ff5ea7..e6d6712 100644
--- a/h5py/h5p.pxd
+++ b/h5py/h5p.pxd
@@ -15,6 +15,31 @@
 # directory.
 
 include "std_defs.pxi"
+from h5 cimport ObjectID
+
+cdef class PropID(ObjectID):
+    pass
+
+cdef class PropClassID(PropID):
+    pass
+
+cdef class PropInstanceID(PropID):
+    pass
+
+cdef class PropDCID(PropInstanceID):
+    pass
+
+cdef class PropDXID(PropInstanceID):
+    pass
+
+cdef class PropFCID(PropInstanceID):
+    pass
+
+cdef class PropFAID(PropInstanceID):
+    pass
+
+cdef class PropMID(PropInstanceID):
+    pass
 
 from h5d cimport H5D_layout_t, H5D_fill_value_t, H5D_fill_time_t, H5D_alloc_time_t
 from h5z cimport H5Z_filter_t, H5Z_EDC_t
diff --git a/h5py/h5p.pyx b/h5py/h5p.pyx
index 33bf204..2faf9fa 100644
--- a/h5py/h5p.pyx
+++ b/h5py/h5p.pyx
@@ -30,19 +30,25 @@ from h5 import DDict
 # === Public constants and data structures ====================================
 
 # Property list classes
-NO_CLASS       = H5P_NO_CLASS
-FILE_CREATE    = H5P_FILE_CREATE
-FILE_ACCESS    = H5P_FILE_ACCESS
-DATASET_CREATE = H5P_DATASET_CREATE
-DATASET_XFER   = H5P_DATASET_XFER
-MOUNT          = H5P_MOUNT
+NO_CLASS       = PropClassID(H5P_NO_CLASS)
+FILE_CREATE    = PropClassID(H5P_FILE_CREATE)
+FILE_ACCESS    = PropClassID(H5P_FILE_ACCESS)
+DATASET_CREATE = PropClassID(H5P_DATASET_CREATE)
+DATASET_XFER   = PropClassID(H5P_DATASET_XFER)
+MOUNT          = PropClassID(H5P_MOUNT)
 
-DEFAULT = H5P_DEFAULT
+DEFAULT = PropID(H5P_DEFAULT)
+
+_classmapper = { H5P_FILE_CREATE: PropFCID,
+                 H5P_FILE_ACCESS: PropFAID,
+                 H5P_DATASET_CREATE: PropDCID,
+                 H5P_DATASET_XFER: PropDXID,
+                 H5P_MOUNT: PropMID }
 
 # === Generic property list operations ========================================
 
-def create(hid_t cls_id):
-    """ (INT cls_id) => INT property_list_id
+def create(PropClassID cls not None):
+    """ (PropClassID cls) => PropID
     
         Create a new property list as an instance of a class; classes are:
             FILE_CREATE
@@ -51,272 +57,287 @@ def create(hid_t cls_id):
             DATASET_XFER
             MOUNT
     """
-    return H5Pcreate(cls_id)
-
-def copy(hid_t plist):
-    """ (INT plist) => INT new_property_list_id
-
-        Create a new copy of an existing property list object.
-    """
-    return H5Pcopy(plist)
+    try:
+        type_ = _classmapper[cls.id]
+    except KeyError:
+        raise ValueError("Invalid class")
 
-def close(hid_t plist):
-    """ (INT plist)
-    """
-    H5Pclose(plist)
+    return type_(H5Pcreate(cls.id))
 
-def get_class(hid_t plist):
-    """ (INT plist) => INT class_code
+cdef class PropClassID(PropID):
 
-        Determine the class of a property list object.
     """
-    return H5Pget_class(plist)
-
-def equal(hid_t plist1, hid_t plist2):
-    """ (INT plist1, INT plist2) => BOOL lists_are_equal
-
-        Compare two existing property lists or classes for equality.
+        Represents property list class objects.
+        These are not automatically closed.
+        This is a hack until custom classes can be implemented.
     """
-    return pybool(H5Pequal(plist1, plist2))
-
-# === File creation ===========================================================
 
-def get_version(hid_t plist):
-    """ (INT plist) => TUPLE version_info   [File creation]
+    def __dealloc__(self):
+        pass
 
-        Determine version information of various file attributes. Elements are:
+cdef class PropInstanceID(ObjectID):
 
-        0:  UINT Super block version number
-        1:  UINT Freelist version number
-        2:  UINT Symbol table version number
-        3:  UINT Shared object header version number
     """
-    cdef herr_t retval
-    cdef unsigned int super_
-    cdef unsigned int freelist
-    cdef unsigned int stab
-    cdef unsigned int shhdr
-
-    H5Pget_version(plist, &super_, &freelist, &stab, &shhdr)
-
-    return (super_, freelist, stab, shhdr)
-
-def set_userblock(hid_t plist, hsize_t size):
-    """ (INT plist, INT/LONG size)    [File creation]
-
-        Set the file user block size, in bytes.  
-        Must be a power of 2, and at least 512.
+        Base class for property list objects
     """
-    H5Pset_userblock(plist, size)
-
-def get_userblock(hid_t plist):
-    """ (INT plist) => LONG size    [File creation]
 
-        Determine the user block size, in bytes.
-    """
-    cdef hsize_t size
-    H5Pget_userblock(plist, &size)
-    return size
+    def copy(self):
+        """ () => PropList new_property_list_id
 
-def set_sizes(hid_t plist, size_t addr, size_t size):
-    """ (INT plist, INT addr, INT size)    [File creation]
+            Create a new copy of an existing property list object.
+        """
+        return type(self)(H5Pcopy(self.id))
 
-        Set the addressing offsets and lengths for objects 
-        in an HDF5 file, in bytes.
-    """
-    H5Pset_sizes(plist, addr, size)
+    def close(self):
+        H5Pclose(self.id)
 
-def get_sizes(hid_t plist):
-    """ (INT plist) => TUPLE sizes    [File creation]
+    def get_class():
+        """ () => PropClassID
 
-        Determine addressing offsets and lengths for objects in an 
-        HDF5 file, in bytes.  Return value is a 2-tuple with values:
+            Determine the class of a property list object.
+        """
+        return PropClassID(H5Pget_class(self.id))
 
-        0:  UINT Address offsets
-        1:  UINT Lengths
-    """
-    cdef size_t addr
-    cdef size_t size
-    H5Pget_sizes(plist, &addr, &size)
-    return (addr, size)
+    def equal(self, PropID plist not None):
+        """ (PropID plist) => BOOL
 
-def set_sym_k(hid_t plist, unsigned int ik, unsigned int lk):
-    """ (INT plist, INT ik, INT lk)    [File creation]
+            Compare this property list to another for equality.
+        """
+        return pybool(H5Pequal(self.id, plist.id))
 
-        Symbol table node settings.  See the HDF5 docs for H5Pset_sym_k.
-    """
-    H5Pset_sym_k(plist, ik, lk)
+# === File creation ===========================================================
 
-def get_sym_k(hid_t plist):
-    """ (INT plist) => TUPLE settings    [File creation]
+cdef class PropFCID(PropInstanceID):
 
-        Determine symbol table node settings.  See the HDF5 docs for
-        H5Pget_sym_k.  Return is a 2-tuple (ik, lk).
     """
-    cdef unsigned int ik
-    cdef unsigned int lk
-    H5Pget_sym_k(plist, &ik, &lk)
-    return (ik, lk)
-
-def set_istore_k(hid_t plist, unsigned int ik):
-    """ (INT plist, UINT ik)    [File creation]
-
-        See hdf5 docs for H5Pset_istore_k.
+        Represents a file creation property list
     """
-    H5Pset_istore_k(plist, ik)
+
+    def get_version():
+        """ () => TUPLE version_info
+
+            Determine version information of various file attributes. 
+            Elements are:
+
+            0:  UINT Super block version number
+            1:  UINT Freelist version number
+            2:  UINT Symbol table version number
+            3:  UINT Shared object header version number
+        """
+        cdef herr_t retval
+        cdef unsigned int super_
+        cdef unsigned int freelist
+        cdef unsigned int stab
+        cdef unsigned int shhdr
+
+        H5Pget_version(self.id, &super_, &freelist, &stab, &shhdr)
+
+        return (super_, freelist, stab, shhdr)
+
+    def set_userblock(self, hsize_t size):
+        """ (INT/LONG size)
+
+            Set the file user block size, in bytes.  
+            Must be a power of 2, and at least 512.
+        """
+        H5Pset_userblock(self.id, size)
+
+    def get_userblock(self):
+        """ () => LONG size
+
+            Determine the user block size, in bytes.
+        """
+        cdef hsize_t size
+        H5Pget_userblock(self.id, &size)
+        return size
+
+    def set_sizes(self, size_t addr, size_t size):
+        """ (INT addr, INT size)
+
+            Set the addressing offsets and lengths for objects 
+            in an HDF5 file, in bytes.
+        """
+        H5Pset_sizes(self.id, addr, size)
+
+    def get_sizes(self):
+        """ () => TUPLE sizes    [File creation]
+
+            Determine addressing offsets and lengths for objects in an 
+            HDF5 file, in bytes.  Return value is a 2-tuple with values:
+
+            0:  UINT Address offsets
+            1:  UINT Lengths
+        """
+        cdef size_t addr
+        cdef size_t size
+        H5Pget_sizes(self.id, &addr, &size)
+        return (addr, size)
+
+    def set_sym_k(self, unsigned int ik, unsigned int lk):
+        """ (INT ik, INT lk)
+
+            Symbol table node settings.  See the HDF5 docs for H5Pset_sym_k.
+        """
+        H5Pset_sym_k(self.id, ik, lk)
+
+    def get_sym_k(self):
+        """ () => TUPLE settings
+
+            Determine symbol table node settings.  See the HDF5 docs for
+            H5Pget_sym_k.  Return is a 2-tuple (ik, lk).
+        """
+        cdef unsigned int ik
+        cdef unsigned int lk
+        H5Pget_sym_k(self.id, &ik, &lk)
+        return (ik, lk)
+
+    def set_istore_k(self, unsigned int ik):
+        """ (UINT ik)    [File creation]
+
+            See hdf5 docs for H5Pset_istore_k.
+        """
+        H5Pset_istore_k(self.id, ik)
     
-def get_istore_k(hid_t plist):
-    """ (INT plist) => UINT ik    [File creation]
+    def get_istore_k(self):
+        """ () => UINT ik    [File creation]
 
-        See HDF5 docs for H5Pget_istore_k
-    """
-    cdef unsigned int ik
-    H5Pget_istore_k(plist, &ik)
-    return ik
+            See HDF5 docs for H5Pget_istore_k
+        """
+        cdef unsigned int ik
+        H5Pget_istore_k(self.id, &ik)
+        return ik
 
 # === Dataset creation properties =============================================
 
-def set_layout(hid_t plist, int layout_code):
-    """ (INT plist, INT layout_code)    [Dataset creation]
-
-        Set dataset storage strategy; legal values are:
-        * h5d.COMPACT
-        * h5d.CONTIGUOUS
-        * h5d.CHUNKED
-    """
-    H5Pset_layout(plist, <H5D_layout_t>layout_code)
-    
-def get_layout(hid_t plist):
-    """ (INT plist) => INT layout_code   [Dataset creation]
+cdef class PropDCID(PropInstanceID):
 
-        Determine the storage strategy of a dataset; legal values are:
-        * h5d.COMPACT
-        * h5d.CONTIGUOUS
-        * h5d.CHUNKED
     """
-    return <int>H5Pget_layout(plist)
-
-def set_chunk(hid_t plist, object chunksize):
-    """ (INT plist_id, TUPLE chunksize)    [Dataset creation]
-
-        Set the dataset chunk size.  It's up to you to provide 
-        values which are compatible with your dataset.
+        Represents a dataset creation property list
     """
-    cdef herr_t retval
-    cdef int rank
-    cdef hsize_t* dims
-    dims = NULL
 
-    require_tuple(chunksize, 0, -1, "chunksize")
-    rank = len(chunksize)
+    def set_layout(self, int layout_code):
+        """ (INT layout_code)    [Dataset creation]
 
-    dims = <hsize_t*>emalloc(sizeof(hsize_t)*rank)
-    try:
-        convert_tuple(chunksize, dims, rank)
-        H5Pset_chunk(plist, rank, dims)
-    finally:
-        efree(dims)
+            Set dataset storage strategy; legal values are:
+            * h5d.COMPACT
+            * h5d.CONTIGUOUS
+            * h5d.CHUNKED
+        """
+        H5Pset_layout(self.id, <H5D_layout_t>layout_code)
+    
+    def get_layout(self):
+        """ () => INT layout_code   [Dataset creation]
+
+            Determine the storage strategy of a dataset; legal values are:
+            * h5d.COMPACT
+            * h5d.CONTIGUOUS
+            * h5d.CHUNKED
+        """
+        return <int>H5Pget_layout(self.id)
+
+    def set_chunk(self, object chunksize):
+        """ (INT plist_id, TUPLE chunksize)    [Dataset creation]
+
+            Set the dataset chunk size.  It's up to you to provide 
+            values which are compatible with your dataset.
+        """
+        cdef herr_t retval
+        cdef int rank
+        cdef hsize_t* dims
+        dims = NULL
+
+        require_tuple(chunksize, 0, -1, "chunksize")
+        rank = len(chunksize)
+
+        dims = <hsize_t*>emalloc(sizeof(hsize_t)*rank)
+        try:
+            convert_tuple(chunksize, dims, rank)
+            H5Pset_chunk(self.id, rank, dims)
+        finally:
+            efree(dims)
     
-def get_chunk(hid_t plist):
-    """ (INT plist_id) => TUPLE chunk_dimensions    [Dataset creation]
+    def get_chunk(self):
+        """ () => TUPLE chunk_dimensions    [Dataset creation]
 
-        Obtain the dataset chunk size, as a tuple.
-    """
-    cdef int rank
-    cdef hsize_t *dims
+            Obtain the dataset chunk size, as a tuple.
+        """
+        cdef int rank
+        cdef hsize_t *dims
 
-    rank = H5Pget_chunk(plist, 0, NULL)
-    assert rank >= 0
-    dims = <hsize_t*>emalloc(sizeof(hsize_t)*rank)
+        rank = H5Pget_chunk(self.id, 0, NULL)
+        assert rank >= 0
+        dims = <hsize_t*>emalloc(sizeof(hsize_t)*rank)
 
-    try:
-        H5Pget_chunk(plist, rank, dims)
-        tpl = convert_dims(dims, rank)
-        return tpl
-    finally:
-        efree(dims)
+        try:
+            H5Pget_chunk(self.id, rank, dims)
+            tpl = convert_dims(dims, rank)
+            return tpl
+        finally:
+            efree(dims)
 
 # === Filter functions ========================================================
 
-def set_deflate(hid_t plist, unsigned int level=5):
-    """ (INT plist_id, UINT level=5)    [Dataset creation]
+    def set_deflate(self, unsigned int level=5):
+        """ (UINT level=5)    [Dataset creation]
 
-        Enable DEFLATE (gzip) compression, at the given level.
-        Valid levels are 0-9, default is 5.
-    """
-    H5Pset_deflate(plist, level)
+            Enable DEFLATE (gzip) compression, at the given level.
+            Valid levels are 0-9, default is 5.
+        """
+        H5Pset_deflate(self.id, level)
     
-def set_fletcher32(hid_t plist):
-    """ (INT plist_id)    [Dataset creation]
+    def set_fletcher32(self):
+        """ ()    [Dataset creation]
 
-        Enable Fletcher32 error correction on an existing list.
-    """
-    H5Pset_fletcher32(plist)
+            Enable Fletcher32 error correction on an existing list.
+        """
+        H5Pset_fletcher32(self.id)
 
-def set_shuffle(hid_t plist):
-    """ (INT plist_id)    [Dataset creation]
+    def set_shuffle(self):
+        """ ()    [Dataset creation]
 
-        Enable to use of the shuffle filter.  Use this immediately before the
-        DEFLATE filter to increase the compression ratio.
-    """
-    H5Pset_shuffle(plist)
+            Enable to use of the shuffle filter.  Use this immediately before the
+            DEFLATE filter to increase the compression ratio.
+        """
+        H5Pset_shuffle(self.id)
 
-def set_szip(hid_t plist, unsigned int options, unsigned int pixels_per_block):
-    """ (INT plist, UINT options, UINT pixels_per_block)   [Dataset creation]
+    def set_szip(self, unsigned int options, unsigned int pixels_per_block):
+        """ (INT plist, UINT options, UINT pixels_per_block)   [Dataset creation]
 
-        Enable SZIP compression.  See the HDF5 docs for argument meanings, and
-        general restrictions on use of the SZIP format.
-    """
-    H5Pset_szip(plist, options, pixels_per_block)
+            Enable SZIP compression.  See the HDF5 docs for argument meanings, and
+            general restrictions on use of the SZIP format.
+        """
+        H5Pset_szip(self.id, options, pixels_per_block)
 
-def remove_filter(hid_t plist, int filter_class):
-    """ (INT plist, INT filter_class)    [Dataset creation]
+    def remove_filter(self, int filter_class):
+        """ (INT plist, INT filter_class)    [Dataset creation]
 
-        Remove a filter from the pipeline.  The class code is one of 
-        h5z.FILTER_*.
-    """
-    H5Premove_filter(plist, <H5Z_filter_t>filter_class)
+            Remove a filter from the pipeline.  The class code is one of 
+            h5z.FILTER_*.
+        """
+        H5Premove_filter(self.id, <H5Z_filter_t>filter_class)
 
 # === File access =============================================================
 
-def set_fclose_degree(hid_t fapl_id, int close_degree):
-    """ (INT fapl_id, INT close_degree)
+cdef class PropFAID(PropInstanceID):
 
-        Set the file-close degree, which determines the library behavior when
-        a file is closed when objects are still open.  See the HDF5 docs for 
-        a full explanation.  Legal values:
-
-        * h5f.CLOSE_WEAK
-        * h5f.CLOSE_SEMI
-        * h5f.CLOSE_STRONG
-        * h5f.CLOSE_DEFAULT
     """
-    H5Pset_fclose_degree(fapl_id, <H5F_close_degree_t>close_degree)
-    
+        Represents a file access property list
+    """
 
-# === Python extensions =======================================================
+    def set_fclose_degree(self, int close_degree):
+        """ (INT close_degree)
 
-def py_has_filter(hid_t plist, int filter_class):
-    """ (INT plist_id, INT filter_class_code) 
-        => BOOL has_filter    [Dataset creation]
-        
-        Determine if a property list has the given filter.
-    """
-    cdef herr_t retval
-    cdef unsigned int flags
-    cdef size_t dmp
-    dmp = 0
-    try:
-        H5Pget_filter_by_id(plist, filter_class, &flags, &dmp, NULL, 0, NULL)
-    except:
-        return False
-    return True
-    
-PY_CLASS = DDict({ H5P_NO_CLASS: 'ERROR', H5P_FILE_CREATE: 'FILE CREATION',
-            H5P_FILE_ACCESS: 'FILE ACCESS', H5P_DATASET_CREATE: 'DATASET CREATION',
-            H5P_DATASET_XFER: 'DATASET TRANSFER', H5P_DEFAULT: 'DEFAULT'})
+            Set the file-close degree, which determines the library behavior when
+            a file is closed when objects are still open.  See the HDF5 docs for 
+            a full explanation.  Legal values:
+
+            * h5f.CLOSE_WEAK
+            * h5f.CLOSE_SEMI
+            * h5f.CLOSE_STRONG
+            * h5f.CLOSE_DEFAULT
+        """
+        H5Pset_fclose_degree(self.id, <H5F_close_degree_t>close_degree)
 
-    
 
     
 
diff --git a/h5py/h5r.pyx b/h5py/h5r.pyx
index ad88eab..93608fb 100644
--- a/h5py/h5r.pyx
+++ b/h5py/h5r.pyx
@@ -15,6 +15,7 @@
 """
 
 # Pyrex compile-time imports
+from h5 cimport ObjectID
 from h5g cimport H5G_obj_t
 
 # Runtime imports
@@ -55,8 +56,8 @@ cdef class Reference:
 
 # === Reference API ===========================================================
 
-def create(hid_t loc_id, char* name, int ref_type, hid_t space_id=-1):
-    """ (INT loc_id, STRING name, INT ref_type, INT space_id=0)
+def create(ObjectID loc_id not None, char* name, int ref_type, SpaceID space=None):
+    """ (ObjectID loc_id, STRING name, INT ref_type, SpaceID space=None)
         => ReferenceObject ref
 
         Create a new reference. The value of ref_type detemines the kind
@@ -70,26 +71,31 @@ def create(hid_t loc_id, char* name, int ref_type, hid_t space_id=-1):
                     name identify the dataset; the selection on space_id
                     identifies the region.
     """
+    cdef hid_t space_id
     cdef Reference ref
     ref = Reference()
+    if space is None:
+        space_id = -1
+    else:
+        space_id = space.id
 
-    H5Rcreate(&ref.ref, loc_id, name, <H5R_type_t>ref_type, space_id)
+    H5Rcreate(&ref.ref, loc_id.id, name, <H5R_type_t>ref_type, space_id)
     ref.typecode = ref_type
 
     return ref
 
-def dereference(hid_t file_id, Reference ref):
-    """ (INT file_id, ReferenceObject ref) => INT obj_id
+def dereference(ObjectID file_id not None, Reference ref):
+    """ (ObjectID file_id, ReferenceObject ref) => INT obj_id
 
         Open the object pointed to by "ref" and return its identifier.
         The containing file must be provided via file_id, which can be
         a file identifier or an identifier for any object which lives
         in the file.
     """
-    return H5Rdereference(file_id, <H5R_type_t>ref.typecode, &ref.ref)
+    return H5Rdereference(file_id.id, <H5R_type_t>ref.typecode, &ref.ref)
 
-def get_region(hid_t dataset_id, Reference ref):
-    """ (INT dataset_id, Reference ref) => INT dataspace_id
+def get_region(ObjectID dataset_id not None, Reference ref):
+    """ (ObjectID dataset_id, Reference ref) => INT dataspace_id
 
         Retrieve the dataspace selection pointed to by a reference.
         Returns a copy of the dataset's dataspace, with the appropriate
@@ -97,10 +103,10 @@ def get_region(hid_t dataset_id, Reference ref):
 
         The given reference object must be of type DATASET_REGION.
     """
-    return H5Rget_region(dataset_id, <H5R_type_t>ref.typecode, &ref.ref)
+    return H5Rget_region(dataset_id.id, <H5R_type_t>ref.typecode, &ref.ref)
 
-def get_obj_type(hid_t ds_id, Reference ref):
-    """ (INT ds_id, Reference ref) => INT obj_code
+def get_obj_type(Object ID ds_id not None, Reference ref):
+    """ (ObjectID ds_id, Reference ref) => INT obj_code
 
         Determine what type of object an object reference points to.  The
         reference may be either type OBJECT or DATASET_REGION.  For 
@@ -114,7 +120,7 @@ def get_obj_type(hid_t ds_id, Reference ref):
         h5g.DATASET     Dataset
         h5g.TYPE        Named datatype
     """
-    return <int>H5Rget_obj_type(ds_id, <H5R_type_t>ref.typecode, &ref.ref)
+    return <int>H5Rget_obj_type(ds_id.id, <H5R_type_t>ref.typecode, &ref.ref)
 
 
 
diff --git a/h5py/h5s.pxd b/h5py/h5s.pxd
index 4636e47..dbb2f5d 100644
--- a/h5py/h5s.pxd
+++ b/h5py/h5s.pxd
@@ -19,6 +19,10 @@
 # root directory.
 
 include "std_defs.pxi"
+from h5s cimport ObjectID
+
+cdef class SpaceID(ObjectID):
+    pass
 
 cdef extern from "hdf5.h":
 
diff --git a/h5py/h5s.pyx b/h5py/h5s.pyx
index a5cb8d6..6f4ad93 100644
--- a/h5py/h5s.pyx
+++ b/h5py/h5s.pyx
@@ -53,27 +53,13 @@ SEL_ALL         = H5S_SEL_ALL
 
 # === Basic dataspace operations ==============================================
 
-def close(hid_t space_id):
-    """ (INT space_id)
-    """
-    H5Sclose(space_id)
-
 def create(int class_code):
-    """ (INT class_code) => INT new_space_id
+    """ (INT class_code) => SpaceID
 
         Create a new HDF5 dataspace object, of the given class.  
         Legal values are SCALAR and SIMPLE.
     """
-    return H5Screate(<H5S_class_t>class_code)
-
-def copy(hid_t space_id):
-    """ (INT space_id) => INT new_space_id
-
-        Create a new copy of an existing dataspace.
-    """
-    return H5Scopy(space_id)
-
-# === Simple dataspaces =======================================================
+    return SpaceID(H5Screate(<H5S_class_t>class_code))
 
 def create_simple(object dims_tpl, object max_dims_tpl=None):
     """ (TUPLE dims_tpl, TUPLE max_dims_tpl) => INT new_space_id
@@ -103,423 +89,422 @@ def create_simple(object dims_tpl, object max_dims_tpl=None):
             max_dims = <hsize_t*>emalloc(sizeof(hsize_t)*rank)
             convert_tuple(max_dims_tpl, max_dims, rank)
 
-        return H5Screate_simple(rank, dims, max_dims)
+        return SpaceID(H5Screate_simple(rank, dims, max_dims))
 
     finally:
         efree(dims)
         efree(max_dims)
 
-def is_simple(hid_t space_id):
-    """ (INT space_id) => BOOL is_simple
+cdef class SpaceID(ObjectID):
 
-        Determine if an existing dataspace is "simple" (including scalar
-        dataspaces). Currently all HDF5 dataspaces are simple.
     """
-    return pybool(H5Sis_simple(space_id))
-
-def offset_simple(hid_t space_id, object offset=None):
-    """ (INT space_id, TUPLE offset=None)
-
-        Set the offset of a dataspace.  The length of the given tuple must
-        match the rank of the dataspace. If None is provided (default), 
-        the offsets on all axes will be set to 0.
+        Represents a dataspace identifier
     """
-    cdef int rank
-    cdef int i
-    cdef hssize_t *dims
-    dims = NULL
-
-    try:
-        if H5Sis_simple(space_id) == 0:
-            raise ValueError("%d is not a simple dataspace" % space_id)
-
-        rank = H5Sget_simple_extent_ndims(space_id)
-        
-        require_tuple(offset, 1, rank, "offset")
-        dims = <hssize_t*>emalloc(sizeof(hssize_t)*rank)
-        if(offset is not None):
-            convert_tuple(offset, <hsize_t*>dims, rank)
-        else:
-            # The HDF5 docs say passing in NULL resets the offset to 0.  
-            # Instead it raises an exception.  Imagine my surprise. We'll 
-            # do this manually.
-            for i from 0<=i<rank:
-                dims[i] = 0
-
-        H5Soffset_simple(space_id, dims)
 
-    finally:
-        efree(dims)
+    def close(self):
+        H5Sclose(self.id)
 
-def get_simple_extent_ndims(hid_t space_id):
-    """ (INT space_id) => INT rank
-        
-        Determine the rank of a "simple" (slab) dataspace.
-    """
-    return H5Sget_simple_extent_ndims(space_id)
+    def copy(self):
+        """ () => SpaceID
 
-def get_simple_extent_dims(hid_t space_id, int maxdims=0):
-    """ (INT space_id, BOOL maxdims=False) => TUPLE shape
+            Create a new copy of this dataspace.
+        """
+        return SpaceID(H5Scopy(self.id))
 
-        Determine the shape of a "simple" (slab) dataspace.  If "maxdims" is
-        True, retrieve the maximum dataspace size instead.
-    """
-    cdef int rank
-    cdef hsize_t* dims
-    dims = NULL
+# === Simple dataspaces =======================================================
 
-    rank = H5Sget_simple_extent_dims(space_id, NULL, NULL)
+    def is_simple(self):
+        """ () => BOOL is_simple
+
+            Determine if an existing dataspace is "simple" (including scalar
+            dataspaces). Currently all HDF5 dataspaces are simple.
+        """
+        return pybool(H5Sis_simple(self.id))
+
+    def offset_simple(self, object offset=None):
+        """ (TUPLE offset=None)
+
+            Set the offset of a dataspace.  The length of the given tuple must
+            match the rank of the dataspace. If None is provided (default), 
+            the offsets on all axes will be set to 0.
+        """
+        cdef int rank
+        cdef int i
+        cdef hssize_t *dims
+        dims = NULL
+
+        try:
+            if not self.is_simple():
+                raise ValueError("%d is not a simple dataspace" % space_id)
+
+            rank = H5Sget_simple_extent_ndims(self.id)
+            
+            require_tuple(offset, 1, rank, "offset")
+            dims = <hssize_t*>emalloc(sizeof(hssize_t)*rank)
+            if(offset is not None):
+                convert_tuple(offset, <hsize_t*>dims, rank)
+            else:
+                # The HDF5 docs say passing in NULL resets the offset to 0.  
+                # Instead it raises an exception.  Imagine my surprise. We'll 
+                # do this manually.
+                for i from 0<=i<rank:
+                    dims[i] = 0
+
+            H5Soffset_simple(self.id, dims)
+
+        finally:
+            efree(dims)
+
+    def get_simple_extent_ndims(self):
+        """ () => INT rank
+            
+            Determine the rank of a "simple" (slab) dataspace.
+        """
+        return H5Sget_simple_extent_ndims(self.id)
+
+    def get_simple_extent_dims(self, int maxdims=0):
+        """ (INT space_id, BOOL maxdims=False) => TUPLE shape
+
+            Determine the shape of a "simple" (slab) dataspace.  If "maxdims" is
+            True, retrieve the maximum dataspace size instead.
+        """
+        cdef int rank
+        cdef hsize_t* dims
+        dims = NULL
+
+        rank = H5Sget_simple_extent_dims(self.id, NULL, NULL)
 
-    dims = <hsize_t*>emalloc(sizeof(hsize_t)*rank)
-    try:
-        if maxdims:
-            H5Sget_simple_extent_dims(space_id, NULL, dims)
-        else:
-            H5Sget_simple_extent_dims(space_id, dims, NULL)
+        dims = <hsize_t*>emalloc(sizeof(hsize_t)*rank)
+        try:
+            if maxdims:
+                H5Sget_simple_extent_dims(self.id, NULL, dims)
+            else:
+                H5Sget_simple_extent_dims(self.id, dims, NULL)
 
-        return convert_dims(dims, rank)
+            return convert_dims(dims, rank)
 
-    finally:
-        efree(dims)
+        finally:
+            efree(dims)
     
-def get_simple_extent_npoints(hid_t space_id):
-    """ (INT space_id) => LONG npoints
+    def get_simple_extent_npoints(self)):
+        """ () => LONG npoints
 
-        Determine the total number of elements in a dataspace.
-    """
-    return H5Sget_simple_extent_npoints(space_id)
+            Determine the total number of elements in a dataspace.
+        """
+        return H5Sget_simple_extent_npoints(self.id)
 
-def get_simple_extent_type(hid_t space_id):
-    """ (INT space_id) => INT class_code
+    def get_simple_extent_type(self):
+        """ () => INT class_code
 
-        Class code is either SCALAR or SIMPLE.
-    """
-    return <int>H5Sget_simple_extent_type(space_id)
+            Class code is either SCALAR or SIMPLE.
+        """
+        return <int>H5Sget_simple_extent_type(self.id)
 
 # === Extents =================================================================
 
-def extent_copy(hid_t dest_id, hid_t source_id):
-    """ (INT dest_id, INT source_id)
+    def extent_copy(self, SpaceID source_id not None)
+        """ (SpaceID source_id)
 
-        Copy one dataspace's extent to another, changing its type if necessary.
-    """
-    H5Sextent_copy(dest_id, source_id)
+            Replace this dataspace's extent with another's, changing its
+            typecode if necessary.
+        """
+        H5Sextent_copy(self.id, source.id)
 
-def set_extent_simple(hid_t space_id, object dims_tpl, object max_dims_tpl=None):
-    """ (INT space_id, TUPLE dims_tpl, TUPLE max_dims_tpl=None)
+    def set_extent_simple(self, object dims_tpl, object max_dims_tpl=None):
+        """ (INT space_id, TUPLE dims_tpl, TUPLE max_dims_tpl=None)
 
-        Reset the dataspace extent via a tuple of dimensions.  
-        Every element of dims_tpl must be a positive integer.  
+            Reset the dataspace extent via a tuple of dimensions.  
+            Every element of dims_tpl must be a positive integer.  
 
-        You can optionally specify the maximum dataspace size. The 
-        special value UNLIMITED, as an element of max_dims, indicates 
-        an unlimited dimension.
-    """
-    cdef int rank
-    cdef hsize_t* dims
-    cdef hsize_t* max_dims
-    dims = NULL
-    max_dims = NULL
+            You can optionally specify the maximum dataspace size. The 
+            special value UNLIMITED, as an element of max_dims, indicates 
+            an unlimited dimension.
+        """
+        cdef int rank
+        cdef hsize_t* dims
+        cdef hsize_t* max_dims
+        dims = NULL
+        max_dims = NULL
 
-    require_tuple(dims_tpl, 0, -1, "dims_tpl")
-    rank = len(dims_tpl)
-    require_tuple(max_dims_tpl, 1, rank, "max_dims_tpl")
+        require_tuple(dims_tpl, 0, -1, "dims_tpl")
+        rank = len(dims_tpl)
+        require_tuple(max_dims_tpl, 1, rank, "max_dims_tpl")
 
-    try:
-        dims = <hsize_t*>emalloc(sizeof(hsize_t)*rank)
-        convert_tuple(dims_tpl, dims, rank)
+        try:
+            dims = <hsize_t*>emalloc(sizeof(hsize_t)*rank)
+            convert_tuple(dims_tpl, dims, rank)
 
-        if max_dims_tpl is not None:
-            max_dims = <hsize_t*>emalloc(sizeof(hsize_t)*rank)
-            convert_tuple(max_dims_tpl, max_dims, rank)
+            if max_dims_tpl is not None:
+                max_dims = <hsize_t*>emalloc(sizeof(hsize_t)*rank)
+                convert_tuple(max_dims_tpl, max_dims, rank)
 
-        H5Sset_extent_simple(space_id, rank, dims, max_dims)
+            H5Sset_extent_simple(self.id, rank, dims, max_dims)
 
-    finally:
-        efree(dims)
-        efree(max_dims)
+        finally:
+            efree(dims)
+            efree(max_dims)
 
-def set_extent_none(hid_t space_id):
-    """ (INT space_id)
+    def set_extent_none(self):
+        """ (INT space_id)
 
-        Remove the dataspace extent; class changes to NO_CLASS.
-    """
-    H5Sset_extent_none(space_id)
+            Remove the dataspace extent; typecode changes to NO_CLASS.
+        """
+        H5Sset_extent_none(self.id)
 
 # === General selection operations ============================================
 
-def get_select_type(hid_t space_id):
-    """ (INT space_id) => INT select_code
+    def get_select_type(self):
+        """ () => INT select_code
 
-        Determine selection type.  Return values are:
-        SEL_NONE:       No selection.
-        SEL_ALL:        All points selected
-        SEL_POINTS:     Point-by-point element selection in use
-        SEL_HYPERSLABS: Hyperslab selection in use
-    """
-    return <int>H5Sget_select_type(space_id)
+            Determine selection type.  Return values are:
+            SEL_NONE:       No selection.
+            SEL_ALL:        All points selected
+            SEL_POINTS:     Point-by-point element selection in use
+            SEL_HYPERSLABS: Hyperslab selection in use
+        """
+        return <int>H5Sget_select_type(self.id)
 
-def get_select_npoints(hid_t space_id):
-    """ (INT space_id) => LONG npoints
+    def get_select_npoints(self):
+        """ () => LONG npoints
 
-        Determine the total number of points currently selected.  
-        Works for all selection techniques.
-    """
-    return H5Sget_select_npoints(space_id)
+            Determine the total number of points currently selected.  
+            Works for all selection techniques.
+        """
+        return H5Sget_select_npoints(self.id)
 
-def get_select_bounds(hid_t space_id):
-    """ (INT space_id) => (TUPLE start, TUPLE end)
+    def get_select_bounds(self):
+        """ () => (TUPLE start, TUPLE end)
 
-        Determine the bounding box which exactly contains 
-        the current selection.
-    """
-    cdef int rank
-    cdef hsize_t *start
-    cdef hsize_t *end
-    start = NULL
-    end = NULL
+            Determine the bounding box which exactly contains 
+            the current selection.
+        """
+        cdef int rank
+        cdef hsize_t *start
+        cdef hsize_t *end
+        start = NULL
+        end = NULL
 
-    rank = H5Sget_simple_extent_ndims(space_id)
+        rank = H5Sget_simple_extent_ndims(self.id)
 
-    start = <hsize_t*>emalloc(sizeof(hsize_t)*rank)
-    end = <hsize_t*>emalloc(sizeof(hsize_t)*rank)
+        start = <hsize_t*>emalloc(sizeof(hsize_t)*rank)
+        end = <hsize_t*>emalloc(sizeof(hsize_t)*rank)
 
-    try:
-        H5Sget_select_bounds(space_id, start, end)
+        try:
+            H5Sget_select_bounds(self.id, start, end)
 
-        start_tpl = convert_dims(start, rank)
-        end_tpl = convert_dims(end, rank)
-        return (start_tpl, end_tpl)
+            start_tpl = convert_dims(start, rank)
+            end_tpl = convert_dims(end, rank)
+            return (start_tpl, end_tpl)
 
-    finally:
-        efree(start)
-        efree(end)
+        finally:
+            efree(start)
+            efree(end)
 
-def select_all(hid_t space_id):
-    """ (INT space_id)
+    def select_all(self):
+        """ ()
 
-        Select all points in the dataspace.
-    """
-    H5Sselect_all(space_id)
+            Select all points in the dataspace.
+        """
+        H5Sselect_all(self.id)
 
-def select_none(hid_t space_id):
-    """ (INT space_id)
+    def select_none(self):
+        """ ()
 
-        Deselect entire dataspace.
-    """
-    H5Sselect_none(space_id)
+            Deselect entire dataspace.
+        """
+        H5Sselect_none(self.id)
 
-def select_valid(hid_t space_id):
-    """ (INT space_id) => BOOL select_valid
-        
-        Determine if the current selection falls within the dataspace extent.
-    """
-    return pybool(H5Sselect_valid(space_id))
+    def select_valid(self):
+        """ () => BOOL select_valid
+            
+            Determine if the current selection falls within the dataspace extent.
+        """
+        return pybool(H5Sselect_valid(self.id))
 
 # === Point selection functions ===============================================
 
-def get_select_elem_npoints(hid_t space_id):
-    """ (INT space_id) => LONG npoints
+    def get_select_elem_npoints(self):
+        """ () => LONG npoints
 
-        Determine the number of elements selected in point-selection mode.
-    """
-    return H5Sget_select_elem_npoints(space_id)
+            Determine the number of elements selected in point-selection mode.
+        """
+        return H5Sget_select_elem_npoints(self.id)
 
-def get_select_elem_pointlist(hid_t space_id):
-    """ (INT space_id) => LIST elements_list
+    def get_select_elem_pointlist(self):
+        """ () => LIST elements_list
 
-        Get a list of all selected elements, in point-selection mode.
-        List entries are <rank>-length tuples containing point coordinates.
-    """
-    cdef int rank
-    cdef hssize_t npoints
-    cdef hsize_t *buf
-    cdef int i_point
-    cdef int i_entry
+            Get a list of all selected elements, in point-selection mode.
+            List entries are <rank>-length tuples containing point coordinates.
+        """
+        cdef int rank
+        cdef hssize_t npoints
+        cdef hsize_t *buf
+        cdef int i_point
+        cdef int i_entry
 
-    npoints = H5Sget_select_elem_npoints(space_id)
-    if npoints == 0:
-        return []
+        npoints = H5Sget_select_elem_npoints(self.id)
+        if npoints == 0:
+            return []
 
-    rank = H5Sget_simple_extent_ndims(space_id)
-    
-    buf = <hsize_t*>emalloc(sizeof(hsize_t)*rank*npoints)
+        rank = H5Sget_simple_extent_ndims(self.id)
+        
+        buf = <hsize_t*>emalloc(sizeof(hsize_t)*rank*npoints)
 
-    try:
-        H5Sget_select_elem_pointlist(space_id, 0, <hsize_t>npoints, buf)
+        try:
+            H5Sget_select_elem_pointlist(self.id, 0, <hsize_t>npoints, buf)
 
-        retlist = []
-        for i_point from 0<=i_point<npoints:
-            tmp_tpl = []
-            for i_entry from 0<=i_entry<rank:
-                tmp_tpl.append( long( buf[i_point*rank + i_entry] ) )
-            retlist.append(tuple(tmp_tpl))
+            retlist = []
+            for i_point from 0<=i_point<npoints:
+                tmp_tpl = []
+                for i_entry from 0<=i_entry<rank:
+                    tmp_tpl.append( long( buf[i_point*rank + i_entry] ) )
+                retlist.append(tuple(tmp_tpl))
 
-    finally:
-        efree(buf)
+        finally:
+            efree(buf)
 
-    return retlist
+        return retlist
 
-def select_elements(hid_t space_id, object coord_list, int op=H5S_SELECT_SET):
-    """ (INT space_id, LIST coord_list, INT op=SELECT_SET)
+    def select_elements(self, object coord_list, int op=H5S_SELECT_SET):
+        """ (LIST coord_list, INT op=SELECT_SET)
 
-        Select elements using a list of points.  List entries should be
-        tuples containing point coordinates. A zero-length list is 
-        apparently not allowed by the HDF5 library.
-    """
-    cdef size_t nelements   # Number of point coordinates
-    cdef hsize_t *coords    # Contiguous 2D array nelements x rank x sizeof(hsize_t)
+            Select elements using a list of points.  List entries should be
+            tuples containing point coordinates. A zero-length list is 
+            apparently not allowed by the HDF5 library.
+        """
+        cdef size_t nelements   # Number of point coordinates
+        cdef hsize_t *coords    # Contiguous 2D array nelements x rank x sizeof(hsize_t)
 
-    cdef int rank
-    cdef int i_point
-    cdef int i_entry
-    coords = NULL
+        cdef int rank
+        cdef int i_point
+        cdef int i_entry
+        coords = NULL
 
-    require_list(coord_list, 0, -1, "coord_list")
-    nelements = len(coord_list)
+        require_list(coord_list, 0, -1, "coord_list")
+        nelements = len(coord_list)
 
-    rank = H5Sget_simple_extent_ndims(space_id)
+        rank = H5Sget_simple_extent_ndims(self.id)
 
-    # The docs say this should be an hsize_t**, but it seems that
-    # HDF5 expects the coordinates to be a static, contiguous
-    # array.  We'll simulate that by malloc'ing a contiguous chunk
-    # and using pointer arithmetic to initialize it.
-    coords = <hsize_t*>emalloc(sizeof(hsize_t)*rank*nelements)
+        # The docs say this should be an hsize_t**, but it seems that
+        # HDF5 expects the coordinates to be a static, contiguous
+        # array.  We'll simulate that by malloc'ing a contiguous chunk
+        # and using pointer arithmetic to initialize it.
+        coords = <hsize_t*>emalloc(sizeof(hsize_t)*rank*nelements)
 
-    try:
-        for i_point from 0<=i_point<nelements:
+        try:
+            for i_point from 0<=i_point<nelements:
 
-            tpl = coord_list[i_point]
-            lmsg = "List element %d" % i_point
-            require_tuple(tpl, 0, rank, lmsg)
+                tpl = coord_list[i_point]
+                lmsg = "List element %d" % i_point
+                require_tuple(tpl, 0, rank, lmsg)
 
-            for i_entry from 0<=i_entry<rank:
-                coords[(i_point*rank) + i_entry] = tpl[i_entry]
+                for i_entry from 0<=i_entry<rank:
+                    coords[(i_point*rank) + i_entry] = tpl[i_entry]
 
-        H5Sselect_elements(space_id, <H5S_seloper_t>op, nelements, <hsize_t**>coords)
+            H5Sselect_elements(self.id, <H5S_seloper_t>op, nelements, <hsize_t**>coords)
 
-    finally:
-        efree(coords)
+        finally:
+            efree(coords)
 
 # === Hyperslab selection functions ===========================================
 
-def get_select_hyper_nblocks(hid_t space_id):
-    """ (INT space_id) => LONG nblocks
+    def get_select_hyper_nblocks(self):
+        """ () => LONG nblocks
 
-        Get the number of hyperslab blocks currently selected.
-    """
-    return H5Sget_select_hyper_nblocks(space_id)
+            Get the number of hyperslab blocks currently selected.
+        """
+        return H5Sget_select_hyper_nblocks(self.id)
 
-def get_select_hyper_blocklist(hid_t space_id):
-    """ (INT space_id) => LIST hyperslab_blocks
+    def get_select_hyper_blocklist(self):
+        """ () => LIST hyperslab_blocks
 
-        Get a Python list containing selected hyperslab blocks.
-        List entries are 2-tuples in the form:
-            ( corner_coordinate, opposite_coordinate )
-        where corner_coordinate and opposite_coordinate are <rank>-length
-        tuples.
-    """
-    cdef hssize_t nblocks
-    cdef herr_t retval
-    cdef hsize_t *buf
+            Get a Python list containing selected hyperslab blocks.
+            List entries are 2-tuples in the form:
+                ( corner_coordinate, opposite_coordinate )
+            where corner_coordinate and opposite_coordinate are <rank>-length
+            tuples.
+        """
+        cdef hssize_t nblocks
+        cdef herr_t retval
+        cdef hsize_t *buf
 
-    cdef int rank
-    cdef int i_block
-    cdef int i_entry
+        cdef int rank
+        cdef int i_block
+        cdef int i_entry
 
-    rank = H5Sget_simple_extent_ndims(space_id)
-    nblocks = H5Sget_select_hyper_nblocks(space_id)
+        rank = H5Sget_simple_extent_ndims(self.id)
+        nblocks = H5Sget_select_hyper_nblocks(self.id)
 
-    buf = <hsize_t*>emalloc(sizeof(hsize_t)*2*rank*nblocks)
-    
-    try:
-        H5Sget_select_hyper_blocklist(space_id, 0, nblocks, buf)
-
-        outlist = []
-        for i_block from 0<=i_block<nblocks:
-            corner_list = []
-            opposite_list = []
-            for i_entry from 0<=i_entry<(2*rank):
-                entry = long(buf[ i_block*(2*rank) + i_entry])
-                if i_entry < rank:
-                    corner_list.append(entry)
-                else:
-                    opposite_list.append(entry)
-            outlist.append( (tuple(corner_list), tuple(opposite_list)) )
-    finally:
-        efree(buf)
+        buf = <hsize_t*>emalloc(sizeof(hsize_t)*2*rank*nblocks)
+        
+        try:
+            H5Sget_select_hyper_blocklist(self.id, 0, nblocks, buf)
+
+            outlist = []
+            for i_block from 0<=i_block<nblocks:
+                corner_list = []
+                opposite_list = []
+                for i_entry from 0<=i_entry<(2*rank):
+                    entry = long(buf[ i_block*(2*rank) + i_entry])
+                    if i_entry < rank:
+                        corner_list.append(entry)
+                    else:
+                        opposite_list.append(entry)
+                outlist.append( (tuple(corner_list), tuple(opposite_list)) )
+        finally:
+            efree(buf)
 
     return outlist
     
 
-def select_hyperslab(hid_t space_id, object start, object count, 
-    object stride=None, object block=None, int op=H5S_SELECT_SET):
-    """ (INT space_id, TUPLE start, TUPLE count, TUPLE stride=None, 
-            TUPLE block=None, INT op=SELECT_SET)
-     
-        Select a block region from an existing dataspace.  See the HDF5
-        documentation for the meaning of the "block" and "op" keywords.
-    """
-    cdef int rank
-    cdef hsize_t* start_array
-    cdef hsize_t* count_array
-    cdef hsize_t* stride_array
-    cdef hsize_t* block_array
-
-    start_array = NULL
-    count_array = NULL
-    stride_array = NULL
-    block_array = NULL
-
-    # Dataspace rank.  All provided tuples must match this.
-    rank = H5Sget_simple_extent_ndims(space_id)
-
-    require_tuple(start, 0, rank, "start")
-    require_tuple(count, 0, rank, "count")
-    require_tuple(stride, 1, rank, "stride")
-    require_tuple(block, 1, rank, "block")
-
-    try:
-        start_array = <hsize_t*>emalloc(sizeof(hsize_t)*rank)
-        count_array = <hsize_t*>emalloc(sizeof(hsize_t)*rank)
-        convert_tuple(start, start_array, rank)
-        convert_tuple(count, count_array, rank)
-
-        if stride is not None:
-            stride_array = <hsize_t*>emalloc(sizeof(hsize_t)*rank)
-            convert_tuple(stride, stride_array, rank)
-        if block is not None:
-            block_array = <hsize_t*>emalloc(sizeof(hsize_t)*rank)
-            convert_tuple(block, block_array, rank)
-
-        H5Sselect_hyperslab(space_id, <H5S_seloper_t>op, start_array, 
-                                     stride_array, count_array, block_array)
-
-    finally:
-        efree(start_array)
-        efree(count_array)
-        efree(stride_array)
-        efree(block_array)
-
-# === Python extensions =======================================================
-
-PY_CLASS = DDict({H5S_ALL: 'ALL', H5S_UNLIMITED: 'UNLIMITED',
-            H5S_NO_CLASS: 'NO CLASS', H5S_SCALAR: 'CLASS SCALAR',
-            H5S_SIMPLE: 'CLASS SIMPLE'})
-PY_SEL = DDict({ H5S_SEL_ERROR: 'SELECTION ERROR', H5S_SEL_NONE: 'SELECT NONE', 
-            H5S_SEL_POINTS: 'POINT SELECTION', 
-            H5S_SEL_HYPERSLABS: 'HYPERSLAB SELECTION',
-            H5S_SEL_ALL: 'SELECT ALL' })
-
-PY_SELECT = DDict({ H5S_SELECT_NOOP: 'NO-OP SELECT', 
-                    H5S_SELECT_SET: 'SET SELECT', 
-                    H5S_SELECT_OR: 'OR SELECT',
-                    H5S_SELECT_AND: 'AND SELECT', H5S_SELECT_XOR: 'XOR SELECT', 
-                    H5S_SELECT_NOTB: 'NOTB SELECT', H5S_SELECT_NOTA: 'NOTA SELECT', 
-                    H5S_SELECT_APPEND: 'APPEND SELECTION',
-                    H5S_SELECT_PREPEND: 'PREPEND SELECTION', 
-                    H5S_SELECT_INVALID: 'INVALID SELECTION' })
-
+    def select_hyperslab(self, object start, object count, 
+        object stride=None, object block=None, int op=H5S_SELECT_SET):
+        """ (INT space_id, TUPLE start, TUPLE count, TUPLE stride=None, 
+                TUPLE block=None, INT op=SELECT_SET)
+         
+            Select a block region from an existing dataspace.  See the HDF5
+            documentation for the meaning of the "block" and "op" keywords.
+        """
+        cdef int rank
+        cdef hsize_t* start_array
+        cdef hsize_t* count_array
+        cdef hsize_t* stride_array
+        cdef hsize_t* block_array
+
+        start_array = NULL
+        count_array = NULL
+        stride_array = NULL
+        block_array = NULL
+
+        # Dataspace rank.  All provided tuples must match this.
+        rank = H5Sget_simple_extent_ndims(self.id)
+
+        require_tuple(start, 0, rank, "start")
+        require_tuple(count, 0, rank, "count")
+        require_tuple(stride, 1, rank, "stride")
+        require_tuple(block, 1, rank, "block")
+
+        try:
+            start_array = <hsize_t*>emalloc(sizeof(hsize_t)*rank)
+            count_array = <hsize_t*>emalloc(sizeof(hsize_t)*rank)
+            convert_tuple(start, start_array, rank)
+            convert_tuple(count, count_array, rank)
+
+            if stride is not None:
+                stride_array = <hsize_t*>emalloc(sizeof(hsize_t)*rank)
+                convert_tuple(stride, stride_array, rank)
+            if block is not None:
+                block_array = <hsize_t*>emalloc(sizeof(hsize_t)*rank)
+                convert_tuple(block, block_array, rank)
+
+            H5Sselect_hyperslab(self.id, <H5S_seloper_t>op, start_array, 
+                                         stride_array, count_array, block_array)
+
+        finally:
+            efree(start_array)
+            efree(count_array)
+            efree(stride_array)
+            efree(block_array)
 
 
 
diff --git a/h5py/h5t.pxd b/h5py/h5t.pxd
index aee72e3..63b0825 100644
--- a/h5py/h5t.pxd
+++ b/h5py/h5t.pxd
@@ -15,6 +15,10 @@
 # directory.
 
 include "std_defs.pxi"
+from h5 cimport LockableID
+
+cdef class TypeID(LockableID):
+    pass
 
 cdef extern from "hdf5.h":
 
diff --git a/h5py/h5t.pyx b/h5py/h5t.pyx
index 121a8ff..ecf09a1 100644
--- a/h5py/h5t.pyx
+++ b/h5py/h5t.pyx
@@ -94,6 +94,12 @@ cdef herr_t PY_H5Tclose(hid_t type_id) except *:
 
     return retval
     
+cdef object lockid(hid_t id_in):
+    cdef TypeID tid
+    tid = TypeID(id_in)
+    tid._locked = 1
+    return tid
+
 # === Public constants and data structures ====================================
 
 # Enumeration H5T_class_t
@@ -137,45 +143,45 @@ else:
 # --- Built-in HDF5 datatypes -------------------------------------------------
 
 # IEEE floating-point
-IEEE_F32LE = H5T_IEEE_F32LE
-IEEE_F32BE = H5T_IEEE_F32BE
-IEEE_F64LE = H5T_IEEE_F64LE 
-IEEE_F64BE = H5T_IEEE_F64BE
+IEEE_F32LE = lockid(H5T_IEEE_F32LE)
+IEEE_F32BE = lockid(H5T_IEEE_F32BE)
+IEEE_F64LE = lockid(H5T_IEEE_F64LE)
+IEEE_F64BE = lockid(H5T_IEEE_F64BE)
 
 # Signed 2's complement integer types
-STD_I8LE  = H5T_STD_I8LE
-STD_I16LE = H5T_STD_I16LE
-STD_I32LE = H5T_STD_I32LE
-STD_I64LE = H5T_STD_I64LE
+STD_I8LE  = lockid(H5T_STD_I8LE)
+STD_I16LE = lockid(H5T_STD_I16LE)
+STD_I32LE = lockid(H5T_STD_I32LE)
+STD_I64LE = lockid(H5T_STD_I64LE)
 
-STD_I8BE  = H5T_STD_I8BE
-STD_I16BE = H5T_STD_I16BE
-STD_I32BE = H5T_STD_I32BE
-STD_I64BE = H5T_STD_I64BE
+STD_I8BE  = lockid(H5T_STD_I8BE)
+STD_I16BE = lockid(H5T_STD_I16BE)
+STD_I32BE = lockid(H5T_STD_I32BE)
+STD_I64BE = lockid(H5T_STD_I64BE)
 
 # Unsigned integers
-STD_U8LE  = H5T_STD_U8LE
-STD_U16LE = H5T_STD_U16LE
-STD_U32LE = H5T_STD_U32LE
-STD_U64LE = H5T_STD_U64LE
+STD_U8LE  = lockid(H5T_STD_U8LE)
+STD_U16LE = lockid(H5T_STD_U16LE)
+STD_U32LE = lockid(H5T_STD_U32LE)
+STD_U64LE = lockid(H5T_STD_U64LE)
 
-STD_U8BE  = H5T_STD_U8BE
-STD_U16BE = H5T_STD_U16BE
-STD_U32BE = H5T_STD_U32BE
-STD_U64BE = H5T_STD_U64BE
+STD_U8BE  = lockid(H5T_STD_U8BE)
+STD_U16BE = lockid(H5T_STD_U16BE)
+STD_U32BE = lockid(H5T_STD_U32BE)
+STD_U64BE = lockid(H5T_STD_U64BE)
 
 # Native integer types by bytesize
-NATIVE_INT8 = H5T_NATIVE_INT8
-NATIVE_UINT8 = H5T_NATIVE_UINT8
-NATIVE_INT16 = H5T_NATIVE_INT16
-NATIVE_UINT16 = H5T_NATIVE_UINT16
-NATIVE_INT32 = H5T_NATIVE_INT32
-NATIVE_UINT32 = H5T_NATIVE_UINT32
-NATIVE_INT64 = H5T_NATIVE_INT64
-NATIVE_UINT64 = H5T_NATIVE_UINT64
+NATIVE_INT8 = lockid(H5T_NATIVE_INT8)
+NATIVE_UINT8 = lockid(H5T_NATIVE_UINT8)
+NATIVE_INT16 = lockid(H5T_NATIVE_INT16)
+NATIVE_UINT16 = lockid(H5T_NATIVE_UINT16)
+NATIVE_INT32 = lockid(H5T_NATIVE_INT32)
+NATIVE_UINT32 = lockid(H5T_NATIVE_UINT32)
+NATIVE_INT64 = lockid(H5T_NATIVE_INT64)
+NATIVE_UINT64 = lockid(H5T_NATIVE_UINT64)
 
 # Null terminated (C) string type
-C_S1 = H5T_C_S1
+C_S1 = lockid(H5T_C_S1)
 
 # === General datatype operations =============================================
 
@@ -185,331 +191,333 @@ def create(int classtype, size_t size):
         Create a new HDF5 type object.  Legal values are 
         COMPOUND, OPAQUE, and ENUM.
     """
-    return H5Tcreate(<H5T_class_t>classtype, size)
+    return TypeID(H5Tcreate(<H5T_class_t>classtype, size))
 
-def open(hid_t group_id, char* name):
-    """ (INT group_id, STRING name) => INT type_id
+def open(ObjectID group not None, char* name):
+    """ (ObjectID group, STRING name) => TypeID
 
         Open a named datatype from a file.
     """
-    return H5Topen(group_id, name)
+    return TypeID(H5Topen(group.id, name))
 
-def commit(hid_t loc_id, char* name, hid_t type_id):
-    """ (INT group_id, STRING name, INT type_id)
+def array_create(TypeID base not None, object dims_tpl):
+    """ (TypeID base, TUPLE dimensions)
 
-        Commit a transient datatype to a named datatype in a file.
+        Create a new array datatype, of parent type <base_type_id> and
+        dimensions given via a tuple of non-negative integers.  "Unlimited" 
+        dimensions are not allowed.
     """
-    return H5Tcommit(loc_id, name, type_id)
+    cdef hsize_t rank
+    cdef hsize_t *dims
+    dims = NULL
 
-def committed(hid_t type_id):
-    """ (INT type_id) => BOOL is_comitted
+    require_tuple(dims_tpl, 0, -1, "dims_tpl")
+    rank = len(dims_tpl)
+    dims = <hsize_t*>emalloc(sizeof(hsize_t)*rank)
 
-        Determine if a given type object is named (T) or transient (F).
-    """
-    return H5Tcommitted(type_id)
+    try:
+        convert_tuple(dims_tpl, dims, rank)
+        return H5Tarray_create(base.id, rank, dims, NULL)
+    finally:
+        efree(dims)
 
-def copy(hid_t type_id):
-    """ (INT type_id) => INT new_type_id
+def enum_create(TypeID base not None):
+    """ (TypeID base) => INT new_type_id
 
-        Copy an existing HDF type object.
+        Create a new enumerated type based on an (integer) parent type.
     """
-    return H5Tcopy(type_id)
+    return H5Tenum_create(base.id)
+
+# === XXXX ====
 
-def equal(hid_t typeid_1, hid_t typeid_2):
-    """ (INT typeid_1, INT typeid_2) => BOOL types_are_equal
+cdef class TypeID(LockableID):
 
-        Test whether two identifiers point to the same datatype object.  
-        Note this does NOT perform any kind of logical comparison.
     """
-    return pybool(H5Tequal(typeid_1, typeid_2))
+        Represents an HDF5 datatype identifier.
+    """
 
-def lock(hid_t type_id):
-    """ (INT type_id)
+    def commit(self, ObjectID group not None, char* name):
+        """ (ObjectID group, STRING name)
 
-        Lock a datatype, which makes it immutable and indestructible.
-        Once locked, it can't be unlocked.
-    """
-    H5Tlock(type_id)
+            Commit this (transient) datatype to a named datatype in a file.
+        """
+        return H5Tcommit(group.id, name, self.id)
 
-def get_class(hid_t type_id):
-    """ (INT type_id) => INT class
+    def committed(self):
+        """ () => BOOL is_comitted
 
-        Determine the datatype's class.
-    """
-    return <int>H5Tget_class(type_id)
+            Determine if a given type object is named (T) or transient (F).
+        """
+        return pybool(H5Tcommitted(self.id))
 
-def get_size(hid_t type_id):
-    """ (INT type_id) => INT size
+    def copy(self):
+        """ () => TypeID
 
-        Determine the total size of a datatype, in bytes.
-    """
-    return H5Tget_size(type_id)
+            Create a copy of this type object.
+        """
+        return TypeID(H5Tcopy(self.id))
 
-def get_super(hid_t type_id):
-    """ (INT type_id) => INT super_type_id
+    def equal(self, TypeID typeid):
+        """ (TypeID typeid) => BOOL
 
-        Determine the parent type of an array or enumeration datatype.
-    """
-    return H5Tget_super(type_id)
+            Test whether two identifiers point to the same datatype object.  
+            Note this does NOT perform any kind of logical comparison.
+        """
+        return pybool(H5Tequal(self.id, typeid.id))
 
-def get_native_type(hid_t type_id, int direction):
-    """ (INT type_id, INT direction) => INT new_type_id
+    def lock(self):
+        """ (self)
 
-        Determine the native C equivalent for the given datatype.
-        Legal values for "direction" are:
-          DIR_DEFAULT
-          DIR_ASCEND
-          DIR_DESCEND
-        These determine which direction the list of native datatypes is
-        searched; see the HDF5 docs for a definitive list.
+            Lock a datatype, which makes it immutable and indestructible.
+            Once locked, it can't be unlocked.
+        """
+        H5Tlock(self.id)
+        self._locked = 1
 
-        The returned datatype is always a copy one of NATIVE_*, and must
-        eventually be closed.
-    """
-    return H5Tget_native_type(type_id, <H5T_direction_t>direction)
+    def get_class():
+        """ () => INT classcode
 
-def detect_class(hid_t type_id, int classtype):
-    """ (INT type_id, INT class) => BOOL class_is_present
+            Determine the datatype's class code.
+        """
+        return <int>H5Tget_class(self.id)
 
-        Determine if a member of the given class exists in a compound
-        datatype.  The search is recursive.
-    """
-    return pybool(H5Tdetect_class(type_id, <H5T_class_t>classtype))
+    def get_size(self):
+        """ () => INT size
 
-def close(hid_t type_id, int force=1):
-    """ (INT type_id, BOOL force=True)
+            Determine the total size of a datatype, in bytes.
+        """
+        return H5Tget_size(self.id)
 
-        Close this datatype.  If "force" is True (default), ignore errors 
-        commonly associated with attempting to close immutable types.
-    """
-    try:
-        H5Tclose(type_id)
-    except ArgsError, e:
-        if not (force and e.errno == 1005):  # ArgsError, bad value
-            raise
+    def get_super(self):
+        """ () => TypeID
 
-# === Atomic datatype operations ==============================================
+            Determine the parent type of an array or enumeration datatype.
+        """
+        return TypeID(H5Tget_super(self.id))
 
+    def get_native_type(self, int direction=H5T_DIR_DEFAULT):
+        """ (INT direction) => INT new_type_id
 
-def set_size(hid_t type_id, size_t size):
-    """ (INT type_id, INT size)
+            Determine the native C equivalent for the given datatype.
+            Legal values for "direction" are:
+              DIR_DEFAULT*
+              DIR_ASCEND
+              DIR_DESCEND
+            These determine which direction the list of native datatypes is
+            searched; see the HDF5 docs for a definitive list.
 
-        Set the total size of the datatype, in bytes.  Useful mostly for
-        string types.
-    """
-    H5Tset_size(type_id, size)
+            The returned datatype is always an unlocked copy one of NATIVE_*.
+        """
+        return TypeID(H5Tget_native_type(self.id, <H5T_direction_t>direction))
 
-def get_order(hid_t type_id):
-    """ (INT type_id) => INT order
+    def detect_class(self, int classtype):
+        """ (INT class) => BOOL class_is_present
 
-        Obtain the byte order of the datatype; one of:
-         ORDER_LE
-         ORDER_BE
-         ORDER_NATIVE
-    """
-    return <int>H5Tget_order(type_id)
+            Determine if a member of the given class exists in a compound
+            datatype.  The search is recursive.
+        """
+        return pybool(H5Tdetect_class(self.id, <H5T_class_t>classtype))
 
-def set_order(hid_t type_id, int order):
-    """ (INT type_id, INT order)
+    def close(self):
+        """ Close this datatype.  If it's locked, nothing happens.
+        """
+        if not self._locked:
+            H5Tclose(type_id)
 
-        Set the byte order of the datatype. "order" must be one of
-         ORDER_LE
-         ORDER_BE
-         ORDER_NATIVE
-    """
-    H5Tset_order(type_id, <H5T_order_t>order)
+# === Atomic datatype operations ==============================================
 
-def get_sign(hid_t type_id):
-    """ (INT type_id) => INT sign
 
-        Obtain the "signedness" of the datatype; one of:
-          SGN_NONE:  Unsigned
-          SGN_2:     Signed 2's complement
-    """
-    return <int>H5Tget_sign(type_id)
+    def set_size(self, size_t size):
+        """ (INT size)
 
-def set_sign(hid_t type_id, int sign):
-    """ (INT type_id, INT sign)
+            Set the total size of the datatype, in bytes.  Useful mostly for
+            string types.
+        """
+        H5Tset_size(self.id, size)
 
-        Set the "signedness" of the datatype; one of:
-          SGN_NONE:  Unsigned
-          SGN_2:     Signed 2's complement
-    """
-    H5Tset_sign(type_id, <H5T_sign_t>sign)
+    def get_order(self)
+        """ () => INT order
 
-def is_variable_str(hid_t type_id):
-    """ (INT type_id) => BOOL is_variable
+            Obtain the byte order of the datatype; one of:
+             ORDER_LE
+             ORDER_BE
+             ORDER_NATIVE
+        """
+        return <int>H5Tget_order(self.id)
 
-        Determine if the given string datatype is a variable-length string.
-        Please note that reading/writing data in this format is impossible;
-        only fixed-length strings are currently supported.
-    """
-    return pybool(H5Tis_variable_str(type_id))
+    def set_order(self, int order):
+        """ (INT type_id, INT order)
 
-# === Compound datatype operations ============================================
+            Set the byte order of the datatype. "order" must be one of
+             ORDER_LE
+             ORDER_BE
+             ORDER_NATIVE
+        """
+        H5Tset_order(self.id, <H5T_order_t>order)
 
+    def get_sign(self):
+        """ (INT type_id) => INT sign
 
-def get_nmembers(hid_t type_id):
-    """ (INT type_id) => INT number_of_members
+            Obtain the "signedness" of the datatype; one of:
+              SGN_NONE:  Unsigned
+              SGN_2:     Signed 2's complement
+        """
+        return <int>H5Tget_sign(self.id)
 
-        Determine the number of members in a compound or enumerated type.
-    """
-    return H5Tget_nmembers(type_id)
+    def set_sign(self, int sign):
+        """ (INT sign)
 
-def get_member_class(hid_t type_id, int member):
-    """ (INT type_id, INT member) => INT class
+            Set the "signedness" of the datatype; one of:
+              SGN_NONE:  Unsigned
+              SGN_2:     Signed 2's complement
+        """
+        H5Tset_sign(self.id <H5T_sign_t>sign)
 
-        Determine the datatype class of the member of a compound type,
-        identified by its index (0 <= member < nmembers).
-    """
-    if member < 0:
-        raise ValueError("Member index must be non-negative.")
-    return H5Tget_member_class(type_id, member)
+    def is_variable_str(self):
+        """ () => BOOL is_variable
 
-    
-def get_member_name(hid_t type_id, int member):
-    """ (INT type_id, INT member) => STRING name
-    
-        Determine the name of a member of a compound or enumerated type,
-        identified by its index (0 <= member < nmembers).
-    """
-    cdef char* name
-    name = NULL
+            Determine if the given string datatype is a variable-length string.
+            Please note that reading/writing data in this format is impossible;
+            only fixed-length strings are currently supported.
+        """
+        return pybool(H5Tis_variable_str(self.id))
 
-    if member < 0:
-        raise ValueError("Member index must be non-negative.")
+# === Compound datatype operations ============================================
 
-    try:
-        name = H5Tget_member_name(type_id, member)
-        assert name != NULL
-        pyname = name
-    finally:
-        free(name)
 
-    return pyname
+    def get_nmembers(self):
+        """ () => INT number_of_members
 
-def get_member_index(hid_t type_id, char* name):
-    """ (INT type_id, STRING name) => INT index
+            Determine the number of members in a compound or enumerated type.
+        """
+        return H5Tget_nmembers(self.id)
 
-        Determine the index of a member of a compound or enumerated datatype
-        identified by a string name.
-    """
-    return H5Tget_member_index(type_id, name)
+    def get_member_class(self, int member):
+        """ (INT member) => INT class
 
-def get_member_offset(hid_t type_id, int member):
-    """ (INT type_id, INT member_index) => INT offset
+            Determine the datatype class of the member of a compound type,
+            identified by its index (0 <= member < nmembers).
+        """
+        if member < 0:
+            raise ValueError("Member index must be non-negative.")
+        return H5Tget_member_class(self.id, member)
 
-        Determine the offset, in bytes, of the beginning of the specified
-        member of a compound datatype.
-    """
-    return H5Tget_member_offset(type_id, member)
+    
+    def get_member_name(self, int member):
+        """ (INT member) => STRING name
+        
+            Determine the name of a member of a compound or enumerated type,
+            identified by its index (0 <= member < nmembers).
+        """
+        cdef char* name
+        name = NULL
 
+        if member < 0:
+            raise ValueError("Member index must be non-negative.")
 
-def get_member_type(hid_t type_id, int member):
-    """ (INT type_id, INT member_index) => INT type_id
+        try:
+            name = H5Tget_member_name(self.id, member)
+            assert name != NULL
+            pyname = name
+        finally:
+            free(name)
 
-        Create a copy of a member of a compound datatype, identified by its
-        index.  You are responsible for closing it when finished.
-    """
-    if member < 0:
-        raise ValueError("Member index must be non-negative.")
-    return H5Tget_member_type(type_id, member)
+        return pyname
 
-def insert(hid_t type_id, char* name, size_t offset, hid_t field_id):
-    """ (INT compound_type_id, STRING name, INT offset, INT member_type)
+    def get_member_index(self, char* name):
+        """ (STRING name) => INT index
 
-        Add a named member datatype to a compound datatype.  The parameter
-        offset indicates the offset from the start of the compound datatype,
-        in bytes.
-    """
-    H5Tinsert(type_id, name, offset, field_id)
+            Determine the index of a member of a compound or enumerated datatype
+            identified by a string name.
+        """
+        return H5Tget_member_index(self.id, name)
 
-def pack(hid_t type_id):
-    """ (INT type_id)
+    def get_member_offset(self, int member):
+        """ (INT member_index) => INT offset
 
-        Recursively removes padding (introduced on account of e.g. compiler
-        alignment rules) from a compound datatype.
-    """
-    H5Tpack(type_id)
+            Determine the offset, in bytes, of the beginning of the specified
+            member of a compound datatype.
+        """
+        return H5Tget_member_offset(self.id, member)
 
-# === Array datatype operations ===============================================
+    def get_member_type(self, int member):
+        """ (INT member_index) => INT type_id
 
-def array_create(hid_t base, object dims_tpl):
-    """ (INT base_type_id, TUPLE dimensions)
+            Create a copy of a member of a compound datatype, identified by its
+            index.
+        """
+        if member < 0:
+            raise ValueError("Member index must be non-negative.")
+        return TypeID(H5Tget_member_type(self.id, member))
 
-        Create a new array datatype, of parent type <base_type_id> and
-        dimensions given via a tuple of non-negative integers.  "Unlimited" 
-        dimensions are not allowed.
-    """
-    cdef hsize_t rank
-    cdef hsize_t *dims
-    dims = NULL
+    def insert(self, char* name, size_t offset, TypeID field not None):
+        """ (STRING name, INT offset, TypeID field)
 
-    require_tuple(dims_tpl, 0, -1, "dims_tpl")
-    rank = len(dims_tpl)
-    dims = <hsize_t*>emalloc(sizeof(hsize_t)*rank)
+            Add a named member datatype to a compound datatype.  The parameter
+            offset indicates the offset from the start of the compound datatype,
+            in bytes.
+        """
+        H5Tinsert(self.id, name, offset, field.id)
 
-    try:
-        convert_tuple(dims_tpl, dims, rank)
-        return H5Tarray_create(base, rank, dims, NULL)
-    finally:
-        efree(dims)
+    def pack(self):
+        """ ()
 
-def get_array_ndims(hid_t type_id):
-    """ (INT type_id) => INT rank
+            Recursively removes padding (introduced on account of e.g. compiler
+            alignment rules) from a compound datatype.
+        """
+        H5Tpack(self.id)
 
-        Get the rank of the given array datatype.
-    """
-    return H5Tget_array_ndims(type_id)
+# === Array datatype operations ===============================================
 
-def get_array_dims(hid_t type_id):
-    """ (INT type_id) => TUPLE dimensions
+    def get_array_ndims(self):
+        """ () => INT rank
 
-        Get the dimensions of the given array datatype as a tuple of integers.
-    """
-    cdef hsize_t rank   
-    cdef hsize_t* dims
-    dims = NULL
+            Get the rank of the given array datatype.
+        """
+        return H5Tget_array_ndims(self.id)
 
-    rank = H5Tget_array_dims(type_id, NULL, NULL)
-    dims = <hsize_t*>emalloc(sizeof(hsize_t)*rank)
-    try:
-        H5Tget_array_dims(type_id, dims, NULL)
-        return convert_dims(dims, rank)
-    finally:
-        efree(dims)
+    def get_array_dims(self):
+        """ (INT type_id) => TUPLE dimensions
 
-# === Enumeration datatypes ===================================================
+            Get the dimensions of the given array datatype as a tuple of integers.
+        """
+        cdef hsize_t rank   
+        cdef hsize_t* dims
+        dims = NULL
 
-def enum_create(hid_t base_id):
-    """ (INT base_type_id) => INT new_type_id
+        rank = H5Tget_array_dims(self.id, NULL, NULL)
+        dims = <hsize_t*>emalloc(sizeof(hsize_t)*rank)
+        try:
+            H5Tget_array_dims(self.id, dims, NULL)
+            return convert_dims(dims, rank)
+        finally:
+            efree(dims)
 
-        Create a new enumerated type based on parent type <base_type_id>
-    """
-    return H5Tenum_create(base_id)
+# === Enumeration datatypes ===================================================
 
-cdef int enum_convert(hid_t type_id, long long *buf, int reverse) except -1:
-    # Convert the long long value in "buf" to the native representation
-    # of type_id.  Conversion performed in-place.
-    # Reverse: false => llong->type; true => type->llong
 
-    cdef hid_t basetype
-    cdef H5T_class_t class_code
+    cdef int enum_convert(hid_t type_id, long long *buf, int reverse) except -1:
+        # Convert the long long value in "buf" to the native representation
+        # of type_id.  Conversion performed in-place.
+        # Reverse: false => llong->type; true => type->llong
 
-    class_code = H5Tget_class(type_id)
-    if class_code != H5T_ENUM:
-        raise ValueError("Type %d is not of class ENUM" % type_id)
+        cdef hid_t basetype
+        cdef H5T_class_t class_code
 
-    basetype = H5Tget_super(type_id)
-    assert basetype > 0
+        class_code = H5Tget_class(type_id)
+        if class_code != H5T_ENUM:
+            raise ValueError("Type %d is not of class ENUM" % type_id)
 
-    try:
-        if not reverse:
-            H5Tconvert(H5T_NATIVE_LLONG, basetype, 1, buf, NULL, H5P_DEFAULT)
-        else:
-            H5Tconvert(basetype, H5T_NATIVE_LLONG, 1, buf, NULL, H5P_DEFAULT)
-    finally:
-        PY_H5Tclose(basetype)
+        basetype = H5Tget_super(type_id)
+        assert basetype > 0
+
+        try:
+            if not reverse:
+                H5Tconvert(H5T_NATIVE_LLONG, basetype, 1, buf, NULL, H5P_DEFAULT)
+            else:
+                H5Tconvert(basetype, H5T_NATIVE_LLONG, 1, buf, NULL, H5P_DEFAULT)
+        finally:
+            PY_H5Tclose(basetype)
 
 def enum_insert(hid_t type_id, char* name, long long value):
     """ (INT type_id, STRING name, INT/LONG value)

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-science/packages/h5py.git



More information about the debian-science-commits mailing list