[h5py] 02/455: Initial import

Ghislain Vaillant ghisvail-guest at moszumanska.debian.org
Thu Jul 2 18:19:11 UTC 2015


This is an automated email from the git hooks/post-receive script.

ghisvail-guest pushed a commit to annotated tag 1.3.0
in repository h5py.

commit f7d3f9f3330707a12bf3c12d725c986cc045368d
Author: andrewcollette <andrew.collette at gmail.com>
Date:   Mon May 5 21:12:34 2008 +0000

    Initial import
---
 LICENSE.txt                     |  34 ++
 MANIFEST.in                     |   8 +
 README.txt                      | 237 ++++++++++
 docs.cfg                        |  10 +
 h5py/__init__.py                |  24 +
 h5py/defs_c.pxd                 |  33 ++
 h5py/errors.py                  |  64 +++
 h5py/h5.pxd                     |  45 ++
 h5py/h5.pyx                     |  89 ++++
 h5py/h5a.pxd                    |  45 ++
 h5py/h5a.pyx                    | 379 ++++++++++++++++
 h5py/h5d.pxd                    |  71 +++
 h5py/h5d.pyx                    | 519 +++++++++++++++++++++
 h5py/h5e.pxd                    |  39 ++
 h5py/h5f.pxd                    |  46 ++
 h5py/h5f.pyx                    | 129 ++++++
 h5py/h5g.pxd                    |  64 +++
 h5py/h5g.pyx                    | 389 ++++++++++++++++
 h5py/h5i.pxd                    |  40 ++
 h5py/h5i.pyx                    |  86 ++++
 h5py/h5p.pxd                    |  88 ++++
 h5py/h5p.pyx                    | 291 ++++++++++++
 h5py/h5s.pxd                    |  69 +++
 h5py/h5s.pyx                    | 262 +++++++++++
 h5py/h5t.pxd                    | 182 ++++++++
 h5py/h5t.pyx                    | 970 ++++++++++++++++++++++++++++++++++++++++
 h5py/h5z.pxd                    |  62 +++
 h5py/h5z.pyx                    | 102 +++++
 h5py/highlevel.py               | 795 ++++++++++++++++++++++++++++++++
 h5py/numpy.pxd                  |  99 ++++
 h5py/python.pxd                 |  68 +++
 h5py/tests/__init__.py          |  39 ++
 h5py/tests/common.py            |  36 ++
 h5py/tests/data/attributes.hdf5 | Bin 0 -> 2240 bytes
 h5py/tests/test_h5a.py          | 240 ++++++++++
 h5py/utils.c                    | 175 ++++++++
 h5py/utils.h                    |  30 ++
 h5py/utils.pxd                  |  20 +
 licenses/hdf5.txt               |  69 +++
 licenses/pytables.txt           |  33 ++
 meta/__init__.py                |   1 +
 meta/attrs.hdf5                 | Bin 0 -> 2240 bytes
 meta/block.txt                  |   9 +
 meta/gen_attributes.c           |  91 ++++
 meta/insertblock.py             |  81 ++++
 obsolete/attrs.hdf5             | Bin 0 -> 26656 bytes
 obsolete/definitions.pxd        |  62 +++
 obsolete/defs_h5common.pxd      |   5 +
 obsolete/file.hdf5              | Bin 0 -> 976 bytes
 obsolete/fragments.pyx          | 138 ++++++
 obsolete/test.h5                | Bin 0 -> 7768 bytes
 obsolete/test_h5a.pyx           | 138 ++++++
 obsolete/test_h5f.pyx           |  76 ++++
 obsolete/test_h5g.pyx           | 184 ++++++++
 obsolete/test_h5s.pyx           | 130 ++++++
 obsolete/test_simple.hdf5       | Bin 0 -> 12336 bytes
 setup.py                        | 215 +++++++++
 57 files changed, 7111 insertions(+)

diff --git a/LICENSE.txt b/LICENSE.txt
new file mode 100755
index 0000000..49e0846
--- /dev/null
+++ b/LICENSE.txt
@@ -0,0 +1,34 @@
+Copyright Notice and Statement for the h5py Project:
+
+Copyright (c) 2008 Andrew Collette
+http://h5py.alfven.org
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+a. Redistributions of source code must retain the above copyright
+   notice, this list of conditions and the following disclaimer.
+
+b. Redistributions in binary form must reproduce the above copyright
+   notice, this list of conditions and the following disclaimer in the
+   documentation and/or other materials provided with the
+   distribution.
+
+c. Neither the name of the author nor the names of contributors may 
+   be used to endorse or promote products derived from this software 
+   without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
diff --git a/MANIFEST.in b/MANIFEST.in
new file mode 100755
index 0000000..daea32d
--- /dev/null
+++ b/MANIFEST.in
@@ -0,0 +1,8 @@
+include MANIFEST.in
+include LICENSE.txt
+include README.txt
+recursive-include h5py *.py *.pyx *.pxd
+recursive-include h5py *.h *.c *.hdf5
+recursive-include licenses *
+recursive-include docs *
+
diff --git a/README.txt b/README.txt
new file mode 100755
index 0000000..8c75368
--- /dev/null
+++ b/README.txt
@@ -0,0 +1,237 @@
+README for the "h5py" Python/HDF5 interface
+===========================================
+Copyright (c) 2008 Andrew Collette
+http://h5py.alfven.org
+mail: "h5py" at the domain "alfven dot org"
+
+Version 0.1.0
+
+DISCLAIMER
+==========
+
+This is the first release of h5py.  Many functions are untested, and it's 
+quite possible that both the high- and low-level APIs will change in the 
+future.  Also, it hasn't been tested on platforms other than 32-bit x86 
+Linux.  For these reasons, you should treat it as an alpha.
+
+Contents
+========
+
+* `Introduction`_
+* `Features`_
+* `High-level interface`_
+* `Low-level interface`_
+
+Introduction
+============
+
+The h5py package provides both a high- and low-level interface to the NCSA
+HDF5 library (hdf.ncsa.uiuc.edu) from Python.  The low-level interface is
+intended to be a complete wrapping of the HDF5 1.6 API, while the high-
+level component supports Python-style object-oriented access to HDF5 files, 
+datasets and groups.
+
+Requires
+--------
+- Unix-like environment (created/tested on 32-bit Intel linux)
+- Numpy 1.0.3 or higher
+- HDF5 1.6.5 or higher (1.8 is untested)
+- Pyrex 0.9.6.4 or higher
+
+Installation
+------------
+See the file "INSTALL.txt"
+
+Documentation
+-------------
+Extensive documentation is available through docstrings, as well as in 
+HTML format on the web and in the "docs/" directory in this distribution.  
+This document is an overview of some of the package's features and 
+highlights.
+
+Features
+========
+
+- Low-level wrappings for most of the HDF5 1.6 C api.  You can call H5* 
+  functions directly from Python.  The wrapped APIs are:
+
+    =====   ==============  =================
+    HDF5        Purpose         Wrapping
+    =====   ==============  =================
+    H5A     Attributes      Module h5a
+    H5F     Files           Module h5f
+    H5D     Datasets        Module h5d
+    H5G     Groups          Module h5g
+    H5T     Datatypes       Module h5t
+    H5S     Dataspaces      Module h5s
+    H5I     Inspection      Module h5i
+    H5Z     Filters         Module h5z
+    H5P     Property lists  Module h5p
+    H5E     Errors          Python exceptions
+    =====   ==============  =================
+
+  See the section "Low-level interface" below for a better overview.
+
+- Calls that fail will raise exceptions; no more checking return values.
+  Wrapper functions have been carefully designed to provide a Pythonic
+  interface to the library.  Where multiple similar HDF5 functions exist
+  (i.e. link and link2) they have been merged into one function, with
+  additional Python keywords.
+
+- Many new, C-level Python functions which smooth some of the rough edges. 
+  For example, you can create a dataset with associated compression and
+  chunking in one function call, get an iterator over the names in a group, 
+  overwrite attributes without deleting them first, etc.
+
+- Conversion functions between HDF5 datatypes and Numpy dtypes, including
+  Numpy's complex numbers.  This lets you read/write data directly from an
+  HDF5 dataset to a Numpy array, with the HDF5 library performing any 
+  endianness or precision conversion for you automatically.
+
+- High-level interface allows Numpy/Python-style  access to HDF5 files and 
+  datasets, with automatic conversion between datatypes.  Slice into an 
+  HDF5 dataset and get a Numpy array back; no extra work required.  You can 
+  also create datasets which use chunking, compression, or other filters, 
+  and use them like any other dataset object.
+
+- High-level Group interface allows dictionary-style manipulation of HDF5
+  groups and links, including automatic creation of datasets and attributes
+  in response to assignment.
+
+- No additional layers of abstraction beyond the HDF5 and Numpy conventions.
+  I like PyTables a lot, but I don't speak database-ese. :) There are also 
+  no new datatypes; just the built-in Numpy ones.
+
+
+High-level interface
+====================
+
+The goal of this component is to present access to HDF5 data in a manner
+consistent with the conventions of Python and Numpy.  For example, "Group" 
+objects allow dictionary-style access to their members, both through the 
+familiar "Object['name']" slicing syntax and by iteration.  "Dataset" 
+objects support multidimensional slicing, Numpy dtype objects, and shape 
+tuples.
+
+This interface is extensively documented via module and class docstrings.
+Consult the online HTML documentation (or Python's `help` command) for a 
+more comprehensive guide.
+
+Here's a (mockup) example of some of the highlights:
+
+1. File objects support Python-like modes:
+
+>>> from h5py.highlevel import File, Dataset, Group
+>>> file = File('test_file.hdf5','r')
+>>> file
+File "test_file.hdf5", root members: "group1", "dataset"
+
+2. Group objects support things like __len__ and iteration, along with
+   dictionary-style access.
+
+>>> grp = file["group1"]
+>>> len(grp)
+4
+>>> list(grp)
+['array1', 'array2', 'array3', 'array4']
+
+3.  It's easy to add/remove members.  Datasets can even be automatically
+    created from Python objects at assignment time:
+
+>>> del grp['array2']
+>>> list(grp)
+['array1', 'array3', 'array4']
+
+>>> grp['My float array'] = [1.0, 2.0, 3.5]
+>>> list(grp)
+['array1', 'array3', 'array4', 'My float array']
+>>> grp['My float array']
+Dataset: (3L,)  dtype('<f4')
+
+4.  Datasets support the Numpy attributes shape and dtype.  Slicing a
+    dataset object returns a Numpy array.
+
+>>> dset = file["dataset"]
+>>> dset
+Dataset: (3L, 10L)  dtype(<f8')
+>>> dset.shape        # Numpy-style shape tuples for dimensions
+(3L, 10L)
+>>> dset.dtype        # Genuine Numpy dtype objects represent the datatype
+'<f4'
+
+>>> dset[2,7]         # Full multidimensional slicing allowed
+2.3
+>>> dset[:,7]    
+[-1.2, 0.5, 2.3]
+>>> dset[0,0:10:2]    # Start/stop/strides work, in any combination
+[-0.7, 9.1, 10.2, 2.6, 99.4]
+
+>>> type(dset)
+<class 'h5py.highlevel.Dataset'>
+>>> type(dset[:,10])      # Slicing produces Numpy ndarrays
+<type 'numpy.ndarray'>
+
+5.  Full support for HDF5 scalar and array attributes:
+
+>>> list(dest.attrs)
+['Name', 'Id', 'IntArray']
+>>> dset.attrs['Name']
+"My Dataset"
+>>> dset.attrs['Id']
+42
+>>> dset.attrs['IntArray']
+array([0,1,2,3,4,5])
+>>> dset.attrs['Name'] = "New name"
+>>> dset.attrs['Name']
+"New name"
+
+Low-Level Interface
+===================
+
+The HDF5 library is divided into a number of groups (H5A, H5F, etc) which map
+more-or-less directly into Python modules of the same name.  See the module
+and function docstrings (or the online HTML help) for details.
+
+Python extensions
+-----------------
+Most modules have several functions which are not part of the HDF5 spec.  
+These are prefixed with "py" to underscore their unofficial nature.  They 
+are designed to encapsulate common operations and provide a more Python/
+Numpy-style interface, even at the low level of this interface.  For 
+example, the functions h5t.py_h5t_to_dtype and h5t.py_dtype_to_h5t allow
+automatic conversion between Numpy dtypes and HDF5 type objects.
+
+Constants
+---------
+Constants are also available at the module level.  When a constant is part 
+of an C enum, the name of the enum is prepended to the constant name.  For 
+example, the dataspace-related enum H5S_class_t is wrapped like this:
+
+    =============== ==============  =============
+      H5S_class_t     h5s module        Value
+    =============== ==============  =============
+    H5S_NO_CLASS    CLASS_NO_CLASS   INT -1
+    H5S_SCALAR      CLASS_SCALAR     INT 0
+    H5S_SIMPLE      CLASS_SIMPLE     INT 1
+    H5S_COMPLEX     CLASS_COMPLEX    INT 2
+    <no equivalent> CLASS_MAPPER     DDict(...)
+    =============== ==============  =============
+
+The additional entry CLASS_MAPPER is a dictionary subclass "DDict" which 
+maps  the integer values to string descriptions.  This simplifies debugging 
+and  logging.  The DDict class overrides "dict" so that it will always 
+return a value; if the given integer is not in the CLASS enum, the returned 
+string is ``"*INVALID* (<given value>)"``.
+
+Exceptions
+----------
+Each HDF5 API function is individually wrapped; the return value
+is checked, and the appropriate exception is raised (from h5py.errors) if
+something has gone wrong.  This way you can write more Pythonic, exception-
+based code instead of checking return values. 
+
+Additionally, the HDF5 error stack is automatically attached to the 
+exception message, giving you a clear picture of what went wrong.  The 
+library will never print anything to stderr; everything goes through Python.
+
+
diff --git a/docs.cfg b/docs.cfg
new file mode 100755
index 0000000..6498abd
--- /dev/null
+++ b/docs.cfg
@@ -0,0 +1,10 @@
+[epydoc]
+
+name: h5py HDF5 interface for Python
+url: http://h5py.alfven.org
+link: <a href="http://h5py.alfven.org">h5py Project Page at Alfven.org</a>
+
+docformat: plaintext
+
+private: no
+
diff --git a/h5py/__init__.py b/h5py/__init__.py
new file mode 100755
index 0000000..ef1e563
--- /dev/null
+++ b/h5py/__init__.py
@@ -0,0 +1,24 @@
+#+
+# 
+# This file is part of h5py, a low-level Python interface to the HDF5 library.
+# 
+# Copyright (C) 2008 Andrew Collette
+# http://h5py.alfven.org
+# License: BSD  (See LICENSE.txt for full license)
+# 
+# $Date$
+# 
+#-
+
+# h5py module __init__
+
+"""
+    This is the h5py package, a Python interface to the NCSA HDF5 
+    scientific data format.
+
+    See the docstring for the "version" module for a longer introduction.
+"""
+
+import h5, h5f, h5g, h5s, h5t, h5d, h5a, h5p, h5z, h5i, highlevel, errors
+import tests
+import version
diff --git a/h5py/defs_c.pxd b/h5py/defs_c.pxd
new file mode 100755
index 0000000..5d0b2ba
--- /dev/null
+++ b/h5py/defs_c.pxd
@@ -0,0 +1,33 @@
+#+
+# 
+# This file is part of h5py, a low-level Python interface to the HDF5 library.
+# 
+# Copyright (C) 2008 Andrew Collette
+# http://h5py.alfven.org
+# License: BSD  (See LICENSE.txt for full license)
+# 
+# $Date$
+# 
+#-
+
+# This file is based on code from the PyTables project.  The complete PyTables
+# license is available at licenses/pytables.txt, in the distribution root
+# directory.
+
+# Standard C functions.
+
+cdef extern from "stdlib.h":
+  ctypedef long size_t
+  void *malloc(size_t size)
+  void free(void *ptr)
+
+cdef extern from "string.h":
+  char *strchr(char *s, int c)
+  char *strcpy(char *dest, char *src)
+  char *strncpy(char *dest, char *src, size_t n)
+  int strcmp(char *s1, char *s2)
+  char *strdup(char *s)
+  void *memcpy(void *dest, void *src, size_t n)
+
+cdef extern from "time.h":
+  ctypedef int time_t
diff --git a/h5py/errors.py b/h5py/errors.py
new file mode 100755
index 0000000..1534f3d
--- /dev/null
+++ b/h5py/errors.py
@@ -0,0 +1,64 @@
+#+
+# 
+# This file is part of h5py, a low-level Python interface to the HDF5 library.
+# 
+# Copyright (C) 2008 Andrew Collette
+# http://h5py.alfven.org
+# License: BSD  (See LICENSE.txt for full license)
+# 
+# $Date$
+# 
+#-
+
+from h5 import get_error_string
+
+class H5Error(StandardError):
+    pass
+
+class ConversionError(H5Error):
+    pass
+
+class H5LibraryError(H5Error):
+
+    def __init__(self, *args, **kwds):
+        arglist = list(args)
+        if len(arglist) == 0:
+            arglist = [""]
+        msg = arglist[0]
+        msg += "\n"+get_error_string()
+        arglist[0] = msg
+        args = tuple(arglist)
+        H5Error.__init__(self, *args, **kwds)
+
+class FileError(H5LibraryError):
+    pass
+
+class GroupError(H5LibraryError):
+    pass
+
+class DataspaceError(H5LibraryError):
+    pass
+
+class DatatypeError(H5LibraryError):
+    pass
+
+class DatasetError(H5LibraryError):
+    pass
+
+class PropertyError(H5LibraryError):
+    pass
+
+class H5AttributeError(H5LibraryError):
+    pass
+
+class FilterError(H5LibraryError):
+    pass
+
+class H5TypeError(H5LibraryError):
+    pass
+
+
+
+
+
+
diff --git a/h5py/h5.pxd b/h5py/h5.pxd
new file mode 100755
index 0000000..d649667
--- /dev/null
+++ b/h5py/h5.pxd
@@ -0,0 +1,45 @@
+#+
+# 
+# This file is part of h5py, a low-level Python interface to the HDF5 library.
+# 
+# Copyright (C) 2008 Andrew Collette
+# http://h5py.alfven.org
+# License: BSD  (See LICENSE.txt for full license)
+# 
+# $Date$
+# 
+#-
+
+# This file is based on code from the PyTables project.  The complete PyTables
+# license is available at licenses/pytables.txt, in the distribution root
+# directory.
+
+from defs_c cimport size_t
+
+# Common structs and types from HDF5
+cdef extern from "hdf5.h":
+
+
+  ctypedef int hid_t  # In H5Ipublic.h
+  ctypedef int hbool_t
+  ctypedef int herr_t
+  ctypedef int htri_t
+  # hsize_t should be unsigned, but Windows platform does not support
+  # such an unsigned long long type.
+  ctypedef long long hsize_t
+  ctypedef signed long long hssize_t
+
+  ctypedef struct hvl_t:
+    size_t len                 # Length of VL data (in base type units)
+    void *p                    # Pointer to VL data
+
+  herr_t H5open()
+  herr_t H5close()
+
+  # --- Version functions -----------------------------------------------------
+  herr_t H5get_libversion(unsigned *majnum, unsigned *minnum,
+                          unsigned *relnum )
+  herr_t H5check_version(unsigned majnum, unsigned minnum,
+                         unsigned relnum )
+
+
diff --git a/h5py/h5.pyx b/h5py/h5.pyx
new file mode 100755
index 0000000..52990e4
--- /dev/null
+++ b/h5py/h5.pyx
@@ -0,0 +1,89 @@
+#+
+# 
+# This file is part of h5py, a low-level Python interface to the HDF5 library.
+# 
+# Copyright (C) 2008 Andrew Collette
+# http://h5py.alfven.org
+# License: BSD  (See LICENSE.txt for full license)
+# 
+# $Date$
+# 
+#-
+
+
+"""
+    Private initialization module for the h5* family of modules.
+
+    ** Not for public use. **
+
+    Common module for the HDF5 low-level interface library.  This module
+    is imported at the top of every h5* sub-module.  Initializes the
+    library and defines common version info, classes and functions.
+"""
+
+from h5e cimport H5Eset_auto, H5E_walk_t, H5Ewalk, H5E_error_t, \
+                      H5E_WALK_DOWNWARD
+
+# Activate the library
+H5open()
+
+# Disable automatic error printing to stderr
+H5Eset_auto(NULL, NULL)
+
+def _getversionastuple():
+
+    cdef unsigned int major
+    cdef unsigned int minor
+    cdef unsigned int release
+    cdef herr_t retval
+    
+    retval = H5get_libversion(&major, &minor, &release)
+    if retval < 0:
+        raise RuntimeError("Error determining HDF5 library version")
+
+    return (major, minor, release)
+    
+hdf5version = _getversionastuple()
+
+def cycle():
+    """ ()
+
+        Force the HDF5 library to close all open objects and files, and re-
+        initialize the library.
+    """
+    cdef herr_t retval
+    H5close()
+    retval = H5open()
+    if retval < 0:
+        raise RuntimeError("Failed to re-initialize the HDF5 library")
+
+
+class DDict(dict):
+    def __missing__(self, key):
+        return '*INVALID* (%s)' % str(key)
+
+# === Error functions =========================================================
+
+cdef herr_t walk_cb(int n, H5E_error_t *err_desc, data):
+
+    cdef object hstring
+    hstring = err_desc.desc
+    if len(hstring) == 0:
+        hstring = "Error"
+    else:
+        hstring = '"'+hstring.capitalize()+'"'
+
+    data.append("    "+str(n)+": "+hstring+" at "+err_desc.func_name)
+
+    return 0
+
+def get_error_string():
+
+    elist = []
+
+    H5Ewalk(H5E_WALK_DOWNWARD, walk_cb, elist)
+
+    if len(elist) == 0:
+        return ""
+    return "HDF5 error stack:\n" + '\n'.join(elist)
+
diff --git a/h5py/h5a.pxd b/h5py/h5a.pxd
new file mode 100755
index 0000000..dd18667
--- /dev/null
+++ b/h5py/h5a.pxd
@@ -0,0 +1,45 @@
+#+
+# 
+# This file is part of h5py, a low-level Python interface to the HDF5 library.
+# 
+# Copyright (C) 2008 Andrew Collette
+# http://h5py.alfven.org
+# License: BSD  (See LICENSE.txt for full license)
+# 
+# $Date$
+# 
+#-
+
+# This file is based on code from the PyTables project.  The complete PyTables
+# license is available at licenses/pytables.txt, in the distribution root
+# directory.
+
+from defs_c  cimport size_t
+from h5 cimport hid_t, herr_t
+
+cdef extern from "hdf5.h":
+
+  # --- Attribute operations --------------------------------------------------
+  hid_t     H5Acreate(hid_t loc_id, char *name, hid_t type_id, hid_t space_id, hid_t create_plist  ) 
+  hid_t     H5Aopen_idx(hid_t loc_id, unsigned int idx)
+  hid_t     H5Aopen_name(hid_t loc_id, char *name)
+  herr_t    H5Aclose(hid_t attr_id)
+  herr_t    H5Adelete(hid_t loc_id, char *name)
+
+  herr_t    H5Aread(hid_t attr_id, hid_t mem_type_id, void *buf)
+  herr_t    H5Awrite(hid_t attr_id, hid_t mem_type_id, void *buf  )
+
+  int       H5Aget_num_attrs(hid_t loc_id)
+  size_t    H5Aget_name(hid_t attr_id, size_t buf_size, char *buf)
+  hid_t     H5Aget_space(hid_t attr_id)
+  hid_t     H5Aget_type(hid_t attr_id)
+
+  ctypedef herr_t (*H5A_operator_t)(hid_t loc_id, char *attr_name, operator_data)
+  herr_t    H5Aiterate(hid_t loc_id, unsigned * idx, H5A_operator_t op, op_data  )
+
+
+
+
+
+
+
diff --git a/h5py/h5a.pyx b/h5py/h5a.pyx
new file mode 100755
index 0000000..753c4b6
--- /dev/null
+++ b/h5py/h5a.pyx
@@ -0,0 +1,379 @@
+#+
+# 
+# This file is part of h5py, a low-level Python interface to the HDF5 library.
+# 
+# Copyright (C) 2008 Andrew Collette
+# http://h5py.alfven.org
+# License: BSD  (See LICENSE.txt for full license)
+# 
+# $Date$
+# 
+#-
+
+"""
+    Provides access to the low-level HDF5 "H5A" attribute interface.
+
+    Functions in this module will raise errors.H5AttributeError.
+"""
+
+# Pyrex compile-time imports
+from defs_c   cimport malloc, free
+from h5  cimport herr_t, hid_t
+from h5p cimport H5P_DEFAULT
+from h5t cimport H5Tclose
+from numpy cimport ndarray, import_array
+
+# Runtime imports
+import h5
+import h5t
+import h5s
+from errors import H5AttributeError
+
+import_array()
+
+# === General attribute operations ============================================
+
+def create(hid_t loc_id, char* name, hid_t type_id, hid_t space_id):
+    """ (INT loc_id, STRING name, INT type_id, INT space_id) => INT attr_id
+
+        Create a new attribute attached to a parent object, specifiying an 
+        HDF5 datatype and dataspace.  For a friendlier version of this function
+        try py_create().
+    """
+    cdef hid_t retval
+    retval = H5Acreate(loc_id, name, type_id, space_id, H5P_DEFAULT)
+    if retval < 0:
+        raise H5AttributeError("Failed to create attribute '%s' on object %d" % (name, loc_id))
+    return retval
+
+def open_idx(hid_t loc_id, unsigned int idx):
+    """ (INT loc_id, UINT index) => INT attr_id
+
+        Open an exisiting attribute on an object, by zero-based index.
+    """
+    cdef hid_t retval
+    retval = H5Aopen_idx(loc_id, idx)
+    if retval < 0:
+        raise H5AttributeError("Failed to open attribute at index %d on object %d" % (idx, loc_id))
+    return retval
+
+def open_name(hid_t loc_id, char* name):
+    """ (INT loc_id, STRING name) => INT attr_id
+
+        Open an existing attribute on an object, by name.
+    """
+    cdef hid_t retval
+    retval = H5Aopen_name(loc_id, name)
+    if retval < 0:
+        raise H5AttributeError("Failed to open attribute '%s' on object %d" % (name, loc_id))
+    return retval
+
+def close(hid_t attr_id):
+    """ (INT attr_id)
+    """
+    cdef hid_t retval
+    retval = H5Aclose(attr_id)
+    if retval < 0:
+        raise H5AttributeError("Failed to close attribute %d" % attr_id)
+
+
+def delete(hid_t loc_id, char* name):
+    """ (INT loc_id, STRING name)
+
+        Remove an attribute from an object.
+    """
+    cdef herr_t retval
+    retval = H5Adelete(loc_id, name)
+    if retval < 0:
+        raise H5AttributeError("Failed delete attribute '%s' on object %d" % (name, loc_id))
+
+
+# === Attribute I/O ===========================================================
+
+def read(hid_t attr_id, ndarray arr_obj):
+    """ (INT attr_id, NDARRAY arr_obj)
+        
+        Read the attribute data into the given Numpy array.  Note that the 
+        Numpy array must have the same shape as the HDF5 attribute, and a 
+        conversion-compatible datatype.  It must also be writable and
+        C-contiguous.  This is not currently checked.
+    """
+    cdef hid_t mtype_id
+    cdef herr_t retval
+    mtype_id = 0
+
+    try:
+        mtype_id = h5t.py_dtype_to_h5t(arr_obj.dtype)
+        retval = H5Aread(attr_id, mtype_id, <void*>arr_obj.data)
+        if retval < 0:
+            raise H5AttributeError("Error reading from attribute %d" % attr_id)
+    finally:
+        if mtype_id:
+            H5Tclose(mtype_id)
+
+def write(hid_t attr_id, ndarray arr_obj):
+    """ (INT attr_id, NDARRAY arr_obj)
+
+        Write the contents of a Numpy array too the attribute.  Note that the 
+        Numpy array must have the same shape as the HDF5 attribute, and a 
+        conversion-compatible datatype.  The Numpy array must also be
+        C-contiguous; this is not currently checked.
+    """
+    
+    cdef hid_t mtype_id
+    cdef herr_t retval
+    mtype_id = 0
+    try:
+        mtype_id = h5t.py_dtype_to_h5t(arr_obj.dtype)
+        retval = H5Awrite(attr_id, mtype_id, <void*>arr_obj.data)
+        if retval < 0:
+            raise H5AttributeError("Error writing to attribute %d" % attr_id)
+    finally:
+        if mtype_id:
+            H5Tclose(mtype_id)
+
+# === Attribute inspection ====================================================
+
+def get_num_attrs(hid_t loc_id):
+    """ (INT loc_id) => INT number_of_attributes
+
+        Determine the number of attributes attached to an HDF5 object.
+    """
+    cdef int retval
+    retval = H5Aget_num_attrs(loc_id)
+    if retval < 0:
+        raise H5AttributeError("Failed to enumerate attributes of object %d" % loc_id)
+    return retval
+
+def get_name(hid_t attr_id):
+    """ (INT attr_id) => STRING name
+
+        Determine the name of an attribute, given its identifier.
+    """
+    cdef int blen
+    cdef char* buf
+    cdef object strout
+    buf = NULL
+
+    blen = H5Aget_name(attr_id, 0, NULL)
+    if blen < 0:
+        raise H5AttributeError("Failed to get name of attribute %d" % attr_id)
+    
+    buf = <char*>malloc(sizeof(char)*blen+1)
+    blen = H5Aget_name(attr_id, blen+1, buf)
+    strout = buf
+    free(buf)
+
+    return strout
+
+def get_space(hid_t attr_id):
+    """ (INT attr_id) => INT space_id
+
+        Create and return a copy of the attribute's dataspace.
+    """
+    cdef hid_t retval
+    retval = H5Aget_space(attr_id)
+    if retval < 0:
+        raise H5AttributeError("Failed to retrieve dataspace of attribute %d" % attr_id)
+    return retval
+
+def get_type(hid_t attr_id):
+    """ (INT attr_id) => INT type_id
+
+        Create and return a copy of the attribute's datatype.
+    """
+    cdef hid_t retval
+    retval = H5Aget_type(attr_id)
+    if retval < 0:
+        raise H5AttributeError("Failed to retrieve datatype of attribute %d" % attr_id)
+    return retval
+
+
+cdef herr_t iter_cb(hid_t loc_id, char *attr_name, object int_tpl):
+
+    func = int_tpl[0]
+    data = int_tpl[1]
+    exc_list = int_tpl[2]
+
+    try:
+        func(loc_id, attr_name, data)
+    except StopIteration:
+        return 1
+    except Exception, e:
+        exc_list.append(e)
+        return -1
+
+    return 0
+
+
+def iterate(hid_t loc_id, object func, object data=None, unsigned int startidx=0):
+    """ (INT loc_id, FUNCTION func, OBJECT data=None, UINT startidx=0)
+        => INT last_attribute_index
+
+        Iterate an arbitrary Python function over the attributes attached
+        to an object.  You can also start at an arbitrary attribute by
+        specifying its (zero-based) index.  The return value is the index of 
+        the last attribute processed.
+
+        Your function:
+        1.  Should accept three arguments: the (INT) id of the parent object, 
+            the (STRING) name of the attribute, and an arbitary Python object
+            you provide as data.  Any return value is ignored.
+        2.  Raise StopIteration to bail out before all attributes are processed.
+        3.  Raising anything else immediately aborts iteration, and the
+            exception is propagated.
+    """
+    cdef unsigned int i
+    cdef herr_t retval
+    i = startidx
+
+    int_tpl = (func, data,[])
+
+    retval = H5Aiterate(loc_id, &i, <H5A_operator_t>iter_cb, int_tpl)
+
+    if retval < 0:
+        if len(int_tpl[2]) != 0:
+            raise int_tpl[2][0]
+        raise H5AttributeError("Error occured during iteration")
+    return i-2
+
+# === Python extensions =======================================================
+
+# Pyrex doesn't allow lambdas
+def _name_cb(hid_t loc_id, char* name, data):
+    data.append(name)
+
+def py_listattrs(hid_t loc_id):
+    """ (INT loc_id) => LIST attribute_list
+
+        Create a Python list of attribute names attached to this object.
+    """
+    nlist = []
+    iterate(loc_id, _name_cb, nlist)
+    return nlist
+    
+def py_create(hid_t loc_id, char* name, object dtype_in, object shape):
+    """ (INT loc_id, STRING name, DTYPE dtype_in, TUPLE shape)
+
+        Create an attribute from a Numpy dtype and a shape tuple.  To
+        create a scalar attribute, provide an empty tuple. If you're creating
+        an attribute from an existing array or scalar, consider using py_set().
+    """
+    cdef hid_t sid
+    cdef hid_t type_id
+    cdef hid_t aid
+    sid = 0
+    type_id = 0
+
+    try:
+        sid = h5s.create_simple(shape)
+        type_id = h5t.py_dtype_to_h5t(dtype_in)
+
+        aid = create(loc_id, name, type_id, sid)
+    finally:
+        if sid:
+            h5s.close(sid)
+        if type_id:
+            H5Tclose(type_id)
+
+    return aid
+
+def py_shape(hid_t attr_id):
+    """ (INT attr_id) => TUPLE shape
+
+        Retrieve the dataspace of this attribute, as a Numpy-style shape tuple.
+    """
+    cdef hid_t sid
+    sid = 0
+    
+    try:
+        sid = get_space(attr_id)
+        tpl = h5s.get_simple_extent_dims(sid)
+    finally:
+        if sid:
+            h5s.close(sid)
+    return tpl
+
+def py_dtype(hid_t attr_id):
+    """ (INT attr_id) => DTYPE numpy_dtype
+
+        Obtain the data-type of this attribute as a Numpy dtype.  Note that the
+        resulting dtype is not guaranteed to be byte-for-byte compatible with
+        the underlying HDF5 datatype, but is appropriate for use in e.g. the 
+        read() and write() functions defined in this module.
+    """
+    cdef hid_t type_id
+    type_id = 0
+    
+    try:
+        type_id = get_type(attr_id)
+        dtype_out = h5t.py_h5t_to_dtype(type_id)
+    finally:
+        if type_id:
+            H5Tclose(type_id)
+    return dtype_out
+
+def py_get(hid_t parent_id, char* name):
+    """ (INT parent_id, STRING name)
+
+        Read an attribute and return the contents as a Numpy ndarray.
+        A 0-dimensional array is returned in the case of a scalar attribute.
+    """
+    cdef hid_t attr_id
+    attr_id = open_name(parent_id, name)
+    try:
+        space = py_shape(attr_id)
+        dtype = py_dtype(attr_id)
+
+        arr = ndarray(space, dtype=dtype)
+        read(attr_id, arr)
+    finally:
+        H5Aclose(attr_id)
+    return arr
+
+def py_set(hid_t parent_id, char* name, ndarray arr):
+    """ (INT parent_id, STRING name, NDARRAY arr)
+
+        Create an attribute and initialize its type, space, and contents to
+        a Numpy ndarray.  Note that this function does not return an
+        identifier; the attribute is created and then closed.  Fails if an 
+        attribute of the same name already exists.
+    """
+    cdef hid_t attr_id
+    attr_id = 0
+    attr_id = py_create(parent_id, name, arr.dtype, arr.shape)
+    try:
+        write(attr_id, arr)
+    except:
+        H5Aclose(attr_id)
+        H5Adelete(parent_id, name)
+        raise
+
+    H5Aclose(attr_id)
+
+def py_exists(hid_t parent_id, char* name):
+    """ (INT parent_id, STRING name) => BOOL exists
+
+        Determine if the specified attribute exists.  Useful before calling
+        py_set().
+    """
+    cdef hid_t attr_id
+    response = None
+    attr_id = H5Aopen_name(parent_id, name)
+    if attr_id < 0:
+        response = False
+    else:
+        response = True
+        H5Aclose(attr_id)
+
+    return response
+
+
+
+
+
+
+
+
+
+
diff --git a/h5py/h5d.pxd b/h5py/h5d.pxd
new file mode 100755
index 0000000..46aca3d
--- /dev/null
+++ b/h5py/h5d.pxd
@@ -0,0 +1,71 @@
+#+
+# 
+# This file is part of h5py, a low-level Python interface to the HDF5 library.
+# 
+# Copyright (C) 2008 Andrew Collette
+# http://h5py.alfven.org
+# License: BSD  (See LICENSE.txt for full license)
+# 
+# $Date$
+# 
+#-
+
+# This file is based on code from the PyTables project.  The complete PyTables
+# license is available at licenses/pytables.txt, in the distribution root
+# directory.
+
+from defs_c cimport size_t, time_t
+from h5 cimport hid_t, hbool_t, herr_t, htri_t, hsize_t, hssize_t, hvl_t
+
+cdef extern from "hdf5.h":
+
+  # HDF5 layouts
+  cdef enum H5D_layout_t:
+    H5D_LAYOUT_ERROR    = -1,
+    H5D_COMPACT         = 0,    # raw data is very small
+    H5D_CONTIGUOUS      = 1,    # the default
+    H5D_CHUNKED         = 2,    # slow and fancy
+    H5D_NLAYOUTS        = 3     # this one must be last!
+
+  cdef enum H5D_alloc_time_t:
+    H5D_ALLOC_TIME_ERROR	=-1,
+    H5D_ALLOC_TIME_DEFAULT  =0,
+    H5D_ALLOC_TIME_EARLY	=1,
+    H5D_ALLOC_TIME_LATE	    =2,
+    H5D_ALLOC_TIME_INCR	    =3
+
+  cdef enum H5D_space_status_t:
+    H5D_SPACE_STATUS_ERROR	        =-1,
+    H5D_SPACE_STATUS_NOT_ALLOCATED	=0,
+    H5D_SPACE_STATUS_PART_ALLOCATED	=1,
+    H5D_SPACE_STATUS_ALLOCATED		=2
+
+  cdef enum H5D_fill_time_t:
+    H5D_FILL_TIME_ERROR	=-1,
+    H5D_FILL_TIME_ALLOC =0,
+    H5D_FILL_TIME_NEVER	=1,
+    H5D_FILL_TIME_IFSET	=2
+
+  cdef enum H5D_fill_value_t:
+    H5D_FILL_VALUE_ERROR        =-1,
+    H5D_FILL_VALUE_UNDEFINED    =0,
+    H5D_FILL_VALUE_DEFAULT      =1,
+    H5D_FILL_VALUE_USER_DEFINED =2
+
+
+  # --- Dataset operations ----------------------------------------------------
+  hid_t     H5Dcreate(hid_t loc, char* name, hid_t type_id, hid_t space_id, hid_t create_plist_id)
+  hid_t     H5Dopen(hid_t file_id, char *name)
+  herr_t    H5Dclose(hid_t dset_id)
+
+  hid_t     H5Dget_space(hid_t dset_id)
+  hid_t     H5Dget_type(hid_t dset_id)
+  hid_t     H5Dget_create_plist(hid_t dataset_id)
+
+  herr_t    H5Dread(hid_t dset_id, hid_t mem_type_id, hid_t mem_space_id,
+                  hid_t file_space_id, hid_t plist_id, void *buf)
+  herr_t    H5Dwrite(hid_t dset_id, hid_t mem_type, hid_t mem_space, hid_t file_space, hid_t xfer_plist, void* buf)
+
+  herr_t H5Dvlen_reclaim(hid_t type_id, hid_t space_id, hid_t plist_id,
+                         void *buf)
+
diff --git a/h5py/h5d.pyx b/h5py/h5d.pyx
new file mode 100755
index 0000000..aabbd83
--- /dev/null
+++ b/h5py/h5d.pyx
@@ -0,0 +1,519 @@
+#+
+# 
+# This file is part of h5py, a low-level Python interface to the HDF5 library.
+# 
+# Copyright (C) 2008 Andrew Collette
+# http://h5py.alfven.org
+# License: BSD  (See LICENSE.txt for full license)
+# 
+# $Date$
+# 
+#-
+
+"""
+    Provides access to the low-level HDF5 "H5D" dataset interface
+
+    Most H5D calls are unchanged.  Since dataset I/O is done with Numpy objects,
+    read and write calls do not require you to explicitly define a datatype;
+    the type of the given Numpy array is used instead.
+
+    The py_* family of functions in this module provide a significantly 
+    simpler interface.  They should be sufficient for nearly all dataset
+    operations from Python.
+"""
+
+# Pyrex compile-time imports
+from defs_c   cimport malloc, free
+from h5  cimport herr_t, hid_t, size_t, hsize_t, htri_t
+from h5s cimport H5Sclose, H5S_ALL, H5S_UNLIMITED
+from h5t cimport H5Tclose
+from h5p cimport H5P_DEFAULT
+from numpy cimport ndarray, import_array
+
+# Runtime imports
+import h5
+from h5 import DDict
+from errors import DatasetError
+import h5t
+import h5s
+
+import_array()
+
+# === Public constants and data structures ====================================
+
+LAYOUT_COMPACT = H5D_COMPACT
+LAYOUT_CONTIGUOUS = H5D_CONTIGUOUS
+LAYOUT_CHUNKED = H5D_CHUNKED
+LAYOUT_MAPPER = { H5D_COMPACT: 'COMPACT', H5D_CONTIGUOUS: 'CONTIGUOUS',
+                  H5D_CHUNKED: 'CHUNKED'}
+LAYOUT_MAPPER = DDict(LAYOUT_MAPPER)
+
+ALLOC_TIME_DEFAULT  = H5D_ALLOC_TIME_DEFAULT
+ALLOC_TIME_LATE     = H5D_ALLOC_TIME_LATE
+ALLOC_TIME_EARLY    = H5D_ALLOC_TIME_EARLY
+ALLOC_TIME_INCR     = H5D_ALLOC_TIME_INCR
+ALLOC_TIME_MAPPER = { H5D_ALLOC_TIME_DEFAULT: 'DEFAULT', H5D_ALLOC_TIME_LATE:'LATE',
+                      H5D_ALLOC_TIME_EARLY: 'EARLY', H5D_ALLOC_TIME_INCR: 'INCR'}
+ALLOC_TIME_MAPPER = DDict(ALLOC_TIME_MAPPER)
+
+SPACE_STATUS_NOT_ALLOCATED  = H5D_SPACE_STATUS_NOT_ALLOCATED
+SPACE_STATUS_PART_ALLOCATED = H5D_SPACE_STATUS_PART_ALLOCATED
+SPACE_STATUS_ALLOCATED      = H5D_SPACE_STATUS_ALLOCATED
+SPACE_STATUS_MAPPER = { H5D_SPACE_STATUS_NOT_ALLOCATED: 'NOT ALLOCATED', 
+                        H5D_SPACE_STATUS_PART_ALLOCATED: 'PARTIALLY ALLOCATED',
+                        H5D_SPACE_STATUS_ALLOCATED: 'ALLOCATED'}
+SPACE_STATUS_MAPPER = DDict(SPACE_STATUS_MAPPER)
+
+FILL_TIME_ALLOC = H5D_FILL_TIME_ALLOC
+FILL_TIME_NEVER = H5D_FILL_TIME_NEVER
+FILL_TIME_IFSET = H5D_FILL_TIME_IFSET
+FILL_TIME_MAPPER = { H5D_FILL_TIME_ALLOC: 'ALLOCATION TIME',
+                     H5D_FILL_TIME_NEVER: 'NEVER',
+                     H5D_FILL_TIME_IFSET: 'IF SET' }
+FILL_TIME_MAPPER = DDict(FILL_TIME_MAPPER)
+
+FILL_VALUE_UNDEFINED    = H5D_FILL_VALUE_UNDEFINED
+FILL_VALUE_DEFAULT      = H5D_FILL_VALUE_DEFAULT
+FILL_VALUE_USER_DEFINED = H5D_FILL_VALUE_USER_DEFINED
+FILL_VALUE_MAPPER = { H5D_FILL_VALUE_UNDEFINED: 'UNDEFINED',
+                      H5D_FILL_VALUE_DEFAULT: 'DEFAULT',
+                      H5D_FILL_VALUE_USER_DEFINED: 'USER-DEFINED' }
+FILL_VALUE_MAPPER = DDict(FILL_VALUE_MAPPER)
+
+# === Basic dataset operations ================================================
+
+def create(int loc_id, char* name, hid_t type_id, hid_t space_id, hid_t plist=H5P_DEFAULT):
+    """ ( INT loc_id, STRING name, INT type_id, INT space_id,
+          INT plist=H5P_DEFAULT ) 
+        => INT dataset_id
+
+        Create a new dataset under an HDF5 file or group id.  Keyword plist 
+        should be a dataset creation property list.
+
+        For a friendlier version of this function, try py_create()
+    """
+    cdef hid_t dataset_id
+    dataset_id = H5Dcreate(loc_id, name, type_id, space_id, plist)
+    if dataset_id < 0:
+        raise DatasetError('Failed to create dataset "%s" under %d' % (name, loc_id))
+    return dataset_id
+
+def open(hid_t loc_id, char* name):
+    """ (INT loc_id, STRING name) => INT dataset_id
+
+        Open an existing dataset attached to a group or file object, by name.
+    """
+    cdef hid_t dset_id
+    dset_id = H5Dopen(loc_id, name)
+    if dset_id < 0:
+        raise DatasetError('Failed to open dataset "%s" under %d' % (name, loc_id))
+    return dset_id
+
+def close(hid_t dset_id):
+    """ (INT dset_id)
+    """
+    cdef herr_t retval
+    retval = H5Dclose(dset_id)
+    if retval < 0:
+        raise DatasetError("Failed to close dataset %d" % dset_id)
+
+# === Dataset I/O =============================================================
+
+def read(hid_t dset_id, hid_t mspace_id, hid_t fspace_id, ndarray arr_obj, hid_t plist=H5P_DEFAULT):
+    """ ( INT dset_id, INT mspace_id, INT fspace_id, NDARRAY arr_obj, 
+          INT plist=H5P_DEFAULT )
+
+        Read data from an HDF5 dataset into a Numpy array.  For maximum 
+        flexibility, you can specify dataspaces for the file and the Numpy
+        object. Keyword plist may be a dataset transfer property list.
+
+        It is your responsibility to ensure that the memory dataspace
+        provided is compatible with the shape of the Numpy array.  It is also
+        up to you to ensure that the Numpy array's dtype is conversion-
+        compatible with the file's datatype. 
+
+        The given Numpy array *must* be C-contiguous, writable and aligned 
+        ("NPY_BEHAVED").  This is not currently checked; anything else may
+        crash Python.
+
+        For a friendlier version of this function, try py_read_slab().
+    """
+    cdef hid_t mtype_id
+    cdef herr_t retval
+    mtype_id = 0
+
+    try:
+        mtype_id = h5t.py_dtype_to_h5t(arr_obj.dtype)
+        retval = H5Dread(dset_id, mtype_id, mspace_id, fspace_id, plist, <void*>arr_obj.data)
+        if retval < 0:
+            raise DatasetError("Error reading from dataset %d" % dset_id)
+    finally:
+        if mtype_id:
+            H5Tclose(mtype_id)
+        
+def write(hid_t dset_id, hid_t mspace_id, hid_t fspace_id, ndarray arr_obj, hid_t plist=H5P_DEFAULT):
+    """ ( INT dset_id, INT mspace_id, INT fspace_id, NDARRAY arr_obj, 
+          INT plist=H5P_DEFAULT )
+
+        Write data from a Numpy array to an HDF5 dataset. Keyword plist may be 
+        a dataset transfer property list.  All the caveats in h5d.read() apply 
+        here as well, in particular the restrictions on the data area of the 
+        Numpy array.
+
+        For a friendlier version of this function, try py_write_slab()
+    """
+    cdef hid_t mtype_id
+    cdef herr_t retval
+    mtype_id = 0
+    try:
+        mtype_id = h5t.py_dtype_to_h5t(arr_obj.dtype)
+        retval = H5Dwrite(dset_id, mtype_id, mspace_id, fspace_id, plist, <void*>arr_obj.data)
+        if retval < 0:
+            raise DatasetError("Error writing to dataset %d" % dset_id)
+    finally:
+        if mtype_id:
+            H5Tclose(mtype_id)
+
+# === Dataset inspection ======================================================
+
+def get_space(hid_t dset_id):
+    """ (INT dset_id) => INT space_id
+
+        Create and return a new copy of the dataspace for this dataset.  You're
+        responsible for closing it.
+    """
+    cdef hid_t space_id
+    space_id = H5Dget_space(dset_id)
+    if space_id < 0:
+        raise DatasetError("Error retrieving space of dataset %d" % dset_id)
+    return space_id
+
+def get_type(hid_t dset_id):
+    """ (INT dset_id) => INT type_id
+
+        Create and return a new copy of the datatype for this dataset.You're
+        responsible for closing it.
+    """
+    cdef hid_t type_id
+    type_id = H5Dget_type(dset_id)
+    if type_id < 0:
+        raise DatasetError("Error retrieving type of dataset %d" % dset_id)
+    return type_id
+
+def get_create_plist(hid_t dset_id):
+    """ (INT dset_id) => INT property_list_id
+
+        Create a new copy of the dataset creation property list used when this
+        dataset was created.  You're responsible for closing it.
+    """
+    cdef hid_t plist
+    plist = H5Dget_create_plist(dset_id)
+    if plist < 0:
+        raise DatasetError("Error retrieving creation property list for dataset %d" % dset_id)
+    return plist
+
+# === Python extensions =======================================================
+
+def py_create(hid_t parent_id, char* name, object data=None, object dtype=None,
+              object shape=None, object chunks=None, object compression=None,
+              object shuffle=False, object fletcher32=False):
+    """ ( INT parent_id, STRING name, NDARRAY data=None, DTYPE dtype=None,
+          TUPLE shape=None, TUPLE chunks=None, PY_INT compression=None,
+          BOOL shuffle=False, BOOL fletcher32=False )
+        => INT dataset_id
+
+        Create an HDF5 dataset from Python.  You must supply *either* a Numpy
+        array, in which case the dataset will be initialized to its type,
+        shape, and contents, *or* both a tuple giving the dimensions and a 
+        Numpy dtype object.
+
+        This function also works for scalar arrays; providing a "shape" tuple 
+        of () or a 0-dimensional array for "data" will result in a scalar 
+        (h5s.CLASS_SCALAR) dataspace for the new dataset, rather than a 
+        slab (h5s.CLASS_SIMPLE).
+
+        Additional options:
+        chunks          A tuple containing chunk sizes, or None
+        compression     Enable DEFLATE compression at this level (0-9), or None
+        shuffle         Enable/disable shuffle filter (default disabled)
+        fletcher32      Enable/disable Fletcher32 error detection (default disabled)
+    """
+    cdef hid_t dset_id
+    cdef hid_t type_id
+    cdef hid_t space_id
+    cdef hid_t plist
+    space_id = 0
+    type_id = 0
+    dset_id = 0
+    plist = 0
+
+    if (data is None and not (dtype and shape)) or (data is not None and (dtype or shape)):
+        raise ValueError("*Either* a Numpy array *or* both a dtype and shape must be provided.")
+
+    if data is not None:
+        shape = data.shape
+        dtype = data.dtype
+
+    try:
+        if len(shape) == 0:
+            space_id = h5s.create(h5s.CLASS_SCALAR)  # let's be explicit
+        else:
+            space_id = h5s.create_simple(shape)
+
+        type_id = h5t.py_dtype_to_h5t(dtype)
+    
+        if( chunks or compression or shuffle or fletcher32):
+            plist = h5p.create(H5P_DATASET_CREATE)
+            if chunks:
+                h5p.set_chunk(plist, chunks)    # required for compression
+            if shuffle:
+                h5p.set_shuffle(plist)          # must immediately precede compression
+            if compression:
+                h5p.set_deflate(plist, compression)
+            if fletcher32:
+                h5p.set_fletcher32(plist)
+        else:
+            plist = H5P_DEFAULT
+
+        dset_id = create(parent_id, name, type_id, space_id, plist)
+
+        if data is not None:
+            write(dset_id, H5S_ALL, H5S_ALL, data)
+
+    finally:
+        if space_id:
+            H5Sclose(space_id)
+        if type_id:
+            H5Tclose(type_id)
+        if plist:
+            H5Pclose(plist)
+
+    return dset_id
+
+def py_read_slab(hid_t ds_id, object start=None, object count=None, 
+                 object stride=None, **kwds):
+    """ (INT ds_id, TUPLE start, TUPLE count, TUPLE stride=None, **kwds)
+        => NDARRAY numpy_array_out
+    
+        Read a hyperslab from an existing HDF5 dataset, and return it as a
+        Numpy array. Dimensions are specified by:
+
+        start:  Tuple of integers indicating the start of the selection.
+                If None, the selection starts at the dataspace origin (0,0,..)
+        count:  Tuple of integers indicating how many elements to read.
+                If None, the selection will extend from <start> to the end of
+                the dataset.  Any of the members can also be None.
+        stride: Pitch of the selection.  Data points at <start> are always
+                selected.  If None, 1 will be used for all axes.
+
+        Any of the members of start/count/stride may also be None, in which
+        case the origin, full extent, and a stride of 1 will be inserted
+        respectively.
+
+        Any additional keywords (**kwds) are passed to the function which maps 
+        HDF5 types to Numpy dtypes; see the docstring for h5t.py_h5t_to_dtype.
+        These include force_native, compound_fields, and force_string_length.
+
+        As is customary when slicing into Numpy array objects, no dimensions 
+        with length 1 are present in the returned array.  Additionally, if the
+        HDF5 dataset has a scalar dataspace, then only None or empty tuples are
+        allowed for start, count and stride, and the returned array will be
+        0-dimensional (arr.shape == ()).
+    """
+    cdef hid_t mem_space
+    cdef hid_t file_space
+    cdef hid_t type_id
+    cdef int rank
+    cdef int i
+
+    mem_space  = 0
+    file_space = 0
+    type_id    = 0
+
+    try:
+        # Obtain the Numpy dtype of the array
+        type_id = get_type(ds_id)
+        dtype = h5t.py_h5t_to_dtype(type_id, **kwds)
+
+        # File dataspace
+        file_space = get_space(ds_id)
+
+        rank = h5s.get_simple_extent_ndims(file_space)
+        file_shape = h5s.get_simple_extent_dims(file_space)
+
+        # Validate arguments and create ones that weren't given
+        if start is None:
+            start = (0,)*rank
+        else:
+            if len(start) != rank:
+                raise ValueError("Length of 'start' tuple must match dataset rank %d (got '%s')" % (rank, repr(start)))
+
+        if count is None:
+            count = []
+            for i from 0<=i<rank:
+                count.append(file_shape[i] - start[i])
+            count = tuple(count)
+        else:
+            if len(count) != rank:
+                raise ValueError("Length of 'count' tuple must match dataset rank %d (got '%s')" % (rank, repr(count)))
+
+            countlist = list(count)
+            for i from 0<=i<rank:
+                if count[i] is None:
+                    countlist[i] = file_shape[i] - start[i]
+                else:
+                    countlist[i] = count[i]
+            count = tuple(countlist)
+
+        if stride is not None:      # Note that h5s.select_hyperslab allows None for stride
+            if len(stride) != rank:
+                raise ValueError("Length of 'stride' tuple must match dataset rank %d (got '%s')" % (rank, repr(stride)))
+
+        # Initialize Numpy array, and an HDF5 dataspace of the same size
+        npy_countlist = []
+        for i from 0<=i<len(count):
+            if count[i] != 0 and count[i] != 1:  # No singlet dimensions
+                npy_countlist.append(count[i])
+        npy_count = tuple(npy_countlist)
+
+        arr = ndarray(npy_count, dtype=dtype)
+        mem_space = h5s.create_simple(npy_count)
+            
+        space_type = h5s.get_simple_extent_type(file_space)
+        if space_type == h5s.CLASS_SIMPLE:
+            h5s.select_hyperslab(file_space, start, count, stride)
+            read(ds_id, mem_space, file_space, arr)
+        elif space_type == h5s.CLASS_SCALAR:
+            read(ds_id, H5S_ALL, H5S_ALL, arr)
+        else:
+            raise ValueError("Dataspace type %d is unsupported" % space_type)
+
+    finally:
+        # ignore return values on cleanup
+        if mem_space:
+            H5Sclose(mem_space)
+        if file_space:
+            H5Sclose(file_space)
+        if type_id:
+            H5Tclose(type_id)
+
+    return arr
+
+def py_write_slab(hid_t ds_id, ndarray arr_obj, object start=None, object stride=None):
+    """ (INT ds_id, NDARRAY arr_obj, TUPLE start, TUPLE stride=None)
+
+        Write the entire contents of a Numpy array into an HDF5 dataset.
+        The size of the given array must fit within the dataspace of the
+        HDF5 dataset.
+
+        start:  Tuple of integers giving offset for write.  If None, the
+                dataspace origin (0,0,...) will be used.
+        stride: Pitch of write in dataset.  The elements of "start" are always
+                selected.  If None, 1 will be used for all dimensions.
+
+        The underlying function depends on write access to the data area of the
+        Numpy array.  See the caveats in h5d.write.
+
+        Please note that this function does absolutely no array broadcasting;
+        if you want to write a (2,3) array to an (N,2,3) or (2,3,N) dataset,
+        you'll have to do it yourself from Numpy.
+    """
+    cdef hid_t mem_space
+    cdef hid_t file_space
+    cdef int rank
+    mem_space  = 0
+    file_space = 0
+
+    count = arr_obj.shape
+
+    try:
+        mem_space = h5s.create_simple(count)
+        file_space = get_space(ds_id)
+
+        rank = h5s.get_simple_extent_ndims(ds_id)
+        file_shape = h5s.get_simple_extent_dims(ds_id)
+
+        if len(count) != rank:
+            raise ValueError("Numpy array must have same rank as the HDF5 dataset")
+
+        if start is None:
+            start = (0,)*rank
+        else:
+            if len(start) != rank:
+                raise ValueError("Length of 'start' tuple must match dataset rank %d (got '%s')" % (rank, repr(start)))
+        
+        if stride is not None:
+            if len(stride) != rank:
+                raise ValueError("Length of 'stride' tuple must match dataset rank %d (got '%s')" % (rank, repr(stride)))
+
+        h5s.select_hyperslab(file_space, start, count, stride)
+        write(ds_id, mem_space, file_space, arr_obj)
+
+    finally:
+        # ignore return values on cleanup
+        if mem_space:
+            H5Sclose(mem_space)
+        if file_space:
+            H5Sclose(file_space)
+
+def py_shape(hid_t dset_id):
+    """ (INT dset_id) => TUPLE shape
+
+        Obtain the dataspace of an HDF5 dataset, as a tuple.
+    """
+    cdef int space_id
+    space_id = 0
+    shape = None
+    try:
+        space_id = get_space(dset_id)
+        shape = h5s.get_simple_extent_dims(space_id)
+    finally:
+        if space_id:
+            H5Sclose(space_id)
+    return shape
+
+def py_rank(hid_t dset_id):
+    """ (INT dset_id) => INT rank
+
+        Obtain the rank of an HDF5 dataset.
+    """
+    cdef int space_id
+    space_id = 0
+    rank = None
+    try:
+        space_id = get_space(dset_id)
+        rank = h5s.get_simple_extent_ndims(space_id)
+    finally:
+        if space_id:
+            H5Sclose(space_id)
+    return rank
+
+def py_dtype(hid_t dset_id):
+    """ (INT dset_id) => DTYPE numpy_dtype
+
+        Get the datatype of an HDF5 dataset, converted to a Numpy dtype.
+    """
+    cdef hid_t type_id
+    type_id = 0
+    dtype_out = None
+    try:
+        type_id = get_type(dset_id)
+        dtype_out = h5t.py_h5t_to_dtype(type_id)
+    finally:
+        if type_id:
+            H5Tclose(type_id)
+    return dtype_out
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/h5py/h5e.pxd b/h5py/h5e.pxd
new file mode 100755
index 0000000..591bf85
--- /dev/null
+++ b/h5py/h5e.pxd
@@ -0,0 +1,39 @@
+#+
+# 
+# This file is part of h5py, a low-level Python interface to the HDF5 library.
+# 
+# Copyright (C) 2008 Andrew Collette
+# http://h5py.alfven.org
+# License: BSD  (See LICENSE.txt for full license)
+# 
+# $Date$
+# 
+#-
+
+# This file contains code or comments from the HDF5 library. The complete HDF5
+# license is available in the file licenses/hdf5.txt in the distribution
+# root directory.
+
+from defs_c cimport size_t, time_t
+from h5 cimport hid_t, hbool_t, herr_t, htri_t, hsize_t, hssize_t, hvl_t
+
+cdef extern from "hdf5.h":
+
+  cdef enum H5E_direction_t:
+    H5E_WALK_UPWARD    = 0  #/*begin deep, end at API function    */
+    H5E_WALK_DOWNWARD = 1   #/*begin at API function, end deep    */
+
+  ctypedef struct H5E_error_t:
+    int      maj_num        # /*major error number             */
+    int      min_num        # /*minor error number             */
+    char    *func_name      # /*function in which error occurred   */
+    char    *file_name      # /*file in which error occurred       */
+    unsigned    line        # /*line in file where error occurs    */
+    char    *desc           # /*optional supplied description      */
+
+  # --- Error handling --------------------------------------------------------
+  herr_t    H5Eset_auto(void* opt1, void* opt2)
+  ctypedef herr_t (*H5E_walk_t)(int n, H5E_error_t *err_desc, client_data)  
+  herr_t    H5Ewalk(H5E_direction_t direction, H5E_walk_t func, client_data  )
+
+
diff --git a/h5py/h5f.pxd b/h5py/h5f.pxd
new file mode 100755
index 0000000..f11b118
--- /dev/null
+++ b/h5py/h5f.pxd
@@ -0,0 +1,46 @@
+#+
+# 
+# This file is part of h5py, a low-level Python interface to the HDF5 library.
+# 
+# Copyright (C) 2008 Andrew Collette
+# http://h5py.alfven.org
+# License: BSD  (See LICENSE.txt for full license)
+# 
+# $Date$
+# 
+#-
+
+# This file is based on code from the PyTables project.  The complete PyTables
+# license is available at licenses/pytables.txt, in the distribution root
+# directory.
+
+from defs_c cimport size_t, time_t
+from h5 cimport hid_t, hbool_t, herr_t, htri_t, hsize_t, hssize_t, hvl_t
+
+cdef extern from "hdf5.h":
+
+  # File constants
+  int H5F_ACC_TRUNC, H5F_ACC_RDONLY, H5F_ACC_RDWR, H5F_ACC_EXCL
+  int H5F_ACC_DEBUG, H5F_ACC_CREAT
+
+  # The difference between a single file and a set of mounted files
+  cdef enum H5F_scope_t:
+    H5F_SCOPE_LOCAL     = 0,    # specified file handle only
+    H5F_SCOPE_GLOBAL    = 1,    # entire virtual file
+    H5F_SCOPE_DOWN      = 2     # for internal use only
+
+  cdef enum H5F_close_degree_t:
+    H5F_CLOSE_WEAK  = 0,
+    H5F_CLOSE_SEMI  = 1,
+    H5F_CLOSE_STRONG = 2,
+    H5F_CLOSE_DEFAULT = 3
+
+  # --- File operations -------------------------------------------------------
+  hid_t  H5Fcreate(char *filename, unsigned int flags,
+                   hid_t create_plist, hid_t access_plist)
+  hid_t  H5Fopen(char *name, unsigned flags, hid_t access_id)
+  herr_t H5Fclose (hid_t file_id)
+  htri_t H5Fis_hdf5(char *name)
+  herr_t H5Fflush(hid_t object_id, H5F_scope_t scope)
+
+
diff --git a/h5py/h5f.pyx b/h5py/h5f.pyx
new file mode 100755
index 0000000..11b61a0
--- /dev/null
+++ b/h5py/h5f.pyx
@@ -0,0 +1,129 @@
+#+
+# 
+# This file is part of h5py, a low-level Python interface to the HDF5 library.
+# 
+# Copyright (C) 2008 Andrew Collette
+# http://h5py.alfven.org
+# License: BSD  (See LICENSE.txt for full license)
+# 
+# $Date$
+# 
+#-
+
+"""
+    Low-level operations on HDF5 file objects.
+
+    Functions in this module raise errors.FileError.
+"""
+
+# Pyrex compile-time imports
+from h5  cimport herr_t, hid_t, htri_t
+from h5p cimport H5P_DEFAULT
+
+# Runtime imports
+import h5
+from h5 import DDict
+from errors import FileError
+
+# === Public constants and data structures ====================================
+
+ACC_TRUNC   = H5F_ACC_TRUNC
+ACC_EXCL    = H5F_ACC_EXCL
+ACC_RDWR    = H5F_ACC_RDWR
+ACC_RDONLY  = H5F_ACC_RDONLY
+ACC_MAPPER  = {H5F_ACC_TRUNC: 'TRUNCATE', H5F_ACC_EXCL: 'EXCLUSIVE',
+               H5F_ACC_RDWR: 'READ-WRITE', H5F_ACC_RDONLY: 'READ-ONLY' }
+ACC_MAPPER  = DDict(ACC_MAPPER)
+
+SCOPE_LOCAL     = H5F_SCOPE_LOCAL
+SCOPE_GLOBAL    = H5F_SCOPE_GLOBAL
+SCOPE_MAPPER    = {H5F_SCOPE_LOCAL: 'LOCAL SCOPE', H5F_SCOPE_GLOBAL: 'GLOBAL SCOPE'}
+SCOPE_MAPPER    = DDict(SCOPE_MAPPER)
+
+CLOSE_WEAK = H5F_CLOSE_WEAK
+CLOSE_SEMI = H5F_CLOSE_SEMI
+CLOSE_STRONG = H5F_CLOSE_STRONG
+CLOSE_DEFAULT = H5F_CLOSE_DEFAULT
+CLOSE_MAPPER = {H5F_CLOSE_WEAK: 'WEAK', H5F_CLOSE_SEMI: 'SEMI', 
+                H5F_CLOSE_STRONG: 'STRONG', H5F_CLOSE_DEFAULT: 'DEFAULT'}
+CLOSE_MAPER = DDict(CLOSE_MAPPER)
+
+# === File operations =========================================================
+
+def open(char* name, unsigned int flags=H5F_ACC_RDWR, hid_t access_id=H5P_DEFAULT):
+    """ (STRING name, UINT flags=ACC_RDWR, INT access_id=H5P_DEFAULT)
+        => INT file_id
+
+        Open an existing HDF5 file.  Keyword "flags" may be ACC_RWDR or
+        ACC_RDONLY.  Keyword "access_id" may be a file access property list.
+    """
+    cdef hid_t retval
+    retval = H5Fopen(name, flags, access_id)
+
+    if retval < 0:
+        raise FileError("Failed to open file '%s'" % name)
+    return retval
+
+def close(hid_t file_id):
+    """ (INT file_id)
+    """
+    cdef herr_t retval
+    retval = H5Fclose(file_id)
+    if retval < 0:
+        raise FileError("Failed to close file id %d" % file_id)
+
+def create(char* name, int flags=H5F_ACC_TRUNC, hid_t create_id=H5P_DEFAULT, hid_t access_id=H5P_DEFAULT):
+    """ (STRING name, INT flags=ACC_TRUNC, INT create_id=H5P_DEFAULT,
+            INT access_id=H5P_DEFAULT)
+        => INT file_id
+
+        Create a new HDF5 file.  Keyword "flags" may be either ACC_TRUNC, in
+        which case any existing file will be destroyed, or ACC_EXCL, which
+        will force the creation to fail if the file already exists.
+        Keywords create_id and access_id may be dataset creation and access
+        property lists, respectively.
+    """
+    cdef hid_t retval
+    retval = H5Fcreate(name, flags, create_id, access_id)
+
+    if retval < 0:
+        raise FileError('Failed to create file "%s" mode %d' % (name,flags))
+    return retval
+
+def flush(hid_t file_id, int scope=H5F_SCOPE_LOCAL):
+    """ (INT file_id, INT scope=SCOPE_LOCAL)
+
+        Tell the HDF5 library to flush file buffers to disk.  See the HDF5
+        docs for the meaning of the scope keyword.
+    """
+    cdef herr_t retval
+    retval = H5Fflush(file_id, <H5F_scope_t>scope)
+
+    if retval < 0:
+        raise FileError("Failed to flush file %d" % file_id)
+
+def is_hdf5(char* name):
+    """ (STRING name) => BOOL is_hdf5
+
+        Determine if a given file is an HDF5 file.  Note this raises an 
+        exception if the file doesn't exist.
+    """
+    cdef htri_t retval
+    retval = H5Fis_hdf5(name)
+    if retval < 0:
+        raise FileError("Can't determine status of file '%s'" % name)
+    return bool(retval)
+
+    
+
+
+
+
+
+
+
+
+
+
+
+    
diff --git a/h5py/h5g.pxd b/h5py/h5g.pxd
new file mode 100755
index 0000000..b100327
--- /dev/null
+++ b/h5py/h5g.pxd
@@ -0,0 +1,64 @@
+#+
+# 
+# This file is part of h5py, a low-level Python interface to the HDF5 library.
+# 
+# Copyright (C) 2008 Andrew Collette
+# http://h5py.alfven.org
+# License: BSD  (See LICENSE.txt for full license)
+# 
+# $Date$
+# 
+#-
+
+# This file is based on code from the PyTables project.  The complete PyTables
+# license is available at licenses/pytables.txt, in the distribution root
+# directory.
+
+from defs_c cimport size_t, time_t
+from h5 cimport hid_t, hbool_t, herr_t, htri_t, hsize_t, hssize_t, hvl_t
+
+cdef extern from "hdf5.h":
+
+  cdef enum H5G_link_t:
+    H5G_LINK_ERROR      = -1,
+    H5G_LINK_HARD       = 0,
+    H5G_LINK_SOFT       = 1
+
+  cdef enum H5G_obj_t:
+    H5G_UNKNOWN = -1,           # Unknown object type
+    H5G_LINK,                   # Object is a symbolic link
+    H5G_GROUP,                  # Object is a group
+    H5G_DATASET,                # Object is a dataset
+    H5G_TYPE,                   # Object is a named data type
+
+  ctypedef struct H5G_stat_t:
+    unsigned long fileno[2]
+    unsigned long objno[2]
+    unsigned nlink
+    H5G_obj_t type              # new in HDF5 1.6
+    time_t mtime
+    size_t linklen
+    #H5O_stat_t ohdr            # Object header information. New in HDF5 1.6
+
+  # --- Group operations ------------------------------------------------------
+  hid_t  H5Gcreate(hid_t loc_id, char *name, size_t size_hint )
+  hid_t  H5Gopen(hid_t loc_id, char *name )
+  herr_t H5Gclose(hid_t group_id)
+  herr_t H5Glink (hid_t file_id, H5G_link_t link_type,
+                  char *current_name, char *new_name)
+  herr_t H5Glink2( hid_t curr_loc_id, char *current_name, 
+                   H5G_link_t link_type, hid_t new_loc_id, char *new_name )
+
+  herr_t H5Gunlink (hid_t file_id, char *name)
+  herr_t H5Gmove(hid_t loc_id, char *src, char *dst)
+  herr_t H5Gmove2(hid_t src_loc_id, char *src_name,
+                  hid_t dst_loc_id, char *dst_name )
+  herr_t H5Gget_num_objs(hid_t loc_id, hsize_t*  num_obj)
+  int    H5Gget_objname_by_idx(hid_t loc_id, hsize_t idx, char *name, size_t size )
+  int    H5Gget_objtype_by_idx(hid_t loc_id, hsize_t idx )
+
+  ctypedef herr_t (*H5G_iterate_t)(hid_t group, char *name, op_data)
+  herr_t H5Giterate(hid_t loc_id, char *name, int *idx, H5G_iterate_t operator, operator_data  )
+  herr_t H5Gget_objinfo(hid_t loc_id, char* name, int follow_link, H5G_stat_t *statbuf)
+
+
diff --git a/h5py/h5g.pyx b/h5py/h5g.pyx
new file mode 100755
index 0000000..741d910
--- /dev/null
+++ b/h5py/h5g.pyx
@@ -0,0 +1,389 @@
+#+
+# 
+# This file is part of h5py, a low-level Python interface to the HDF5 library.
+# 
+# Copyright (C) 2008 Andrew Collette
+# http://h5py.alfven.org
+# License: BSD  (See LICENSE.txt for full license)
+# 
+# $Date$
+# 
+#-
+
+"""
+    Low-level HDF5 "H5G" group interface.
+"""
+
+# Pyrex compile-time imports
+from defs_c   cimport malloc, free, time_t
+from h5  cimport herr_t, hid_t, size_t, hsize_t
+
+# Runtime imports
+import h5
+from h5 import DDict
+from errors import GroupError
+
+# === Public constants and data structures ====================================
+
+# Enumerated object types for groups "H5G_obj_t"
+OBJ_UNKNOWN  = H5G_UNKNOWN
+OBJ_LINK     = H5G_LINK
+OBJ_GROUP    = H5G_GROUP
+OBJ_DATASET  = H5G_DATASET
+OBJ_DATATYPE = H5G_TYPE
+OBJ_MAPPER = { H5G_UNKNOWN: "UNKNOWN", H5G_LINK: "LINK", H5G_GROUP: "GROUP",
+                 H5G_DATASET: "DATASET", H5G_TYPE: "DATATYPE" }
+OBJ_MAPPER = DDict(OBJ_MAPPER)
+
+# Enumerated link types "H5G_link_t"
+LINK_ERROR = H5G_LINK_ERROR
+LINK_HARD  = H5G_LINK_HARD
+LINK_SOFT  = H5G_LINK_SOFT
+LINK_MAPPER = { H5G_LINK_ERROR: "ERROR", H5G_LINK_HARD: "HARDLINK", 
+                H5G_LINK_SOFT: "SOFTLINK" }
+LINK_MAPPER = DDict(LINK_MAPPER)
+
+cdef class GroupStat:
+    """ Represents the H5G_stat_t structure containing group member info.
+
+        Fields:
+        fileno -> 2-tuple uniquely* identifying the current file
+        objno  -> 2-tuple uniquely* identifying this object
+        nlink  -> Number of hard links to this object
+        mtime  -> Modification time of this object (flaky)
+
+        *"Uniquely identifying" means unique among currently open files, 
+        not universally unique.
+    """
+    cdef public object fileno  # will be a 2-tuple
+    cdef public object objno   # will be a 2-tuple
+    cdef public unsigned nlink
+    cdef public int type
+    cdef public time_t mtime
+    cdef public size_t linklen
+
+
+# === Basic group management ==================================================
+
+def open(hid_t loc_id, char* name):
+    """ (INT loc_id, STRING name)
+
+        Open an existing HDF5 group, attached to some other group.
+    """
+    cdef herr_t retval
+    
+    retval = H5Gopen(loc_id, name)
+    if retval < 0:
+        raise GroupError("Failed to open group %s at %d" % (name, loc_id))
+    return retval
+
+def close(hid_t group_id):
+    """ (INT group_id)
+    """
+    cdef herr_t retval
+
+    retval = H5Gclose(group_id)
+    if retval < 0:
+        raise GroupError("Can't close group %d" % group_id)
+
+def create(hid_t loc_id, char* name, int size_hint=-1):
+    """ (INT loc_id, STRING name, INT size_hint=-1)
+
+        Create a new group named "name", under a parent group identified by
+        "loc_id".  See the HDF5 documentation for the meaning of size_hint.
+    """
+    cdef herr_t retval
+    
+    retval = H5Gcreate(loc_id, name, size_hint)
+    if retval < 0:
+        raise GroupError("Can't create group %s under %d" % (name, loc_id))
+    return retval
+
+# === Group member management =================================================
+
+def link(hid_t loc_id, char* current_name, char* new_name, int link_type=H5G_LINK_HARD, hid_t remote_id=-1):
+    """ ( INT loc_id, STRING current_name, STRING new_name, 
+          INT link_type=LINK_HARD, INT remote_id=-1) 
+
+        Create a new hard or soft link.  The link target (object the link will
+        point to) is identified by its parent group "loc_id", and the string
+        current_name.  The name of the new link is new_name.  If you want to
+        create the link in another group, pass its identifier through
+        remote_id.
+
+        Hard links are created by default (link_type=LINK_HARD).  To create a
+        symbolic link, pass in link_type=LINK_SOFT.
+    """
+    cdef herr_t retval
+    
+    if remote_id < 0:
+        remote_id = loc_id
+
+    retval = H5Glink2(loc_id, current_name, <H5G_link_t>link_type, remote_id, new_name)
+    if retval < 0:
+        raise GroupError('Failed to link %d=>"%s" to %d=>"%s"' % (loc_id, current_name, remote_id, new_name))
+
+def unlink(hid_t loc_id, char* name):
+    """ (INT loc_id, STRING name)
+
+        Remove a link to an object from the given group.
+    """
+    cdef herr_t retval
+
+    retval = H5Gunlink(loc_id, name)
+    if retval < 0:
+        raise GroupError("Failed to unlink member '%s' from group %d" % (name, loc_id))
+
+
+def move(hid_t loc_id, char* current_name, char* new_name, hid_t remote_id=-1):
+    """ (INT loc_id, STRING current_name, STRING new_name, INT new_group_id=-1)
+
+        Relink an object, identified by its parent group loc_id and string
+        current_name.  The new name of the link is new_name.  You can create
+        the link in a different group by passing its identifier to remote_id.
+    """
+    cdef int retval
+    if remote_id < 0:
+        remote_id = loc_id
+
+    retval = H5Gmove2(loc_id, current_name, remote_id, new_name)
+    if retval < 0:
+        raise GroupError('Failed to move %d=>"%s" to %d=>"%s"' % (loc_id, current_name, remote_id, new_name))
+
+# === Member inspection and iteration =========================================
+
+def get_num_objs(hid_t loc_id):
+    """ (INT loc_id) => INT number_of_objects
+
+        Get the number of objects attached to a given group.
+    """
+    cdef hsize_t size
+    cdef herr_t retval
+    
+    retval = H5Gget_num_objs(loc_id, &size)
+    if retval < 0:
+        raise GroupError("Group enumeration failed: %d" % loc_id)
+
+    return size
+
+def get_objname_by_idx(hid_t loc_id, hsize_t idx):
+    """ (INT loc_id, INT idx) => STRING object_name
+
+        Get the name of a group member given its zero-based index.
+    """
+    cdef int retval
+    cdef char* buf
+    cdef object pystring
+
+    retval = H5Gget_objname_by_idx(loc_id, idx, NULL, 0)
+    if retval < 0:
+        raise GroupError("Error accessing element %d of group %d" % (idx, loc_id))
+    elif retval == 0:
+        return None
+    else:
+        buf = <char*>malloc(retval+1)
+        retval = H5Gget_objname_by_idx(loc_id, idx, buf, retval+1)
+        pystring = buf
+        free(buf)
+        return pystring
+
+def get_objtype_by_idx(hid_t loc_id, hsize_t idx):
+    """ (INT loc_id, INT idx) => INT object_type_code
+
+        Get the type of an object attached to a group, given its zero-based
+        index.  Return value is one of the OBJ_* constants.
+    """
+    cdef int retval
+
+    retval = H5Gget_objtype_by_idx(loc_id, idx)
+    if retval < 0:
+        raise GroupError("Error accessing element %d of group %d" % (idx, loc_id))
+
+    return retval
+
+def get_objinfo(hid_t loc_id, char* name, int follow_link=1):
+    """ (INT loc_id, STRING name, BOOL follow_link=True)
+        => GroupStat object
+
+        Obtain information about an arbitrary object attached to a group. The
+        return value is a GroupStat object; see that class's docstring
+        for a description of its attributes.  If follow_link is True (default)
+        and the object is a symbolic link, the information returned describes 
+        its target.  Otherwise the information describes the link itself.
+    """
+    cdef int retval
+    cdef H5G_stat_t stat
+    cdef object statobj
+
+    retval = H5Gget_objinfo(loc_id, name, follow_link, &stat)
+    if retval < 0:
+        raise GroupError("Can't stat member \"%s\" of group %d" % (name, loc_id))
+
+    statobj = GroupStat()
+    statobj.fileno = (stat.fileno[0], stat.fileno[1])
+    statobj.objno = (stat.objno[0], stat.objno[1])
+    statobj.nlink = stat.nlink
+    statobj.type = <int>stat.type
+    statobj.mtime = stat.mtime
+    statobj.linklen = stat.linklen
+
+    return statobj
+
+cdef herr_t iter_cb_helper(hid_t gid, char *name, object int_tpl):
+
+    cdef object func
+    cdef object data
+    cdef object outval
+
+    func = int_tpl[0]
+    data = int_tpl[1]
+    exc_list = int_tpl[2]
+
+    try:
+        func(gid, name, data)
+    except StopIteration:
+        return 1
+    except Exception, e:
+        exc_list.append(e)
+        return -1
+
+    return 0
+
+def iterate(hid_t loc_id, char* name, object func, object data=None, int startidx=0):
+    """ (INT loc_id, STRING name, FUNCTION func, OBJECT data=None, 
+            UINT startidx=0) => INT last_index_processed
+
+        Iterate an arbitrary Python function over a group.  Note that the
+        group is specified by a parent and a name; if you have a group
+        identifier and want to iterate over it; pass in "." for the name.
+
+        You can also start at an arbitrary member by specifying its 
+        (zero-based) index.  The return value is the index of the last 
+        group member processed.
+
+        Your function:
+        1.  Should accept three arguments: the (INT) id of the group, the 
+            (STRING) name of the member, and an arbitary Python object you 
+            provide as data.  Any return value is ignored.
+        2.  Raise StopIteration to bail out before all members are processed.
+        3.  Raising anything else immediately aborts iteration, and the
+            exception is propagated.
+    """
+    cdef int i
+    cdef herr_t retval
+
+    i = startidx
+
+    int_tpl = (func, data, [])
+
+    retval = H5Giterate(loc_id, name, &i, <H5G_iterate_t>iter_cb_helper, int_tpl)
+
+    if retval < 0:
+        if len(int_tpl[2]) != 0:
+            raise int_tpl[2][0]
+        raise GroupError("Error occured during iteration")
+    return i-2
+
+# === Custom extensions =======================================================
+
+def py_listnames(hid_t group_id):
+    """ (INT group_id) => LIST names_list
+
+        Create a Python list of the object names directly attached to a group.
+    """
+    cdef int nitems
+    cdef object thelist
+    cdef int i
+
+    thelist = []
+    nitems = get_num_objs(group_id)
+
+    for i from 0 <= i < nitems:
+        thelist.append(get_objname_by_idx(group_id, i))
+
+    return thelist
+
+cdef class _GroupIterator:
+
+    """ Iterator object which yields names of group members.
+        These objects are created by py_iternames; don't create them yourself.
+    """
+
+    cdef hid_t gid
+    cdef int idx
+    cdef int nitems
+
+    def __init__(self, int gid):
+        self.gid = gid
+        self.idx = 0
+        self.nitems = get_num_objs(gid)
+
+    def __next__(self):
+        cdef hsize_t nobjs
+        nobjs = -1
+        H5Gget_num_objs(self.gid, &nobjs)
+        if nobjs != self.nitems:
+            raise GroupError("Group length changed during iteration")
+        if self.idx >= self.nitems:
+            raise StopIteration()
+        name = get_objname_by_idx(self.gid, self.idx)
+        self.idx  = self.idx + 1
+        return name
+
+    def __iter__(self):
+        return self
+
+def py_iternames(hid_t group_id):
+    """ (INT group_id) => ITERATOR names_iterator
+
+        Create an iterator object which yields names attached to the current
+        group.  Mutating group members is OK, but do *NOT* change the group 
+        membership while iterating over it.
+    """
+    return _GroupIterator(group_id)
+
+def py_exists(hid_t group_id, char* name, int follow_link=1):
+    """ (INT group_id, STRING name, BOOL follow_link=True) => BOOL exists
+
+        Determine if a named member exists in the given group.  If follow_link
+        is True (default), symbolic links will be dereferenced.
+    """
+    cdef int retval
+    retval = H5Gget_objinfo(group_id, name, follow_link, NULL)
+    if retval < 0:
+        return False
+    return True
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+    
+
+
diff --git a/h5py/h5i.pxd b/h5py/h5i.pxd
new file mode 100755
index 0000000..cb01e1e
--- /dev/null
+++ b/h5py/h5i.pxd
@@ -0,0 +1,40 @@
+#+
+# 
+# This file is part of h5py, a low-level Python interface to the HDF5 library.
+# 
+# Copyright (C) 2008 Andrew Collette
+# http://h5py.alfven.org
+# License: BSD  (See LICENSE.txt for full license)
+# 
+# $Date$
+# 
+#-
+
+# This file contains code or comments from the HDF5 library. The complete HDF5
+# license is available in the file licenses/hdf5.txt in the distribution
+# root directory.
+
+from defs_c cimport size_t, time_t
+from h5 cimport hid_t, hbool_t, herr_t, htri_t, hsize_t, hssize_t, hvl_t
+
+cdef extern from "hdf5.h":
+
+  # reflection
+  cdef enum H5I_type_t:
+    H5I_BADID        = -1,  # /*invalid Group                    */
+    H5I_FILE        = 1,    # /*group ID for File objects            */
+    H5I_GROUP,              # /*group ID for Group objects            */
+    H5I_DATATYPE,           # /*group ID for Datatype objects            */
+    H5I_DATASPACE,          # /*group ID for Dataspace objects        */
+    H5I_DATASET,            # /*group ID for Dataset objects            */
+    H5I_ATTR,               # /*group ID for Attribute objects        */
+    H5I_REFERENCE,          # /*group ID for Reference objects        */
+    H5I_VFL,                # /*group ID for virtual file layer        */
+    H5I_GENPROP_CLS,        # /*group ID for generic property list classes */
+    H5I_GENPROP_LST,        # /*group ID for generic property lists       */
+    H5I_NGROUPS             # /*number of valid groups, MUST BE LAST!        */
+
+  # --- Reflection ------------------------------------------------------------
+  H5I_type_t H5Iget_type(hid_t obj_id)
+  size_t H5Iget_name( hid_t obj_id, char *name, size_t size  )
+
diff --git a/h5py/h5i.pyx b/h5py/h5i.pyx
new file mode 100755
index 0000000..a4ff4d9
--- /dev/null
+++ b/h5py/h5i.pyx
@@ -0,0 +1,86 @@
+#+
+# 
+# This file is part of h5py, a low-level Python interface to the HDF5 library.
+# 
+# Copyright (C) 2008 Andrew Collette
+# http://h5py.alfven.org
+# License: BSD  (See LICENSE.txt for full license)
+# 
+# $Date$
+# 
+#-
+
+"""
+    Identifier interface for object inspection.
+"""
+
+# Pyrex compile-time imports
+from defs_c   cimport size_t, malloc, free
+from h5  cimport hid_t
+
+import h5
+from h5 import DDict
+from errors import H5TypeError
+
+# === Public constants and data structures ====================================
+
+TYPE_BADID = H5I_BADID
+TYPE_FILE = H5I_FILE
+TYPE_GROUP = H5I_GROUP
+TYPE_DATASPACE = H5I_GROUP
+TYPE_DATASET = H5I_DATASET
+TYPE_ATTR = H5I_ATTR
+TYPE_REFERENCE = H5I_REFERENCE
+TYPE_MAPPER = { H5I_BADID: 'BAD ID', H5I_FILE: 'FILE', H5I_GROUP: 'GROUP',
+                 H5I_DATASET: 'DATASET', H5I_ATTR: 'ATTRIBUTE', 
+                 H5I_REFERENCE: 'REFERENCE' }
+TYPE_MAPPER = DDict(TYPE_MAPPER)
+
+# === Introspection API =======================================================
+
+def get_type(hid_t obj_id):
+    """ (INT obj_id) => INT type_code
+
+        Determine the type of an arbitrary HDF5 object.  The return value is
+        always one of TYPE_*; if the ID is invalid, TYPE_BADID is returned.
+    """
+    cdef int retval
+    retval = <int>H5Iget_type(obj_id)
+    return retval
+
+def get_name(hid_t obj_id):
+    """ (INT obj_id) => STRING name or None
+
+        Determine (a) name of an HDF5 object.  Because an object has as many
+        names as there are hard links to it, this may not be unique.  If the
+        object does not have a name (transient datatypes, etc.), the 
+        return value is None.
+    """
+    cdef size_t namelen
+    cdef char* name
+
+    namelen = H5Iget_name(obj_id, NULL, 0)
+    if namelen < 0:
+        raise H5TypeError("Failed to determine name of object %d" % obj_id)
+    if namelen == 0:
+        return None
+
+    name = <char*>malloc(namelen+1)
+    namelen = H5Iget_name(obj_id, name, namelen+1)
+    retstring = name
+    free(name)
+
+    return retstring
+
+    
+
+
+
+
+
+
+
+
+
+
+
diff --git a/h5py/h5p.pxd b/h5py/h5p.pxd
new file mode 100755
index 0000000..c0007c4
--- /dev/null
+++ b/h5py/h5p.pxd
@@ -0,0 +1,88 @@
+#+
+# 
+# This file is part of h5py, a low-level Python interface to the HDF5 library.
+# 
+# Copyright (C) 2008 Andrew Collette
+# http://h5py.alfven.org
+# License: BSD  (See LICENSE.txt for full license)
+# 
+# $Date$
+# 
+#-
+
+# This file is based on code from the PyTables project.  The complete PyTables
+# license is available at licenses/pytables.txt, in the distribution root
+# directory.
+
+from defs_c   cimport size_t, time_t
+from h5  cimport hid_t, hbool_t, herr_t, htri_t, hsize_t, hssize_t, hvl_t
+from h5d cimport H5D_layout_t, H5D_fill_value_t, H5D_fill_time_t, H5D_alloc_time_t
+from h5z cimport H5Z_filter_t, H5Z_EDC_t
+from h5f cimport H5F_close_degree_t
+
+cdef extern from "hdf5.h":
+
+  int H5P_DEFAULT
+
+  # Property list classes
+  int H5P_NO_CLASS
+  int H5P_FILE_CREATE 
+  int H5P_FILE_ACCESS 
+  int H5P_DATASET_CREATE 
+  int H5P_DATASET_XFER 
+  int H5P_MOUNT      
+
+  # --- Property list operations ----------------------------------------------
+  # General operations
+  hid_t  H5Pcreate(hid_t plist_id)
+  hid_t  H5Pcopy(hid_t plist_id)
+  int    H5Pget_class(hid_t plist_id)
+  herr_t H5Pclose(hid_t plist_id)
+  htri_t H5Pequal( hid_t id1, hid_t id2  )
+
+  # Dataset creation properties
+  herr_t        H5Pset_layout(hid_t plist, H5D_layout_t layout )
+  H5D_layout_t  H5Pget_layout(hid_t plist)
+  herr_t        H5Pset_chunk(hid_t plist, int ndims, hsize_t * dim)
+  int           H5Pget_chunk(hid_t plist, int max_ndims, hsize_t * dims  )
+  herr_t        H5Pset_deflate( hid_t plist, int level)
+  herr_t        H5Pset_fill_value(hid_t plist_id, hid_t type_id, void *value  )
+  herr_t        H5Pget_fill_value(hid_t plist_id, hid_t type_id, void *value  )
+  herr_t        H5Pfill_value_defined(hid_t plist_id, H5D_fill_value_t *status  )
+  herr_t        H5Pset_fill_time(hid_t plist_id, H5D_fill_time_t fill_time  )
+  herr_t        H5Pget_fill_time(hid_t plist_id, H5D_fill_time_t *fill_time  )
+  herr_t        H5Pset_alloc_time(hid_t plist_id, H5D_alloc_time_t alloc_time  )
+  herr_t        H5Pget_alloc_time(hid_t plist_id, H5D_alloc_time_t *alloc_time  )
+  herr_t        H5Pset_filter(hid_t plist, H5Z_filter_t filter, unsigned int flags,
+                              size_t cd_nelmts, unsigned int cd_values[]  )
+  htri_t        H5Pall_filters_avail(hid_t dcpl_id)
+  int           H5Pget_nfilters(hid_t plist)
+  H5Z_filter_t  H5Pget_filter(hid_t plist, unsigned int filter_number, 
+                              unsigned int *flags, size_t *cd_nelmts, 
+                              unsigned int *cd_values, size_t namelen, char name[]  )
+  herr_t        H5Pget_filter_by_id( hid_t plist_id, H5Z_filter_t filter, 
+                                     unsigned int *flags, size_t *cd_nelmts, 
+                                     unsigned int cd_values[], size_t namelen, char name[]  )
+  herr_t        H5Pmodify_filter(hid_t plist, H5Z_filter_t filter, unsigned int flags,
+                                 size_t cd_nelmts, unsigned int cd_values[]  )
+  herr_t        H5Premove_filter(hid_t plist, H5Z_filter_t filter  )
+  herr_t        H5Pset_fletcher32(hid_t plist)
+  herr_t        H5Pset_shuffle(hid_t plist_id)
+  herr_t        H5Pset_szip(hid_t plist, unsigned int options_mask, unsigned int pixels_per_block)
+                # external files not implemented
+
+  # File access
+  herr_t    H5Pset_fclose_degree(hid_t fapl_id, H5F_close_degree_t fc_degree)
+
+  # Transfer properties
+  herr_t    H5Pset_edc_check(hid_t plist, H5Z_EDC_t check)
+  H5Z_EDC_t H5Pget_edc_check(hid_t plist)
+
+  # Other properties
+  herr_t H5Pset_cache(hid_t plist_id, int mdc_nelmts, int rdcc_nelmts,
+                      size_t rdcc_nbytes, double rdcc_w0)
+  herr_t H5Pset_sieve_buf_size(hid_t fapl_id, hsize_t size)
+  herr_t H5Pset_fapl_log(hid_t fapl_id, char *logfile,
+                         unsigned int flags, size_t buf_size)
+
+
diff --git a/h5py/h5p.pyx b/h5py/h5p.pyx
new file mode 100755
index 0000000..4b11686
--- /dev/null
+++ b/h5py/h5p.pyx
@@ -0,0 +1,291 @@
+#+
+# 
+# This file is part of h5py, a low-level Python interface to the HDF5 library.
+# 
+# Copyright (C) 2008 Andrew Collette
+# http://h5py.alfven.org
+# License: BSD  (See LICENSE.txt for full license)
+# 
+# $Date$
+# 
+#-
+
+
+"""
+    HDF5 property list interface.
+
+    This module is currently incomplete; functions exist for generic operations
+    and dataset creation property lists, but not much else.
+"""
+
+# Pyrex compile-time imports
+from defs_c   cimport malloc, free, size_t
+from h5  cimport herr_t, hid_t, htri_t, herr_t, hsize_t
+from h5d cimport H5D_layout_t
+from h5z cimport H5Z_filter_t
+from utils cimport tuple_to_dims, dims_to_tuple
+from numpy cimport PyArray_CheckScalar, PyArray_ScalarAsCtype
+
+# Runtime imports
+import h5
+import h5t
+from h5 import DDict
+from errors import PropertyError, ConversionError
+
+# === Public constants and data structures ====================================
+
+# Property list classes (I'm surprised there's no enum for this)
+CLASS_NO_CLASS       = H5P_NO_CLASS
+CLASS_FILE_CREATE    = H5P_FILE_CREATE
+CLASS_FILE_ACCESS    = H5P_FILE_ACCESS
+CLASS_DATASET_CREATE = H5P_DATASET_CREATE
+CLASS_DATASET_XFER   = H5P_DATASET_XFER
+CLASS_MOUNT          = H5P_MOUNT
+
+CLASS_MAPPER = { H5P_NO_CLASS: 'ERROR', H5P_FILE_CREATE: 'FILE CREATION',
+                  H5P_FILE_ACCESS: 'FILE ACCESS', H5P_DATASET_CREATE: 'DATASET CREATION',
+                  H5P_DATASET_XFER: 'DATASET TRANSFER', H5P_MOUNT: 'MOUNT'}
+CLASS_MAPPER = DDict(CLASS_MAPPER)
+
+DEFAULT = H5P_DEFAULT # not really a "class"
+
+# === Generic property list operations ========================================
+
+def create(hid_t cls_id):
+    """ (INT cls_id) => INT property_list_id
+    
+        Create a new property list as an instance of a class, which should be
+        one of CLASS_*.
+    """
+    cdef hid_t retval
+    retval = H5Pcreate(cls_id)
+    if retval < 0:
+        raise PropertyError("Failed to create instance of property list class %d" % cls_id)
+    return retval
+
+def copy(hid_t plist):
+    """ (INT plist) => INT new_property_list_id
+
+        Create a new copy of an existing property list object.
+    """
+    cdef hid_t retval
+    retval = H5Pcopy(plist)
+    if retval < 0:
+        raise PropertyError("Failed to copy property list %d" % plist)
+    return retval
+
+def close(hid_t plist):
+    """ (INT plist)
+    """
+    cdef herr_t retval
+    retval = H5Pclose(plist)
+    if retval < 0:
+        raise PropertyError("Failed to close property list %d" % plist)
+    return retval
+
+def get_class(hid_t plist):
+    """ (INT plist) => INT class_code
+
+        Determine the class of a property list object (one of CLASS_*).
+    """
+    cdef int retval
+    retval = H5Pget_class(plist)
+    if retval < 0:
+        raise PropertyError("Failed to determine class of property list %d" % plist)
+    return retval
+
+def equal(hid_t plist1, hid_t plist2):
+    """ (INT plist1, INT plist2) => BOOL lists_are_equal
+
+        Compare two existing property lists for equality.
+    """
+    cdef htri_t retval
+    retval = H5Pequal(plist1, plist2)
+    if retval < 0:
+        raise PropertyError("Could not compare for equality: %d vs %d" % (plist1, plist2))
+    return bool(retval)
+
+# === Dataset creation properties =============================================
+
+def set_layout(hid_t plist, int layout_code):
+    """ (INT plist, INT layout_code)    [Dataset creation]
+
+        Set dataset storage strategy; legal values are:
+        * h5d.LAYOUT_COMPACT
+        * h5d.LAYOUT_CONTIGUOUS
+        * h5d.LAYOUT_CHUNKED
+    """
+    cdef herr_t retval
+    retval = H5Pset_layout(plist, <H5D_layout_t>layout_code)
+    if retval < 0:
+        raise PropertyError("Failed to set layout of list %d to %d" % (plist, layout_code))
+    
+def get_layout(hid_t plist):
+    """ (INT plist) => INT layout_code   [Dataset creation]
+
+        Determine the storage strategy of a dataset; legal values are:
+        * h5d.LAYOUT_COMPACT
+        * h5d.LAYOUT_CONTIGUOUS
+        * h5d.LAYOUT_CHUNKED
+    """
+    cdef int retval
+    retval = <int>H5Pget_layout(plist)
+    if retval < 0:
+        raise PropertyError("Failed to get layout of list %d" % plist)
+
+def set_chunk(hid_t plist, object chunksize):
+    """ (INT plist_id, TUPLE chunksize)    [Dataset creation]
+
+        Set the dataset chunk size.  It's up to you to provide values which
+        are compatible with your dataset.
+    """
+    cdef herr_t retval
+    cdef int rank
+    cdef hsize_t* dims
+    dims = NULL
+
+    rank = len(chunksize)
+    dims = tuple_to_dims(chunksize)
+    if dims == NULL:
+        raise ValueError("Bad input dimensions tuple: %s" % repr(chunksize))
+
+    retval = H5Pset_chunk(plist, rank, dims)
+    if retval < 0:
+        free(dims)
+        raise PropertyError("Failed to set chunk size to %s on list %d" % (str(chunksize), plist))
+    
+    free(dims)
+    
+def get_chunk(hid_t plist):
+    """ (INT plist_id) => TUPLE chunk_dimensions    [Dataset creation]
+
+        Obtain the dataset chunk size, as a tuple.
+    """
+    cdef int rank
+    cdef hsize_t *dims
+
+    rank = H5Pget_chunk(plist, 0, NULL)
+    if rank < 0:
+        raise PropertyError("Failed to get chunk size on list %d" % plist)
+
+    dims = <hsize_t*>malloc(sizeof(hsize_t)*rank)
+    rank = H5Pget_chunk(plist, rank, dims)
+    if rank < 0:
+        free(dims)
+        raise PropertyError("Failed to get chunk size on list %d" % plist)
+
+    tpl = dims_to_tuple(dims, rank)
+    if tpl is None:
+        free(dims)
+        raise ConversionError("Bad dims/tuple conversion (plist %d rank %d)" % (plist, rank))
+
+    free(dims)
+    return tpl
+
+# === Filter functions ========================================================
+
+def set_deflate(hid_t plist, unsigned int level=5):
+    """ (INT plist_id, UINT level=5)    [Dataset creation]
+
+        Enable DEFLATE (gzip) compression, at the given level (0-9, default 5).
+    """
+    cdef herr_t retval
+    retval = H5Pset_deflate(plist, level)
+    if retval < 0:
+        raise PropertyError("Error enabling DEFLATE (level %d) on list %d" % (level, plist))
+    
+def set_fletcher32(hid_t plist):
+    """ (INT plist_id)    [Dataset creation]
+
+        Enable Fletcher32 error correction on an existing list.
+    """
+    cdef herr_t retval
+    retval = H5Pset_fletcher32(plist)
+    if retval < 0:
+        raise PropertyError("Error enabling Fletcher32 checksum filter on list %d" % plist)
+
+def set_shuffle(hid_t plist):
+    """ (INT plist_id)    [Dataset creation]
+
+        Enable to use of the shuffle filter.  Use this immediately before the
+        DEFLATE filter to increase the compression ratio.
+    """
+    cdef herr_t retval
+    retval = H5Pset_shuffle(plist)
+    if retval < 0:
+        raise PropertyError("Error enabling shuffle filter on list %d" % plist)
+
+def set_szip(hid_t plist, unsigned int options, unsigned int pixels_per_block):
+    """ (INT plist, UINT options, UINT pixels_per_block)   [Dataset creation]
+
+        Enable SZIP compression.  See the HDF5 docs for argument meanings, and
+        general restrictions on use of the SZIP format.
+    """
+    cdef herr_t retval
+    retval = H5Pset_szip(plist, options, pixels_per_block)
+    if retval < 0:
+        raise PropertyError("Error enabling szip filter on list %d" % plist)
+
+def remove_filter(hid_t plist, int filter_class):
+    """ (INT plist, INT filter_class)    [Dataset creation]
+
+        Remove a filter from the pipeline.  The class code is one of 
+        h5z.FILTER_*.
+    """
+    cdef herr_t retval
+    retval = H5Premove_filter(plist, <H5Z_filter_t>filter_class)
+    if retval < 0:
+        raise PropertyError("Error removing filter %d from list %d" % (filter_class, plist))
+
+# === File access =============================================================
+
+def set_fclose_degree(hid_t fapl_id, int close_degree):
+    """ (INT fapl_id, INT close_degree)
+
+        Set the file-close degree, which determines the library behavior when
+        a file is closed when objects are still open.  See the HDF5 docs for 
+        a full explanation.  Legal values:
+
+        * h5f.CLOSE_WEAK
+        * h5f.CLOSE_SEMI
+        * h5f.CLOSE_STRONG
+        * h5f.CLOSE_DEFAULT
+    """
+    cdef herr_t retval
+    retval = H5Pset_fclose_degree(fapl_id, <H5F_close_degree_t>close_degree)
+    if retval < 0:
+        raise PropertyError("Failed to set file close degree on list %d to %d" % (fapl_id, close_degree))
+    
+
+# === Python extensions =======================================================
+
+def py_has_filter(hid_t plist, int filter_class):
+    """ (INT plist_id, INT filter_class_code) 
+        => BOOL has_filter    [Dataset creation]
+        
+        Determine if a property list has the given filter.
+    """
+    cdef herr_t retval
+    cdef unsigned int flags
+    cdef size_t dmp
+    dmp = 0
+    retval = H5Pget_filter_by_id(plist, filter_class, &flags, &dmp, NULL, 0, NULL)
+    if retval <= 0:
+        return False
+    return True
+    
+    
+
+    
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/h5py/h5s.pxd b/h5py/h5s.pxd
new file mode 100755
index 0000000..1758985
--- /dev/null
+++ b/h5py/h5s.pxd
@@ -0,0 +1,69 @@
+#+
+# 
+# This file is part of h5py, a low-level Python interface to the HDF5 library.
+# 
+# Copyright (C) 2008 Andrew Collette
+# http://h5py.alfven.org
+# License: BSD  (See LICENSE.txt for full license)
+# 
+# $Date$
+# 
+#-
+
+# This file is based on code from the PyTables project.  The complete PyTables
+# license is available at licenses/pytables.txt, in the distribution root
+# directory.
+
+# This file contains code or comments from the HDF5 library. The complete HDF5
+# license is available in the file licenses/hdf5.txt in the distribution
+# root directory.
+
+from defs_c cimport size_t, time_t
+from h5 cimport hid_t, hbool_t, herr_t, htri_t, hsize_t, hssize_t, hvl_t
+
+cdef extern from "hdf5.h":
+
+  int H5S_ALL, H5S_UNLIMITED, H5S_MAX_RANK
+
+  # Codes for defining selections
+  cdef enum H5S_seloper_t:
+    H5S_SELECT_NOOP      = -1,
+    H5S_SELECT_SET       = 0,
+    H5S_SELECT_OR,
+    H5S_SELECT_AND,
+    H5S_SELECT_XOR,
+    H5S_SELECT_NOTB,
+    H5S_SELECT_NOTA,
+    H5S_SELECT_APPEND,
+    H5S_SELECT_PREPEND,
+    H5S_SELECT_INVALID    # Must be the last one
+
+  cdef enum H5S_class_t:
+    H5S_NO_CLASS         = -1,  #/*error                                      */
+    H5S_SCALAR           = 0,   #/*scalar variable                            */
+    H5S_SIMPLE           = 1,   #/*simple data space                          */
+    H5S_COMPLEX          = 2    #/*complex data space                         */
+
+  cdef enum H5S_sel_type:
+    H5S_SEL_ERROR	= -1, 	    #/* Error			*/
+    H5S_SEL_NONE	= 0,        #/* Nothing selected 		*/
+    H5S_SEL_POINTS	= 1,        #/* Sequence of points selected	*/
+    H5S_SEL_HYPERSLABS  = 2,    #/* "New-style" hyperslab selection defined	*/
+    H5S_SEL_ALL		= 3,        #/* Entire extent selected	*/
+    H5S_SEL_N		= 4	        #/*THIS MUST BE LAST		*/
+
+
+  # --- Dataspace operations --------------------------------------------------
+  hid_t H5Screate(H5S_class_t type)
+  hid_t H5Screate_simple(int rank, hsize_t dims[], hsize_t maxdims[])
+  int H5Sget_simple_extent_ndims(hid_t space_id)
+  int H5Sget_simple_extent_dims(hid_t space_id, hsize_t dims[],
+                                hsize_t maxdims[])
+  herr_t H5Sselect_hyperslab(hid_t space_id, H5S_seloper_t op,
+                             hsize_t start[], hsize_t _stride[],
+                             hsize_t count[], hsize_t _block[])
+  herr_t H5Sclose(hid_t space_id)
+  herr_t H5Sget_select_bounds(hid_t space_id, hsize_t *start, hsize_t *end)
+  herr_t H5Sselect_none(hid_t space_id)
+  H5S_class_t H5Sget_simple_extent_type(hid_t space_id)
+
diff --git a/h5py/h5s.pyx b/h5py/h5s.pyx
new file mode 100755
index 0000000..e3a3c6e
--- /dev/null
+++ b/h5py/h5s.pyx
@@ -0,0 +1,262 @@
+#+
+# 
+# This file is part of h5py, a low-level Python interface to the HDF5 library.
+# 
+# Copyright (C) 2008 Andrew Collette
+# http://h5py.alfven.org
+# License: BSD  (See LICENSE.txt for full license)
+# 
+# $Date$
+# 
+#-
+
+
+"""
+    Low-level interface to the "H5S" family of data-space functions.
+
+    This module is incomplete; it currently only implements hyperslab and 
+    scalar operations.
+"""
+
+# Pyrex compile-time imports
+from defs_c   cimport malloc, free
+from h5  cimport herr_t, hid_t, size_t, hsize_t
+from utils cimport tuple_to_dims, dims_to_tuple
+
+# Runtime imports
+import h5
+from h5 import DDict
+from errors import DataspaceError
+
+
+# === Public constants and data structures ====================================
+
+#enum H5S_seloper_t:
+SELECT_NOOP     = H5S_SELECT_NOOP
+SELECT_SET      = H5S_SELECT_SET      
+SELECT_OR       = H5S_SELECT_OR
+SELECT_AND      = H5S_SELECT_AND
+SELECT_XOR      = H5S_SELECT_XOR
+SELECT_NOTB     = H5S_SELECT_NOTB
+SELECT_NOTA     = H5S_SELECT_NOTA
+SELECT_APPEND   = H5S_SELECT_APPEND
+SELECT_PREPEND  = H5S_SELECT_PREPEND
+SELECT_INVALID  = H5S_SELECT_INVALID 
+SELECT_MAPPER = {H5S_SELECT_NOOP: 'NO-OP', H5S_SELECT_SET: 'SET', H5S_SELECT_OR: 'OR',
+                 H5S_SELECT_AND: 'AND', H5S_SELECT_XOR: 'XOR', H5S_SELECT_NOTB: 'NOTB',
+                 H5S_SELECT_NOTA: 'NOTA', H5S_SELECT_APPEND: 'APPEND',
+                 H5S_SELECT_PREPEND: 'PREPEND', H5S_SELECT_INVALID: 'INVALID' }
+SELECT_MAPPER = DDict(SELECT_MAPPER)
+
+SPACE_ALL       = H5S_ALL
+SPACE_UNLIMITED = H5S_UNLIMITED
+SPACE_MAPPER = DDict({H5S_ALL: 'ALL', H5S_UNLIMITED: 'UNLIMITED'})
+
+#enum H5S_class_t
+CLASS_NO_CLASS = H5S_NO_CLASS
+CLASS_SCALAR   = H5S_SCALAR
+CLASS_SIMPLE   = H5S_SIMPLE
+CLASS_COMPLEX  = H5S_COMPLEX
+CLASS_MAPPER = {H5S_NO_CLASS: 'NO CLASS', H5S_SCALAR: 'SCALAR',
+                H5S_SIMPLE: 'SIMPLE', H5S_COMPLEX: 'COMPLEX' }
+CLASS_MAPPER = DDict(CLASS_MAPPER)
+
+# === Basic dataspace operations ==============================================
+
+def close(hid_t space_id):
+    """ (INT space_id)
+    """
+    cdef herr_t retval
+
+    retval = H5Sclose(space_id)
+    if retval < 0:
+        raise DataspaceError("Failed to close dataspace %d" % space_id)
+
+def create(int class_code):
+    """ (INT class_code) => INT new_space_id
+
+        Create a new HDF5 dataspace object, of the given class.  Legal values
+        are CLASS_SCALAR and CLASS_SIMPLE.
+    """
+    cdef hid_t retval
+    retval = H5Screate(<H5S_class_t>class_code)
+    if retval < 0:
+        raise DataspaceError("Failed to create dataspace of class %d" %d)
+    return retval
+
+def create_simple(object dims_tpl, object max_dims_tpl=None):
+    """ (TUPLE dims_tpl, TUPLE max_dims_tpl) => INT new_space_id
+
+        Create a simple (slab) dataspace from a tuple of dimensions.  Every
+        element of dims_tpl must be a positive integer.  You can also specify
+        the maximum dataspace size, via the tuple max_dims.  The special
+        integer h5s.SPACE_UNLIMITED, as an element of max_dims, indicates an
+        unlimited dimension.
+    """
+    cdef hid_t space_id
+    cdef int rank
+    cdef hsize_t* dims
+    cdef hsize_t* max_dims
+    dims = NULL
+    max_dims = NULL
+
+    rank = len(dims_tpl)
+    if max_dims_tpl is not None and len(max_dims_tpl) != rank:
+        raise ValueError("Dims/max dims tuples must be the same rank: %s vs %s" % (repr(dims_tpl),repr(max_dims_tpl)))
+
+    try:
+        dims = tuple_to_dims(dims_tpl)
+        if dims == NULL:
+            raise ValueError("Bad dimensions tuple: %s" % repr(dims_tpl))
+
+        if max_dims_tpl is not None:
+            max_dims = tuple_to_dims(max_dims_tpl)
+            if max_dims == NULL:
+                raise ValueError("Bad max dimensions tuple: %s" % repr(max_dims_tpl))
+
+        space_id = H5Screate_simple(rank, dims, max_dims)
+
+        if space_id < 0:
+            raise DataspaceError("Failed to create dataspace with dimensions %s" % str(dims_tpl))
+    finally:
+        if dims != NULL:
+            free(dims)
+        if max_dims != NULL:
+            free(max_dims)
+
+    return space_id
+
+def get_simple_extent_ndims(hid_t space_id):
+    """ (INT space_id) => INT rank
+        
+        Determine the rank of a "simple" (slab) dataspace.
+    """
+    cdef int ndims
+    ndims = H5Sget_simple_extent_ndims(space_id)
+    if ndims < 0:
+        raise DataspaceError("Failed to retrieve dimension info for dataspace %d" % space_id)
+
+    return ndims
+
+
+def get_simple_extent_dims(hid_t space_id, int maxdims=0):
+    """ (INT space_id, BOOL maxdims=False) => TUPLE shape
+
+        Determine the shape of a "simple" (slab) dataspace.  If "maxdims" is
+        True, retrieve the maximum dataspace size instead.
+    """
+    cdef int rank
+    cdef hsize_t* dims
+    dims = NULL
+    dims_tpl = None
+
+    rank = H5Sget_simple_extent_dims(space_id, NULL, NULL)
+    if rank < 0:
+        raise DataspaceError("Failed to retrieve dimension info for dataspace %d" % space_id)
+
+    dims = <hsize_t*>malloc(sizeof(hsize_t)*rank)
+    try:
+        if maxdims:
+            rank = H5Sget_simple_extent_dims(space_id, NULL, dims)
+        else:
+            rank = H5Sget_simple_extent_dims(space_id, dims, NULL)
+        if rank < 0:
+            raise DataspaceError("Failed to retrieve dimension info for dataspace %d" % space_id)
+
+        dims_tpl = dims_to_tuple(dims, rank)
+        if dims_tpl is None:
+            raise DataspaceError("Can't unwrap dimensions on dataspace %d rank %d" % (space_id, rank))
+    finally:
+        if dims != NULL:
+            free(dims)
+
+    return dims_tpl
+    
+def get_simple_extent_type(hid_t space_id):
+    """ (INT space_id) => INT class_code
+
+        Class code is either CLASS_SCALAR or CLASS_SIMPLE.
+    """
+    cdef int retval
+    retval = <int>H5Sget_simple_extent_type(space_id)
+    if retval < 0:
+        raise DataspaceError("Can't determine type of dataspace %d" % space_id)
+    return retval
+
+# === Dataspace manipulation ==================================================
+
+def select_hyperslab(hid_t space_id, object start, object count, 
+    object stride=None, object block=None, int op=H5S_SELECT_SET):
+    """ (INT space_id, TUPLE start, TUPLE count, TUPLE stride=None, 
+            TUPLE block=None, INT op=SELECT_SET)
+     
+        Select a block region from an existing dataspace.  See the HDF5
+        documentation for the meaning of the "block" and "op" keywords.
+    """
+    cdef herr_t retval
+    cdef int rank
+    cdef hsize_t* start_array
+    cdef hsize_t* count_array
+    cdef hsize_t* stride_array
+    cdef hsize_t* block_array
+
+    start_array = NULL
+    count_array = NULL
+    stride_array = NULL
+    block_array = NULL
+
+    rank = get_simple_extent_ndims(space_id)
+    if len(start) != rank:
+        raise DataspaceError('Dimensions of input "%s" must match rank of dataspace (%d)' % (repr(start), rank))
+
+    if len(count) != rank:
+        raise DataspaceError("Dimensions of all arguments must be the same and of rank %d" % rank)
+
+    if stride is not None:
+        if len(stride) != rank:
+            raise DataspaceError("Dimensions of all arguments must be the same and of rank %d" % rank)
+    
+    if block is not None:
+        if len(block) != rank:
+            raise DataspaceError("Dimensions of all arguments must be the same and of rank %d" % rank)
+
+    try:
+        start_array = tuple_to_dims(start)
+        if start_array == NULL:
+            raise ValueError("Invalid start tuple: %s" % repr(start))
+
+        count_array = tuple_to_dims(count)
+        if count_array == NULL:
+            raise ValueError("Invalid count tuple: %s" % repr(count))
+
+        if stride is not None:
+            stride_array = tuple_to_dims(stride)
+            if stride_array == NULL:
+                raise ValueError("Invalid stride tuple: %s" % repr(stride))
+
+        if block is not None:
+            block_array = tuple_to_dims(block)
+            if block_array == NULL:
+                raise ValueError("Invalid block tuple: %s" % repr(block))
+
+        retval = H5Sselect_hyperslab(space_id, <H5S_seloper_t>op, start_array, 
+                                     stride_array, count_array, block_array)
+        if retval < 0:
+            raise DataspaceError("Failed to select hyperslab on dataspace %d" % space_id)
+    finally:
+        if start_array != NULL:
+            free(start_array)
+        if count_array != NULL:
+            free(count_array)
+        if stride_array != NULL:
+            free(stride_array)
+        if block_array != NULL:
+            free(block_array)
+
+
+
+
+
+
+
+
diff --git a/h5py/h5t.pxd b/h5py/h5t.pxd
new file mode 100755
index 0000000..6a5dcd7
--- /dev/null
+++ b/h5py/h5t.pxd
@@ -0,0 +1,182 @@
+#+
+# 
+# This file is part of h5py, a low-level Python interface to the HDF5 library.
+# 
+# Copyright (C) 2008 Andrew Collette
+# http://h5py.alfven.org
+# License: BSD  (See LICENSE.txt for full license)
+# 
+# $Date$
+# 
+#-
+
+# This file is based on code from the PyTables project.  The complete PyTables
+# license is available at licenses/pytables.txt, in the distribution root
+# directory.
+
+from defs_c cimport size_t, time_t
+from h5 cimport hid_t, hbool_t, herr_t, htri_t, hsize_t, hssize_t, hvl_t
+
+cdef extern from "hdf5.h":
+
+  cdef enum:
+    H5T_C_S1
+    H5T_NATIVE_B8
+    H5T_NATIVE_CHAR
+    H5T_NATIVE_SCHAR
+    H5T_NATIVE_UCHAR
+    H5T_NATIVE_SHORT
+    H5T_NATIVE_USHORT
+    H5T_NATIVE_INT
+    H5T_NATIVE_UINT
+    H5T_NATIVE_LONG
+    H5T_NATIVE_ULONG
+    H5T_NATIVE_LLONG
+    H5T_NATIVE_ULLONG
+    H5T_NATIVE_FLOAT
+    H5T_NATIVE_DOUBLE
+    H5T_NATIVE_LDOUBLE
+
+  # Byte orders
+  cdef enum H5T_order_t:
+    H5T_ORDER_ERROR      = -1,  # error
+    H5T_ORDER_LE         = 0,   # little endian
+    H5T_ORDER_BE         = 1,   # bit endian
+    H5T_ORDER_VAX        = 2,   # VAX mixed endian
+    H5T_ORDER_NONE       = 3    # no particular order (strings, bits,..)
+
+  # HDF5 signed enums
+  cdef enum H5T_sign_t:
+    H5T_SGN_ERROR        = -1,  # error
+    H5T_SGN_NONE         = 0,   # this is an unsigned type
+    H5T_SGN_2            = 1,   # two's complement
+    H5T_NSGN             = 2    # this must be last!
+
+  # HDF5 type classes
+  cdef enum H5T_class_t:
+    H5T_NO_CLASS         = -1,  # error
+    H5T_INTEGER          = 0,   # integer types
+    H5T_FLOAT            = 1,   # floating-point types
+    H5T_TIME             = 2,   # date and time types
+    H5T_STRING           = 3,   # character string types
+    H5T_BITFIELD         = 4,   # bit field types
+    H5T_OPAQUE           = 5,   # opaque types
+    H5T_COMPOUND         = 6,   # compound types
+    H5T_REFERENCE        = 7,   # reference types
+    H5T_ENUM             = 8,   # enumeration types
+    H5T_VLEN             = 9,   # variable-length types
+    H5T_ARRAY            = 10,  # array types
+    H5T_NCLASSES                # this must be last
+
+  # "Standard" types
+  cdef enum:
+    H5T_STD_I8LE
+    H5T_STD_I16LE
+    H5T_STD_I32LE
+    H5T_STD_I64LE
+    H5T_STD_U8LE
+    H5T_STD_U16LE
+    H5T_STD_U32LE
+    H5T_STD_U64LE
+    H5T_STD_B8LE
+    H5T_STD_B16LE
+    H5T_STD_B32LE
+    H5T_STD_B64LE
+    H5T_IEEE_F32LE
+    H5T_IEEE_F64LE
+    H5T_STD_I8BE
+    H5T_STD_I16BE
+    H5T_STD_I32BE
+    H5T_STD_I64BE
+    H5T_STD_U8BE
+    H5T_STD_U16BE
+    H5T_STD_U32BE
+    H5T_STD_U64BE
+    H5T_STD_B8BE
+    H5T_STD_B16BE
+    H5T_STD_B32BE
+    H5T_STD_B64BE
+    H5T_IEEE_F32BE
+    H5T_IEEE_F64BE
+
+  cdef enum:
+    H5T_NATIVE_INT8
+    H5T_NATIVE_UINT8
+    H5T_NATIVE_INT16
+    H5T_NATIVE_UINT16
+    H5T_NATIVE_INT32
+    H5T_NATIVE_UINT32
+    H5T_NATIVE_INT64
+    H5T_NATIVE_UINT64
+
+  # Types which are particular to UNIX (for Time types)
+  cdef enum:
+    H5T_UNIX_D32LE
+    H5T_UNIX_D64LE
+    H5T_UNIX_D32BE
+    H5T_UNIX_D64BE
+
+ # --- Datatype operations ---------------------------------------------------
+  # General operations
+  hid_t         H5Tcreate(H5T_class_t type, size_t size)
+  hid_t         H5Topen(hid_t loc, char* name)
+  herr_t        H5Tcommit(hid_t loc_id, char* name, hid_t type)
+  htri_t        H5Tcommitted(hid_t type)
+  hid_t         H5Tcopy(hid_t type_id)
+  H5T_class_t   H5Tget_class(hid_t type_id)
+  hid_t         H5Tget_super(hid_t type)
+  htri_t        H5Tdetect_class(hid_t type_id, H5T_class_t dtype_class)
+  herr_t        H5Tclose(hid_t type_id)
+  herr_t        H5Tconvert(hid_t src_id, hid_t dst_id, size_t nelmts, void *buf, void *background, hid_t plist_id  )
+
+  # Atomic datatypes
+  size_t        H5Tget_size(hid_t type_id)
+  herr_t        H5Tset_size(hid_t type_id, size_t size)
+  H5T_order_t   H5Tget_order(hid_t type_id)
+  herr_t        H5Tset_order(hid_t type_id, H5T_order_t order)
+  hsize_t       H5Tget_precision(hid_t type_id)
+  herr_t        H5Tset_precision(hid_t type_id, size_t prec)
+  int           H5Tget_offset(hid_t type_id)
+  herr_t        H5Tset_offset(hid_t type_id, size_t offset)
+                # missing: get_pad
+                # missing: set_pad
+  H5T_sign_t    H5Tget_sign(hid_t type_id)
+  herr_t        H5Tset_sign(hid_t type_id, H5T_sign_t sign)
+                # missing: bunch of floating-point crap nobody uses
+                # missing: g/s strpad
+
+  # VLENs
+  hid_t     H5Tvlen_create(hid_t base_type_id)
+  htri_t    H5Tis_variable_str(hid_t dtype_id)
+
+  # Compound data types
+  int           H5Tget_nmembers(hid_t type_id)
+  H5T_class_t   H5Tget_member_class(hid_t type_id, int member_no)
+  char*         H5Tget_member_name(hid_t type_id, unsigned membno)
+  hid_t         H5Tget_member_type(hid_t type_id, unsigned membno)
+  #hid_t         H5Tget_native_type(hid_t type_id, H5T_direction_t direction)
+  int           H5Tget_member_offset(hid_t type_id, int membno)
+  int           H5Tget_member_index(hid_t type_id, char* name)
+  herr_t        H5Tinsert(hid_t parent_id, char *name, size_t offset,
+                   hid_t member_id)
+  herr_t        H5Tpack(hid_t type_id)
+
+  # Enumerated types
+  hid_t     H5Tenum_create(hid_t base_id)
+  herr_t    H5Tenum_insert(hid_t type, char *name, void *value)
+  herr_t    H5Tenum_nameof( hid_t type, void *value, char *name, size_t size  )
+  herr_t    H5Tenum_valueof( hid_t type, char *name, void *value  )
+  herr_t    H5Tget_member_value(hid_t type,  unsigned int memb_no, void *value  )
+  #char*     H5Tget_member_name(hid_t type_id, unsigned field_idx  )
+  #int       H5Tget_member_index(hid_t type_id, char * field_name  )
+
+  # Array data types
+  hid_t H5Tarray_create(hid_t base_id, int ndims, hsize_t dims[], int perm[])
+  int   H5Tget_array_ndims(hid_t type_id)
+  int   H5Tget_array_dims(hid_t type_id, hsize_t dims[], int perm[])
+
+  # Opaque data types
+  herr_t    H5Tset_tag(hid_t type_id, char* tag)
+  char*     H5Tget_tag(hid_t type_id)
+
+
diff --git a/h5py/h5t.pyx b/h5py/h5t.pyx
new file mode 100755
index 0000000..e6c5095
--- /dev/null
+++ b/h5py/h5t.pyx
@@ -0,0 +1,970 @@
+#+
+# 
+# This file is part of h5py, a low-level Python interface to the HDF5 library.
+# 
+# Copyright (C) 2008 Andrew Collette
+# http://h5py.alfven.org
+# License: BSD  (See LICENSE.txt for full license)
+# 
+# $Date$
+# 
+#-
+
+"""
+    HDF5 "H5T" data-type API
+
+    Provides access to the HDF5 data-type object interface.  Functions
+    are provided to convert HDF5 datatype object back and forth from Numpy
+    dtype objects.  Constants are also defined in this module for a variety
+    of HDF5 native types and classes. Points of interest:
+
+    (1) Enumerations
+    There is no native Numpy or Python type for enumerations.  Since an
+    enumerated type is simply a mapping between string names and integer
+    values, I have implemented enum support through dictionaries.  An HDF5
+    H5T_ENUM type is converted to the appropriate Numpy integer type (e.g.
+    <u4, etc.), and a dictionary mapping names to values is also generated.
+    Since dtype objects cannot be subclassed (why?) and have no provision
+    for directly attached metadata, the dtype is given a single named field 
+    ("enum") and the dictionary stored in the metadata for that field. An
+    example dtype declaration for this is:
+
+        enum_dict = {'RED': 0L, 'GREEN': 1L}
+
+        dtype( ('<i4', [ ( (enum_dict, 'enum'),   '<i4' )] ) )
+                  ^             ^         ^         ^
+             (main type)  (metadata) (field name) (field type)
+
+    The functions py_attach_enum and py_recover_enum simplify the attachment
+    and recovery of enumeration dictionaries from integer dtype objects.
+
+    (2) Complex numbers
+    Since HDF5 has no native complex types defined, and the native Numpy
+    representation is a struct with two floating-point members, complex
+    numbers are saved as HDF5 compound objects with IEEE 32/64 floating point
+    and field names (by default) "r" and "i".  Complex numbers can be auto-
+    recovered from HDF5 objects provided they match this format and have
+    compatible field names.  Since other people may have named their fields
+    e.g. "img" and "real", these names can be changed.  The function
+    py_set_complex_names(real_name, imaginary_name) allows you to select at
+    runtime what names are used to read in and write out complex-compound
+    objects.  To turn off this behavior completely, simply call 
+    py_set_complex_names with no arguments.
+
+"""
+
+
+# Pyrex compile-time imports
+from defs_c   cimport malloc, free
+from h5  cimport herr_t, hid_t, size_t, hsize_t, htri_t
+from h5e cimport H5Eset_auto
+from python cimport PyTuple_New, PyTuple_SetItem, Py_INCREF
+from h5p cimport H5P_DEFAULT
+cimport numpy
+from numpy cimport dtype
+from utils cimport create_ieee_complex64, create_ieee_complex128,\
+                        tuple_to_dims, dims_to_tuple
+
+# Runtime imports
+import h5
+from errors import DatatypeError, ConversionError
+import sys
+
+H5Eset_auto(NULL,NULL)
+
+# === Public constants and data structures ====================================
+
+# Enumeration H5T_class_t
+CLASS_NO_CLASS  = H5T_NO_CLASS
+CLASS_INTEGER   = H5T_INTEGER
+CLASS_FLOAT     = H5T_FLOAT
+CLASS_TIME      = H5T_TIME
+CLASS_STRING    = H5T_STRING
+CLASS_BITFIELD  = H5T_BITFIELD
+CLASS_OPAQUE    = H5T_OPAQUE
+CLASS_COMPOUND  = H5T_COMPOUND
+CLASS_REFERENCE = H5T_REFERENCE
+CLASS_ENUM      = H5T_ENUM
+CLASS_VLEN      = H5T_VLEN
+CLASS_ARRAY     = H5T_ARRAY
+CLASS_MAPPER = {H5T_NO_CLASS: "ERROR", H5T_INTEGER: "INTEGER", H5T_FLOAT: "FLOAT",
+                H5T_TIME: "TIME", H5T_STRING: "STRING", H5T_BITFIELD: "BITFIELD",
+                H5T_OPAQUE: "OPAQUE", H5T_COMPOUND: "COMPOUND", H5T_REFERENCE: "REFERENCE",
+                H5T_ENUM: "ENUM", H5T_VLEN: "VLEN", H5T_ARRAY: "ARRAY"}
+
+# Enumeration H5T_sign_t
+SIGN_NONE   = H5T_SGN_NONE
+SIGN_2      = H5T_SGN_2
+SIGN_MAPPER = {H5T_SGN_NONE: "UNSIGNED", H5T_SGN_2: "SIGNED"}
+
+# Enumeration H5T_order_t
+ORDER_LE    = H5T_ORDER_LE
+ORDER_BE    = H5T_ORDER_BE
+ORDER_VAX   = H5T_ORDER_VAX
+ORDER_NONE  = H5T_ORDER_NONE
+ORDER_MAPPER = {H5T_ORDER_LE: "LITTLE-ENDIAN", H5T_ORDER_BE: "BIG-ENDIAN",
+                H5T_ORDER_VAX: "VAX MIXED-ENDIAN" }
+if sys.byteorder == "little":    # Custom python addition
+    pyORDER_NATIVE = H5T_ORDER_LE
+else:
+    pyORDER_NATIVE = H5T_ORDER_BE
+
+# --- Built-in HDF5 datatypes -------------------------------------------------
+
+# IEEE floating-point
+IEEE_F32LE = H5T_IEEE_F32LE
+IEEE_F32BE = H5T_IEEE_F32BE
+IEEE_F64LE = H5T_IEEE_F64LE 
+IEEE_F64BE = H5T_IEEE_F64BE
+
+# Signed 2's complement integer types
+STD_I8LE  = H5T_STD_I8LE
+STD_I16LE = H5T_STD_I16LE
+STD_I32LE = H5T_STD_I32LE
+STD_I64LE = H5T_STD_I64LE
+
+STD_I8BE  = H5T_STD_I8BE
+STD_I16BE = H5T_STD_I16BE
+STD_I32BE = H5T_STD_I32BE
+STD_I64BE = H5T_STD_I64BE
+
+# Unsigned integers
+STD_U8LE  = H5T_STD_U8LE
+STD_U16LE = H5T_STD_U16LE
+STD_U32LE = H5T_STD_U32LE
+STD_U64LE = H5T_STD_U64LE
+
+STD_U8BE  = H5T_STD_U8BE
+STD_U16BE = H5T_STD_U16BE
+STD_U32BE = H5T_STD_U32BE
+STD_U64BE = H5T_STD_U64BE
+
+# Native integer types by bytesize
+NATIVE_INT8 = H5T_NATIVE_INT8
+NATIVE_UINT8 = H5T_NATIVE_UINT8
+NATIVE_INT16 = H5T_NATIVE_INT16
+NATIVE_UINT16 = H5T_NATIVE_UINT16
+NATIVE_INT32 = H5T_NATIVE_INT32
+NATIVE_UINT32 = H5T_NATIVE_UINT32
+NATIVE_INT64 = H5T_NATIVE_INT64
+NATIVE_UINT64 = H5T_NATIVE_UINT64
+
+# Null terminated (C) string type
+CSTRING = H5T_C_S1
+
+# === General datatype operations =============================================
+
+def create(int classtype, size_t size):
+    """ (INT class, INT size) => INT type_id
+        
+        Create a new HDF5 type object.  Legal values are CLASS_COMPOUND, 
+        CLASS_OPAQUE, CLASS_ENUM
+    """
+    cdef hid_t retval
+    retval = H5Tcreate(<H5T_class_t>classtype, size)
+    if retval < 0:
+        raise DatatypeError("Failed to create datatype of class %s, size %d" % (str(classtype), size))
+    return retval
+
+def open(hid_t group_id, char* name):
+    """ (INT group_id, STRING name) => INT type_id
+
+        Open a named datatype from a file.
+    """
+    cdef hid_t retval
+    retval = H5Topen(group_id, name)
+    if retval < 0:
+        raise DatatypeError('Failed to open datatype "%s" on group %d' % (name, group_id))
+    return retval
+
+def copy(hid_t type_id):
+    """ (INT type_id) => INT new_type_id
+
+        Copy an existing HDF type object.
+    """
+    
+    cdef hid_t retval
+    retval = H5Tcopy(type_id)
+    if retval < 0:
+        raise DatatypeError("Failed to copy datatype %d" % type_id)
+    return retval
+
+def close(hid_t type_id):
+    " (INT type_id) "
+    
+    cdef herr_t retval
+    retval = H5Tclose(type_id)
+    if retval < 0:
+        raise DatatypeError("Failed to close datatype %d" % type_id)
+
+def get_class(hid_t type_id):
+    """ (INT type_id) => INT class
+
+        Get <type_id>'s class, one of h5t.CLASS_*
+    """
+
+    cdef int classtype
+    classtype = <int>H5Tget_class(type_id)
+    if classtype < 0:
+        raise DatatypeError("Failed to determine class of datatype %d" % type_id)
+    return classtype
+
+def get_super(hid_t type_id):
+    """ (INT type_id) => INT super_type_id
+
+        Determine the parent type of an array or enumeration datatype.
+    """
+
+    cdef hid_t stype
+    stype = H5Tget_super(type_id)
+    if stype < 0:
+        raise DatatypeError("Can't determine base datatype of %d" % type_id)
+    return stype
+
+def detect_class(hid_t type_id, int classtype):
+    """ (INT type_id, INT class) => BOOL class_is_present
+
+        Determine if a member of class <class> exists in <type_id>
+    """
+
+    cdef htri_t retval
+    retval = H5Tdetect_class(type_id, <H5T_class_t>classtype)
+    if retval < 0:
+        raise DatatypeError("Couldn't inspect datatype %d for class %s" % (type_id, str(classtype)))
+    return bool(retval)
+
+def commit(hid_t loc_id, char* name, hid_t type_id):
+    """ (INT group_id, STRING name, INT type_id)
+
+        Commit a transient datatype to a named datatype in a file.
+    """
+    cdef herr_t retval
+    retval = H5Tcommit(loc_id, name, type_id)
+    if retval < 0:
+        raise DatatypeError("Failed to commit datatype %d under group %d with name '%s'" % (type_id, loc_id, name))
+
+def committed(hid_t type_id):
+    """ (INT type_id) => BOOL is_comitted
+
+        Determine if a given type object is named (T) or transient (F).
+    """
+    cdef htri_t retval
+    retval = H5Tcommitted(type_id)
+    if retval < 0:
+        raise DatatypeError("Failed to determine status of datatype %d" % type_id)
+    return bool(retval)
+
+# === Atomic datatype operations ==============================================
+#     H5Tget_size, H5Tset_size, H5Tget_order, H5Tset_order, H5Tget_precision, \
+#     H5Tset_precision, H5Tget_offset, H5Tset_offset, H5Tget_sign, H5Tset_sign
+
+def get_size(hid_t type_id):
+    """ (INT type_id) => INT size_in_bytes
+
+        Obtain the total size of the datatype in bytes.
+    """
+    cdef size_t retval
+    retval = H5Tget_size(type_id)
+    if retval == 0:
+        raise DatatypeError("Failed to get size of datatype %d" % type_id)
+    return retval
+
+def set_size(hid_t type_id, size_t size):
+    """ (INT type_id, INT size)
+
+        Set the total size of the datatype, in bytes.  Useful mostly for
+        string types.
+    """
+    cdef herr_t retval
+    retval = H5Tset_size(type_id, size)
+    if retval < 0:
+        raise DatatypeError("Failed to set size of datatype %d to %s" % (type_id, size))
+    return retval
+
+def get_order(hid_t type_id):
+    """ (INT type_id) => INT order
+
+        Obtain the byte order of the datatype; one of h5t.ORDER_* or
+        h5t.pyORDER_NATIVE
+    """
+    cdef int order
+    order = <int>H5Tget_order(type_id)
+    if order < 0:
+        raise DatatypeError("Failed to determine order of datatype %d" % type_id)
+    return order
+
+def set_order(hid_t type_id, int order):
+    """ (INT type_id, INT order)
+
+        Set the byte order of the datatype. <order> must be one of 
+        h5t.ORDER_* or h5t.pyORDER_NATIVE
+    """
+    cdef herr_t retval
+    retval = H5Tset_order(type_id, <H5T_order_t>order)
+    if retval < 0:
+        raise DatatypeError("Failed to set order of datatype %d" % type_id)
+
+def get_sign(hid_t type_id):
+    """ (INT type_id) => INT sign
+
+        Obtain the "signedness" of the datatype; one of h5t.SIGN_*
+    """
+    cdef int retval
+    retval = <int>H5Tget_sign(type_id)
+    if retval < 0:
+        raise DatatypeError("Failed to get sign of datatype %d" % type_id)
+    return retval
+
+def set_sign(hid_t type_id, int sign):
+    """ (INT type_id, INT sign)
+
+        Set the "signedness" of the datatype; one of h5t.SIGN_*
+    """
+    cdef herr_t retval
+    retval = H5Tset_sign(type_id, <H5T_sign_t>sign)
+    if retval < 0:
+        raise DatatypeError("Failed to set sign of datatype %d" % type_id)
+
+def is_variable_str(hid_t type_id):
+    """ (INT type_id) => BOOL is_variable
+
+        Determine if the given string datatype is a variable-length string.
+        Please note that reading/writing data in this format is impossible;
+        only fixed-length strings are currently supported.
+    """
+    cdef htri_t retval
+    retval = H5Tis_variable_str(type_id)
+    if retval < 0:
+        raise DatatypeError("Failed to inspect type %d" % type_id)
+    return bool(retval)
+
+# === Compound datatype operations ============================================
+# get_nmembers
+# get_member_class
+# get_member_name
+# get_member_index
+# get_member_offset
+# get_member_type
+# insert
+# pack
+
+def get_nmembers(hid_t type_id):
+    """ (INT type_id) => INT number_of_members
+
+        Determine the number of members in a compound or enumerated type.
+    """
+    cdef int retval
+    retval = H5Tget_nmembers(type_id)
+    if retval < 0:
+        raise DatatypeError("Failed to determine members of datatype %d" % type_id)
+    return retval
+
+def get_member_class(hid_t type_id, int member):
+    """ (INT type_id, INT member_index) => INT class
+
+        Determine the datatype class of the member of a compound type,
+        identified by its index (must be 0 <= idx <= nmembers).  Returns
+        one of h5t.CLASS_*
+    """
+
+    cdef int retval
+    retval = H5Tget_member_class(type_id, member)
+    if retval < 0:
+        raise DatatypeError()
+    
+def get_member_name(hid_t type_id, int member):
+    """ (INT type_id, INT member_index) => STRING name
+    
+        Determine the name of a member of a compound or enumerated type,
+        identified by its index.
+    """
+
+    cdef char* name
+    cdef object pyname
+    name = NULL
+    name = H5Tget_member_name(type_id, member)
+    if name != NULL:
+        pyname = name
+        free(name)
+        return pyname
+    raise DatatypeError()
+
+def get_member_index(hid_t type_id, char* name):
+    """ (INT type_id, STRING name) => INT index
+
+        Determine the index of a member of a compound or enumerated datatype
+        identified by a string name.
+    """
+    cdef int retval
+    retval = H5Tget_member_index(type_id, name)
+    if retval < 0:
+        raise DatatypeError("Failed to determine index of field '%' in datatype %d" % (name, type_id))
+    return retval
+
+def get_member_offset(hid_t type_id, int member):
+    """ (INT type_id, INT member_index) => INT offset
+
+        Determine the offset, in bytes, of the beginning of the specified
+        member of a compound datatype.  Due to a limitation of the HDF5
+        library, this function will never raise an exception.  It returns
+        0 on failure; be careful as this is also a legal offset value.
+    """
+    cdef size_t offset
+    offset = H5Tget_member_offset(type_id, member)
+    return offset
+
+def get_member_type(hid_t type_id, int member):
+    """ (INT type_id, INT member_index) => INT type_id
+
+        Create a copy of a member of a compound datatype, identified by its
+        index.  You are responsible for closing it when finished.
+    """
+    cdef hid_t retval
+    retval = H5Tget_member_type(type_id, member)
+    if retval < 0:
+        raise DataTypeError
+    return retval
+
+
+def insert(hid_t type_id, char* name, size_t offset, hid_t field_id):
+    """ (INT compound_type_id, STRING name, INT offset, INT member_type)
+
+        Add a member <member_type> named <name> to an existing compound 
+        datatype.  <offset> is  the offset in bytes from the beginning of the
+        compound type.
+    """
+    cdef herr_t retval
+    retval = H5Tinsert(type_id, name, offset, field_id)
+    if retval < 0:
+        raise DatatypeError("Failed to insert field %s into compound type" % name)
+
+def pack(hid_t type_id):
+    """ (INT type_id)
+
+        Recursively removes padding (introduced on account of e.g. compiler
+        alignment rules) from a compound datatype.
+    """
+
+    cdef herr_t retval
+    retval = H5Tpack(type_id)
+    if retval < 0:
+        raise DatatypeError("Failed to pack datatype %d" % type_id)
+    return retval
+
+# === Array datatype operations ===============================================
+# array_create
+# get_array_ndims
+# get_array_dims
+
+
+
+def array_create(hid_t base, object dims_tpl):
+    """ (INT base_type_id, TUPLE dimensions)
+
+        Create a new array datatype, of parent type <base_type_id> and
+        dimensions given via a tuple of non-negative integers.  "Unlimited" 
+        dimensions are not allowed.
+    """
+    cdef int rank
+    cdef hsize_t *dims
+    cdef hid_t type_id
+    rank = len(dims_tpl)
+
+    dims = tuple_to_dims(dims_tpl)
+    if dims == NULL:
+        raise ValueError("Invalid dimensions tuple: %s" % str(dims_tpl))
+    type_id = H5Tarray_create(base, rank, dims, NULL)
+    free(dims)
+
+    if type_id < 0:
+        raise DatatypeError("Failed to create datatype based on %d, dimensions %s" % (base, str(dims_tpl)))
+    return type_id
+
+def get_array_ndims(hid_t type_id):
+    """ (INT type_id) => INT rank
+
+        Get the rank of the given array datatype.
+    """
+    cdef int n
+    n = H5Tget_array_ndims(type_id)
+    if n < 0:
+        raise DatatypeError("Failed to determine rank of array datatype %d" % type_id)
+    return n
+
+def get_array_dims(hid_t type_id):
+    """ (INT type_id) => TUPLE dimensions
+
+        Get the dimensions of the given array datatype as a tuple of integers.
+    """
+    cdef int rank   
+    cdef hsize_t* dims
+    cdef object dims_tpl
+    dims = NULL
+
+    rank = H5Tget_array_dims(type_id, NULL, NULL)
+    if rank < 0:
+        raise DatatypeError("Failed to determine dimensions of datatype %d" % type_id)
+
+    dims = <hsize_t*>malloc(sizeof(hsize_t)*rank)
+    rank = H5Tget_array_dims(type_id, dims, NULL)
+    dims_tpl = dims_to_tuple(dims, rank)
+    free(dims)
+    if dims_tpl is None:
+        raise DatatypeError("Failed to determine dimensions of datatype %d: tuple conversion error" % type_id)
+
+    return dims_tpl
+
+# === Enumeration datatypes ===================================================
+#  hid_t     H5Tenum_create(hid_t base_id)
+#  herr_t    H5Tenum_insert(hid_t type, char *name, void *value)
+#  herr_t    H5Tenum_nameof( hid_t type, void *value, char *name, size_t size  )
+#  herr_t    H5Tenum_valueof( hid_t type, char *name, void *value  )
+
+#  char*     H5Tget_member_name(hid_t type_id, unsigned field_idx  )
+#  int       H5Tget_member_index(hid_t type_id, char * field_name  )
+
+def enum_create(hid_t base_id):
+    """ (INT base_type_id) => INT new_type_id
+
+        Create a new enumerated type based on parent type <base_type_id>
+    """
+    cdef hid_t retval
+    retval = H5Tenum_create(base_id)
+    if retval < 0:
+        raise DatatypeError("Failed to create enum of class %d" % base_id)
+    return retval
+
+def enum_insert(hid_t type_id, char* name, long long value):
+    """ (INT type_id, STRING name, INT/LONG value)
+
+        Define a new member of an enumerated type.  <value> will be
+        automatically converted to the base type defined for this enum.
+    """
+    cdef herr_t retval
+    cdef hid_t ptype
+    cdef long long *data_ptr
+    ptype = 0
+
+    data_ptr = <long long*>malloc(sizeof(long long))
+    try:
+        data_ptr[0] = value
+        ptype = H5Tget_super(type_id)
+        retval = H5Tconvert(H5T_NATIVE_LLONG, ptype, 1, data_ptr, NULL, H5P_DEFAULT)
+        if retval < 0:
+            raise DatatypeError("Can't preconvert integer for enum insert")
+        retval = H5Tenum_insert(type_id, name, data_ptr)
+        if retval < 0:
+            raise DatatypeError("Failed to insert '%s' (value %d) into enum %d" % (name, value, type_id))
+    finally:
+        if ptype:
+            H5Tclose(ptype)
+        free(data_ptr)
+
+#  herr_t    H5Tget_member_value(hid_t type  unsigned memb_no, void *value  )
+def get_member_value(hid_t type_id, unsigned int idx):
+    """ (INT type_id, UINT index) => LONG value
+
+        Determine the value for the member at <index> of enumerated type
+        <type_id>
+    """
+    cdef herr_t retval
+    cdef hid_t ptype
+    cdef long long *data_ptr
+    data_ptr = <long long*>malloc(sizeof(long long))
+
+    try:
+        ptype = H5Tget_super(type_id)
+        if ptype < 0:
+            raise DatatypeError("Failed to get parent type of enum %d" % type_id)
+
+        retval = H5Tget_member_value(type_id, idx, data_ptr)
+        if retval < 0:
+            raise DatatypeError("Failed to obtain value of element %d of enum %d" % (idx, type_id))
+
+        retval = H5Tconvert(ptype, H5T_NATIVE_LLONG, 1, data_ptr, NULL, H5P_DEFAULT)
+        if retval < 0:
+            raise DatatypeError("Failed to postconvert integer for enum retrieval")
+    finally:
+        H5Tclose(ptype)
+        interm = data_ptr[0]
+        free(data_ptr)
+    return interm
+
+# === Opaque datatypes ========================================================
+
+def set_tag(hid_t type_id, char* tag):
+    """ (INT type_id, STRING tag)
+
+        Set the a string describing the contents of an opaque datatype
+    """
+    cdef herr_t retval
+    retval = H5Tset_tag(type_id, tag)
+    if retval < 0:
+        raise DatatypeError("Failed to set opaque data tag '%s' on type %d" % (tag, type_id))
+    return retval
+
+def get_tag(hid_t type_id):
+    """ (INT type_id) => STRING tag
+
+        Get the tag associated with an opaque datatype
+    """
+    cdef char* buf
+    cdef object tag
+    buf = NULL
+
+    buf = H5Tget_tag(type_id)
+    if buf == NULL:
+        raise DatatypeError("Failed to get opaque data tag for type %d" % type_id)
+    tag = buf
+    free(buf)
+    return tag
+
+
+# === Custom Python additions =================================================
+
+
+# Map array protocol strings to their HDF5 atomic equivalents
+# Not sure why LE/BE versions of I8/U8 exist; I'll include them anyway.
+_code_map = {"<i1": H5T_STD_I8LE, "<i2": H5T_STD_I16LE, "<i4": H5T_STD_I32LE, "<i8": H5T_STD_I64LE,
+            ">i1": H5T_STD_I8BE, ">i2": H5T_STD_I16BE, ">i4": H5T_STD_I32BE, ">i8": H5T_STD_I64BE,
+            "|i1": H5T_NATIVE_INT8, "|u1": H5T_NATIVE_UINT8, 
+            "<u1": H5T_STD_U8LE, "<u2": H5T_STD_U16LE, "<u4": H5T_STD_U32LE, "<u8": H5T_STD_U64LE,
+            ">u1": H5T_STD_U8BE, ">u2": H5T_STD_U16BE, ">u4": H5T_STD_U32BE, ">u8": H5T_STD_U64BE,
+            "<f4": H5T_IEEE_F32LE, "<f8": H5T_IEEE_F64LE, ">f4": H5T_IEEE_F32BE, ">f8": H5T_IEEE_F64BE }
+
+# Intermediate mapping which takes complex types to their components
+_complex_map = { "<c8": H5T_IEEE_F32LE, "<c16": H5T_IEEE_F64LE, ">c8": H5T_IEEE_F32BE, ">c16": H5T_IEEE_F64BE }
+
+_order_map = { H5T_ORDER_NONE: '|', H5T_ORDER_LE: '<', H5T_ORDER_BE: '>'}
+_sign_map  = { H5T_SGN_NONE: 'u', H5T_SGN_2: 'i' }
+
+_complex_names = ('r','i')
+
+# For an HDF5 compound object to be considered complex, the following must be
+# true:
+# (1) Must have exactly two fields
+# (2) Both must be IEEE floating-point, of the same precision and byteorder
+# (3) The field names must match the contents of _complex_names
+
+def py_set_complex_names(char* real_name=NULL, char* imag_name=NULL):
+    """ (STRING real_name, STRING imag_name) or ()
+
+        Sets the field names used to read and write complex numbers from HDF5
+        compound datatypes.  To disable all complex conversion, call with no
+        arguments.
+    """
+    if real_name == NULL and imag_name == NULL:
+        _complex_names = None
+    elif real_name != NULL and imag_name != NULL:
+        _complex_names = (real_name, imag_name)
+    else:
+        raise ValueError("Must be called with no arguments or exactly 2: STRING real_name, STRING imag_name")
+
+    
+def py_h5t_to_dtype(hid_t type_id, object force_native=False, int force_string_length=-1, object compound_fields=None):
+    """ (INT type_id, BOOL force_native=False, INT force_string_length=-1,
+            TUPLE compound_fields=None) 
+        => INT type_id
+
+        Produce a Numpy dtype of the same general kind as an HDF5 datatype.
+        Note that the result is *NOT* guaranteed to be memory-compatible with
+        the HDF5 type; for that use py_dtype_to_h5t.  
+
+        If force_native is True, all byte-orders in the returned dtype will be
+        in native order. Variable-length (VLEN) strings are currently not
+        supported, but by providing a value for <force_string_length> they
+        can be converted to fixed-length strings compatible with Numpy.
+
+        If compound_fields is provided, it must be a tuple of names which 
+        correspond to fields in the HDF5 object.  Only HDF5 field names which
+        are present in this tuple will be copied, and will be inserted into the
+        dtype in the order that they appear in the tuple.  Fields which are
+        not present in the HDF5 type are discarded.  As a side effect, this
+        disables automatic conversion of compound types to complex numbers,
+        even if they have the appropriate names.
+    """
+    cdef int classtype
+    cdef int sign
+    cdef int size
+    cdef int order
+    cdef int nfields
+    cdef int i
+    cdef hid_t tmp_id
+
+    classtype = get_class(type_id)
+    
+    if classtype == H5T_INTEGER:
+        size = get_size(type_id)
+        sign = get_sign(type_id)
+        order = get_order(type_id)
+        typeobj = dtype(_order_map[order] + _sign_map[sign] + str(size))
+
+    elif classtype == H5T_FLOAT:
+        size = get_size(type_id)
+        order = get_order(type_id)
+        typeobj = dtype(_order_map[order] + "f" + str(size))
+
+    elif classtype == H5T_STRING:
+        if is_variable_str(type_id):
+            if force_string_length <= 0:
+                raise ConversionError("Variable-length strings are unsupported; try using a fixed size via force_string_length")
+            else:
+                size = force_string_length
+        else:
+            size = get_size(type_id)
+        typeobj = dtype("|S" + str(size))
+
+    elif classtype == H5T_OPAQUE:
+        size = get_size(type_id)
+        typeobj = dtype("|V" + str(size))
+
+    elif classtype == H5T_COMPOUND:
+        nfields = get_nmembers(type_id)
+        field_list = []
+
+        for i from 0 <= i < nfields:
+            tmp_id = get_member_type(type_id, i)
+            try:
+                tmp_name = get_member_name(type_id, i)
+                field_list.append( (tmp_name, py_h5t_to_dtype(tmp_id, force_native, force_string_length)) )
+            finally:
+                H5Tclose(tmp_id)
+
+        if compound_fields is not None:
+            # If only specific fields are requested, provide them
+            # in the order specified
+            name_dict = dict(field_list)
+            field_list = []
+            for name in compound_fields:
+                if name in name_dict:
+                    field_list.append((name, name_dict[name]))
+            
+        elif len(field_list) == 2:
+            # Special case: complex type.  Note this changes "field_list" to a string.
+            if _complex_names is not None                       and \
+               field_list[0][1].str     == field_list[1][1].str and \
+               field_list[0][1].str[1]  == 'f'                  and \
+               field_list[0][0].lower() == _complex_names[0]    and \
+               field_list[1][0].lower() == _complex_names[1]:
+
+                    bstring = field_list[0][1].str
+                    blen = int(bstring[2:])
+                    nstring = bstring[0] + "c" + str(2*blen)
+                    field_list = nstring
+
+        typeobj = dtype(field_list)
+
+    elif classtype == H5T_ENUM:
+        # Enumerated types are treated as their parent type, with an additional
+        # enum field entry carrying a dictionary as metadata
+        super_tid = H5Tget_super(type_id)
+        try:
+            edct = py_enum_to_dict(type_id)
+            typeobj = py_attach_enum(edct, py_h5t_to_dtype(super_tid))
+        finally:
+            H5Tclose(super_tid)
+
+    elif classtype == H5T_ARRAY:
+        super_tid = get_super(type_id)
+        try:
+            base_dtype = py_h5t_to_dtype(super_tid)
+        finally:
+            H5Tclose(super_tid)
+        shape = get_array_dims(type_id)
+        typeobj = dtype( (base_dtype, shape) )
+
+    else:
+        raise ConversionError('Unsupported datatype class "%s"' % CLASS_MAPPER[classtype])
+
+    if force_native:
+        return typeobj.newbyteorder('=')
+    return typeobj
+
+def py_dtype_to_h5t(numpy.dtype dtype_in):
+    """ ( DTYPE dtype_in ) => INT type_id
+
+        Given a Numpy dtype object, generate a byte-for-byte memory-compatible
+        HDF5 transient datatype object.
+    """
+    cdef hid_t type_out
+    cdef hid_t tmp
+    cdef hid_t basetype
+    cdef object names
+    cdef int retval
+
+    cdef char* type_str
+    cdef char kind
+    cdef char byteorder
+    cdef int length
+
+    type_out = -1
+
+    type_str = dtype_in.str
+    kind = type_str[1]
+    byteorder = type_str[0]
+
+    length = int(dtype_in.str[2:])  # is there a better way to do this?
+
+    names = dtype_in.names
+
+    # Anything with field names is considered to be a compound type, except enums
+    if names is not None:
+
+        # Check for enumerated type first
+        if (kind == c'u' or kind == c'i') and len(names) == 1 and names[0] == 'enum':
+            basetype = _code_map[dtype_in.str]
+            type_out = py_dict_to_enum(py_recover_enum(dtype_in), basetype)
+
+        # Otherwise it's just a compound type
+        else:
+            type_out = create(H5T_COMPOUND, length)
+            for name, (dt, offset) in dtype_in.fields.iteritems():
+                tmp = py_dtype_to_h5t(dt)
+                try:
+                    insert(type_out, name, offset, tmp)
+                finally:
+                    H5Tclose(tmp)
+
+    # Integers and floats map directly to HDF5 atomic types
+    elif kind == c'u' or kind  == c'i' or kind == c'f': 
+        try:
+            type_out =  _code_map[dtype_in.str]
+        except KeyError:
+            raise ConversionError("Failed to find '%s' in atomic code map" % dtype_in.str)
+
+    # Complex numbers are stored as HDF5 structs, with names defined at runtime
+    elif kind == c'c':
+        if _complex_names is None:
+            raise ConversionError("Support for writing complex numbers is turned off.  Use py_set_complex_names to turn it back on.")
+
+        if length == 8:
+            type_out = create_ieee_complex64(byteorder, _complex_names[0], _complex_names[1])
+        elif length == 16:
+            type_out = create_ieee_complex128(byteorder, _complex_names[0], _complex_names[1])
+        else:
+            raise ConversionError("Unsupported length %d for complex dtype: %s" % (length, repr(dtype_in)))
+
+        if type_out < 0:
+            raise ConversionError("Failed to create complex equivalent for dtype: %s" % repr(dtype_in))
+
+    # Opaque/array types are differentiated by the presence of a subdtype
+    elif kind == c'V':
+
+        if dtype_in.subdtype:
+            basetype = py_dtype_to_h5t(dtype_in.subdtype[0])
+            try:
+                type_out = array_create(basetype, dtype_in.subdtype[1])
+            finally:
+                H5Tclose(basetype)
+        else:
+            type_out = create(H5T_OPAQUE, length)
+                
+    # Strings are assumed to be stored C-style.
+    elif kind == c'S':
+        type_out = copy(H5T_C_S1)
+        set_size(type_out, length)
+
+    else:
+        raise ConversionError("No conversion path for dtype: %s" % repr(dtype_in))
+
+    return type_out
+
+
+def py_enum_to_dict(hid_t type_id):
+    """ (INT type_id) => DICT enum
+        
+        Produce a dictionary in the format [STRING] => LONG from
+        an HDF5 enumerated type.
+    """
+    cdef int nmem
+    cdef int i
+    nmem = get_nmembers(type_id)
+
+    dictout = {}
+
+    for i from 0 <= i < nmem:
+        dictout[get_member_name(type_id, i)] = get_member_value(type_id,i)
+
+    return dictout
+
+def py_dict_to_enum(object enumdict, hid_t basetype):
+    """ (DICT enum, INT base_type_id) => INT new_type_id
+
+        Create a new HDF5 enumeration from a Python dictionary in the format
+        [string name] => long value, and an HDF5 base type
+    """
+    cdef hid_t type_id
+    type_id = enum_create(basetype)
+    for name, value in enumdict.iteritems():
+        enum_insert(type_id, str(name), value)
+
+    return type_id
+
+def py_attach_enum(object enumdict, object basetype):
+    """ (DICT enum, DTYPE base_dtype) => DTYPE new_dtype
+
+        Convert a Python dictionary in the format [string] => integer to a 
+        Numpy dtype with associated enum dictionary.
+    """
+    return dtype( (basetype, [( (enumdict, 'enum'), basetype )] ) )
+
+def py_recover_enum(numpy.dtype dtype_in):
+    """ (DTYPE dtype_with_enum) => DICT enum
+
+        Extract the enum dictionary from a Numpy dtype object
+    """
+    cdef object names
+    names = dtype_in.names
+
+    if names is not None:
+        if len(names) == 1 and names[0] == 'enum':
+            return dtype_in.fields['enum'][2]
+
+    raise ValueError("Type %s is not an enumerated type" % repr(dtype_in))
+
+def py_list_compound_names(hid_t type_in):
+    """ (INT type_id) => LIST compound_names
+   
+        Obtain a Python list of member names for a compound or enumeration
+        type.
+    """
+    cdef int nmem
+    cdef int i
+
+    nmem = get_nmembers(type_in)
+
+    qlist = []
+    for i from 0<=i<nmem:
+        qlist.append(get_member_name(type_in,i))
+
+    return qlist
+
+def py_can_convert_dtype(object dt):
+
+    cdef hid_t tid
+    tid = 0
+    can_convert = False
+    try:
+        tid = py_dtype_to_h5t(dt)
+        can_convert = True
+    except ConversionError:
+        pass
+
+    if tid:
+        H5Tclose(tid)
+
+    return can_convert
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/h5py/h5z.pxd b/h5py/h5z.pxd
new file mode 100755
index 0000000..6b480a6
--- /dev/null
+++ b/h5py/h5z.pxd
@@ -0,0 +1,62 @@
+#+
+# 
+# This file is part of h5py, a low-level Python interface to the HDF5 library.
+# 
+# Copyright (C) 2008 Andrew Collette
+# http://h5py.alfven.org
+# License: BSD  (See LICENSE.txt for full license)
+# 
+# $Date$
+# 
+#-
+
+from h5 cimport herr_t, htri_t
+
+cdef extern from "hdf5.h":
+
+    ctypedef int H5Z_filter_t
+
+    int H5Z_FILTER_ERROR
+    int H5Z_FILTER_NONE
+    int H5Z_FILTER_ALL
+    int H5Z_FILTER_DEFLATE
+    int H5Z_FILTER_SHUFFLE 
+    int H5Z_FILTER_FLETCHER32
+    int H5Z_FILTER_SZIP
+    int H5Z_FILTER_RESERVED
+    int H5Z_FILTER_MAX
+    int H5Z_MAX_NFILTERS
+
+    int H5Z_FLAG_DEFMASK
+    int H5Z_FLAG_MANDATORY
+    int H5Z_FLAG_OPTIONAL
+
+    int H5Z_FLAG_INVMASK
+    int H5Z_FLAG_REVERSE
+    int H5Z_FLAG_SKIP_EDC
+
+    int H5_SZIP_ALLOW_K13_OPTION_MASK   #1
+    int H5_SZIP_CHIP_OPTION_MASK        #2
+    int H5_SZIP_EC_OPTION_MASK          #4
+    int H5_SZIP_NN_OPTION_MASK          #32
+    int H5_SZIP_MAX_PIXELS_PER_BLOCK    #32
+
+    int H5Z_FILTER_CONFIG_ENCODE_ENABLED #(0x0001)
+    int H5Z_FILTER_CONFIG_DECODE_ENABLED #(0x0002)
+
+    cdef enum H5Z_EDC_t:
+        H5Z_ERROR_EDC       = -1,
+        H5Z_DISABLE_EDC     = 0,
+        H5Z_ENABLE_EDC      = 1,
+        H5Z_NO_EDC          = 2 
+
+    # --- Filter API ----------------------------------------------------------
+    htri_t H5Zfilter_avail(H5Z_filter_t id_)
+    herr_t H5Zget_filter_info(H5Z_filter_t filter_, unsigned int *filter_config_flags)
+
+
+
+
+
+
+
diff --git a/h5py/h5z.pyx b/h5py/h5z.pyx
new file mode 100755
index 0000000..603a881
--- /dev/null
+++ b/h5py/h5z.pyx
@@ -0,0 +1,102 @@
+#+
+# 
+# This file is part of h5py, a low-level Python interface to the HDF5 library.
+# 
+# Copyright (C) 2008 Andrew Collette
+# http://h5py.alfven.org
+# License: BSD  (See LICENSE.txt for full license)
+# 
+# $Date$
+# 
+#-
+
+
+"""
+    Filter API and constants
+"""
+
+# Pyrex compile-time imports
+from h5  cimport herr_t, htri_t
+
+# Runtime imports
+import h5
+from h5 import DDict
+from errors import FilterError
+
+# === Public constants and data structures ====================================
+
+FILTER_ERROR    = H5Z_FILTER_ERROR
+FILTER_NONE     = H5Z_FILTER_NONE
+FILTER_ALL      = H5Z_FILTER_ALL
+FILTER_DEFLATE  = H5Z_FILTER_DEFLATE
+FILTER_SHUFFLE  = H5Z_FILTER_SHUFFLE
+FILTER_FLETCHER32 = H5Z_FILTER_FLETCHER32
+FILTER_SZIP     = H5Z_FILTER_SZIP
+FILTER_RESERVED = H5Z_FILTER_RESERVED
+FILTER_MAX      = H5Z_FILTER_MAX
+FILTER_NMAX     = H5Z_MAX_NFILTERS
+_FILTER_MAPPER = { H5Z_FILTER_ERROR: 'ERROR', H5Z_FILTER_NONE: 'NONE',
+                   H5Z_FILTER_ALL: 'ALL', H5Z_FILTER_DEFLATE: 'DEFLATE',
+                   H5Z_FILTER_SHUFFLE: 'SHUFFLE', H5Z_FILTER_FLETCHER32: 'FLETCHER32',
+                   H5Z_FILTER_SZIP: 'SZIP', H5Z_FILTER_RESERVED: 'RESERVED'}
+FILTER_MAPPER = DDict(_FILTER_MAPPER)
+
+FLAG_DEFMASK    = H5Z_FLAG_DEFMASK
+FLAG_MANDATORY  = H5Z_FLAG_MANDATORY
+FLAG_OPTIONAL   = H5Z_FLAG_OPTIONAL
+FLAG_INVMASK    = H5Z_FLAG_INVMASK
+FLAG_REVERSE    = H5Z_FLAG_REVERSE
+FLAG_SKIP_EDC   = H5Z_FLAG_SKIP_EDC
+_FLAG_MAPPER = {H5Z_FLAG_DEFMASK: 'DEFMASK', H5Z_FLAG_MANDATORY: 'MANDATORY',
+                H5Z_FLAG_OPTIONAL: 'OPTIONAL', H5Z_FLAG_INVMASK: 'INVMASK',
+                H5Z_FLAG_REVERSE: 'REVERSE', H5Z_FLAG_SKIP_EDC: 'SKIP EDC' }
+FLAG_MAPPER = DDict(_FLAG_MAPPER)
+
+#skip SZIP options
+
+CONFIG_ENCODE_ENABLED = H5Z_FILTER_CONFIG_ENCODE_ENABLED
+CONFIG_DECODE_ENABLED = H5Z_FILTER_CONFIG_DECODE_ENABLED
+_CONFIG_MAPPER = { H5Z_FILTER_CONFIG_ENCODE_ENABLED: 'ENCODE ENABLED',
+                   H5Z_FILTER_CONFIG_DECODE_ENABLED: 'ENCODE DISABLED' }
+CONFIG_MAPPER = DDict(_CONFIG_MAPPER)
+
+EDC_ERROR   = H5Z_ERROR_EDC
+EDC_DISABLE = H5Z_DISABLE_EDC
+EDC_ENABLE  = H5Z_ENABLE_EDC
+EDC_NONE    = H5Z_NO_EDC
+_EDC_MAPPER = { H5Z_ERROR_EDC: 'ERROR', H5Z_DISABLE_EDC: 'DISABLE EDC',
+                H5Z_ENABLE_EDC: 'ENABLE EDC', H5Z_NO_EDC: 'NO EDC' }
+EDC_MAPPER = DDict(_EDC_MAPPER)
+
+# === Filter API  =============================================================
+
+def filter_avail(int filter_id):
+
+    cdef htri_t retval
+    retval = H5Zfilter_avail(<H5Z_filter_t>filter_id)
+    if retval < 0:
+        raise FilterError("Can't determine availability of filter %d" % filter_id)
+    return bool(retval)
+
+def get_filter_info(int filter_id):
+
+    cdef herr_t retval
+    cdef unsigned int flags
+    retval = H5Zget_filter_info(<H5Z_filter_t>filter_id, &flags)
+    if retval < 0:
+        raise FilterError("Can't determine flags of filter %d" % filter_id)
+    return flags
+
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/h5py/highlevel.py b/h5py/highlevel.py
new file mode 100755
index 0000000..fbbebca
--- /dev/null
+++ b/h5py/highlevel.py
@@ -0,0 +1,795 @@
+#+
+# 
+# This file is part of h5py, a low-level Python interface to the HDF5 library.
+# 
+# Copyright (C) 2008 Andrew Collette
+# http://h5py.alfven.org
+# License: BSD  (See LICENSE.txt for full license)
+# 
+# $Date$
+# 
+#-
+
+"""
+    Provides high-level Python objects for HDF5 files, groups, and datasets.  
+
+    Highlights:
+
+    - Groups provide dictionary-like __getitem__ access to their members, and 
+      allow iteration over member names.  File objects also perform these
+      operations, implicitly on the root ('/') group.
+
+    - Datasets support Numpy dtype and shape access, along with read/write 
+      access to the underlying HDF5 dataset (including slicing for partial I/0),
+      and reading/writing specified fields of a compound type object.
+
+    - Both group and dataset attributes can be accessed via a dictionary-style
+      attribute manager (group_obj.attrs and dataset_obj.attrs).  See the
+      highlevel.AttributeManager docstring for more information.
+
+    - Named datatypes are reached through the NamedType class, which allows
+      attribute access like Group and Dataset objects.
+
+    - An interactive command-line utility "browse(filename)" allows access to 
+      HDF5 datasets, with commands like "cd, ls", etc.  Also allows you to
+      import groups and datasets directly into Python as highlevel.Group and
+      highlevel.Dataset objects.
+
+    - There are no "h5py.highlevel" exceptions; classes defined here raise 
+      native Python exceptions.  However, they will happily propagate 
+      exceptions from the underlying h5py.h5* modules, which are (mostly) 
+      subclasses of h5py.errors.H5Error.
+"""
+
+__revision__ = "$Id$"
+
+import os
+import cmd
+import random
+import string
+import numpy
+
+import h5f
+import h5g
+import h5i
+import h5d
+import h5t
+import h5a
+import h5p
+from errors import H5Error
+
+# === Main classes (Dataset/Group/File) =======================================
+
+class Dataset(object):
+
+    """ High-level interface to an HDF5 dataset
+
+        A Dataset object is designed to permit "Numpy-like" access to the 
+        underlying HDF5 dataset.  It supports array-style indexing, which 
+        returns Numpy ndarrays.  For the case of arrays containing compound
+        data, it also allows a "compound mask" to be set, allowing you to 
+        only extract elements which match names in the mask.  The underlying
+        array can also be written to using the indexing syntax.
+
+        HDF5 attribute access is provided through the property obj.attrs.  See
+        the AttributeManager class documentation for more information.
+
+        Read-only properties:
+        names       Compound fields defined in this object (tuple or None)
+        names_mask  Current mask controlling compound access (tuple or None)
+        shape       Tuple containing array dimensions
+        dtype       A Numpy dtype representing the array data-type.
+
+        Writable properties:
+        force_native    
+            Returned data will be automatically converted
+            to the native platform byte order
+
+        force_string_length     
+            Variable-length strings will be converted to
+            Numpy strings of this length.
+    """
+
+    # --- Properties (Dataset) ------------------------------------------------
+
+    def _set_native(self, val):
+        self._force_native = bool(val) if val is not None else None
+
+    def _set_string_length(self, val):
+        self._string_length = val
+
+    names_mask = property(lambda self: self._fields)
+    names = property(lambda self: self.dtype.names)
+
+    shape = property(lambda self: h5d.py_shape(self.id))
+    dtype = property(lambda self: h5d.py_dtype(self.id))
+
+    force_native = property(lambda self: self._force_native, _set_native)
+    force_string_length = property(lambda self: self._string_length, _set_string_length)
+
+    attrs = property(lambda self: self._attrs)
+
+    # --- Public interface (Dataset) ------------------------------------------
+
+    def __init__(self, group, name, create=False, force=False,
+                    data=None, dtype=None, shape=None, 
+                    chunks=None, compression=None, shuffle=False, fletcher32=False):
+        """ Create a new Dataset object.  There are two modes of operation:
+
+            1.  Open an existing dataset
+                If "create" is false, open an existing dataset.  An exception
+                will be raised if it doesn't exist.
+
+            2.  Create a dataset
+                If "create" is True, create a new dataset.  You must supply
+                *either* "data", which must be a Numpy array from which the 
+                shape, dtype and initial contents will be determined, or *both* 
+                "dtype" (Numpy dtype object) and "shape" (tuple of dimensions).
+                Chunks/compression/shuffle/fletcher32 can also be specified.
+
+                By default, creating a dataset will fail if another of the
+                same name already exists. If you specify force=True, any 
+                existing dataset will be unlinked, and the new one created.
+                This is as close as possible to an atomic operation; if the 
+                dataset creation fails, the old dataset isn't destroyed.
+
+            Creation keywords (* is default):
+
+            chunks:        Tuple of chunk dimensions or None*
+            compression:   DEFLATE (gzip) compression level, int or None*
+            shuffle:       Use the shuffle filter? (requires compression) T/F*
+            fletcher32:    Enable Fletcher32 error detection? T/F*
+        """
+        if create:
+            if force and h5g.py_exists(group.id,name):
+                tmpname = 'h5py_temp_' + ''.join(random.sample(string.ascii_letters, 30))
+                tmpid = h5d.py_create(group.id, tmpname, data, shape, 
+                                    chunks, compression, shuffle, fletcher32)
+                h5g.unlink(group.id, name)
+                h5g.link(group.id, tmpname, name)
+                h5g.unlink(group.id, tmpname)
+
+            else:
+                self.id = h5d.py_create(group.id, name, data, shape, 
+                                        chunks, compression, shuffle, fletcher32)
+        else:
+            if any((data,dtype,shape,chunks,compression,shuffle,fletcher32)):
+                raise ValueError('You cannot specify keywords when opening a dataset.')
+            self.id = h5d.open(group.id, name)
+
+        self._fields = None
+        self._attrs = AttributeManager(self)
+        self.force_native = None
+        self.force_string_length = None
+
+    def __getitem__(self, *args):
+        """ Read a slice from the underlying HDF5 array.  Currently only
+            numerical slices are supported; for recarray-style access consider
+            using set_names_mask().
+        """
+        if any( [isinstance(x, basestring) for x in args] ):
+            raise TypeError("Slices must be numbers; recarray-style indexing is not yet supported.")
+
+        start, count, stride = _slices_to_tuples(args)
+
+        return h5d.py_read_slab(self.id, start, count, stride, 
+                                compound_fields=self.names_mask,
+                                force_native=self.force_native)
+
+    def __setitem__(self, *args):
+        """ Write to the underlying array from an existing Numpy array.  The
+            shape of the Numpy array must match the shape of the selection,
+            and the Numpy array's datatype must be convertible to the HDF5
+            array's datatype.
+        """
+        start, count, stride = _slices_to_tuples(args[0:len(args)-1])
+        h5d.py_write_slab(self.id, args[-1], start, stride)
+
+    def set_names_mask(self, iterable=None):
+        """ Determine which fields of a compound datatype will be read. Only 
+            compound fields whose names match those provided by the given 
+            iterable will be read.  Any given names which do not exist in the
+            HDF5 compound type are simply ignored.
+
+            If the argument is a single string, it will be correctly processed
+            (i.e. not exploded).
+        """
+        if iterable == None:
+            self._fields = None
+        else:
+            if isinstance(iterable, basestring):
+                iterable = (iterable,)    # not 'i','t','e','r','a','b','l','e'
+            self._fields = tuple(iterable)
+
+    def close(self):
+        """ Force the HDF5 library to close and free this object.  You 
+            shouldn't need to do this in normal operation; HDF5 objects are 
+            automatically closed when their Python counterparts are deallocated.
+        """
+        h5d.close(self.id)
+
+    def __del__(self):
+        try:
+            h5d.close(self.id)
+        except H5Error:
+            pass
+
+    def __str__(self):
+        return 'Dataset: '+str(self.shape)+'  '+repr(self.dtype)
+
+    def __repr__(self):
+        return self.__str__()
+
+class Group(object):
+    """ Represents an HDF5 group object
+
+        Group members are accessed through dictionary-style syntax.  Iterating
+        over a group yields the names of its members, while the method
+        iteritems() yields (name, value) pairs. Examples:
+
+            highlevel_obj = group_obj["member_name"]
+            member_list = list(group_obj)
+            member_dict = dict(group_obj.iteritems())
+
+        - Accessing items: generally a Group or Dataset object is returned. In
+          the special case of a scalar dataset, a Numpy array scalar is
+          returned.
+
+        - Setting items: See the __setitem__ docstring; the rules are:
+            1. Existing Group or Dataset: create a hard link in this group
+            2. Numpy array: create a new dataset here, overwriting any old one
+            3. Anything else: try to create a Numpy array.  Also works with
+               Python scalars which have Numpy type equivalents.
+
+        - Deleting items: unlinks the object from this group.
+
+        - Attribute access: through the property obj.attrs.  See the 
+          AttributeManager class documentation for more information.
+
+        - len(obj) returns the number of group members
+    """
+
+    # --- Public interface (Group) --------------------------------------------
+
+    def __init__(self, parent_object, name, create=False):
+        """ Create a new Group object, from a parent object and a name.
+
+            If "create" is False (default), try to open the given group,
+            raising an exception if it doesn't exist.  If "create" is True,
+            create a new HDF5 group and link it into the parent group.
+        """
+        self.id = 0
+        if create:
+            self.id = h5g.create(parent_object.id, name)
+        else:
+            self.id = h5g.open(parent_object.id, name)
+        
+        #: Group attribute access (dictionary-style)
+        self.attrs = AttributeManager(self)
+
+    def __delitem__(self, name):
+        """ Unlink a member from the HDF5 group.
+        """
+        h5g.unlink(self.id, name)
+
+    def __setitem__(self, name, obj):
+        """ Add the given object to the group.  Here are the rules:
+
+            1. If "obj" is a Dataset or Group object, a hard link is created
+                in this group which points to the given object.
+            2. If "obj" is a Numpy ndarray, it is converted to a dataset
+                object, with default settings (contiguous storage, etc.).
+            3. If "obj" is anything else, attempt to convert it to an ndarray
+                and store it.  Scalar values are stored as scalar datasets.
+                Raise ValueError if we can't understand the resulting array 
+                dtype.
+        """
+        if isinstance(obj, Group) or isinstance(obj, Dataset):
+            h5g.link(self.id, name, h5i.get_name(obj.id), link_type=h5g.LINK_HARD)
+
+        elif isinstance(obj, numpy.ndarray):
+            if h5t.py_can_convert_dtype(obj.dtype):
+                dset = Dataset(self, name, data=obj, create=True, force=True)
+                dset.close()
+            else:
+                raise ValueError("Don't know how to store data of this type in a dataset: " + repr(obj.dtype))
+
+        else:
+            arr = numpy.array(obj)
+            if h5t.py_can_convert_dtype(arr.dtype):
+                dset = Dataset(self, name, data=arr, create=True, force=True)
+                dset.close()
+            else:
+                raise ValueError("Don't know how to store data of this type in a dataset: " + repr(arr.dtype))
+
+    def __getitem__(self, name):
+        """ Retrive the Group or Dataset object.  If the Dataset is scalar,
+            returns its value instead.
+        """
+        retval = _open_arbitrary(self, name)
+        if isinstance(retval, Dataset) and retval.shape == ():
+            value = h5d.py_read_slab(retval.id, ())
+            value = value.astype(value.dtype.type)
+            retval.close()
+            return value
+        return retval
+
+    def __iter__(self):
+        """ Yield the names of group members.
+        """
+        return h5g.py_iternames(self.id)
+
+    def iteritems(self):
+        """ Yield 2-tuples of (member_name, member_value).
+        """
+        for name in self:
+            yield (name, self[name])
+
+    def __len__(self):
+        return h5g.get_num_objs(self.id)
+
+    def close(self):
+        """ Immediately close the underlying HDF5 object.  Further operations
+            on this Group object will raise an exception.  You don't typically
+            have to use this, as these objects are automatically closed when
+            their Python equivalents are deallocated.
+        """
+        h5g.close(self.id)
+
+    def __del__(self):
+        try:
+            h5g.close(self.id)
+        except H5Error:
+            pass
+
+    def __str__(self):
+        return 'Group (%d members): ' % self.nmembers + ', '.join(['"%s"' % name for name in self])
+
+    def __repr__(self):
+        return self.__str__()
+
+class File(Group):
+
+    """ Represents an HDF5 file on disk.
+
+        File objects inherit from Group objects; Group-like methods all
+        operate on the HDF5 root group ('/').  Like Python file objects, you
+        must close the file ("obj.close()") when you're done with it.
+    """
+
+    _modes = ('r','r+','w','w+','a')
+                      
+    # --- Public interface (File) ---------------------------------------------
+
+    def __init__(self, name, mode, noclobber=False):
+        """ Create a new file object.  
+
+            Valid modes (like Python's file() modes) are: 
+            - 'r'   Readonly, file must exist
+            - 'r+'  Read/write, file must exist
+            - 'w'   Write, create/truncate file
+            - 'w+'  Read/write, create/truncate file
+            - 'a'   Read/write, file must exist (='r+')
+
+            If "noclobber" is specified, file truncation (w/w+) will fail if 
+            the file already exists.  Note this is NOT the default.
+        """
+        if not mode in self._modes:
+            raise ValueError("Invalid mode; must be one of %s" % ', '.join(self._modes))
+              
+        plist = h5p.create(h5p.CLASS_FILE_ACCESS)
+        try:
+            h5p.set_fclose_degree(plist, h5f.CLOSE_STRONG)
+            if mode == 'r':
+                self.id = h5f.open(name, h5f.ACC_RDONLY, access_id=plist)
+            elif 'r' in mode or 'a' in mode:
+                self.id = h5f.open(name, h5f.ACC_RDWR, access_id=plist)
+            elif noclobber:
+                self.id = h5f.create(name, h5f.ACC_EXCL, access_id=plist)
+            else:
+                self.id = h5f.create(name, h5f.ACC_TRUNC, access_id=plist)
+        finally:
+            h5p.close(plist)
+
+        # For __str__ and __repr__
+        self.filename = name
+        self.mode = mode
+        self.noclobber = noclobber
+
+    def close(self):
+        """ Close this HDF5 object.  Note that any further access to objects
+            defined in this file will raise an exception.
+        """
+        h5f.close(self.id)
+
+    def flush(self):
+        """ Instruct the HDF5 library to flush disk buffers for this file.
+        """
+        h5f.flush(self.id)
+
+    def __del__(self):
+        """ This docstring is here to remind you that THE HDF5 FILE IS NOT 
+            AUTOMATICALLY CLOSED WHEN IT'S GARBAGE COLLECTED.  YOU MUST
+            CALL close() WHEN YOU'RE DONE WITH THE FILE.
+        """
+        pass
+
+    def __str__(self):
+        return 'File "%s", root members: %s' % (self.filename, ', '.join(['"%s"' % name for name in self]))
+
+    def __repr_(self):
+        return 'File("%s", "%s", noclobber=%s)' % (self.filename, self.mode, str(self.noclobber))
+
+
+class AttributeManager(object):
+
+    """ Allows dictionary-style access to an HDF5 object's attributes.
+
+        You should never have to create one of these; they come attached to
+        Group, Dataset and NamedType objects as "obj.attrs".
+
+        - Access existing attributes with "obj.attrs['attr_name']".  If the
+          attribute is scalar, a scalar value is returned, else an ndarray.
+
+        - Set attributes with "obj.attrs['attr_name'] = value".  Note that
+          this will overwrite an existing attribute.
+
+        - Delete attributes with "del obj.attrs['attr_name']".
+
+        - Iterating over obj.attrs yields the names of the attributes. The
+          method iteritems() yields (name, value) pairs.
+        
+        - len(obj.attrs) returns the number of attributes.
+    """
+    def __init__(self, parent_object):
+        self.id = parent_object.id
+
+    def __getitem__(self, name):
+        obj = h5a.py_get(self.id, name)
+        if len(obj.shape) == 0:
+            return obj.dtype.type(obj)
+        return obj
+
+    def __setitem__(self, name, value):
+        if not isinstance(value, numpy.ndarray):
+            value = numpy.array(value)
+        if h5a.py_exists(self.id, name):
+            h5a.delete(self.id, name)
+        h5a.py_set(self.id, name, value)
+
+    def __delitem__(self, name):
+        h5a.delete(self.id, name)
+
+    def __len__(self):
+        return h5a.get_num_attrs(self.id)
+
+    def __iter__(self):
+        return h5a.py_listattrs(self.id)
+
+    def iteritems(self):
+        for name in self:
+            yield (name, self[name])
+
+    def __str__(self):
+        return "Attributes: "+', '.join(['"%s"' % x for x in self])
+
+class NamedType(object):
+
+    """ Represents a named datatype, stored in a file.  
+
+        HDF5 datatypes are typically represented by their Numpy dtype
+        equivalents; this class exists mainly to provide access to attributes
+        stored on HDF5 named types.  Properties:
+
+        dtype:   Equivalent Numpy dtype for this HDF5 type
+        attrs:   AttributeManager instance for attribute access
+
+        Mutating the returned dtype object has no effect on the underlying
+        HDF5 datatype.
+    """ 
+        
+    def _get_dtype(self):
+        if self._dtype is None:
+            self._dtype = h5t.py_h5t_to_dtype(self.id)
+        return self._dtype
+
+    dtype = property(_get_dtype)
+
+    def __init__(self, group, name, dtype=None):
+        """ Open an existing HDF5 named type, or create one.
+
+            If no value is provided for "dtype", try to open a named type
+            called "name" under the given group.  If "dtype" is anything
+            which can be converted to a Numpy dtype, create a new datatype
+            based on it and store it in the group.
+        """
+        self.id = None
+        self._dtype = None  # Defer initialization; even if the named type 
+                            # isn't Numpy-compatible, we can still get at the
+                            # attributes.
+
+        if dtype is not None:
+            dtype = numpy.dtype(dtype)
+            tid = h5t.py_dtype_to_h5t(dtype)
+            try:
+                h5t.commit(group.id, name, tid)
+            finally:
+                h5t.close(tid)
+
+        self.id = h5t.open(group.id, name)
+        self.attrs = AttributeManager(self)
+
+    def close(self):
+        """ Force the library to close this object.  Not ordinarily required.
+        """
+        if self.id is not None:
+            h5t.close(self.id)
+
+    def __del__(self):
+        if self.id is not None:
+            try:
+                h5t.close(self.id)
+            except H5Error:
+                pass
+    
+
+
+# === Browsing and interactivity ==============================================
+
+import inspect
+import string
+import posixpath
+
+
+class _H5Browse(object):
+
+    def __init__(self):
+        self.filename = None
+        self.file_obj = None
+        self.path = None
+
+    def _loadfile(self, filename):
+        if self.file_obj is not None:
+            self.file_obj.close()
+            self.filename = None
+
+        self.file_obj = File(filename, 'r+')
+        self.filename = filename
+
+    def __call__(self, filename=None, importdict=None):
+        """ Browse a new file, or the current one.
+        """
+        if filename is not None:
+            self._loadfile(filename)
+        else:
+            if self.file_obj is None:
+                raise ValueError("Must provide filename if no file is currently open")
+
+        if importdict is None:  # hang on tight... here we go...
+            importdict = inspect.currentframe().f_back.f_globals
+
+        cmdinstance = _H5Cmd(self.file_obj, self.filename, importdict, self.path)
+        cmdinstance.browse()
+        self.path = cmdinstance.path
+
+class _H5Cmd(cmd.Cmd):
+
+    def __init__(self, file_obj, filename, importdict, groupname=None):
+        cmd.Cmd.__init__(self)
+        self.file = file_obj
+        self.filename = filename
+
+        if groupname is None:
+            groupname = '/'
+        self.group = self.file[groupname]
+        self.path = groupname
+
+        self.prompt = os.path.basename(self.filename)+' '+os.path.basename(self.path)+'> '
+
+        self.importdict = importdict
+
+    def browse(self):
+        self.cmdloop('Browsing "%s". Type "help" for commands, "exit" to exit.' % os.path.basename(self.filename))
+
+    def _safename(self, name):
+        legal = string.ascii_letters + '0123456789'
+        instring = list(name)
+        for idx, x in enumerate(instring):
+            if x not in legal:
+                instring[idx] = '_'
+        if instring[0] not in string.ascii_letters:
+            instring = ['_']+instring
+        return ''.join(instring)
+
+    def do_ls(self, line):
+
+        def padline(line, width, trunc=True):
+            slen = len(line)
+            if slen >= width:
+                if trunc:
+                    line = line[0:width-4]+'... '
+                else:
+                    line = line+' '
+            else:
+                line = line + ' '*(width-slen)
+            return line
+
+        extended = False
+        trunc = True
+        if line.strip() == '-l':
+            extended = True
+        if line.strip() == '-ll':
+            extended = True
+            trunc = False
+
+        for name in self.group:
+            outstring = name
+            type_code = h5g.get_objinfo(self.group.id, name).type
+            if type_code == h5g.OBJ_GROUP:
+                outstring += "/"
+
+            if extended:
+                outstring = padline(outstring, 20, trunc)
+                codestring = str(self.group[name])
+                outstring += padline(codestring, 60, trunc)
+
+            print outstring
+
+    def do_cd(self, path):
+        """ cd <path>
+        """
+        path = posixpath.normpath(posixpath.join(self.path, path))
+        try:
+            group = Group(self.file, path)
+            self.prompt = os.path.basename(self.filename)+' '+os.path.basename(path)+'> '
+        except H5Error, e:
+            print e.message
+        self.path = path
+        self.group = group
+
+    def do_import(self, line):
+        if self.importdict is None:
+            print "Can't import variables (no import dict provided)."
+        line = line.strip()
+        objname, as_string, newname = line.partition(' as ')
+        newname = newname.strip()
+        objname = objname.strip()
+        if len(newname) == 0:
+            newname = objname
+        try:
+            self.importdict[newname] = self.group[objname]
+        except H5Error, e:
+            print e.message
+
+    def do_exit(self, line):
+        return True
+
+    def do_EOF(self, line):
+        return self.do_exit(line)
+
+    def do_pwd(self, line):
+        print self.path
+
+    def complete_import(self, text, line, begidx, endidx):
+        return [x for x in self.group if x.find(text)==0]
+
+    def complete_cd(self, text, line, begidx, endidx):
+        return [x for x in self.group if x.find(text)==0 \
+                    and h5g.get_objinfo(self.group.id,x).type == h5g.OBJ_GROUP]
+
+    def help_cd(self):
+        print ""
+        print "cd <name>"
+        print "    Enter a subgroup of the current group"
+        print ""
+
+    def help_pwd(self):
+        print ""
+        print "pwd"
+        print "    Print current path"
+        print ""
+
+    def help_ls(self):
+        print ""
+        print "ls [-l] [-ll]"
+        print "    Print the contents of the current group."
+        print "    Optional long format with -l (80 columns)"
+        print "    Very long format (-ll) has no column limit."
+        print ""
+
+    def help_import(self):
+        print ""
+        print "import <name> [as <python_name>]"
+        print "    Import a member of the current group as a Python object" 
+        print "    at the interactive level, optionally under a different"
+        print "    name."
+        print ""
+
+
+
+# === Utility functions =======================================================
+
+def _open_arbitrary(group_obj, name):
+    """ Figure out the type of an object attached to an HDF5 group and return 
+        the appropriate high-level interface object.
+
+        Currently supports Group, Dataset, and NamedDatatype
+    """
+    info = h5g.get_objinfo(group_obj.id, name)
+
+    if info.type == h5g.OBJ_GROUP:      # group
+        return Group(group_obj, name)
+
+    elif info.type == h5g.OBJ_DATASET:  # dataset
+        return Dataset(group_obj, name)
+
+    elif info.type == h5g.OBJ_DATATYPE: # named type
+        return NamedDatatype(group_obj, name)
+
+    raise NotImplementedError('Object type "%s" unsupported by the high-level interface.' % h5g.OBJ_MAPPER[info.type])
+
+def _slices_to_tuples(args):
+    """ Turns a series of slice objects into the start, count, stride tuples
+        expected by py_read/py_write
+    """
+
+    startlist = []
+    countlist = []
+    stridelist = []
+    
+    if len(args) == 1 and isinstance(args[0], tuple):
+        args = args[0]
+
+    for arg in args:
+
+        if isinstance(arg, slice):
+
+            if arg.start is None:
+                start=0
+            else:
+                if arg.start < 0:
+                    raise ValueError("Negative dimensions are not allowed")
+                start=arg.start
+
+            if arg.step is None:
+                step = 1
+            else:
+                if arg.step < 0:
+                    raise ValueError("Negative step sizes are not allowed")
+                step = arg.step
+
+            startlist.append(start)
+            stridelist.append(step)
+
+            if arg.stop is None:
+                countlist.append(None)
+            else:
+                if arg.stop < 0:
+                    raise ValueError("Negative dimensions are not allowed")
+                count = (arg.stop-start)/step
+                if count == 0:
+                    raise ValueError("Zero-length selections are not allowed")
+                countlist.append(count)
+
+        else:
+            startlist.append(arg)
+            countlist.append(1)
+            stridelist.append(1)
+
+    return (tuple(startlist), tuple(countlist), tuple(stridelist))
+
+#: Command-line HDF5 file "shell": browse(name) (or browse() for last file).
+browse = _H5Browse()
+
+
+
+
+
+
+
+
+
+
+
+
+
diff --git a/h5py/numpy.pxd b/h5py/numpy.pxd
new file mode 100755
index 0000000..def7164
--- /dev/null
+++ b/h5py/numpy.pxd
@@ -0,0 +1,99 @@
+#+
+# 
+# This file is part of h5py, a low-level Python interface to the HDF5 library.
+# 
+# Copyright (C) 2008 Andrew Collette
+# http://h5py.alfven.org
+# License: BSD  (See LICENSE.txt for full license)
+# 
+# $Date$
+# 
+#-
+
+# This file is based on code from the PyTables project.  The complete PyTables
+# license is available at licenses/pytables.txt, in the distribution root
+# directory.
+
+# API for NumPy objects
+cdef extern from "numpy/arrayobject.h":
+
+  # Platform independent types
+  ctypedef int npy_intp
+  ctypedef signed int npy_int8
+  ctypedef unsigned int npy_uint8
+  ctypedef signed int npy_int16
+  ctypedef unsigned int npy_uint16
+  ctypedef signed int npy_int32
+  ctypedef unsigned int npy_uint32
+  ctypedef signed long long npy_int64
+  ctypedef unsigned long long npy_uint64
+  ctypedef float npy_float32
+  ctypedef double npy_float64
+
+  cdef enum NPY_TYPES:
+    NPY_BOOL
+    NPY_BYTE
+    NPY_UBYTE
+    NPY_SHORT
+    NPY_USHORT
+    NPY_INT
+    NPY_UINT
+    NPY_LONG
+    NPY_ULONG
+    NPY_LONGLONG
+    NPY_ULONGLONG
+    NPY_FLOAT
+    NPY_DOUBLE
+    NPY_LONGDOUBLE
+    NPY_CFLOAT
+    NPY_CDOUBLE
+    NPY_CLONGDOUBLE
+    NPY_OBJECT
+    NPY_STRING
+    NPY_UNICODE
+    NPY_VOID
+    NPY_NTYPES
+    NPY_NOTYPE
+
+  # Platform independent types
+  cdef enum:
+    NPY_INT8, NPY_INT16, NPY_INT32, NPY_INT64,
+    NPY_UINT8, NPY_UINT16, NPY_UINT32, NPY_UINT64,
+    NPY_FLOAT32, NPY_FLOAT64, NPY_COMPLEX64, NPY_COMPLEX128
+
+  # Classes
+  ctypedef extern class numpy.dtype [object PyArray_Descr]:
+    cdef int type_num, elsize, alignment
+    cdef char type, kind, byteorder, hasobject
+    cdef object fields, typeobj
+
+  ctypedef extern class numpy.ndarray [object PyArrayObject]:
+    cdef char *data
+    cdef int nd
+    cdef npy_intp *dimensions
+    cdef npy_intp *strides
+    cdef object base
+    cdef dtype descr
+    cdef int flags
+
+  ctypedef struct npy_cfloat:
+    float real
+    float imag
+
+  ctypedef struct npy_cdouble:
+    double real
+    double imag
+
+  # Functions
+  object PyArray_GETITEM(object arr, void *itemptr)
+  int PyArray_SETITEM(object arr, void *itemptr, object obj)
+  dtype PyArray_DescrFromType(int type)
+  object PyArray_Scalar(void *data, dtype descr, object base)
+
+  int PyArray_CheckScalar(object sclr)
+  void PyArray_ScalarAsCtype(object sclr, void* ptr)
+
+  # The NumPy initialization function
+  void import_array()
+
+
diff --git a/h5py/python.pxd b/h5py/python.pxd
new file mode 100755
index 0000000..2185722
--- /dev/null
+++ b/h5py/python.pxd
@@ -0,0 +1,68 @@
+#+
+# 
+# This file is part of h5py, a low-level Python interface to the HDF5 library.
+# 
+# Copyright (C) 2008 Andrew Collette
+# http://h5py.alfven.org
+# License: BSD  (See LICENSE.txt for full license)
+# 
+# $Date$
+# 
+#-
+
+# This file is based on code from the PyTables project.  The complete PyTables
+# license is available at licenses/pytables.txt, in the distribution root
+# directory.
+
+# Some helper routines from the Python API
+cdef extern from "Python.h":
+
+  # special types
+  ctypedef int Py_ssize_t
+
+  # references
+  void Py_INCREF(object)
+  void Py_DECREF(object)
+
+  # To release global interpreter lock (GIL) for threading
+  void Py_BEGIN_ALLOW_THREADS()
+  void Py_END_ALLOW_THREADS()
+
+  # Functions for integers
+  object PyInt_FromLong(long)
+  long PyInt_AsLong(object)
+  object PyLong_FromLongLong(long long)
+  long long PyLong_AsLongLong(object)
+
+  # Functions for floating points
+  object PyFloat_FromDouble(double)
+
+  # Functions for strings
+  object PyString_FromStringAndSize(char *s, int len)
+  char *PyString_AsString(object string)
+  object PyString_FromString(char *)
+
+  # Functions for lists
+  int PyList_Append(object list, object item)
+
+  # Functions for tuples
+  object PyTuple_New(int)
+  int PyTuple_SetItem(object, int, object)
+  object PyTuple_GetItem(object, int)
+  int PyTuple_Size(object tuple)
+  int PyTuple_Check(object tpl)
+
+  # Functions for dicts
+  int PyDict_Contains(object p, object key)
+  object PyDict_GetItem(object p, object key)
+
+  # Functions for objects
+  object PyObject_GetItem(object o, object key)
+  int PyObject_SetItem(object o, object key, object v)
+  int PyObject_DelItem(object o, object key)
+  long PyObject_Length(object o)
+  int PyObject_Compare(object o1, object o2)
+  int PyObject_AsReadBuffer(object obj, void **buffer, Py_ssize_t *buffer_len)
+
+
+
diff --git a/h5py/tests/__init__.py b/h5py/tests/__init__.py
new file mode 100755
index 0000000..e58680e
--- /dev/null
+++ b/h5py/tests/__init__.py
@@ -0,0 +1,39 @@
+#+
+# 
+# This file is part of h5py, a low-level Python interface to the HDF5 library.
+# 
+# Copyright (C) 2008 Andrew Collette
+# http://h5py.alfven.org
+# License: BSD  (See LICENSE.txt for full license)
+# 
+# $Date$
+# 
+#-
+
+import unittest
+import sys
+import test_h5a
+import h5py.h5a, h5py.h5f, h5py.h5g, h5py.h5d, h5py.h5s, h5py.h5t, h5py.h5z, h5py.h5p
+
+TEST_CASES = (test_h5a.TestH5A,)
+
+def buildsuite(cases):
+
+    loader = unittest.TestLoader()
+    suite = unittest.TestSuite()
+    for test_case in cases:
+        suite.addTests(loader.loadTestsFromTestCase(test_case))
+    return suite
+
+def runtests():
+    suite = buildsuite(TEST_CASES)
+    retval = unittest.TextTestRunner(verbosity=3).run(suite)
+    return retval.wasSuccessful()
+
+def autotest():
+    if not runtests():
+        sys.exit(1)
+
+__all__ = ['test_h5a','runtests']
+
+
diff --git a/h5py/tests/common.py b/h5py/tests/common.py
new file mode 100755
index 0000000..445ca55
--- /dev/null
+++ b/h5py/tests/common.py
@@ -0,0 +1,36 @@
+#+
+# 
+# This file is part of h5py, a low-level Python interface to the HDF5 library.
+# 
+# Copyright (C) 2008 Andrew Collette
+# http://h5py.alfven.org
+# License: BSD  (See LICENSE.txt for full license)
+# 
+# $Date$
+# 
+#-
+
+import tempfile
+import os
+import shutil
+from h5py import h5f
+
+def getcopy(filename):
+    """ Create a temporary working copy of "filename". Return is a 2-tuple
+        containing (HDF5 file id, file name)
+    """
+    newname = tempfile.mktemp('.hdf5')
+    shutil.copy(filename, newname)
+    fid = h5f.open(newname, h5f.ACC_RDWR)
+    return (fid, newname)
+
+def deletecopy(fid, newname):
+    h5f.close(fid)
+    os.unlink(newname)
+
+def errstr(arg1, arg2, msg=''):
+    """ Used to mimic assertEqual-style auto-repr, where assertEqual doesn't
+        work (i.e. for Numpy arrays where all() must be used)
+    """
+    return msg+'%s != %s' % (repr(arg1), repr(arg2))
+
diff --git a/h5py/tests/data/attributes.hdf5 b/h5py/tests/data/attributes.hdf5
new file mode 100755
index 0000000..c431d49
Binary files /dev/null and b/h5py/tests/data/attributes.hdf5 differ
diff --git a/h5py/tests/test_h5a.py b/h5py/tests/test_h5a.py
new file mode 100755
index 0000000..22be4e5
--- /dev/null
+++ b/h5py/tests/test_h5a.py
@@ -0,0 +1,240 @@
+#+
+# 
+# This file is part of h5py, a low-level Python interface to the HDF5 library.
+# 
+# Copyright (C) 2008 Andrew Collette
+# http://h5py.alfven.org
+# License: BSD  (See LICENSE.txt for full license)
+# 
+# $Date$
+# 
+#-
+
+import unittest
+from numpy import array, ndarray, dtype, all
+import os
+
+from h5py import h5a
+from h5py import h5f, h5g, h5i, h5t, h5s
+import h5py
+from common import getcopy, deletecopy, errstr
+
+from h5py.errors import DatatypeError
+
+HDFNAME = os.path.join(os.path.dirname(h5py.__file__), 'tests/data/attributes.hdf5')
+OBJECTNAME = 'Group'
+ATTRIBUTES = {  'String Attribute': ("This is a string.", dtype('S18'), ()),
+                'Integer': (42, dtype('<i4'), ()),
+                'Integer Array': ( [0,1,2,3], dtype('<i4'), (4,) ),
+                'Byte': (-34, dtype('|i1'), ()) }
+ATTRIBUTES_ORDER = ['String Attribute', 'Integer', 'Integer Array', 'Byte']
+NEW_ATTRIBUTES = {'New float': ( 3.14, dtype('<f4'), ()) }
+
+class TestH5A(unittest.TestCase):
+
+    def setUp(self):
+        self.fid = h5f.open(HDFNAME, h5f.ACC_RDONLY)
+        self.obj = h5g.open(self.fid, OBJECTNAME)
+
+    def tearDown(self):
+        h5g.close(self.obj)
+        h5f.close(self.fid)
+
+    def is_attr(self, aid):
+        return (h5i.get_type(aid) == h5i.TYPE_ATTR)
+
+    # === General attribute operations ========================================
+
+    def test_create_write(self):
+        fid, filename = getcopy(HDFNAME)
+        obj = h5g.open(fid, OBJECTNAME)
+        for name, (value, dt, shape) in NEW_ATTRIBUTES.iteritems():
+            arr_ref = array(value, dtype=dt)
+
+            sid = h5s.create(h5s.CLASS_SCALAR)
+            tid = h5t.py_dtype_to_h5t(dt)
+
+            aid = h5a.create(obj, name, tid, sid)
+            self.assert_(self.is_attr(aid))
+            h5a.write(aid, arr_ref)
+            h5a.close(aid)
+
+            arr_val = h5a.py_get(obj,name)
+            self.assert_(all(arr_val == arr_ref), errstr(arr_val, arr_ref))
+            try:
+                h5t.close(tid)
+            except DatatypeError:
+                pass
+            h5s.close(sid)
+        h5g.close(obj)
+        deletecopy(fid, filename)
+        
+    def test_open_idx(self):
+        for idx, name in enumerate(ATTRIBUTES_ORDER):
+            aid = h5a.open_idx(self.obj, idx)
+            self.assert_(self.is_attr(aid), "Open: index %d" % idx)
+            h5a.close(aid)
+
+    def test_open_name(self):
+        for name in ATTRIBUTES:
+            aid = h5a.open_name(self.obj, name)
+            self.assert_(self.is_attr(aid), 'Open: name "%s"' % name)
+            h5a.close(aid)
+
+    def test_close(self):
+        aid = h5a.open_idx(self.obj, 0)
+        self.assert_(self.is_attr(aid))
+        h5a.close(aid)
+        self.assert_(not self.is_attr(aid))
+    
+    def test_delete(self):
+        fid, filename = getcopy(HDFNAME)
+        obj = h5g.open(fid, OBJECTNAME)
+        self.assert_(h5a.py_exists(obj, ATTRIBUTES_ORDER[0]))
+        h5a.delete(obj, ATTRIBUTES_ORDER[0])
+        self.assert_(not h5a.py_exists(obj, ATTRIBUTES_ORDER[0]))
+        deletecopy(fid, filename)
+
+    # === Attribute I/O =======================================================
+
+    def test_read(self):
+        for name in ATTRIBUTES:
+            value, dt, shape = ATTRIBUTES[name]
+            aid = h5a.open_name(self.obj, name)
+            arr_holder = ndarray(shape, dtype=dt)
+            arr_reference = array(value, dtype=dt)
+            h5a.read(aid, arr_holder)
+            self.assert_( all(arr_holder == arr_reference),
+                errstr(arr_reference, arr_holder, 'Attr "%s"):\n' % name, ))
+            h5a.close(aid)
+        
+    # h5a.write is done by test_create_write
+
+    # === Attribute inspection ================================================
+
+    def test_get_num_attrs(self):
+        n = h5a.get_num_attrs(self.obj)
+        self.assertEqual(n, len(ATTRIBUTES))
+
+    def test_get_name(self):
+    
+        for name in ATTRIBUTES:
+            aid = h5a.open_name(self.obj, name)
+            supposed_name = h5a.get_name(aid)
+            self.assertEqual(supposed_name, name)
+            h5a.close(aid)
+
+    def test_get_space(self):
+
+        for name, (value, dt, shape) in ATTRIBUTES.iteritems():
+            aid = h5a.open_name(self.obj, name)
+            sid = h5a.get_space(aid)
+            shape_tpl = h5s.get_simple_extent_dims(sid)
+            self.assertEqual(shape_tpl, shape)
+            h5s.close(sid)
+            h5a.close(aid)
+
+    def test_get_type(self):
+
+        for name, (value, dt, shape) in ATTRIBUTES.iteritems():
+            aid = h5a.open_name(self.obj, name)
+            tid = h5a.get_type(aid)
+            supposed_dtype = h5t.py_h5t_to_dtype(tid)
+            self.assertEqual(supposed_dtype, dt)
+            h5t.close(tid)
+            h5a.close(aid)
+
+    def test_iterate(self):
+
+        def iterate_all(id, name, namelist):
+            namelist.append(name)
+
+        def iterate_two(id, name, namelist):
+            if len(namelist) == 2:
+                raise StopIteration
+            namelist.append(name)
+
+        def iterate_fault(id, name, namelist):
+            if len(namelist) == 2:
+                raise RuntimeError("Intentional fault")
+            namelist.append(name)
+
+        namelist = []
+        n = h5a.iterate(self.obj, iterate_all, namelist)
+        self.assertEqual(namelist, ATTRIBUTES_ORDER)
+        self.assertEqual(n, len(ATTRIBUTES_ORDER)-1)
+
+        namelist = []
+        n = h5a.iterate(self.obj, iterate_two, namelist)
+        self.assertEqual(namelist, ATTRIBUTES_ORDER[0:2])
+        self.assertEqual(n, 1)
+
+        namelist = []
+        self.assertRaises(RuntimeError, h5a.iterate, self.obj, iterate_fault, namelist)
+        self.assertEqual(namelist, ATTRIBUTES_ORDER[0:2])
+        
+        namelist = []
+        n = h5a.iterate(self.obj, iterate_two, namelist, 1)
+        self.assertEqual(namelist, ATTRIBUTES_ORDER[1:3])
+        self.assertEqual(n, 2)
+
+
+    # === Python extensions ===================================================
+
+    def test_py_listattrs(self):
+        self.assertEqual(h5a.py_listattrs(self.obj), ATTRIBUTES_ORDER)
+
+    def test_py_shape(self):
+        
+        for name, (value, dt, shape) in ATTRIBUTES.iteritems():
+            aid = h5a.open_name(self.obj, name)
+            retshape = h5a.py_shape(aid)
+            self.assertEqual(retshape, shape) 
+            h5a.close(aid)
+
+    def test_py_dtype(self):
+
+        for name, (value, dt, shape) in ATTRIBUTES.iteritems():
+            aid = h5a.open_name(self.obj, name)
+            self.assertEqual(h5a.py_dtype(aid),dt)
+            h5a.close(aid)
+
+    def test_py_get(self):
+
+        for name, (value, dt, shape) in ATTRIBUTES.iteritems():
+            arr_reference = array(value, dtype=dt)
+            arr_returned = h5a.py_get(self.obj, name)
+            self.assert_(all(arr_returned == arr_reference), 
+                errstr(arr_reference, arr_returned))
+
+    def test_py_set(self):
+
+        fid, filename = getcopy(HDFNAME)
+        obj = h5g.open(fid, OBJECTNAME)
+
+        for name, (value, dt, shape) in NEW_ATTRIBUTES.iteritems():
+            arr_reference = array(value, dtype=dt)
+            h5a.py_set(obj, name, arr_reference)
+            arr_ret = h5a.py_get(obj, name)
+            self.assert_( all( arr_ret == arr_reference), errstr(arr_ret, arr_reference))
+        h5g.close(obj)
+        deletecopy(fid, filename)
+
+
+    def test_py_exists(self):
+
+        for name in ATTRIBUTES:
+            self.assert_(h5a.py_exists(self.obj, name), name)
+
+        self.assert_(not h5a.py_exists(self.obj, 'SOME OTHER ATTRIBUTE') )
+            
+
+
+
+
+
+
+
+
+
+
diff --git a/h5py/utils.c b/h5py/utils.c
new file mode 100755
index 0000000..b9a04dc
--- /dev/null
+++ b/h5py/utils.c
@@ -0,0 +1,175 @@
+/***** Preamble block *********************************************************
+* 
+* This file is part of h5py, a low-level Python interface to the HDF5 library.
+* 
+* Copyright (C) 2008 Andrew Collette
+* http://h5py.alfven.org
+* License: BSD  (See LICENSE.txt for full license)
+* 
+* $Date$
+* 
+****** End preamble block ****************************************************/
+
+/* 
+   Utilities which are difficult or impossible to implement in pure Pyrex, 
+   such as functions requiring HDF5 C macros.
+
+   This file contains code based on utils.c from the PyTables project.  The
+   complete PyTables license is available under licenses/pytables.txt in the
+   distribution root directory.
+*/
+
+#include "Python.h"
+#include "numpy/arrayobject.h"
+#include "utils.h"
+#include "hdf5.h"
+
+
+/* Check to make sure we can reliably copy data from this array. */
+int check_array(PyObject* arr){
+
+    if(!PyArray_Check(arr)) return 0;
+
+    if(!PyArray_ISCONTIGUOUS(arr)) return 0;
+
+    if(!PyArray_ISBEHAVED(arr)) return 0;
+
+    return 1;
+
+}
+
+/* Convert an hsize_t array to a Python tuple of long ints.
+   Returns None on failure
+*/
+PyObject* dims_to_tuple(hsize_t* dims, hsize_t rank) {
+
+    PyObject* tpl;
+    PyObject* plong;
+    int i;
+    tpl = NULL;
+    plong = NULL;
+    
+    tpl = PyTuple_New(rank);
+    if(tpl == NULL) goto err;
+
+    for(i=0; i<rank; i++){
+        plong = PyLong_FromLong((long) dims[i]);
+        if(plong == NULL) goto err;
+        PyTuple_SET_ITEM(tpl, i, plong); /* steals reference */
+    }
+    
+    Py_INCREF(tpl);
+    return tpl;
+
+    err:
+    Py_XDECREF(tpl);
+    Py_INCREF(Py_None);
+    return Py_None;
+}
+
+/* Convert a Python tuple to a malloc'ed hsize_t array 
+   Returns NULL on failure
+*/
+hsize_t* tuple_to_dims(PyObject* tpl){
+
+    int rank;
+    hsize_t* dims;
+    PyObject* temp;
+    int i;
+    dims = NULL;
+    temp = NULL;
+
+    if(tpl == NULL) goto err;
+    if(!PyTuple_Check(tpl)) goto err;
+
+    rank = (int)PyTuple_GET_SIZE(tpl);
+
+    dims = (hsize_t*)malloc(sizeof(hsize_t)*rank);
+
+    for(i=0; i<rank; i++){
+        temp = PyTuple_GetItem(tpl, i);
+        if(temp == NULL) goto err;
+        
+        if PyLong_Check(temp)
+            dims[i] = (hsize_t)PyLong_AsLong(temp);
+        else if PyInt_Check(temp)
+            dims[i] = (hsize_t)PyLong_AsLong(temp);
+        else if PyFloat_Check(temp)
+            dims[i] = (hsize_t)PyFloat_AsDouble(temp);
+        else
+            goto err;
+    }
+
+    return dims;
+
+    err:
+      if(dims!=NULL) free(dims);
+      return NULL;
+}
+
+/* Rewritten versions of create_ieee_complex64/128 from Pytables, to support 
+   standard array-interface typecodes and variable names for real/imag parts.  
+   Also removed unneeded datatype copying.
+   Both return -1 on failure.
+*/
+hid_t create_ieee_complex64(const char byteorder, const char* real_name, const char* img_name) {
+  hid_t float_id = -1;
+  hid_t complex_id = -1;
+  herr_t retval = -1;
+
+  complex_id = H5Tcreate(H5T_COMPOUND, sizeof(npy_complex64));
+  if(complex_id < 0) goto err;
+
+  if (byteorder == '<')
+    float_id = H5T_IEEE_F32LE;
+  else if (byteorder == '>')
+    float_id = H5T_IEEE_F32BE;
+  else if (byteorder == '=' || byteorder == '|')
+    float_id = H5T_NATIVE_FLOAT;
+  else
+    goto err;
+
+  retval = H5Tinsert(complex_id, real_name, HOFFSET(npy_complex64, real), float_id);
+  if(retval<0) goto err;
+
+  retval = H5Tinsert(complex_id, img_name, HOFFSET(npy_complex64, imag), float_id);
+  if(retval<0) goto err;
+
+  return complex_id;
+
+  err:
+    if(complex_id > 0)
+        H5Tclose(complex_id);
+    return -1;
+}
+
+hid_t create_ieee_complex128(const char byteorder, const char* real_name, const char* img_name) {
+  hid_t float_id = -1;
+  hid_t complex_id = -1;
+  herr_t retval = -1;
+
+  complex_id = H5Tcreate(H5T_COMPOUND, sizeof(npy_complex128));
+  if(complex_id < 0) goto err;
+
+  if (byteorder == '<')
+    float_id = H5T_IEEE_F64LE;
+  else if (byteorder == '>')
+    float_id = H5T_IEEE_F64BE;
+  else if (byteorder == '=' || byteorder == '|')
+    float_id = H5T_NATIVE_DOUBLE;
+  else
+    goto err;
+
+  retval = H5Tinsert(complex_id, real_name, HOFFSET(npy_complex128, real), float_id);
+  if(retval<0) goto err;
+
+  retval = H5Tinsert(complex_id, img_name, HOFFSET(npy_complex128, imag), float_id);
+  if(retval<0) goto err;
+
+  return complex_id;
+
+  err:
+    if(complex_id > 0)
+        H5Tclose(complex_id);
+    return -1;
+}
diff --git a/h5py/utils.h b/h5py/utils.h
new file mode 100755
index 0000000..5cf78e8
--- /dev/null
+++ b/h5py/utils.h
@@ -0,0 +1,30 @@
+/***** Preamble block *********************************************************
+* 
+* This file is part of h5py, a low-level Python interface to the HDF5 library.
+* 
+* Copyright (C) 2008 Andrew Collette
+* http://h5py.alfven.org
+* License: BSD  (See LICENSE.txt for full license)
+* 
+* $Date$
+* 
+****** End preamble block ****************************************************/
+
+/*
+   This file contains code based on utils.c from the PyTables project.  The
+   complete PyTables license is available under licenses/pytables.txt in the
+   distribution root directory.
+*/
+
+
+#include "hdf5.h"
+
+hid_t create_ieee_complex64(const char byteorder, const char* real_name, const char* img_name);
+hid_t create_ieee_complex128(const char byteorder, const char* real_name, const char* img_name);
+
+
+hsize_t* tuple_to_dims(PyObject* tpl);
+PyObject* dims_to_tuple(hsize_t* dims, hsize_t rank);
+
+int check_array(PyObject* arr);
+
diff --git a/h5py/utils.pxd b/h5py/utils.pxd
new file mode 100755
index 0000000..6922ede
--- /dev/null
+++ b/h5py/utils.pxd
@@ -0,0 +1,20 @@
+#+
+# 
+# This file is part of h5py, a low-level Python interface to the HDF5 library.
+# 
+# Copyright (C) 2008 Andrew Collette
+# http://h5py.alfven.org
+# License: BSD  (See LICENSE.txt for full license)
+# 
+# $Date$
+# 
+#-
+
+from h5 cimport hid_t, hsize_t
+
+cdef extern from "utils.h":
+
+    hid_t create_ieee_complex64(char byteorder, char* real_name, char* img_name)
+    hid_t create_ieee_complex128(char byteorder, char* real_name, char* img_name)
+    hsize_t* tuple_to_dims(object tpl)
+    object dims_to_tuple(hsize_t* dims, hsize_t rank)
diff --git a/licenses/hdf5.txt b/licenses/hdf5.txt
new file mode 100644
index 0000000..54126de
--- /dev/null
+++ b/licenses/hdf5.txt
@@ -0,0 +1,69 @@
+HDF5 (Hierarchical Data Format 5) Software Library and Utilities
+Copyright 2006-2007 by The HDF Group (THG).
+
+NCSA HDF5 (Hierarchical Data Format 5) Software Library and Utilities
+Copyright 1998-2006 by the Board of Trustees of the University of Illinois.
+
+All rights reserved.
+
+Contributors: National Center for Supercomputing Applications (NCSA)
+at the University of Illinois, Fortner Software, Unidata Program
+Center (netCDF), The Independent JPEG Group (JPEG), Jean-loup Gailly
+and Mark Adler (gzip), and Digital Equipment Corporation (DEC).
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted for any purpose (including commercial
+purposes) provided that the following conditions are met:
+
+   1. Redistributions of source code must retain the above copyright
+notice, this list of conditions, and the following disclaimer.
+   2. Redistributions in binary form must reproduce the above
+copyright notice, this list of conditions, and the following
+disclaimer in the documentation and/or materials provided with the
+distribution.
+   3. In addition, redistributions of modified forms of the source or
+binary code must carry prominent notices stating that the original
+code was changed and the date of the change.
+   4. All publications or advertising materials mentioning features or
+use of this software are asked, but not required, to acknowledge that
+it was developed by The HDF Group and by the National Center for
+Supercomputing Applications at the University of Illinois at
+Urbana-Champaign and credit the contributors.
+   5. Neither the name of The HDF Group, the name of the University,
+nor the name of any Contributor may be used to endorse or promote
+products derived from this software without specific prior written
+permission from THG, the University, or the Contributor, respectively.
+
+DISCLAIMER: THIS SOFTWARE IS PROVIDED BY THE HDF GROUP (THG) AND THE
+CONTRIBUTORS "AS IS" WITH NO WARRANTY OF ANY KIND, EITHER EXPRESSED OR
+IMPLIED. In no event shall THG or the Contributors be liable for any
+damages suffered by the users arising out of the use of this software,
+even if advised of the possibility of such damage.
+
+Portions of HDF5 were developed with support from the University of
+California, Lawrence Livermore National Laboratory (UC LLNL). The
+following statement applies to those portions of the product and must
+be retained in any redistribution of source code, binaries,
+documentation, and/or accompanying materials:
+
+This work was partially produced at the University of California,
+Lawrence Livermore National Laboratory (UC LLNL) under contract
+no. W-7405-ENG-48 (Contract 48) between the U.S. Department of Energy
+(DOE) and The Regents of the University of California (University) for
+the operation of UC LLNL.
+
+DISCLAIMER: This work was prepared as an account of work sponsored by
+an agency of the United States Government. Neither the United States
+Government nor the University of California nor any of their
+employees, makes any warranty, express or implied, or assumes any
+liability or responsibility for the accuracy, completeness, or
+usefulness of any information, apparatus, product, or process
+disclosed, or represents that its use would not infringe privately-
+owned rights. Reference herein to any specific commercial products,
+process, or service by trade name, trademark, manufacturer, or
+otherwise, does not necessarily constitute or imply its endorsement,
+recommendation, or favoring by the United States Government or the
+University of California. The views and opinions of authors expressed
+herein do not necessarily state or reflect those of the United States
+Government or the University of California, and shall not be used for
+advertising or product endorsement purposes.
diff --git a/licenses/pytables.txt b/licenses/pytables.txt
new file mode 100755
index 0000000..9f2f482
--- /dev/null
+++ b/licenses/pytables.txt
@@ -0,0 +1,33 @@
+Copyright Notice and Statement for PyTables Software Library and Utilities:
+
+Copyright (c) 2002, 2003, 2004  Francesc Altet
+Copyright (c) 2005, 2006, 2007  Carabos Coop. V.
+All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+a. Redistributions of source code must retain the above copyright
+   notice, this list of conditions and the following disclaimer.
+
+b. Redistributions in binary form must reproduce the above copyright
+   notice, this list of conditions and the following disclaimer in the
+   documentation and/or other materials provided with the
+   distribution.
+
+c. Neither the name of the Carabos Coop. V. nor the names of its
+   contributors may be used to endorse or promote products derived
+   from this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/meta/__init__.py b/meta/__init__.py
new file mode 100755
index 0000000..b2ad2dc
--- /dev/null
+++ b/meta/__init__.py
@@ -0,0 +1 @@
+import insertblock
diff --git a/meta/attrs.hdf5 b/meta/attrs.hdf5
new file mode 100755
index 0000000..c431d49
Binary files /dev/null and b/meta/attrs.hdf5 differ
diff --git a/meta/block.txt b/meta/block.txt
new file mode 100755
index 0000000..1b5f191
--- /dev/null
+++ b/meta/block.txt
@@ -0,0 +1,9 @@
+
+This file is part of h5py, a low-level Python interface to the HDF5 library.
+
+Copyright (C) 2008 Andrew Collette
+http://h5py.alfven.org
+License: BSD  (See LICENSE.txt for full license)
+
+$Date$
+
diff --git a/meta/gen_attributes.c b/meta/gen_attributes.c
new file mode 100755
index 0000000..204c1ad
--- /dev/null
+++ b/meta/gen_attributes.c
@@ -0,0 +1,91 @@
+
+/* Generate an HDF5 test file for attributes unit test. */
+
+#include "hdf5.h"
+
+int attributes(char* filename){
+    
+    char val1[] = "This is a string.";
+    int val2 = 42;
+    hsize_t val3_dims = 4;
+    int val3[4];
+    int val4 = -34;
+    
+    val3[0] = 0;
+    val3[1] = 1;
+    val3[2] = 2;
+    val3[3] = 3;
+
+    hid_t sid_scalar=0;
+    hid_t sid_array=0;
+    hid_t fid=0;
+    hid_t gid=0;
+    hid_t string_id=0;
+    hid_t aid=0;
+
+    int retval=1;
+
+    sid_scalar = H5Screate(H5S_SCALAR);
+    if(sid_scalar<0) goto out;
+
+    sid_array = H5Screate_simple(1, &val3_dims, NULL);
+    if(sid_array<0) goto out;
+
+    string_id = H5Tcopy(H5T_C_S1);
+    if(string_id<0) goto out;
+
+    if(H5Tset_size(string_id, 18)<0) goto out; /* string is 17 chars, plus NULL */
+
+    fid = H5Fcreate(filename, H5F_ACC_TRUNC, H5P_DEFAULT, H5P_DEFAULT);
+    if(fid<0) goto out;
+
+    gid = H5Gcreate(fid, "Group", -1);
+    if(gid<0) goto out;
+
+    /* 1: "String attribute", exactly 18 bytes with null terminator, scalar,
+          value "This is a string." */
+    aid = H5Acreate(gid, "String Attribute", string_id, sid_scalar, H5P_DEFAULT);
+    if(aid<0) goto out;
+    if(H5Awrite(aid, string_id, &val1) < 0) goto out;
+    if(H5Aclose(aid)<0) goto out;
+
+    /* 2: "Integer", 32-bit little-endian, scalar, value 42. */
+    aid = H5Acreate(gid, "Integer", H5T_STD_I32LE, sid_scalar, H5P_DEFAULT);
+    if(aid<0) goto out;
+    if(H5Awrite(aid, H5T_NATIVE_INT, &val2)<0) goto out;
+    if(H5Aclose(aid)<0) goto out;
+
+    /* 3: "Integer array", 4-element 32-bit little endian, value [0,1,2,3] */
+    aid = H5Acreate(gid, "Integer Array", H5T_STD_I32LE, sid_array, H5P_DEFAULT);
+    if(aid<0) goto out;
+    if(H5Awrite(aid, H5T_NATIVE_INT, &val3)<0) goto out;
+    if(H5Aclose(aid)<0) goto out;
+
+    /* 4: "Byte", 8-bit "little-endian" integer, value -34 */
+    aid = H5Acreate(gid, "Byte", H5T_STD_I8LE, sid_scalar, H5P_DEFAULT);
+    if(aid<0) goto out;
+    if(H5Awrite(aid, H5T_NATIVE_INT, &val4)<0) goto out;
+    if(H5Aclose(aid)<0) goto out;
+
+    retval = 0;  /* got here == success */
+
+    out:
+
+    if(sid_scalar) H5Sclose(sid_scalar);
+    if(sid_array)  H5Sclose(sid_array);
+    if(fid) H5Fclose(fid);
+    if(gid) H5Gclose(gid);
+    if(string_id) H5Tclose(string_id);
+
+    return retval;
+}
+
+int main(int argc, char **argv){
+
+    if(argc!=2) return 2;
+    return attributes(argv[1]);
+}
+
+
+
+    
diff --git a/meta/insertblock.py b/meta/insertblock.py
new file mode 100755
index 0000000..83e8423
--- /dev/null
+++ b/meta/insertblock.py
@@ -0,0 +1,81 @@
+import os
+
+python_mode = ('.py', '.pyx', '.pxd', '.pxi')
+c_mode = ('.c','.h')
+
+firstline = { 'python': "#+",
+              'c': "/*"+"*"*4 + " Preamble block " + "*"*57}
+
+lastline = { 'python': "#-",
+             'c': '*'*6 + " End preamble block " + '*'*52 +'/' }
+
+beginline = { 'python': '# ', 'c': '* ' }
+
+def guessmode(filename):
+    ext = os.path.splitext(filename)[1]
+    if ext in python_mode:
+        return 'python'
+    if ext in c_mode:
+        return 'c'
+    raise ValueError("Can't determine mode of %s" % filename)
+
+def iterblock(file_obj, mode, output_block=True):
+
+    in_block = False
+
+    for idx, line in enumerate(file_obj):
+        matchline = line.strip()
+
+        if matchline == firstline[mode]:
+            in_block = True
+            continue
+        elif matchline == lastline[mode]:
+            in_block = False
+            continue
+
+        if in_block == output_block:
+            yield line
+
+def printblock(filename):
+    fh = open(filename,'r')
+    try:
+        print "".join(iterblock(fh, guessmode(filename), True))
+    finally:
+        fh.close()
+    
+def replaceblock(filename, block_filename):
+    mode = guessmode(filename)
+    fh = open(filename,'r')
+
+    contents = list(iterblock(fh, mode, False))
+    fh.close()
+
+    fh_block = open(block_filename, 'r')
+    block_contents = list(fh_block)
+    fh_block.close()
+
+    contents =  [firstline[mode]+os.linesep] + \
+                [beginline[mode] + line for line in block_contents] + \
+                [lastline[mode]+os.linesep] + \
+                contents
+
+    fh = open(filename,'w')
+    fh.write("".join(contents))
+    fh.close()
+
+def eraseblock(filename):
+    try:
+        fh = open(filename,'r')
+    except IOError:
+        return
+    try:
+        contents = list(iterblock(fh, guessmode(filename),False))
+    finally:
+        fh.close()
+
+    fh = open(filename, 'w')
+    fh.write("".join(contents))
+    fh.close()
+
+
+
diff --git a/obsolete/attrs.hdf5 b/obsolete/attrs.hdf5
new file mode 100755
index 0000000..3ecb615
Binary files /dev/null and b/obsolete/attrs.hdf5 differ
diff --git a/obsolete/definitions.pxd b/obsolete/definitions.pxd
new file mode 100755
index 0000000..fb39341
--- /dev/null
+++ b/obsolete/definitions.pxd
@@ -0,0 +1,62 @@
+#  Ei!, emacs, this is -*-Python-*- mode
+########################################################################
+#
+#       License: BSD
+#       Created: June 20, 2005
+#       Author:  Francesc Altet - faltet at carabos.com
+#
+#       $Id: definitions.pyd 1018 2005-06-20 09:43:34Z faltet $
+#
+########################################################################
+
+"""Here are some definitions for sharing between extensions.
+
+"""
+
+import sys
+
+from defs_c cimport size_t, time_t
+from defs_h5 cimport hid_t, hbool_t, herr_t, htri_t, hsize_t, hssize_t, hvl_t
+
+# Structs and types from HDF5
+cdef extern from "hdf5.h":
+
+
+  int H5FD_LOG_LOC_WRITE, H5FD_LOG_ALL
+  int H5I_INVALID_HID
+
+  # Native types
+  # NOT MOVED
+  cdef enum:
+    H5T_C_S1
+
+
+  # The order to retrieve atomic native datatype
+  # NOT MOVED
+  cdef enum H5T_direction_t:
+    H5T_DIR_DEFAULT     = 0,    #default direction is inscendent
+    H5T_DIR_ASCEND      = 1,    #in inscendent order
+    H5T_DIR_DESCEND     = 2     #in descendent order
+
+
+
+
+
+
+  # === HDF5 API ==============================================================
+
+
+
+
+
+  
+
+
+
+
+
+
+
+
+
+
diff --git a/obsolete/defs_h5common.pxd b/obsolete/defs_h5common.pxd
new file mode 100755
index 0000000..eff2399
--- /dev/null
+++ b/obsolete/defs_h5common.pxd
@@ -0,0 +1,5 @@
+from defs_h5 cimport hsize_t
+
+cdef hsize_t* tuple_to_dims(object dims_tpl, int rank)
+cdef object dims_to_tuple(hsize_t* dims, int rank)
+
diff --git a/obsolete/file.hdf5 b/obsolete/file.hdf5
new file mode 100755
index 0000000..c11879d
Binary files /dev/null and b/obsolete/file.hdf5 differ
diff --git a/obsolete/fragments.pyx b/obsolete/fragments.pyx
new file mode 100755
index 0000000..db47b54
--- /dev/null
+++ b/obsolete/fragments.pyx
@@ -0,0 +1,138 @@
+def set_fill_value(hid_t plist, object value):
+    """ (INT plist, INT type_id, ARRAY_SCALAR value)
+        For lists of class CLASS_DATASET_CREATE
+
+        Set the fill value for the dataset. <value> should be a NumPy array 
+        scalar or 0-dimensional array.  It's up to you to make sure the dtype 
+        of the scalar is compatible with the type of whatever dataset you want 
+        to use this list on.
+
+        As a special exception, providing a value of None means the fill is
+        undefined (HDF5 default is otherwise zero-fill).
+    """
+    cdef hid_t type_id
+    cdef herr_t retval
+    cdef void* data_ptr
+
+    raise NotImplementedError()
+
+    if value is None:
+        retval = H5Pset_fill_value(plist, 0, NULL)
+        if retval < 0:
+            raise PropertyError("Failed to undefine fill value on list %d" % plist)
+        return
+
+    if not PyArray_CheckScalar(value):
+        raise ValueError("Given fill value must be a Numpy array scalar or 0-dimensional array")
+
+    data_ptr = malloc(128)
+    PyArray_ScalarAsCtype(value, data_ptr)
+    type_id = h5t.py_dtype_to_h5t(value.dtype)
+
+    retval = H5Pset_fill_value(plist, type_id, data_ptr)
+    if retval < 0:
+        free(data_ptr)
+        H5Tclose(type_id)
+        raise PropertyError("Failed to set fill value on list %d to %s" % (plist, repr(value)))
+
+    free(data_ptr)
+    H5Tclose(type_id)
+
+def get_fill_value(hid_t plist, object dtype_in):
+    """ (INT plist_id, DTYPE dtype_in) => ARRAY_SCALAR value
+
+        Obtain the fill value.  Due to restrictions in the HDF5 library
+        design, you have to provide a Numpy dtype object specifying the
+        fill value type.  The function will raise an exception if this
+        type is not conversion-compatible with the fill value type recorded
+        in the list.
+    """
+    raise NotImplementedError()
+    cdef herr_t retval
+    cdef hid_t type_id
+
+def set_filter(hid_t plist, int filter_code, unsigned int flags, object data_tpl=()):
+    """ (INT plist_id, INT filter_type_code, UINT flags, TUPLE data)
+    """
+
+    cdef unsigned int *data
+    cdef size_t datalen
+    cdef int i
+    cdef herr_t retval
+
+    if !PyTuple_Check(data_tpl):
+        raise ValueError("Data for the filter must be a tuple of integers")
+
+    datalen = len(data_tpl)
+    data = <unsigned int*>malloc(sizeof(unsigned int)*datalen)
+
+    try:
+        for i from 0<=i<datalen:
+            data[i] = data_tpl[i]
+
+        retval = H5Pset_filter(plist, filter_code, flags, data_len, data)
+        if retval < 0:
+            raise PropertyError("Failed to set filter code %d on list %d; flags %d, data %s" % (filter_code, plist, flags, str(data_tpl)))
+    finally:
+        free(data) 
+    
+def all_filters_avail(hid_t plist):
+
+    cdef htri_t retval
+    retval = H5Pall_filters_avail(plist)
+    if retval < 0:
+        raise PropertyError("Failed to determine filter status on list %d" % plist)
+    return bool(retval)
+
+def get_nfilters(hid_t plist)
+
+    cdef int retval
+    retval = H5Pget_nfilters(plist)
+    if retval < 0:
+        raise PropertyError("Failed to determine number of filters in list %d" % plist)
+    return retval
+
+cdef class FilterInfo:
+
+    cdef object name
+    cdef int code
+    cdef unsigned int flags
+    cdef object data
+
+def get_filter_info(hid_t plist, unsigned int filter_no):
+
+    cdef char namearr[256]
+    cdef int namelen
+    cdef unsigned int flags
+    cdef size_t datalen
+    cdef unsigned int data[256]
+    cdef int retval
+    cdef int i
+
+    datalen = 256
+    namelen = 256
+
+    retval = <int>H5Pget_filter(plist, filter_no, &flags, &datalen, &data, namelen, &namearr)
+    if retval < 0:
+        raise PropertyError("Failed to get info for filter %d on list %d" % (filter_no, plist))
+    
+    # HDF5 docs claim the string may not be properly terminated.
+    for i from 0<=i<namelen:
+        if namearr[i] == c'\0':
+            break
+    if i == namelen:
+        namearr[namelen-1] = c'\0'
+
+    tpl = PyTuple_New(datalen)
+    for i from 0<=i<datalen:
+        tmp = data[i]
+        Py_INCREF(tmp)  # to get around pyrex issues
+        PyTuple_SetItem(tpl, i, tmp)
+
+    info = FilterInfo()
+    info.name = &namearr
+    info.code = retval
+    info.flags = flags
+    info.data = tpl
+
+    return info
diff --git a/obsolete/test.h5 b/obsolete/test.h5
new file mode 100755
index 0000000..e386791
Binary files /dev/null and b/obsolete/test.h5 differ
diff --git a/obsolete/test_h5a.pyx b/obsolete/test_h5a.pyx
new file mode 100755
index 0000000..3b53697
--- /dev/null
+++ b/obsolete/test_h5a.pyx
@@ -0,0 +1,138 @@
+##### Preamble block ##########################################################
+# 
+# This file is part of the "h5py" HDF5 Interface for Python project.
+# 
+# Copyright 2008 Andrew Collette
+# http://software.alfven.org
+# License: BSD  (See file "LICENSE" for complete license, or the URL above)
+# 
+##### End preamble block ######################################################
+
+from defs_h5t cimport H5T_NATIVE_INT8
+from defs_h5i cimport H5Iget_type, H5I_ATTR, H5I_BADID
+from defs_h5a cimport H5Aclose, H5Acreate, H5Adelete, H5Awrite
+from defs_h5p cimport H5P_DEFAULT
+from defs_h5s cimport H5Screate
+
+import os
+import unittest
+import tempfile
+
+import numpy
+import h5f
+import h5g
+import h5a
+import h5s
+import h5t
+
+from errors import H5AttributeError
+
+SCL_NAME = 'SCALAR ATTRIBUTE'
+ARR_NAME = 'ARRAY ATTRIBUTE'
+TEST_NAME = 'TEST ATTRIBUTE'
+
+class TestH5A(unittest.TestCase):
+
+    def setUp(self):
+        self.fname = tempfile.mktemp(".hdf5")
+        self.fid = h5f.create(self.fname, h5f.ACC_TRUNC)
+        self.gid = h5g.create(self.fid, "GROUP")
+        sid = h5s.create(h5s.CLASS_SCALAR)
+        sid2 = h5s.create_simple((2,3))
+        self.scl_attr = H5Acreate(self.gid, SCL_NAME, H5T_NATIVE_INT8, sid, H5P_DEFAULT)
+        self.arr_attr = H5Acreate(self.gid, ARR_NAME, H5T_NATIVE_INT8, sid2, H5P_DEFAULT)
+        h5s.close(sid2)
+        h5s.close(sid)
+
+    def tearDown(self):
+        H5Aclose(self.arr_attr)
+        H5Aclose(self.scl_attr)
+        h5g.close(self.gid)
+        h5f.close(self.fid)
+        os.unlink(self.fname)
+    
+    def testcreate(self):
+        sid = h5s.create(h5s.CLASS_SCALAR)
+        aid = h5a.create(self.gid, TEST_NAME, H5T_NATIVE_INT8, sid)
+        self.assert_(H5Iget_type(aid) == H5I_ATTR)
+        H5Aclose(aid)
+        H5Adelete(self.gid, TEST_NAME)
+        h5s.close(sid)
+
+    def test_open_idx(self):
+        aid = h5a.open_idx(self.gid, 0)
+        self.assert_(h5a.get_name(aid) == SCL_NAME)
+        H5Aclose(aid)
+        aid = h5a.open_idx(self.gid, 1)
+        self.assert_(h5a.get_name(aid) == ARR_NAME)
+        H5Aclose(aid)
+
+        self.assertRaises(H5AttributeError, h5a.open_idx, self.gid, 2)
+
+    def test_open_name(self):
+        aid = h5a.open_name(self.gid, SCL_NAME)
+        self.assert_(H5Iget_type(aid) == H5I_ATTR)
+        H5Aclose(aid)
+
+    def test_close(self):
+        sid = H5Screate(h5s.CLASS_SCALAR)
+        aid = H5Acreate(self.gid, TEST_NAME, H5T_NATIVE_INT8, sid, H5P_DEFAULT)
+        h5s.close(sid)
+        self.assert_(H5Iget_type(aid) == H5I_ATTR)
+        h5a.close(aid)
+        self.assert_(H5Iget_type(aid) == H5I_BADID)
+
+    def test_delete(self):
+        cdef char foo
+        foo = 1
+
+        sid = H5Screate(h5s.CLASS_SCALAR)
+        aid = H5Acreate(self.gid, TEST_NAME, H5T_NATIVE_INT8, sid, H5P_DEFAULT)
+        h5s.close(sid)
+        self.assert_(H5Iget_type(aid) == H5I_ATTR)
+
+        retval = H5Awrite(aid, H5T_NATIVE_INT8, &foo)
+        assert retval >= 0
+        
+        H5Aclose(aid)
+
+        aid = h5a.open_name(self.gid, TEST_NAME)
+        h5a.close(aid)
+
+        h5a.delete(self.gid, TEST_NAME)
+        self.assertRaises(H5AttributeError, h5a.open_name, self.gid, TEST_NAME)
+
+    def test_read(self):
+
+        cdef char foo
+        foo = 42
+        sid = H5Screate(h5s.CLASS_SCALAR)
+        aid = H5Acreate(self.gid, TEST_NAME, H5T_NATIVE_INT8, sid, H5P_DEFAULT)
+        h5s.close(sid)
+        self.assert_(H5Iget_type(aid) == H5I_ATTR)
+
+        retval = H5Awrite(aid, H5T_NATIVE_INT8, &foo)
+        assert retval >= 0
+
+        a = numpy.ndarray((1,),dtype=h5t.py_h5t_to_dtype(H5T_NATIVE_INT8))
+        h5a.read(aid, a)
+
+        self.assert_(a[0] == 42)
+        H5Aclose(aid)
+        H5Adelete(self.gid, TEST_NAME)
+
+        
+
+        
+
+        
+
+
+
+
+
+
+
+
+
+
diff --git a/obsolete/test_h5f.pyx b/obsolete/test_h5f.pyx
new file mode 100755
index 0000000..8df1933
--- /dev/null
+++ b/obsolete/test_h5f.pyx
@@ -0,0 +1,76 @@
+##### Preamble block ##########################################################
+# 
+# This file is part of the "h5py" HDF5 Interface for Python project.
+# 
+# Copyright 2008 Andrew Collette
+# http://software.alfven.org
+# License: BSD  (See file "LICENSE" for complete license, or the URL above)
+# 
+##### End preamble block ######################################################
+
+from defs_h5f cimport H5Fopen, H5Fclose,\
+                      H5F_ACC_TRUNC, H5F_ACC_RDONLY, H5F_ACC_RDWR, H5F_ACC_EXCL
+from defs_h5p cimport H5P_DEFAULT
+from defs_h5i cimport H5Iget_type, H5I_FILE
+                 
+import unittest
+import os
+import tempfile
+
+import h5f
+from errors import FileError
+
+"""
+    Tests functions defined in h5f.  Requires HDF5 file; default name
+    is "test_simple.hdf5".
+
+"""
+TEST_FILE = "test_simple.hdf5"
+
+class TestH5F(unittest.TestCase):
+
+    def testopen(self):
+        os.chmod(TEST_FILE, 0600)
+
+        fid = h5f.open(TEST_FILE, flags=h5f.ACC_RDWR)
+        self.assert_(H5Iget_type(fid) == H5I_FILE)
+        H5Fclose(fid)
+
+        fid = h5f.open(TEST_FILE, flags=h5f.ACC_RDONLY)
+        self.assert_(H5Iget_type(fid) == H5I_FILE)
+        H5Fclose(fid)     
+
+        os.chmod(TEST_FILE, 0400)
+        
+        fid = h5f.open(TEST_FILE, flags=H5F_ACC_RDONLY)
+        self.assert_(H5Iget_type(fid) == H5I_FILE)
+        H5Fclose(fid)     
+
+        self.assertRaises(FileError, h5f.open, TEST_FILE, flags=h5f.ACC_RDWR)
+        
+    def testclose(self):
+        os.chmod(TEST_FILE, 0600)
+        fid = H5Fopen(TEST_FILE, H5F_ACC_RDWR, H5P_DEFAULT)
+        self.assert_(H5Iget_type(fid) == H5I_FILE)
+        h5f.close(fid)
+
+    def testcreate(self):
+
+        fd, name = tempfile.mkstemp('.hdf5')
+        os.close(fd)
+
+        fid = h5f.create(name, flags=h5f.ACC_TRUNC)
+        self.assert_(H5Iget_type(fid) == H5I_FILE)
+        H5Fclose(fid)   
+
+        self.assertRaises(FileError, h5f.create, name, flags=h5f.ACC_EXCL)
+        
+        os.unlink(name)
+
+
+
+
+
+
+
+
diff --git a/obsolete/test_h5g.pyx b/obsolete/test_h5g.pyx
new file mode 100755
index 0000000..0dd36d4
--- /dev/null
+++ b/obsolete/test_h5g.pyx
@@ -0,0 +1,184 @@
+##### Preamble block ##########################################################
+# 
+# This file is part of the "h5py" HDF5 Interface for Python project.
+# 
+# Copyright 2008 Andrew Collette
+# http://software.alfven.org
+# License: BSD  (See file "LICENSE" for complete license, or the URL above)
+# 
+##### End preamble block ######################################################
+
+from defs_h5g cimport H5Gclose, H5Gopen, H5Gget_objinfo, H5Gunlink, \
+                      H5G_GROUP, H5G_stat_t
+from defs_h5i cimport H5Iget_type, H5I_GROUP, H5I_BADID
+
+import unittest
+import shutil
+import tempfile
+import os
+
+import h5f
+import h5g
+
+from errors import GroupError
+
+""" Depends on h5f
+"""
+
+TEST_FILE = "test_simple.hdf5"
+TEST_GROUPS = ["columns", "detector"]
+NEW_GROUP_NAME = "XXXNEWGROUPXXX"
+NEW_LINK_NAME = "linked"
+
+# Pyrex doesn't let you nest functions
+# These are used in the iterator test.
+def ifunc1(gid, name, data):
+    data.append(name)
+    return None
+
+def ifunc2(gid, name, data):
+    data.append(name)
+    return 0
+
+def ifunc3(gid, name, data):
+    data.append(name)
+    return 1
+
+def ifunc4(gid, name, data):
+    data.append(name)
+    return -1
+
+
+class TestH5G(unittest.TestCase):
+
+    def setUp(self):
+        self.fname = tempfile.mktemp(".hdf5")
+        shutil.copyfile(TEST_FILE, self.fname)
+        self.fid = h5f.open(self.fname, flags=h5f.ACC_RDWR)
+
+    def tearDown(self):
+        h5f.close(self.fid)
+        os.unlink(self.fname)
+
+    def testopen(self):
+        for name in TEST_GROUPS:
+            gid = h5g.open(self.fid, name)
+            self.assert_(H5Iget_type(gid) == H5I_GROUP)
+            H5Gclose(gid)
+
+    def testclose(self):
+        for name in TEST_GROUPS:
+            gid = H5Gopen(self.fid, name)
+            h5g.close(gid)
+            self.assert_(H5Iget_type(gid) == H5I_BADID)
+
+    def testcreate(self):
+        gid = h5g.create(self.fid, NEW_GROUP_NAME)
+        self.assert_( H5Gget_objinfo(self.fid, NEW_GROUP_NAME, 0, NULL) >= 0 )
+        H5Gclose(gid)
+        H5Gunlink(self.fid, NEW_GROUP_NAME)
+
+    def testlink(self):
+        # local link
+        h5g.link(self.fid, TEST_GROUPS[1], NEW_LINK_NAME, h5g.LINK_HARD)
+        self.assert_( H5Gget_objinfo(self.fid, NEW_LINK_NAME, 0, NULL) >= 0 )
+
+        # test local unlink
+        h5g.unlink(self.fid, NEW_LINK_NAME)
+        self.assert_( H5Gget_objinfo(self.fid, NEW_LINK_NAME, 0, NULL) < 0 )
+
+        # remote link
+        rgid = H5Gopen(self.fid, TEST_GROUPS[0])
+        h5g.link(self.fid, TEST_GROUPS[0], NEW_LINK_NAME, h5g.LINK_HARD, rgid)
+        self.assert_( H5Gget_objinfo(rgid, NEW_LINK_NAME, 0, NULL) >= 0 )
+    
+        h5g.unlink(rgid, NEW_LINK_NAME)
+        self.assert_( H5Gget_objinfo(rgid, NEW_LINK_NAME, 0, NULL) < 0 )
+
+    def testmove(self):
+        tname = TEST_GROUPS[0]+'_2'
+
+        # local move
+        h5g.move(self.fid, TEST_GROUPS[0], tname)
+        self.assert_( H5Gget_objinfo(self.fid, TEST_GROUPS[0], 0, NULL) < 0 )        
+        self.assert_( H5Gget_objinfo(self.fid, tname, 0, NULL) >= 0 )
+
+        h5g.move(self.fid, TEST_GROUPS[0]+'_2', TEST_GROUPS[0])
+        self.assert_( H5Gget_objinfo(self.fid, TEST_GROUPS[0], 0, NULL) >= 0 )        
+        self.assert_( H5Gget_objinfo(self.fid, tname, 0, NULL) <0 )
+
+        gid = H5Gopen(self.fid, TEST_GROUPS[1])
+
+        # remote move
+        h5g.move(self.fid, TEST_GROUPS[0], TEST_GROUPS[0], gid)
+        self.assert_( H5Gget_objinfo(self.fid, TEST_GROUPS[0], 0, NULL) < 0 )    
+        self.assert_( H5Gget_objinfo(gid, TEST_GROUPS[0], 0, NULL) >= 0 )
+
+        h5g.move(gid, TEST_GROUPS[0], TEST_GROUPS[0], self.fid)
+        self.assert_( H5Gget_objinfo(self.fid, TEST_GROUPS[0], 0, NULL) >= 0 )    
+        self.assert_( H5Gget_objinfo(gid, TEST_GROUPS[0], 0, NULL) < 0 )
+
+        H5Gclose(gid)
+
+    def test_get_num_objs(self):
+        self.assert_(h5g.get_num_objs(self.fid) == 2)
+
+    def test_get_objname_by_idx(self):
+
+        for idx, name in enumerate(TEST_GROUPS):
+            self.assert_(h5g.get_objname_by_idx(self.fid, idx) == name)
+
+    def test_get_objtype_by_idx(self):
+
+        for idx, name in enumerate(TEST_GROUPS):
+            self.assert_(h5g.get_objtype_by_idx(self.fid, idx) == h5g.OBJ_GROUP)
+        
+    def test_get_objinfo(self):
+
+        cdef H5G_stat_t stat
+        H5Gget_objinfo(self.fid, TEST_GROUPS[0], 1, &stat)
+        qstat = h5g.get_objinfo(self.fid, TEST_GROUPS[0])
+
+        self.assert_(qstat.fileno[0] == stat.fileno[0])
+        self.assert_(qstat.fileno[1] == stat.fileno[1])
+        self.assert_(qstat.nlink == stat.nlink)
+        self.assert_(qstat.type == <int>stat.type)
+        self.assert_(qstat.mtime == stat.mtime)
+        self.assert_(qstat.linklen == stat.linklen)
+
+    def test_iterate(self):
+
+        nlist = []
+        h5g.iterate(self.fid, '.', ifunc1, nlist)
+        self.assert_(nlist == TEST_GROUPS )
+        
+        nlist = []
+        h5g.iterate(self.fid, '.', ifunc2, nlist)
+        self.assert_(nlist == TEST_GROUPS)
+
+        nlist = []
+        h5g.iterate(self.fid, '.', ifunc3, nlist)
+        self.assert_(nlist == [TEST_GROUPS[0]])
+
+        nlist = []
+        self.assertRaises(GroupError, h5g.iterate, self.fid, '.', ifunc4, nlist)
+        self.assert_(nlist == [TEST_GROUPS[0]])
+
+    def test_py_listnames(self):
+
+        thelist = h5g.py_listnames(self.fid)
+        self.assert_(thelist == TEST_GROUPS)
+
+        
+    def test_py_iternames(self):
+        iterator = h5g.py_iternames(self.fid)
+        thelist = list( iterator )
+        self.assert_(thelist == TEST_GROUPS)
+        self.assertRaises(StopIteration, iterator.next)
+        
+
+
+
+
+    
+
diff --git a/obsolete/test_h5s.pyx b/obsolete/test_h5s.pyx
new file mode 100755
index 0000000..85a1363
--- /dev/null
+++ b/obsolete/test_h5s.pyx
@@ -0,0 +1,130 @@
+##### Preamble block ##########################################################
+# 
+# This file is part of the "h5py" HDF5 Interface for Python project.
+# 
+# Copyright 2008 Andrew Collette
+# http://software.alfven.org
+# License: BSD  (See file "LICENSE" for complete license, or the URL above)
+# 
+##### End preamble block ######################################################
+
+from defs_c   cimport malloc, free
+from defs_h5  cimport hsize_t
+from defs_h5i cimport H5Iget_type, H5I_BADID, H5I_DATASPACE
+from defs_h5s cimport H5Sget_simple_extent_ndims, H5Sget_simple_extent_dims, \
+                      H5Sclose, H5Screate_simple, H5Sget_select_bounds, \
+                      H5Sselect_none
+
+import unittest
+import h5s
+from errors import DataspaceError
+
+cdef int NDIMS
+NDIMS = 3
+DIMS = (10,13,24)
+SELECT_START        = (1, 1, 5)
+SELECT_LEN          = (2, 3, 4)
+SELECT_STRIDE       = (1, 1, 2)
+SELECT_BBOX_START   = (1, 1, 5)
+SELECT_BBOX_END     = (2, 3, 11)
+
+class TestH5S(unittest.TestCase):
+
+    def setUp(self):
+
+        cdef hsize_t *dims
+        dims = <hsize_t*>malloc(sizeof(hsize_t)*NDIMS)
+        for idx, val in enumerate(DIMS):
+            dims[idx] = val
+        self.sid = H5Screate_simple(NDIMS, dims, NULL)
+        free(dims)
+
+    def tearDown(self):
+        H5Sclose(self.sid)
+
+    def test_close(self):
+
+        self.assert_(H5Iget_type(self.sid) == H5I_DATASPACE)
+        h5s.close(self.sid)
+        self.assert_(H5Iget_type(self.sid) == H5I_BADID)
+        self.setUp()
+
+    def test_create(self):
+
+        cdef hsize_t *dims
+        sid = h5s.create_simple(DIMS)
+
+        self.assert_(H5Sget_simple_extent_ndims(sid) == NDIMS)
+        dims = <hsize_t*>malloc(sizeof(hsize_t)*NDIMS)
+        H5Sget_simple_extent_dims(sid, dims, NULL)
+        for idx, val in enumerate(DIMS):
+            self.assert_( dims[idx] == val )
+        free(dims)
+        H5Sclose(sid)
+
+    def test_ndims(self):
+        self.assert_(h5s.get_simple_extent_ndims(self.sid) == NDIMS)
+        self.assertRaises(DataspaceError, h5s.get_simple_extent_ndims, -1)
+
+    def test_dims(self):
+        self.assert_(h5s.get_simple_extent_dims(self.sid) == DIMS)
+        self.assertRaises(DataspaceError, h5s.get_simple_extent_dims, -1)
+
+    def test_hyperslab(self):
+
+        cdef hsize_t *start
+        cdef hsize_t *end
+
+        self.assertRaises(DataspaceError, h5s.select_hyperslab, self.sid, (1,), (1,) )
+        self.assertRaises(DataspaceError, h5s.select_hyperslab, self.sid, SELECT_START, SELECT_LEN, SELECT_STRIDE[0:2] )
+        self.assertRaises(DataspaceError, h5s.select_hyperslab, self.sid, SELECT_START, SELECT_LEN[0:2], SELECT_STRIDE )
+        self.assertRaises(DataspaceError, h5s.select_hyperslab, self.sid, SELECT_START[0:2], SELECT_LEN, SELECT_STRIDE )
+
+        H5Sselect_none(self.sid)
+
+        start = <hsize_t*>malloc(sizeof(hsize_t)*NDIMS)
+        end = <hsize_t*>malloc(sizeof(hsize_t)*NDIMS)
+
+        h5s.select_hyperslab(self.sid, SELECT_START, SELECT_LEN, SELECT_STRIDE)
+        H5Sget_select_bounds(self.sid, start, end)
+
+        for idx in range(NDIMS):
+            self.assert_( start[idx] == SELECT_BBOX_START[idx] )
+            self.assert_( end[idx] == SELECT_BBOX_END[idx] )
+        free(start)
+        free(end)
+
+        H5Sselect_none(self.sid)
+
+        
+        
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+        
diff --git a/obsolete/test_simple.hdf5 b/obsolete/test_simple.hdf5
new file mode 100755
index 0000000..4786eea
Binary files /dev/null and b/obsolete/test_simple.hdf5 differ
diff --git a/setup.py b/setup.py
new file mode 100755
index 0000000..3158f3d
--- /dev/null
+++ b/setup.py
@@ -0,0 +1,215 @@
+#!/usr/bin/env python
+
+#+
+# 
+# This file is part of h5py, a low-level Python interface to the HDF5 library.
+# 
+# Copyright (C) 2008 Andrew Collette
+# http://h5py.alfven.org
+# License: BSD  (See LICENSE.txt for full license)
+# 
+# $Date$
+# 
+#-
+
+"""
+    Setup script for the h5py package.
+
+    To install h5py, run "python setup.py build" followed by
+    "python setup.py install".  You may need sudo priviledges for the second
+    command.
+
+    Implements a few new commands, in addition to standard commands like
+    "build" and "install":
+    
+    1.  "test"
+        Build the package locally (don't install it) and run unit tests. Exits
+        Python with a nonzero error code if any of the unit tests fail.  Test
+        output (unittest.TextTestRunner) is written to stdout/stderr.
+    
+    2.  "dev"
+        Developer commands.  Runs "build" and optionally:
+        --doc               Rebuilds HTML documentation
+        --readme <name>     Generates an HTML form of the README.txt document.
+
+    New option: "--revision" appends the SVN revision and current build number
+    to the version string; again mainly for development.
+"""
+
+__revision__ = "$Id"
+
+from distutils.cmd import Command
+from distutils.errors import DistutilsError, DistutilsExecError
+from distutils.core import setup
+from distutils.extension import Extension
+import os
+import sys
+
+# Distutils tries to use hard links when building source distributions, which 
+# fails under a wide variety of network filesystems under Linux.
+delattr(os, 'link') # goodbye!
+
+# === Global constants ========================================================
+
+NAME = 'h5py'
+VERSION = '0.1.0a'
+REVISION = "$Rev: 0$"
+
+# === Custom extensions for distutils =========================================
+
+class test(Command):
+    description = "Build %s and run unit tests" % NAME
+    user_options = []
+
+    def initialize_options(self):
+        pass
+    def finalize_options(self):
+        pass
+
+    def run(self):
+        buildobj = self.distribution.get_command_obj('build')
+        buildobj.run()
+        oldpath = sys.path
+        sys.path = [os.path.abspath(buildobj.build_lib)] + oldpath
+        import h5py.tests
+        if not h5py.tests.runtests():
+            raise DistutilsError("Unit tests failed.")
+        sys.path = oldpath
+
+class dev(Command):
+    description = "Developer commands (--doc, --readme=<file>)"
+    user_options = [('doc','d','Rebuild documentation'),
+                    ('readme=','r','Rebuild HTML readme file'),
+                    ('inspect', 'i', 'Don\'t use this.')]
+    boolean_options = ['doc', 'inspect']
+
+    def initialize_options(self):
+        self.doc = False
+        self.readme = False
+
+    def finalize_options(self):
+        pass
+
+    def run(self):
+        buildobj = self.distribution.get_command_obj('build')
+        buildobj.run()
+        
+        if self.doc:
+            retval = os.spawnlp(os.P_WAIT, 'epydoc', '-q', '--html', '-o', 'docs/',
+                               '--config', 'docs.cfg', os.path.join(buildobj.build_lib, NAME) )
+            if retval != 0:
+                raise DistutilsExecError("Could not run epydoc to build documentation.")
+
+        if self.readme:
+            import docutils.core
+            fh = open('README.txt','r')
+            parts = docutils.core.publish_parts(fh.read(),writer_name='html')
+            fh.close()
+            fh = open(self.readme,'w')
+            fh.write(parts['body'])
+            fh.close()
+
+# === Versioning setup ========================================================
+
+for arg in sys.argv[:]:
+    if arg.find('--revision') == 0:
+        REVDIGITS = '0'
+        try:
+            tmpstring = REVISION[5:-2].strip()
+            if tmpstring.isdigit(): REVDIGITS = tmpstring
+        except KeyError:
+            pass
+
+        VERSION = VERSION + '-r' + REVDIGITS
+        
+        sys.argv.remove(arg)
+
+# Automatically update the h5py source with current version info and
+# docstring from current README file
+vfile = open(os.path.join(NAME,'version.py'),'w')
+rdfile = open('README.txt','r')
+vfile.write('# This file is automatically generated; do not edit.\n')
+vfile.write('"""\nPackage "h5py" extended information\n\n%s"""\nversion = "%s"\n\n' % (rdfile.read(), VERSION))
+rdfile.close()
+vfile.close()
+
+# === Setup configuration =====================================================
+
+min_pyrex_version = '0.9.6.4'
+min_numpy_version = '1.0.3'
+
+def fatal(instring):
+    print "Fatal: "+instring
+    exit(2)
+
+# Python version
+if not (sys.version_info[0] >= 2 and sys.version_info[1] >= 5):
+    fatal("At least Python 2.5 is required to install h5py")
+
+# Check for Numpy (required)
+try:
+    import numpy
+    if numpy.version.version < min_numpy_version:
+        raise ImportError()
+except ImportError:
+    fatal("Numpy version >= %s required" % min_numpy_version)
+
+# Check for Pyrex (also required)
+try:
+    from Pyrex.Compiler.Main import Version
+    if Version.version < min_pyrex_version:
+        raise ImportError()
+    from Pyrex.Distutils import build_ext
+except ImportError:
+    fatal("Pyrex is unavailable or out of date (>= %s required)." % min_pyrex_version)
+
+ext_exten = '.pyx'
+
+# Pyrex extension modules
+pyx_modules = ['h5' , 'h5f', 'h5g', 'h5s', 'h5t', 
+               'h5d', 'h5a', 'h5p', 'h5z', 'h5i']
+
+pyx_src_path = 'h5py'
+pyx_extra_src = ['utils.c']         # C source files required for Pyrex code
+pyx_libraries = ['hdf5']            # Libraries to link into Pyrex code
+pyx_include = [numpy.get_include()] # Compile-time include dirs for Pyrex code
+
+# Additional compiler flags for Pyrex code
+pyx_extra_args = ['-Wno-unused', '-DH5_USE_16_API']
+
+
+# === Setup implementation ====================================================
+
+# Create extensions
+pyx_extensions = []
+for module_name in pyx_modules:
+    sources  = [os.path.join(pyx_src_path, module_name) + ext_exten]
+    sources += [os.path.join(pyx_src_path, x) for x in pyx_extra_src]
+
+    pyx_extensions.append(
+        Extension( 
+            NAME+'.'+module_name,
+            sources, 
+            include_dirs = pyx_include, 
+            libraries = pyx_libraries,
+            extra_compile_args = pyx_extra_args
+        )
+    )
+
+# Run setup
+setup(
+  name = NAME,
+  version = VERSION,
+  author = 'Andrew Collette',
+  url = 'h5py.alfven.org',
+  packages = ['h5py','h5py.tests'],
+  package_data = {'h5py': ['*.pyx'],  # so source is available for tracebacks
+                  'h5py.tests': ['data/*.hdf5']},
+  ext_modules= pyx_extensions,
+  requires = ['numpy (>=1.0.3)','Pyrex (>=0.9.6)'],  # "0.9.6.4 is not a valid version string"???
+  provides = ['h5py'],
+  cmdclass = {'build_ext': build_ext, 'dev': dev, 'test': test}
+)
+
+
+

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-science/packages/h5py.git



More information about the debian-science-commits mailing list