[h5py] 224/455: Major docs and tests update for 1.1

Ghislain Vaillant ghisvail-guest at moszumanska.debian.org
Thu Jul 2 18:19:35 UTC 2015


This is an automated email from the git hooks/post-receive script.

ghisvail-guest pushed a commit to annotated tag 1.3.0
in repository h5py.

commit ac7203bd702d828833bdd36b68f13c2cd4806cbd
Author: andrewcollette <andrew.collette at gmail.com>
Date:   Sun Feb 8 07:40:41 2009 +0000

    Major docs and tests update for 1.1
---
 ANN.txt                     | 10 ++++--
 docs/source/guide/hdf5.rst  | 10 ------
 docs/source/guide/hl.rst    | 11 ++++--
 docs/source/guide/quick.rst |  2 +-
 examples/compression.py     | 88 +++++++++++++++++++++++++++++++++++++++++++++
 examples/groups.py          | 50 ++++++++++++++++++++++++++
 h5py/filters.py             |  7 ++++
 h5py/tests/test_dataset.py  |  2 +-
 8 files changed, 162 insertions(+), 18 deletions(-)

diff --git a/ANN.txt b/ANN.txt
index 2316c0e..99310dc 100644
--- a/ANN.txt
+++ b/ANN.txt
@@ -19,7 +19,7 @@ accesed using the tradional POSIX /path/to/resource syntax.
 
 In addition to providing interoperability with existing HDF5 datasets
 and platforms, h5py is a convienient way to store and retrieve
-arbitrary NumPy data from disk.
+arbitrary NumPy data and metadata.
 
 
 New features in 1.1
@@ -30,12 +30,14 @@ New features in 1.1
     GZIP filter.
 
   - Efficient broadcasting using HDF5 hyperslab selections; for example,
-    you can write to a (100 x 100 x 50) selection from a (100 x 50) array.
+    you can write to a (2000 x 100 x 50) selection from a (100 x 50) array.
 
-  - Now installable via easy_install
+  - Installable via easy_install
 
   - Now supports the NumPy boolean type
 
+  - Auto-completion for IPython 0.9.X (contributed by Darren Dale)
+
 
 Standard features
 -----------------
@@ -66,6 +68,8 @@ Standard features
   - Supports many new features of HDF5 1.8, including recursive iteration
     over entire files and in-library copy operations on the file tree
 
+  - Thread-safe
+
 
 Where to get it
 ---------------
diff --git a/docs/source/guide/hdf5.rst b/docs/source/guide/hdf5.rst
deleted file mode 100644
index 153c052..0000000
--- a/docs/source/guide/hdf5.rst
+++ /dev/null
@@ -1,10 +0,0 @@
-.. _hdf5:
-
-****************
-Overview of HDF5
-****************
-
-The final authority on all things HDF-related is 
-`the HDF group <http://www.hdfgroup.org>`.
-
-
diff --git a/docs/source/guide/hl.rst b/docs/source/guide/hl.rst
index 0ea7499..1c91ae2 100644
--- a/docs/source/guide/hl.rst
+++ b/docs/source/guide/hl.rst
@@ -31,7 +31,7 @@ a POSIX filesystem, objects are specified by ``/``-separated names, with the
 root group ``/`` (represented by the :class:`File` class) at the base.
 
 Wherever a name or path is called for, it may be relative or absolute.
-Constructs like ``..`` (parent group) are allowed.
+Unfortunately, the construct ``..`` (parent group) is not allowed.
 
 
 Exceptions
@@ -334,7 +334,7 @@ Reference
         If an existing incompatible object (Group or Datatype) already exists
         with the given name, fails with H5Error.
 
-    .. method:: copy(source, dest)
+    .. method:: copy(source, dest, name=None)
 
         **Only available with HDF5 1.8**
 
@@ -350,6 +350,11 @@ Reference
             Destination.  Must be either Group or path.  If a Group object, it may
             be in a different file.
 
+        **name** (None or str)
+            If the destination is a Group object, you can override the name
+            for the newly created member.  Otherwise a new name will be chosen
+            using basename(source.name).
+
     .. method:: visit(func) -> None or return value from func
 
         **Only available with HDF5 1.8**
@@ -469,7 +474,7 @@ Like Numpy arrays, Dataset objects have attributes named "shape" and "dtype":
     >>> dset.dtype
     dtype('complex64')
     >>> dset.shape
-    (4L, 5L)
+    (4, 5)
 
 
 .. _dsetfeatures:
diff --git a/docs/source/guide/quick.rst b/docs/source/guide/quick.rst
index 56ecf76..1508589 100644
--- a/docs/source/guide/quick.rst
+++ b/docs/source/guide/quick.rst
@@ -105,7 +105,7 @@ along with some emulated advanced indexing features
 (see :ref:`sparse_selection`):
 
     * Boolean array indexing (``array[ array[...] > 0.5 ]``)
-    * Discrete coordinate selection (
+    * Discrete coordinate selection (see the ``selections`` module)
 
 Closing the file
 ----------------
diff --git a/examples/compression.py b/examples/compression.py
new file mode 100644
index 0000000..f9b8075
--- /dev/null
+++ b/examples/compression.py
@@ -0,0 +1,88 @@
+
+"""
+    Example demonstrating how to use compression and other special options
+    for storing datasets in HDF5.
+
+    Compression is supported in HDF5 via a "filter pipeline" which is applied
+    to data as it is written to and read from disk.  Each dataset in the
+    file has its own pipeline, which allows the compression strategy to be
+    specified on a per-dataset basis.
+
+    Compression is only available for the actual data, and not for attributes
+    or metadata.
+
+    As of h5py 1.1, three compression techniques are available, "gzip", "lzf",
+    and "szip".  The non-compression filters "shuffle" and "fletcher32" are
+    also available.  See the docstring for the module h5py.filters for more
+    information.
+
+    Please note LZF is a h5py-only filter.  While reference C source is
+    available, other HDF5-aware applications may be unable to read data in
+    this format.
+"""
+
+import os
+
+import numpy as np
+import h5py
+import h5py.filters
+
+SHAPE = (100,100,100,20)
+DTYPE = np.dtype('i')
+SIZE = np.product(SHAPE)
+
+f = h5py.File('compress_test.hdf5','w')
+
+mydata = np.arange(SIZE,dtype=DTYPE).reshape(SHAPE)
+
+datasets = []
+
+print "Creating dataset with gzip"
+dset = f.create_dataset("gzipped", data=mydata, compression="gzip",
+                         compression_opts=4)   # compression_opts is optional
+datasets.append(dset)
+
+print "Creating dataset with LZF"
+dset = f.create_dataset("lzfcompressed", data=mydata, compression="lzf")
+datasets.append(dset)
+
+if 'szip' in h5py.filters.encode:       # Not distributed with all versions of HDF5
+    print "Creating dataset with SZIP"
+    dset = f.create_dataset("szipped", data=mydata, compression="szip",
+                             compression_opts=('nn',8))
+    datasets.append(dset)
+
+print "Creating dataset with LZF and error detection"
+dset = f.create_dataset("gzip_error_detection", data=mydata,
+                        compression="gzip", fletcher32=True)
+datasets.append(dset)
+
+print "Creating uncompressed dataset"
+dset = f.create_dataset("uncompressed", data=mydata)
+datasets.append(dset)
+
+f.flush()
+
+def showsettings(dataset):
+    """ Demonstrate the public attributes of datasets """
+
+    print "="*60
+    print "Dataset      ", dataset.name
+    print '-'*30
+    print "Shape        ", dataset.shape
+    print "Chunk size   ", dataset.chunks
+    print "Datatype     ", dataset.dtype
+    print '-'*30
+    print "Compression  ", dataset.compression
+    print "Settings     ", dataset.compression_opts
+    print '-'*32
+    print "Shuffle      ", dataset.shuffle
+    print "Fletcher32   ", dataset.fletcher32
+
+for x in datasets:
+    showsettings(x)
+
+f.close()
+
+
+
diff --git a/examples/groups.py b/examples/groups.py
new file mode 100644
index 0000000..6bf1778
--- /dev/null
+++ b/examples/groups.py
@@ -0,0 +1,50 @@
+
+"""
+    Example demonstrating some features of groups in HDF5, and how to
+    use them from h5py.  
+"""
+
+import h5py
+
+groups = ['/foo', '/foo/bar', '/foo/bar/baz',
+          '/grp1', '/grp1/grp2', '/mygroup']
+
+f = h5py.File('group_test.hdf5','w')
+
+for grpname in groups:
+    f.create_group(grpname)
+
+print "Root group names:"
+
+for name in f:
+    print "   ", name
+
+print "Root group info:"
+
+for name, grp in f.iteritems():
+    print "    %s: %s items" % (name, len(grp))
+
+if h5py.version.api_version_tuple >= (1,8):
+
+    def treewalker(name):
+        """ Callback function for visit() """
+        print "    Called for %s" % name
+
+    print "Walking..."
+    f.visit(treewalker)
+
+    print "Copying /foo to /mygroup/newfoo..."
+    f.copy("/foo", "/mygroup/newfoo")
+
+    print "Walking again..."
+    f.visit(treewalker)
+
+    g = f['/grp1']
+
+    print "Walking from /grp1..."
+    g.visit(treewalker)
+
+else:
+    print "HDF5 1.8 is needed for extra demos"
+
+f.close()
diff --git a/h5py/filters.py b/h5py/filters.py
index 33bf171..eede7e0 100644
--- a/h5py/filters.py
+++ b/h5py/filters.py
@@ -96,6 +96,8 @@ def generate_dcpl(shape, dtype, chunks, compression, compression_opts,
         if compression not in _COMP_FILTERS:
             raise ValueError("Compression method must be one of %s" % ", ".join(_COMP_FILTERS))
         if compression == 'gzip':
+            if not "gzip" in encode:
+                raise ValueError("GZIP filter unavailable")
             if compression_opts is None:
                 gzip_level = DEFAULT_GZIP
             elif compression_opts in range(10):
@@ -103,9 +105,14 @@ def generate_dcpl(shape, dtype, chunks, compression, compression_opts,
             else:
                 raise ValueError("GZIP setting must be an integer from 0-9, not %r" % compression_opts)
         elif compression == 'lzf':
+            if not "lzf" in encode:
+                raise ValueError("LZF filter unavailable")
             if compression_opts is not None:
                 raise ValueError("LZF compression filter accepts no options")
         elif compression == 'szip':
+            if not "szip" in encode:
+                raise ValueError("SZIP filter unavailable")
+
             if compression_opts is None:
                 compression_opts = DEFAULT_SZIP
 
diff --git a/h5py/tests/test_dataset.py b/h5py/tests/test_dataset.py
index ad8d713..c2d5c95 100644
--- a/h5py/tests/test_dataset.py
+++ b/h5py/tests/test_dataset.py
@@ -26,7 +26,7 @@ class TestDataset(object):
     def test_create(self):
         # Test dataset creation from shape and type, or raw data
 
-        types = INTS + FLOATS + COMPLEX + STRINGS
+        types = INTS + FLOATS + COMPLEX + STRINGS + ('b',)
         shapes = ( (), (1,), (10,), (20,1,15), (7,200,1) )
 
         for s in shapes:

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-science/packages/h5py.git



More information about the debian-science-commits mailing list