[h5py] 105/455: Docstring updates, h5p additions/tests

Ghislain Vaillant ghisvail-guest at moszumanska.debian.org
Thu Jul 2 18:19:22 UTC 2015


This is an automated email from the git hooks/post-receive script.

ghisvail-guest pushed a commit to annotated tag 1.3.0
in repository h5py.

commit 11dd238140e03c6794f0224a774d0421ad04f846
Author: andrewcollette <andrew.collette at gmail.com>
Date:   Fri Aug 22 03:55:38 2008 +0000

    Docstring updates, h5p additions/tests
---
 h5py/h5p.pxd           |   1 +
 h5py/h5p.pyx           |  35 ++++++++++++++++-
 h5py/h5t.pxd           |   2 +
 h5py/h5t.pyx           |   1 -
 h5py/highlevel.py      |  14 +++++--
 h5py/tests/__init__.py |   7 +++-
 h5py/tests/test_h5p.py | 105 +++++++++++++++++++++++++++++++++++++++++++++++++
 7 files changed, 157 insertions(+), 8 deletions(-)

diff --git a/h5py/h5p.pxd b/h5py/h5p.pxd
index 0d2be85..0255652 100644
--- a/h5py/h5p.pxd
+++ b/h5py/h5p.pxd
@@ -16,6 +16,7 @@
 
 include "std_defs.pxi"
 from h5 cimport class ObjectID
+from numpy cimport ndarray, import_array
 
 cdef class PropID(ObjectID):
     """ Base class for all property lists """
diff --git a/h5py/h5p.pyx b/h5py/h5p.pyx
index 19c5f7d..7eb6a3b 100644
--- a/h5py/h5p.pyx
+++ b/h5py/h5p.pyx
@@ -18,10 +18,17 @@
 # Pyrex compile-time imports
 
 from utils cimport  require_tuple, convert_dims, convert_tuple, \
-                    emalloc, efree, pybool, require_list
+                    emalloc, efree, pybool, require_list, \
+                    check_numpy_write, check_numpy_read
+
+from h5t cimport TypeID
 
 # Runtime imports
 import h5
+import h5t
+import numpy
+
+import_array()
 
 # === C API ===================================================================
 
@@ -306,6 +313,32 @@ cdef class PropDCID(PropInstanceID):
         finally:
             efree(dims)
 
+    def set_fill_value(self, ndarray value not None):
+        """ (NDARRAY value)
+
+            Set the dataset fill value.  The object provided should be an
+            0-dimensional NumPy array; otherwise, the value will be read from
+            the first element.
+        """
+        cdef TypeID tid
+
+        check_numpy_read(value, -1)
+        tid = h5t.py_create(value.dtype)        
+        H5Pset_fill_value(self.id, tid.id, value.data)
+
+    def get_fill_value(self, ndarray value not None):
+        """ (NDARRAY value)
+
+            Read the dataset fill value into a NumPy array.  It will be
+            converted to match the array dtype.  If the array has nonzero
+            rank, only the first element will contain the value.
+        """
+        cdef TypeID tid
+
+        check_numpy_write(value, -1)
+        tid = h5t.py_create(value.dtype)
+        H5Pget_fill_value(self.id, tid.id, value.data)
+
     # === Filter functions ====================================================
     
     def set_deflate(self, unsigned int level=5):
diff --git a/h5py/h5t.pxd b/h5py/h5t.pxd
index 1072b4d..dfeab53 100644
--- a/h5py/h5t.pxd
+++ b/h5py/h5t.pxd
@@ -76,6 +76,8 @@ cdef object typewrap(hid_t id_)
 
 cdef extern from "hdf5.h":
 
+  hid_t H5P_DEFAULT
+
   # --- Enumerated constants --------------------------------------------------
 
   # Byte orders
diff --git a/h5py/h5t.pyx b/h5py/h5t.pyx
index 79c99e4..06a2f3e 100644
--- a/h5py/h5t.pyx
+++ b/h5py/h5t.pyx
@@ -59,7 +59,6 @@ include "conditions.pxi"
 
 # Pyrex compile-time imports
 from defs_c cimport free
-from h5p cimport H5P_DEFAULT
 from h5 cimport err_c, pause_errors, resume_errors
 from numpy cimport dtype, ndarray
 
diff --git a/h5py/highlevel.py b/h5py/highlevel.py
index 13d49a6..1dcffe7 100644
--- a/h5py/highlevel.py
+++ b/h5py/highlevel.py
@@ -246,6 +246,13 @@ class Group(HLObject):
             compression:   DEFLATE (gzip) compression level, int or None*
             shuffle:       Use the shuffle filter? (requires compression) T/F*
             fletcher32:    Enable Fletcher32 error detection? T/F*
+            maxshape:      Tuple giving dataset maximum dimensions or None*.
+                           You can grow each axis up to this limit using
+                           extend().  For each unlimited axis, provide None.
+
+            All these options require chunking.  If a chunk tuple is not
+            provided, the constructor will guess an appropriate chunk shape.
+            Please note none of these are allowed for scalar datasets.
         """
         return Dataset(self, name, *args, **kwds)
 
@@ -531,10 +538,9 @@ class Dataset(HLObject):
             compression:   DEFLATE (gzip) compression level, int or None*
             shuffle:       Use the shuffle filter? (requires compression) T/F*
             fletcher32:    Enable Fletcher32 error detection? T/F*
-
-            maxshape:      Tuple giving dataset maximum dimensions.  You can
-                           grow each axis up to this limit using extend(). For
-                           an unlimited axis, provide None.  Requires chunks.
+            maxshape:      Tuple giving dataset maximum dimensions or None*.
+                           You can grow each axis up to this limit using
+                           extend().  For each unlimited axis, provide None.
 
             All these options require chunking.  If a chunk tuple is not
             provided, the constructor will guess an appropriate chunk shape.
diff --git a/h5py/tests/__init__.py b/h5py/tests/__init__.py
index e4dc9c4..df9edfa 100644
--- a/h5py/tests/__init__.py
+++ b/h5py/tests/__init__.py
@@ -22,6 +22,8 @@ from h5py import *
 sections = {'h5a': test_h5a.TestH5A, 'h5d': test_h5d.TestH5D,
             'h5f': test_h5f.TestH5F, 'h5g': test_h5g.TestH5G,
             'h5i': test_h5i.TestH5I, 'h5p': test_h5p.TestH5P,
+            'h5p.fcid': test_h5p.TestFCID, 'h5p.faid': test_h5p.TestFAID,
+            'h5p.dcid': test_h5p.TestDCID, 'h5p.dxid': test_h5p.TestDXID,
             'h5s': test_h5s.TestH5S, 'h5t': test_h5t.TestH5T,
             'h5': test_h5.TestH5,
             'File': test_highlevel.TestFile,
@@ -29,8 +31,9 @@ sections = {'h5a': test_h5a.TestH5A, 'h5d': test_h5d.TestH5D,
             'Dataset': test_highlevel.TestDataset,
             'threads': test_threads.TestThreads }
 
-order = ('h5a', 'h5d', 'h5f', 'h5g', 'h5i', 'h5p', 'h5s', 'h5', 'File', 'Group',
-         'Dataset', 'threads')
+order = ('h5a', 'h5d', 'h5f', 'h5g', 'h5i', 'h5p', 'h5p.fcid', 'h5p.faid',
+         'h5p.dcid', 'h5p.dxid', 'h5s', 'h5', 'File', 'Group', 'Dataset',
+         'threads')
 
 def buildsuite(cases):
     """ cases should be an iterable of TestCase subclasses """
diff --git a/h5py/tests/test_h5p.py b/h5py/tests/test_h5p.py
index 9e75d05..4387e70 100644
--- a/h5py/tests/test_h5p.py
+++ b/h5py/tests/test_h5p.py
@@ -11,12 +11,117 @@
 #-
 
 import unittest
+import numpy
 
 from h5py import *
 from h5py.h5 import H5Error
 
 HDFNAME = 'attributes.hdf5'
 
+TYPES = {h5p.FILE_CREATE: h5p.PropFCID,
+         h5p.FILE_ACCESS: h5p.PropFAID,
+         h5p.DATASET_CREATE: h5p.PropDCID,
+         h5p.DATASET_XFER: h5p.PropDXID }
+
 class TestH5P(unittest.TestCase):
+
+
+    def test_create_get_class(self):
+        for typecode, cls in TYPES.iteritems():
+            instance = h5p.create(typecode)
+            self.assertEqual(type(instance), cls)
+            self.assert_(instance.get_class().equal(typecode))
+        
+
+class TestFCID(unittest.TestCase):
+
+    def setUp(self):
+        self.p = h5p.create(h5p.FILE_CREATE)
+
+    def test_version(self):
+        vers = self.p.get_version()
+        self.assertEqual(len(vers), 4)
+
+    def test_userblock(self):
+        for size in (512,1024,2048):
+            self.p.set_userblock(size)
+            self.assertEqual(self.p.get_userblock(), size)
+    
+    def test_sizes(self):
+        sizes = [(2,4), (8,16)]
+        for a, s in sizes:
+            self.p.set_sizes(a,s)
+            self.assertEqual(self.p.get_sizes(), (a,s))
+
+    def test_sym(self):
+        self.p.set_sym_k(2,3)
+        self.assertEqual(self.p.get_sym_k(), (2,3))
+
+    def test_istore(self):
+        for size in (2,4,8,16):
+            self.p.set_istore_k(size)
+            self.assertEqual(self.p.get_istore_k(), size)
+
+
+class TestFAID(unittest.TestCase):
+
+    CLOSE_DEGREES = (h5f.CLOSE_WEAK,
+                     h5f.CLOSE_SEMI,
+                     h5f.CLOSE_STRONG,
+                     h5f.CLOSE_DEFAULT)
+
+    def setUp(self):
+        self.p = h5p.create(h5p.FILE_ACCESS)
+
+    def test_fclose_degree(self):
+        for deg in self.CLOSE_DEGREES:
+            self.p.set_fclose_degree(deg)
+            self.assertEqual(self.p.get_fclose_degree(), deg)
+
+    def test_fapl_core(self):
+        settings = (2*1024*1024, 1)
+        self.p.set_fapl_core(*settings)
+        self.assertEqual(self.p.get_fapl_core(), settings)
+
+
+class TestDCID(unittest.TestCase):
+
+    LAYOUTS = (h5d.COMPACT,
+                h5d.CONTIGUOUS,
+                h5d.CHUNKED)
+
+    CHUNKSIZES = ((1,), (4,4), (16,32,4))
+
+    def setUp(self):
+        self.p = h5p.create(h5p.DATASET_CREATE)
+
+    def test_layout(self):
+        for l in self.LAYOUTS:
+            self.p.set_layout(l)
+            self.assertEqual(self.p.get_layout(), l)
+
+    def test_chunk(self):
+        
+        for c in self.CHUNKSIZES:
+            self.p.set_chunk(c)
+            self.assertEqual(self.p.get_chunk(), c)
+
+    def test_fill_value(self):
+        vals = [ numpy.array(1.0), numpy.array(2.0), numpy.array(4, dtype='=u8'),
+                 numpy.array( (1,2,3.5+6j), dtype=[('a','<i4'),('b','=f8'),('c','<c16')] )]
+
+        self.assertEqual(self.p.fill_value_defined(), h5d.FILL_VALUE_DEFAULT)
+
+        for val in vals:
+            self.p.set_fill_value(val)
+            holder = numpy.ndarray(val.shape, val.dtype)
+            self.p.get_fill_value(holder)
+            self.assertEqual(holder, val)
+
+        self.assertEqual(self.p.fill_value_defined(), h5d.FILL_VALUE_USER_DEFINED)
+
+
+class TestDXID(unittest.TestCase):
     pass
 
+

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-science/packages/h5py.git



More information about the debian-science-commits mailing list