[h5py] 165/455: Minor setup fixes; close files on exit. Feature freeze for release 12/1.

Ghislain Vaillant ghisvail-guest at moszumanska.debian.org
Thu Jul 2 18:19:29 UTC 2015


This is an automated email from the git hooks/post-receive script.

ghisvail-guest pushed a commit to annotated tag 1.3.0
in repository h5py.

commit 023bee45c268e060809ecba2205b286e1ae80fff
Author: andrewcollette <andrew.collette at gmail.com>
Date:   Wed Nov 26 06:01:56 2008 +0000

    Minor setup fixes; close files on exit.  Feature freeze for release 12/1.
---
 docs/source/guide/hl.rst     |  5 ++++-
 docs/source/guide/quick.rst  |  2 --
 h5py/h5.pyx                  |  2 +-
 h5py/highlevel.py            | 18 +++++++++++++-----
 h5py/tests/test_highlevel.py |  1 +
 setup.py                     | 16 +++++++++++-----
 6 files changed, 30 insertions(+), 14 deletions(-)

diff --git a/docs/source/guide/hl.rst b/docs/source/guide/hl.rst
index f4cbe06..42810d7 100644
--- a/docs/source/guide/hl.rst
+++ b/docs/source/guide/hl.rst
@@ -527,7 +527,8 @@ Compression
     further.
 
 Resizing
-    Datasets can be resized, up to a maximum value provided at creation time.
+    When using HDF5 1.8,
+    datasets can be resized, up to a maximum value provided at creation time.
     You can specify this maximum size via the *maxshape* argument to
     :meth:`create_dataset <Group.create_dataset>` or
     :meth:`require_dataset <Group.require_dataset>`. Shape elements with the
@@ -653,6 +654,8 @@ Reference
         the keyword *axis* is provided, the argument should be a single
         integer instead; that axis only will be modified.
 
+        **Only available with HDF5 1.8**
+
     .. method:: __len__
 
         The length of the first axis in the dataset (TypeError if scalar).
diff --git a/docs/source/guide/quick.rst b/docs/source/guide/quick.rst
index 5b06cc3..c5a728e 100644
--- a/docs/source/guide/quick.rst
+++ b/docs/source/guide/quick.rst
@@ -74,8 +74,6 @@ in it directly, or create subgroups to keep your data better organized.
 Create a dataset
 ----------------
 
-(Main chapter: :ref:`Datasets`)
-
 Datasets are like Numpy arrays which reside on disk; they are associated with
 a name, shape, and a Numpy dtype.  The easiest way to create them is with a
 method of the File object you already have::
diff --git a/h5py/h5.pyx b/h5py/h5.pyx
index 218f240..b4fafc5 100644
--- a/h5py/h5.pyx
+++ b/h5py/h5.pyx
@@ -690,7 +690,7 @@ def _exithack():
         try:
             H5Fget_obj_ids(H5F_OBJ_ALL, H5F_OBJ_ALL, count, objs)
             for i from 0<=i<count:
-                while H5Iget_ref(objs[i]) > 1:
+                while H5Iget_type(objs[i]) != H5I_BADID and H5Iget_ref(objs[i]) > 0:
                     H5Idec_ref(objs[i])
         finally:
             free(objs)
diff --git a/h5py/highlevel.py b/h5py/highlevel.py
index e5cec76..dab04af 100644
--- a/h5py/highlevel.py
+++ b/h5py/highlevel.py
@@ -16,7 +16,7 @@
     Groups provide dictionary-like access to and iteration over their members.
     File objects implicitly perform these operations on the root ('/') group.
 
-    Datasets support full Numpy-style slicing and partial I/0, including
+    Datasets support Numpy-style slicing and partial I/0, including
     recarray-style access to named fields.  A minimal Numpy interface is
     included, with shape and dtype properties.
 
@@ -40,10 +40,8 @@
 
     It is safe to import this module using "from h5py.highlevel import *"; it
     will export only the major classes.
-
-    Everything in this module is thread-safe, regardless of the HDF5
-    configuration or compile options.
 """
+
 from __future__ import with_statement
 
 import os
@@ -74,7 +72,7 @@ class LockableObject(object):
         Base class which provides rudimentary locking support.
     """
 
-    _lock = h5.get_phil()
+    _lock = threading.RLock()
 
 
 class HLObject(LockableObject):
@@ -566,6 +564,11 @@ class File(Group):
             return self.fid == other.fid
         return False
 
+    def __del__(self):
+        try:
+            self.close()
+        except Exception:
+            pass
 
 class Dataset(HLObject):
 
@@ -762,9 +765,14 @@ class Dataset(HLObject):
 
         Beware; if the array has more than one dimension, the indices of
         existing data can change.
+
+        Only available with HDF5 1.8.
         """
         with self._lock:
 
+            if not config.API_18:
+                raise NotImplementedError("Resizing is only available with HDF5 1.8.")
+
             if self.chunks is None:
                 raise TypeError("Only chunked datasets can be resized")
 
diff --git a/h5py/tests/test_highlevel.py b/h5py/tests/test_highlevel.py
index ef1608a..986543c 100644
--- a/h5py/tests/test_highlevel.py
+++ b/h5py/tests/test_highlevel.py
@@ -317,6 +317,7 @@ class TestDataset(HDF5TestCase):
             self.assert_(numpy.all(dset[:] == x))
             del self.f['TEST_DATA']
 
+    @api_18
     def test_Dataset_resize(self):
         """ Test extending datasets """
 
diff --git a/setup.py b/setup.py
index 7c5d8c7..76936da 100644
--- a/setup.py
+++ b/setup.py
@@ -155,7 +155,7 @@ class cybuild(build):
                      ('cython','y','Run Cython'),
                      ('cython-only','Y', 'Run Cython and stop'),
                      ('diag', 'd','Enable library debug logging'),
-                     ('threads', 't', 'Make library thread-aware')]
+                     ('no-threads', 't', 'Build without thread support')]
 
     boolean_options = build.boolean_options + ['cython', 'cython-only', 'threads','diag']
 
@@ -189,14 +189,17 @@ class cybuild(build):
         self.cython = False
         self.cython_only = False
         self.diag = False
-        self.threads = False
+        self.no_threads = False
 
 
     def finalize_options(self):
 
         build.finalize_options(self)
 
-        if any((self.cython, self.cython_only, self.diag, self.threads,
+        if self.no_threads:
+            warn("Option --no-threads will disappear soon")
+
+        if any((self.cython, self.cython_only, self.diag, self.no_threads,
                 self.api, self.hdf5)):
             self._default = False
             self.cython = True
@@ -241,6 +244,9 @@ class cybuild(build):
 
         self.distribution.ext_modules = extensions
 
+        if not all(op.exists(op.join(SRC_PATH, x+'.c')) for x in modules):
+            self.cython = True
+
         # Rebuild the C source files if necessary
         if self.cython:
             self.compile_cython(sorted(modules))
@@ -269,7 +275,7 @@ DEF H5PY_THREADS = %(THREADS)d  # Enable thread-safety and non-blocking reads
 """
         return pxi_str % {"VERSION": VERSION, "API_MAX": self.api,
                     "API_16": True, "API_18": self.api == 18,
-                    "DEBUG": 10 if self.diag else 0, "THREADS": self.threads,
+                    "DEBUG": 10 if self.diag else 0, "THREADS": not self.no_threads,
                     "HDF5": "Default" if self.hdf5 is None else self.hdf5}
 
     def compile_cython(self, modules):
@@ -286,7 +292,7 @@ DEF H5PY_THREADS = %(THREADS)d  # Enable thread-safety and non-blocking reads
 
         print "Running Cython (%s)..." % Version.version
         print "  API level: %d" % self.api
-        print "  Thread-aware: %s" % ('yes' if self.threads else 'no')
+        print "  Thread-aware: %s" % ('yes' if not self.no_threads else 'no')
         print "  Diagnostic mode: %s" % ('yes' if self.diag else 'no')
         print "  HDF5: %s" % ('default' if self.hdf5 is None else self.hdf5)
 

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-science/packages/h5py.git



More information about the debian-science-commits mailing list