[h5py] 07/17: Import h5py_2.7.0~rc2.orig.tar.gz
Ghislain Vaillant
ghisvail-guest at moszumanska.debian.org
Thu Jan 5 19:13:37 UTC 2017
This is an automated email from the git hooks/post-receive script.
ghisvail-guest pushed a commit to branch master
in repository h5py.
commit aa2d362fff97e9ab820932f646243c8b84377a05
Author: Ghislain Antony Vaillant <ghisvail at gmail.com>
Date: Thu Jan 5 14:00:24 2017 +0000
Import h5py_2.7.0~rc2.orig.tar.gz
---
.travis.yml | 68 ++++++--
README.rst | 2 +
appveyor.yml | 103 ++++++++++++
ci/appveyor/build.cmd | 21 +++
ci/appveyor/vs2008_patch/readme.txt | 15 ++
ci/appveyor/vs2008_patch/setup_x64.bat | 13 ++
ci/appveyor/vs2008_patch/setup_x86.bat | 13 ++
.../x64/600dd186-2429-11d7-8bf6-00b0d03daa06.reg | Bin 0 -> 614 bytes
.../x64/600dd187-2429-11d7-8bf6-00b0d03daa06.reg | Bin 0 -> 606 bytes
.../x64/600dd188-2429-11d7-8bf6-00b0d03daa06.reg | Bin 0 -> 592 bytes
.../x64/600dd189-2429-11d7-8bf6-00b0d03daa06.reg | Bin 0 -> 612 bytes
.../x64/656d875f-2429-11d7-8bf6-00b0d03daa06.reg | Bin 0 -> 610 bytes
.../x64/656d8760-2429-11d7-8bf6-00b0d03daa06.reg | Bin 0 -> 596 bytes
.../x64/656d8763-2429-11d7-8bf6-00b0d03daa06.reg | Bin 0 -> 612 bytes
.../x64/656d8766-2429-11d7-8bf6-00b0d03daa06.reg | Bin 0 -> 616 bytes
.../vs2008_patch/x64/VC_OBJECTS_PLATFORM_INFO.reg | Bin 0 -> 3460 bytes
.../x86/600dd186-2429-11d7-8bf6-00b0d03daa06.reg | Bin 0 -> 578 bytes
.../x86/600dd187-2429-11d7-8bf6-00b0d03daa06.reg | Bin 0 -> 570 bytes
.../x86/600dd188-2429-11d7-8bf6-00b0d03daa06.reg | Bin 0 -> 556 bytes
.../x86/600dd189-2429-11d7-8bf6-00b0d03daa06.reg | Bin 0 -> 576 bytes
.../x86/656d875f-2429-11d7-8bf6-00b0d03daa06.reg | Bin 0 -> 574 bytes
.../x86/656d8760-2429-11d7-8bf6-00b0d03daa06.reg | Bin 0 -> 560 bytes
.../x86/656d8763-2429-11d7-8bf6-00b0d03daa06.reg | Bin 0 -> 576 bytes
.../x86/656d8766-2429-11d7-8bf6-00b0d03daa06.reg | Bin 0 -> 580 bytes
.../vs2008_patch/x86/VC_OBJECTS_PLATFORM_INFO.reg | Bin 0 -> 3340 bytes
ci/fix_paths.py | 22 +++
ci/get_hdf5.py | 134 ++++++++++++++++
docs/Makefile | 2 +-
docs/build.rst | 42 +++--
docs/conf.py | 2 +-
docs/contributing.rst | 22 +--
docs/high/dataset.rst | 38 +++++
docs/high/index.rst | 16 --
docs/quick.rst | 2 +-
docs/strings.rst | 10 +-
docs/whatsnew/2.1.rst | 14 +-
docs/whatsnew/2.5.rst | 67 ++++++++
docs/whatsnew/2.6.rst | 91 +++++++++++
docs/whatsnew/2.7.rst | 95 +++++++++++
docs/whatsnew/index.rst | 3 +
h5py/__init__.py | 2 +-
h5py/_hl/attrs.py | 24 +--
h5py/_hl/base.py | 75 ++++++---
h5py/_hl/compat.py | 98 ++++++++++++
h5py/_hl/dataset.py | 38 +++--
h5py/_hl/files.py | 53 +++---
h5py/_hl/group.py | 21 ++-
h5py/_hl/selections.py | 6 +-
h5py/_objects.pxd | 2 +
h5py/_objects.pyx | 87 +++++-----
h5py/api_compat.h | 12 +-
h5py/api_functions.txt | 7 +
h5py/api_types_ext.pxd | 7 +-
h5py/api_types_hdf5.pxd | 126 +++++++--------
h5py/h5d.pyx | 35 ++++
h5py/h5f.pyx | 21 +++
h5py/h5p.pyx | 30 ++++
h5py/h5s.pyx | 3 +
h5py/h5t.pxd | 2 +-
h5py/h5t.pyx | 128 +++++++++++----
h5py/ipy_completer.py | 14 +-
h5py/tests/common.py | 20 +++
h5py/tests/hl/test_dataset_getitem.py | 23 +--
h5py/tests/hl/test_datatype.py | 52 ++++++
h5py/tests/old/__init__.py | 4 +-
h5py/tests/old/test_attrs_data.py | 37 +++--
h5py/tests/old/test_base.py | 6 +-
h5py/tests/old/test_dataset.py | 21 +++
h5py/tests/old/test_file.py | 41 ++++-
h5py/tests/old/test_file_image.py | 26 +++
h5py/tests/old/test_group.py | 40 ++++-
h5py/tests/old/test_h5d_direct_chunk_write.py | 34 ++++
h5py/tests/old/test_h5t.py | 178 ++++++++++++++++++++-
h5py/tests/old/test_slicing.py | 23 +--
setup.py | 36 +++--
setup_build.py | 35 ++--
setup_configure.py | 11 +-
tox.ini | 60 ++++++-
78 files changed, 1849 insertions(+), 384 deletions(-)
diff --git a/.travis.yml b/.travis.yml
index 2e9460f..fd5a2ee 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -15,21 +15,67 @@ cache:
- $HOME/.cache/pip
env:
-# Commented out since setup_requires is controlled by easy_install
-# This should be uncommented when pip can use setup_requires
-# - TOXENV=py26-test-nodeps
-# - TOXENV=py27-test-nodeps
-# - TOXENV=py32-test-nodeps
-# - TOXENV=py33-test-nodeps
-# - TOXENV=py34-test-nodeps
- TOXENV=py26-test-deps
- TOXENV=py27-test-deps
- - TOXENV=py32-test-deps
- TOXENV=py33-test-deps
- TOXENV=py34-test-deps
- - TOXENV=py27-pylint-deps
- - TOXENV=py33-pylint-deps
- - TOXENV=py34-pylint-deps
+ - TOXENV=py26-test-mindeps
+ - TOXENV=py27-test-mindeps
+ - TOXENV=py33-test-mindeps
+ - TOXENV=py34-test-mindeps
+# - TOXENV=py35-test-deps
+# - TOXENV=py35-test-mindeps
+# commented out because of https://github.com/travis-ci/travis-ci/issues/4794
+ - TOXENV=py26-test-deps
+ TOX_TESTENV_PASSENV=LANG LC_ALL
+ LANG=C
+ LC_ALL=C
+ - TOXENV=py27-test-deps
+ TOX_TESTENV_PASSENV=LANG LC_ALL
+ LANG=C
+ LC_ALL=C
+ - TOXENV=py33-test-deps
+ TOX_TESTENV_PASSENV=LANG LC_ALL
+ LANG=C
+ LC_ALL=C
+ - TOXENV=py34-test-deps
+ TOX_TESTENV_PASSENV=LANG LC_ALL
+ LANG=C
+ LC_ALL=C
+
+matrix:
+ include:
+ # needed to work around https://github.com/travis-ci/travis-ci/issues/4794
+ - python: 3.5
+ env:
+ - TOXENV=py35-test-deps
+ - python: 3.5
+ env:
+ - TOXENV=py35-test-mindeps
+ - python: 3.5
+ env:
+ - TOXENV=py35-test-deps
+ TOX_TESTENV_PASSENV=LANG LC_ALL
+ LANG=C
+ LC_ALL=C
+ # parallel HDF5 test with HDF5>=1.8.9 for mpio "atomic" support
+ - dist: trusty
+ sudo: required
+ env:
+ - TOXENV=py34-test-mpi4py
+ - CC="mpicc"
+ - HDF5_MPI="ON"
+ addons:
+ apt:
+ packages:
+ - openmpi-bin # 1.4.3
+ - libopenmpi-dev
+ - libhdf5-openmpi-dev # 1.8.4
+ - python: pypy
+ env:
+ - TOXENV=pypy-test-deps
+ allow_failures:
+ - python: pypy
install:
- pip install tox
diff --git a/README.rst b/README.rst
index 0e4d106..948cb80 100644
--- a/README.rst
+++ b/README.rst
@@ -1,5 +1,7 @@
.. image:: https://travis-ci.org/h5py/h5py.png
:target: https://travis-ci.org/h5py/h5py
+.. image:: https://ci.appveyor.com/api/projects/status/h3iajp4d1myotprc/branch/master?svg=true
+ :target: https://ci.appveyor.com/project/h5py/h5py/branch/master
HDF5 for Python
===============
diff --git a/appveyor.yml b/appveyor.yml
new file mode 100644
index 0000000..9a0d6ec
--- /dev/null
+++ b/appveyor.yml
@@ -0,0 +1,103 @@
+# from https://packaging.python.org/en/latest/appveyor/
+environment:
+ HDF5_VERSION: "1.8.17"
+ TOX_TESTENV_PASSENV: "HDF5_DIR"
+
+ matrix:
+
+ # For Python versions available on Appveyor, see
+ # http://www.appveyor.com/docs/installed-software#python
+ # The list here is complete (excluding Python 2.6, which
+ # isn't covered by this document) at the time of writing.
+
+ - PYTHON: "C:\\Python27"
+ TOXENV: "py27-test-deps"
+ TOXPYTHON: "%PYTHON%\\python.exe"
+ HDF5_VSVERSION: "9"
+ HDF5_DIR: "C:\\hdf5\\%HDF5_VERSION%\\%HDF5_VSVERSION%"
+
+ - PYTHON: "C:\\Python33"
+ TOXENV: "py33-test-deps"
+ TOXPYTHON: "%PYTHON%\\python.exe"
+ HDF5_VSVERSION: "10"
+ HDF5_DIR: "C:\\hdf5\\%HDF5_VERSION%\\%HDF5_VSVERSION%"
+
+ - PYTHON: "C:\\Python34"
+ TOXENV: "py34-test-deps"
+ TOXPYTHON: "%PYTHON%\\python.exe"
+ HDF5_VSVERSION: "10"
+ HDF5_DIR: "C:\\hdf5\\%HDF5_VERSION%\\%HDF5_VSVERSION%"
+
+ - PYTHON: "C:\\Python35"
+ TOXENV: "py35-test-deps"
+ TOXPYTHON: "%PYTHON%\\python.exe"
+ HDF5_VSVERSION: "14"
+ HDF5_DIR: "C:\\hdf5\\%HDF5_VERSION%\\%HDF5_VSVERSION%"
+
+ - PYTHON: "C:\\Python27-x64"
+ TOXENV: "py27-test-deps"
+ TOXPYTHON: "%PYTHON%\\python.exe"
+ HDF5_VSVERSION: "9-64"
+ HDF5_DIR: "C:\\hdf5\\%HDF5_VERSION%\\%HDF5_VSVERSION%"
+
+ - PYTHON: "C:\\Python33-x64"
+ TOXENV: "py33-test-deps"
+ TOXPYTHON: "%PYTHON%\\python.exe"
+ HDF5_VSVERSION: "10-64"
+ DISTUTILS_USE_SDK: "1"
+ HDF5_DIR: "C:\\hdf5\\%HDF5_VERSION%\\%HDF5_VSVERSION%"
+
+ - PYTHON: "C:\\Python34-x64"
+ TOXENV: "py34-test-deps"
+ TOXPYTHON: "%PYTHON%\\python.exe"
+ HDF5_VSVERSION: "10-64"
+ DISTUTILS_USE_SDK: "1"
+ HDF5_DIR: "C:\\hdf5\\%HDF5_VERSION%\\%HDF5_VSVERSION%"
+
+ - PYTHON: "C:\\Python35-x64"
+ TOXENV: "py35-test-deps"
+ TOXPYTHON: "%PYTHON%\\python.exe"
+ HDF5_VSVERSION: "14-64"
+ HDF5_DIR: "C:\\hdf5\\%HDF5_VERSION%\\%HDF5_VSVERSION%"
+
+install:
+ # We need wheel installed to build wheels
+ - "%PYTHON%\\python.exe -m pip install --upgrade wheel pip setuptools"
+ - "py -3.5 -m pip install --upgrade wheel pip setuptools"
+ - "py -3.5 -m pip install requests"
+ - "py -3.5 ci\\get_hdf5.py"
+ - "py -3.5 -m pip install tox"
+
+build: off
+
+test_script:
+ # Put your test command here.
+ # If you don't need to build C extensions on 64-bit Python 3.3 or 3.4,
+ # you can remove "build.cmd" from the front of the command, as it's
+ # only needed to support those cases.
+ # Note that you must use the environment variable %PYTHON% to refer to
+ # the interpreter you're using - Appveyor does not do anything special
+ # to put the Python evrsion you want to use on PATH.
+ - "ci\\appveyor\\build.cmd py -3.5 -m tox"
+
+# This is commented out as there's no easy way to deal with numpy dropping
+# older python versions without a recent pip/setuptools.
+#after_test:
+# # This step builds your wheels.
+# # Again, you only need build.cmd if you're building C extensions for
+# # 64-bit Python 3.3/3.4. And you need to use %PYTHON% to get the correct
+# # interpreter
+# - "ci\\appveyor\\build.cmd %PYTHON%\\python.exe setup.py bdist_wheel"
+#
+#artifacts:
+# # bdist_wheel puts your built wheel in the dist directory
+# - path: dist\*
+
+cache:
+ - "%LOCALAPPDATA%\\pip\\Cache"
+ - "C:\\hdf5"
+
+#on_success:
+# You can use this step to upload your artifacts to a public website.
+# See Appveyor's documentation for more details. Or you can simply
+# access your wheels from the Appveyor "artifacts" tab for your build.
diff --git a/ci/appveyor/build.cmd b/ci/appveyor/build.cmd
new file mode 100644
index 0000000..243dc9a
--- /dev/null
+++ b/ci/appveyor/build.cmd
@@ -0,0 +1,21 @@
+ at echo off
+:: To build extensions for 64 bit Python 3, we need to configure environment
+:: variables to use the MSVC 2010 C++ compilers from GRMSDKX_EN_DVD.iso of:
+:: MS Windows SDK for Windows 7 and .NET Framework 4
+::
+:: More details at:
+:: https://github.com/cython/cython/wiki/64BitCythonExtensionsOnWindows
+
+IF "%DISTUTILS_USE_SDK%"=="1" (
+ ECHO Configuring environment to build with MSVC on a 64bit architecture
+ ECHO Using Windows SDK 7.1
+ "C:\Program Files\Microsoft SDKs\Windows\v7.1\Setup\WindowsSdkVer.exe" -q -version:v7.1
+ CALL "C:\Program Files\Microsoft SDKs\Windows\v7.1\Bin\SetEnv.cmd" /x64 /release
+ SET MSSdk=1
+ REM Need the following to allow tox to see the SDK compiler
+ SET TOX_TESTENV_PASSENV=DISTUTILS_USE_SDK MSSdk INCLUDE LIB
+) ELSE (
+ ECHO Using default MSVC build environment
+)
+
+CALL %*
diff --git a/ci/appveyor/vs2008_patch/readme.txt b/ci/appveyor/vs2008_patch/readme.txt
new file mode 100644
index 0000000..931ee41
--- /dev/null
+++ b/ci/appveyor/vs2008_patch/readme.txt
@@ -0,0 +1,15 @@
+How to enable X64 and IA64 programming in Visual C++ Express:
+
+1. Install Visual C++ 2008 Express (to default folder in C drive, or this patch will not work)
+2. Install Windows SDK (Microsoft Windows SDK for Windows 7 and .NET Framework 3.5 SP1)
+3. Fix SDK bugs: http://www.cppblog.com/xcpp/archive/2009/09/09/vc2008express_64bit_win7sdk.html
+4. Open a command prompt with Administrator privilege, navigate to the folder contains this file, run setup_x86.bat or setup_x64.bat according to your OS architecture
+5. If there is no error in the command prompt, launch the Visual C++ 2008 Express IDE and build your X64 or IA64 projects
+
+This work is based on the work by jenshuebel: http://jenshuebel.wordpress.com/2009/02/12/visual-c-2008-express-edition-and-64-bit-targets/
+
+Thanks jenshuebel for the complete and accurate instructions, and thanks Microsoft for the free Visual C++ IDE.
+
+
+
+Xia Wei, sunmast#gmail.com
diff --git a/ci/appveyor/vs2008_patch/setup_x64.bat b/ci/appveyor/vs2008_patch/setup_x64.bat
new file mode 100644
index 0000000..4786b3c
--- /dev/null
+++ b/ci/appveyor/vs2008_patch/setup_x64.bat
@@ -0,0 +1,13 @@
+regedit /s x64\VC_OBJECTS_PLATFORM_INFO.reg
+
+regedit /s x64\600dd186-2429-11d7-8bf6-00b0d03daa06.reg
+regedit /s x64\600dd187-2429-11d7-8bf6-00b0d03daa06.reg
+regedit /s x64\600dd188-2429-11d7-8bf6-00b0d03daa06.reg
+regedit /s x64\600dd189-2429-11d7-8bf6-00b0d03daa06.reg
+regedit /s x64\656d875f-2429-11d7-8bf6-00b0d03daa06.reg
+regedit /s x64\656d8760-2429-11d7-8bf6-00b0d03daa06.reg
+regedit /s x64\656d8763-2429-11d7-8bf6-00b0d03daa06.reg
+regedit /s x64\656d8766-2429-11d7-8bf6-00b0d03daa06.reg
+
+copy "C:\Program Files (x86)\Microsoft Visual Studio 9.0\VC\vcpackages\AMD64.VCPlatform.config" "C:\Program Files (x86)\Microsoft Visual Studio 9.0\VC\vcpackages\AMD64.VCPlatform.Express.config"
+copy "C:\Program Files (x86)\Microsoft Visual Studio 9.0\VC\vcpackages\Itanium.VCPlatform.config" "C:\Program Files (x86)\Microsoft Visual Studio 9.0\VC\vcpackages\Itanium.VCPlatform.Express.config"
\ No newline at end of file
diff --git a/ci/appveyor/vs2008_patch/setup_x86.bat b/ci/appveyor/vs2008_patch/setup_x86.bat
new file mode 100644
index 0000000..746dfe5
--- /dev/null
+++ b/ci/appveyor/vs2008_patch/setup_x86.bat
@@ -0,0 +1,13 @@
+regedit /s x86\VC_OBJECTS_PLATFORM_INFO.reg
+
+regedit /s x86\600dd186-2429-11d7-8bf6-00b0d03daa06.reg
+regedit /s x86\600dd187-2429-11d7-8bf6-00b0d03daa06.reg
+regedit /s x86\600dd188-2429-11d7-8bf6-00b0d03daa06.reg
+regedit /s x86\600dd189-2429-11d7-8bf6-00b0d03daa06.reg
+regedit /s x86\656d875f-2429-11d7-8bf6-00b0d03daa06.reg
+regedit /s x86\656d8760-2429-11d7-8bf6-00b0d03daa06.reg
+regedit /s x86\656d8763-2429-11d7-8bf6-00b0d03daa06.reg
+regedit /s x86\656d8766-2429-11d7-8bf6-00b0d03daa06.reg
+
+copy "C:\Program Files\Microsoft Visual Studio 9.0\VC\vcpackages\AMD64.VCPlatform.config" "C:\Program Files\Microsoft Visual Studio 9.0\VC\vcpackages\AMD64.VCPlatform.Express.config"
+copy "C:\Program Files\Microsoft Visual Studio 9.0\VC\vcpackages\Itanium.VCPlatform.config" "C:\Program Files\Microsoft Visual Studio 9.0\VC\vcpackages\Itanium.VCPlatform.Express.config"
\ No newline at end of file
diff --git a/ci/appveyor/vs2008_patch/x64/600dd186-2429-11d7-8bf6-00b0d03daa06.reg b/ci/appveyor/vs2008_patch/x64/600dd186-2429-11d7-8bf6-00b0d03daa06.reg
new file mode 100644
index 0000000..ff97081
Binary files /dev/null and b/ci/appveyor/vs2008_patch/x64/600dd186-2429-11d7-8bf6-00b0d03daa06.reg differ
diff --git a/ci/appveyor/vs2008_patch/x64/600dd187-2429-11d7-8bf6-00b0d03daa06.reg b/ci/appveyor/vs2008_patch/x64/600dd187-2429-11d7-8bf6-00b0d03daa06.reg
new file mode 100644
index 0000000..6f218a5
Binary files /dev/null and b/ci/appveyor/vs2008_patch/x64/600dd187-2429-11d7-8bf6-00b0d03daa06.reg differ
diff --git a/ci/appveyor/vs2008_patch/x64/600dd188-2429-11d7-8bf6-00b0d03daa06.reg b/ci/appveyor/vs2008_patch/x64/600dd188-2429-11d7-8bf6-00b0d03daa06.reg
new file mode 100644
index 0000000..8dd2beb
Binary files /dev/null and b/ci/appveyor/vs2008_patch/x64/600dd188-2429-11d7-8bf6-00b0d03daa06.reg differ
diff --git a/ci/appveyor/vs2008_patch/x64/600dd189-2429-11d7-8bf6-00b0d03daa06.reg b/ci/appveyor/vs2008_patch/x64/600dd189-2429-11d7-8bf6-00b0d03daa06.reg
new file mode 100644
index 0000000..26403d8
Binary files /dev/null and b/ci/appveyor/vs2008_patch/x64/600dd189-2429-11d7-8bf6-00b0d03daa06.reg differ
diff --git a/ci/appveyor/vs2008_patch/x64/656d875f-2429-11d7-8bf6-00b0d03daa06.reg b/ci/appveyor/vs2008_patch/x64/656d875f-2429-11d7-8bf6-00b0d03daa06.reg
new file mode 100644
index 0000000..4e196d4
Binary files /dev/null and b/ci/appveyor/vs2008_patch/x64/656d875f-2429-11d7-8bf6-00b0d03daa06.reg differ
diff --git a/ci/appveyor/vs2008_patch/x64/656d8760-2429-11d7-8bf6-00b0d03daa06.reg b/ci/appveyor/vs2008_patch/x64/656d8760-2429-11d7-8bf6-00b0d03daa06.reg
new file mode 100644
index 0000000..d39caed
Binary files /dev/null and b/ci/appveyor/vs2008_patch/x64/656d8760-2429-11d7-8bf6-00b0d03daa06.reg differ
diff --git a/ci/appveyor/vs2008_patch/x64/656d8763-2429-11d7-8bf6-00b0d03daa06.reg b/ci/appveyor/vs2008_patch/x64/656d8763-2429-11d7-8bf6-00b0d03daa06.reg
new file mode 100644
index 0000000..76ec9de
Binary files /dev/null and b/ci/appveyor/vs2008_patch/x64/656d8763-2429-11d7-8bf6-00b0d03daa06.reg differ
diff --git a/ci/appveyor/vs2008_patch/x64/656d8766-2429-11d7-8bf6-00b0d03daa06.reg b/ci/appveyor/vs2008_patch/x64/656d8766-2429-11d7-8bf6-00b0d03daa06.reg
new file mode 100644
index 0000000..d945da4
Binary files /dev/null and b/ci/appveyor/vs2008_patch/x64/656d8766-2429-11d7-8bf6-00b0d03daa06.reg differ
diff --git a/ci/appveyor/vs2008_patch/x64/VC_OBJECTS_PLATFORM_INFO.reg b/ci/appveyor/vs2008_patch/x64/VC_OBJECTS_PLATFORM_INFO.reg
new file mode 100644
index 0000000..b8282bb
Binary files /dev/null and b/ci/appveyor/vs2008_patch/x64/VC_OBJECTS_PLATFORM_INFO.reg differ
diff --git a/ci/appveyor/vs2008_patch/x86/600dd186-2429-11d7-8bf6-00b0d03daa06.reg b/ci/appveyor/vs2008_patch/x86/600dd186-2429-11d7-8bf6-00b0d03daa06.reg
new file mode 100644
index 0000000..98df831
Binary files /dev/null and b/ci/appveyor/vs2008_patch/x86/600dd186-2429-11d7-8bf6-00b0d03daa06.reg differ
diff --git a/ci/appveyor/vs2008_patch/x86/600dd187-2429-11d7-8bf6-00b0d03daa06.reg b/ci/appveyor/vs2008_patch/x86/600dd187-2429-11d7-8bf6-00b0d03daa06.reg
new file mode 100644
index 0000000..9ef557f
Binary files /dev/null and b/ci/appveyor/vs2008_patch/x86/600dd187-2429-11d7-8bf6-00b0d03daa06.reg differ
diff --git a/ci/appveyor/vs2008_patch/x86/600dd188-2429-11d7-8bf6-00b0d03daa06.reg b/ci/appveyor/vs2008_patch/x86/600dd188-2429-11d7-8bf6-00b0d03daa06.reg
new file mode 100644
index 0000000..9e00dab
Binary files /dev/null and b/ci/appveyor/vs2008_patch/x86/600dd188-2429-11d7-8bf6-00b0d03daa06.reg differ
diff --git a/ci/appveyor/vs2008_patch/x86/600dd189-2429-11d7-8bf6-00b0d03daa06.reg b/ci/appveyor/vs2008_patch/x86/600dd189-2429-11d7-8bf6-00b0d03daa06.reg
new file mode 100644
index 0000000..0b7f62a
Binary files /dev/null and b/ci/appveyor/vs2008_patch/x86/600dd189-2429-11d7-8bf6-00b0d03daa06.reg differ
diff --git a/ci/appveyor/vs2008_patch/x86/656d875f-2429-11d7-8bf6-00b0d03daa06.reg b/ci/appveyor/vs2008_patch/x86/656d875f-2429-11d7-8bf6-00b0d03daa06.reg
new file mode 100644
index 0000000..2fc4f16
Binary files /dev/null and b/ci/appveyor/vs2008_patch/x86/656d875f-2429-11d7-8bf6-00b0d03daa06.reg differ
diff --git a/ci/appveyor/vs2008_patch/x86/656d8760-2429-11d7-8bf6-00b0d03daa06.reg b/ci/appveyor/vs2008_patch/x86/656d8760-2429-11d7-8bf6-00b0d03daa06.reg
new file mode 100644
index 0000000..1d5f2a2
Binary files /dev/null and b/ci/appveyor/vs2008_patch/x86/656d8760-2429-11d7-8bf6-00b0d03daa06.reg differ
diff --git a/ci/appveyor/vs2008_patch/x86/656d8763-2429-11d7-8bf6-00b0d03daa06.reg b/ci/appveyor/vs2008_patch/x86/656d8763-2429-11d7-8bf6-00b0d03daa06.reg
new file mode 100644
index 0000000..e743ce9
Binary files /dev/null and b/ci/appveyor/vs2008_patch/x86/656d8763-2429-11d7-8bf6-00b0d03daa06.reg differ
diff --git a/ci/appveyor/vs2008_patch/x86/656d8766-2429-11d7-8bf6-00b0d03daa06.reg b/ci/appveyor/vs2008_patch/x86/656d8766-2429-11d7-8bf6-00b0d03daa06.reg
new file mode 100644
index 0000000..2814f9d
Binary files /dev/null and b/ci/appveyor/vs2008_patch/x86/656d8766-2429-11d7-8bf6-00b0d03daa06.reg differ
diff --git a/ci/appveyor/vs2008_patch/x86/VC_OBJECTS_PLATFORM_INFO.reg b/ci/appveyor/vs2008_patch/x86/VC_OBJECTS_PLATFORM_INFO.reg
new file mode 100644
index 0000000..ad44e9e
Binary files /dev/null and b/ci/appveyor/vs2008_patch/x86/VC_OBJECTS_PLATFORM_INFO.reg differ
diff --git a/ci/fix_paths.py b/ci/fix_paths.py
new file mode 100644
index 0000000..cd401d8
--- /dev/null
+++ b/ci/fix_paths.py
@@ -0,0 +1,22 @@
+import argparse
+from glob import glob
+from os import environ
+from os.path import join as pjoin
+from shutil import copy
+from sys import platform
+
+def main():
+ """
+ Fix paths to dlls
+ """
+ p = argparse.ArgumentParser()
+ p.add_argument("sitepackagesdir")
+ args = p.parse_args()
+ hdf5_path = environ.get("HDF5_DIR")
+ if platform.startswith('win'):
+ for f in glob(pjoin(hdf5_path, 'lib/*.dll')):
+ copy(f, pjoin(args.sitepackagesdir, 'h5py'))
+
+
+if __name__ == '__main__':
+ main()
diff --git a/ci/get_hdf5.py b/ci/get_hdf5.py
new file mode 100644
index 0000000..bf55bf4
--- /dev/null
+++ b/ci/get_hdf5.py
@@ -0,0 +1,134 @@
+# -*- coding: utf-8 -*-
+
+from os import environ, makedirs, walk, listdir, getcwd, chdir
+from os.path import join as pjoin, exists
+from tempfile import TemporaryFile, TemporaryDirectory
+from sys import exit, stderr, platform
+from shutil import copyfileobj, copy
+from glob import glob
+from subprocess import run, PIPE, STDOUT
+from zipfile import ZipFile
+
+import requests
+
+HDF5_URL = "https://www.hdfgroup.org/ftp/HDF5/releases/hdf5-{version}/src/"
+HDF5_FILE = HDF5_URL + "hdf5-{version}.zip"
+CMAKE_CONFIGURE_CMD = [
+ "cmake", "-DBUILD_SHARED_LIBS:BOOL=ON", "-DCMAKE_BUILD_TYPE:STRING=RELEASE",
+ "-DHDF5_BUILD_CPP_LIB=OFF", "-DHDF5_BUILD_HL_LIB=ON",
+ "-DHDF5_BUILD_TOOLS:BOOL=ON",
+]
+CMAKE_BUILD_CMD = ["cmake", "--build"]
+CMAKE_INSTALL_ARG = ["--target", "install", '--config', 'Release']
+CMAKE_INSTALL_PATH_ARG = "-DCMAKE_INSTALL_PREFIX={install_path}"
+CMAKE_HDF5_LIBRARY_PREFIX = ["-DHDF5_EXTERNAL_LIB_PREFIX=h5py_"]
+REL_PATH_TO_CMAKE_CFG = "hdf5-{version}"
+DEFAULT_VERSION = '1.8.17'
+VSVERSION_TO_GENERATOR = {
+ "9": "Visual Studio 9 2008",
+ "10": "Visual Studio 10 2010",
+ "14": "Visual Studio 14 2015",
+ "9-64": "Visual Studio 9 2008 Win64",
+ "10-64": "Visual Studio 10 2010 Win64",
+ "14-64": "Visual Studio 14 2015 Win64",
+}
+
+
+def download_hdf5(version, outfile):
+ r = requests.get(HDF5_FILE.format(version=version), stream=True)
+ try:
+ r.raise_for_status()
+ copyfileobj(r.raw, outfile)
+ except requests.HTTPError:
+ print("Failed to download hdf5 version {version}, exiting".format(
+ version=version
+ ), file=stderr)
+ exit(1)
+
+
+def build_hdf5(version, hdf5_file, install_path, cmake_generator, use_prefix):
+ with TemporaryDirectory() as hdf5_extract_path:
+ generator_args = (
+ ["-G", cmake_generator]
+ if cmake_generator is not None
+ else []
+ )
+ prefix_args = CMAKE_HDF5_LIBRARY_PREFIX if use_prefix else []
+
+ with ZipFile(hdf5_file) as z:
+ z.extractall(hdf5_extract_path)
+ old_dir = getcwd()
+
+ with TemporaryDirectory() as new_dir:
+ chdir(new_dir)
+ cfg_cmd = CMAKE_CONFIGURE_CMD + [
+ get_cmake_install_path(install_path),
+ get_cmake_config_path(version, hdf5_extract_path),
+ ] + generator_args + prefix_args
+ build_cmd = CMAKE_BUILD_CMD + [
+ '.',
+ ] + CMAKE_INSTALL_ARG
+ print("Configuring HDF5 version {version}...".format(version=version), file=stderr)
+ print(' '.join(cfg_cmd), file=stderr)
+ p = run(cfg_cmd, stdout=PIPE, stderr=STDOUT, universal_newlines=True)
+ print(p.stdout)
+ print("Building HDF5 version {version}...".format(version=version), file=stderr)
+ print(' '.join(build_cmd), file=stderr)
+ p = run(build_cmd, stdout=PIPE, stderr=STDOUT, universal_newlines=True)
+ print(p.stdout)
+ print("Installed HDF5 version {version} to {install_path}".format(
+ version=version, install_path=install_path,
+ ), file=stderr)
+ chdir(old_dir)
+ if platform.startswith('win'):
+ for f in glob(pjoin(install_path, 'bin/*.dll')):
+ copy(f, pjoin(install_path, 'lib'))
+
+
+def get_cmake_config_path(version, extract_point):
+ return pjoin(extract_point, REL_PATH_TO_CMAKE_CFG.format(version=version))
+
+
+def get_cmake_install_path(install_path):
+ if install_path is not None:
+ return CMAKE_INSTALL_PATH_ARG.format(install_path=install_path)
+ return ' '
+
+
+def hdf5_cached(install_path):
+ if exists(pjoin(install_path, "lib", "hdf5.dll")):
+ return True
+ return False
+
+
+def main():
+ install_path = environ.get("HDF5_DIR")
+ version = environ.get("HDF5_VERSION", DEFAULT_VERSION)
+ vs_version = environ.get("HDF5_VSVERSION")
+ use_prefix = True if environ.get("H5PY_USE_PREFIX") is not None else False
+
+ if install_path is not None:
+ if not exists(install_path):
+ makedirs(install_path)
+ if vs_version is not None:
+ cmake_generator = VSVERSION_TO_GENERATOR[vs_version]
+ if vs_version == '9-64':
+ # Needed for
+ # http://help.appveyor.com/discussions/kb/38-visual-studio-2008-64-bit-builds
+ run("ci\\appveyor\\vs2008_patch\\setup_x64.bat")
+
+ if not hdf5_cached(install_path):
+ with TemporaryFile() as f:
+ download_hdf5(version, f)
+ build_hdf5(version, f, install_path, cmake_generator, use_prefix)
+ else:
+ print("using cached hdf5", file=stderr)
+ if install_path is not None:
+ print("hdf5 files: ", file=stderr)
+ for dirpath, dirnames, filenames in walk(install_path):
+ for file in filenames:
+ print(" * " + pjoin(dirpath, file))
+
+
+if __name__ == '__main__':
+ main()
diff --git a/docs/Makefile b/docs/Makefile
index e728e9b..7c2e830 100644
--- a/docs/Makefile
+++ b/docs/Makefile
@@ -2,7 +2,7 @@
#
# You can set these variables from the command line.
-SPHINXOPTS =
+SPHINXOPTS = -W
SPHINXBUILD = sphinx-build
PAPER =
BUILDDIR = _build
diff --git a/docs/build.rst b/docs/build.rst
index d1ea4b2..5e3c43e 100644
--- a/docs/build.rst
+++ b/docs/build.rst
@@ -24,14 +24,17 @@ Or, use your package manager:
* apt-get (Linux/Debian, including Ubuntu)
* yum (Linux/Red Hat, including Fedora and CentOS)
* Homebrew (OS X)
+* pacman (Arch linux)
+.. _source_install:
+
Source installation on Linux and OS X
-------------------------------------
You need, via apt-get, yum or Homebrew:
-* Python 2.6, 2.7, 3.3, or 3.4 with development headers (``python-dev`` or similar)
+* Python 2.6, 2.7, 3.3, 3.4, or 3.5 with development headers (``python-dev`` or similar)
* HDF5 1.8.4 or newer, shared library version with development headers (``libhdf5-dev`` or similar)
* NumPy 1.6.1 or later
@@ -39,10 +42,22 @@ You need, via apt-get, yum or Homebrew:
$ pip install h5py
-or, from a tarball::
+or, from a tarball or git :ref:`checkout <git_checkout>` ::
+
+ $ pip install -v .
+
+or ::
$ python setup.py install
+If you are working on a development version and the underlying cython files change
+it may be necessary to force a full rebuild. The easiest way to achieve this is ::
+
+ $ git clean -xfd
+
+from the top of your clone and then rebuilding.
+
+
Source installation on Windows
------------------------------
@@ -63,8 +78,8 @@ setup.py. Options may be given together or separately::
$ python setup.py configure --hdf5=/path/to/hdf5
$ python setup.py configure --hdf5-version=X.Y.Z
$ python setup.py configure --mpi
-
-Note the ``--hdf5-version`` option is generally not needed, as h5py
+
+Note the ``--hdf5-version`` option is generally not needed, as h5py
auto-detects the installed version of HDF5 (even for custom locations).
Once set, build options apply to all future builds in the source directory.
@@ -77,7 +92,8 @@ when installing via ``pip``, as you don't have direct access to setup.py::
$ HDF5_DIR=/path/to/hdf5 pip install h5py
$ HDF5_VERSION=X.Y.Z pip install h5py
-
+ $ CC="mpicc" HDF5_MPI="ON" HDF5_DIR=/path/to/parallel-hdf5 pip install h5py
+
Here's a list of all the configure options currently supported:
======================= =========================== ===========================
@@ -85,7 +101,7 @@ Option Via setup.py Via environment variable
======================= =========================== ===========================
Custom path to HDF5 ``--hdf5=/path/to/hdf5`` ``HDF5_DIR=/path/to/hdf5``
Force HDF5 version ``--hdf5-version=X.Y.Z`` ``HDF5_VERSION=X.Y.Z``
-Enable MPI mode ``--mpi`` (none)
+Enable MPI mode ``--mpi`` ``HDF5_MPI=ON``
======================= =========================== ===========================
@@ -99,18 +115,20 @@ HDF5 features in h5py itself::
$ python setup.py install
If you want access to the full Parallel HDF5 feature set in h5py
-(:ref:`parallel`), you will have to build in MPI mode. Right now this must
-be done with command-line options from the h5py tarball.
+(:ref:`parallel`), you will further have to build in MPI mode. This can either
+be done with command-line options from the h5py tarball or by::
+
+ $ export HDF5_MPI="ON"
**You will need a shared-library build of Parallel HDF5 (i.e. built with
./configure --enable-shared --enable-parallel).**
-To build in MPI mode, use the ``--mpi`` option to ``setup.py configure``::
+To build in MPI mode, use the ``--mpi`` option to ``setup.py configure`` or
+export ``HDF5_MPI="ON"`` beforehand::
$ export CC=mpicc
- $ python setup.py configure --mpi
+ $ export HDF5_MPI="ON"
+ $ python setup.py configure
$ python setup.py build
See also :ref:`parallel`.
-
-
diff --git a/docs/conf.py b/docs/conf.py
index 0b3e34a..74c4c0e 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -129,7 +129,7 @@ html_theme = 'default'
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
-html_static_path = ['_static']
+# html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
diff --git a/docs/contributing.rst b/docs/contributing.rst
index 4898464..bcec60f 100644
--- a/docs/contributing.rst
+++ b/docs/contributing.rst
@@ -23,13 +23,13 @@ This guide is divided into three sections. The first describes how to file
a bug report.
The second describes the mechanics of
-how to submit a contribution to the h5py project; for example, how to
+how to submit a contribution to the h5py project; for example, how to
create a pull request, which branch to base your work on, etc.
We assume you're are familiar with Git, the version control system used by h5py.
If not, `here's a great place to start <http://git-scm.com/book>`_.
Finally, we describe the various subsystems inside h5py, and give
-technical guidance as to how to implement your changes.
+technical guidance as to how to implement your changes.
How to File a Bug Report
@@ -43,7 +43,7 @@ Bug reports are always welcome! The issue tracker is at:
If you're unsure whether you've found a bug
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-Always feel free to ask on the mailing list (h5py at Google Groups).
+Always feel free to ask on the mailing list (h5py at Google Groups).
Discussions there are seen by lots of people and are archived by Google.
Even if the issue you're having turns out not to be a bug in the end, other
people can benefit from a record of the conversation.
@@ -98,12 +98,14 @@ Here are some tips to getting your pull requests accepted:
advertised. The maintainers will let you know if more are needed.
+.. _git_checkout:
+
Clone the h5py repository
~~~~~~~~~~~~~~~~~~~~~~~~~
The best way to do this is by signing in to GitHub and cloning the
h5py project directly. You'll end up with a new repository under your
-account; for example, if your username is ``yourname``, the repository
+account; for example, if your username is ``yourname``, the repository
would be at http://github.com/yourname/h5py.
Then, clone your new copy of h5py to your local machine::
@@ -144,7 +146,7 @@ Push your topic branch back up to your GitHub clone::
$ git push origin newfeature
-Then, `create a pull request <https://help.github.com/articles/creating-a-pull-request>`_ based on your topic branch.
+Then, `create a pull request <https://help.github.com/articles/creating-a-pull-request>`_ based on your topic branch.
Work with the maintainers
@@ -229,7 +231,7 @@ This is necessary because Cython will use a "generic" signature like
``method(*args, **kwds)`` when the file is compiled. The h5py documentation
system will extract the first line and use it as the signature.
-Next, we decide whether we want to add access to this function to the
+Next, we decide whether we want to add access to this function to the
high-level interface. That means users of the top-level ``h5py.Dataset``
object will be able to see how much space on disk their files use. The
high-level interface is implemented in the subpackage ``h5py._hl``, and
@@ -249,7 +251,7 @@ Finally (and don't skip this step), we write **unit tests** for this feature.
Since the feature is ultimately exposed at the high-level interface, it's OK
to write tests for the ``Dataset.storagesize`` property only. Unit tests for
the high-level interface are located in the "tests" subfolder, right near
-``dataset.py``.
+``dataset.py``.
It looks like the right file is ``test_dataset.py``. Unit tests are
implemented as methods on custom ``unittest.UnitTest`` subclasses;
@@ -281,7 +283,7 @@ test every combination under the sun (different ranks, datasets with more
than 2**32 elements, datasets with the string "kumquat" in the name...), but
the basic, commonly encountered set of conditions.
-To build and test our changes, we have to do a few things. First of all,
+To build and test our changes, we have to do a few things. First of all,
run the file ``api_gen.py`` to re-generate the Cython wrappers from
``api_functions.txt``::
@@ -323,7 +325,7 @@ In the Cython code, these show up as "preprocessor" defines ``MPI`` and
def set_mpi_atomicity(self, bint atomicity):
""" (BOOL atomicity)
- For MPI-IO driver, set to atomic (True), which guarantees sequential
+ For MPI-IO driver, set to atomic (True), which guarantees sequential
I/O semantics, or non-atomic (False), which improves performance.
Default is False.
@@ -334,5 +336,3 @@ In the Cython code, these show up as "preprocessor" defines ``MPI`` and
High-level code can check the version of the HDF5 library, or check to see if
the method is present on ``FileID`` objects.
-
-
diff --git a/docs/high/dataset.rst b/docs/high/dataset.rst
index 9bc4489..70a5609 100644
--- a/docs/high/dataset.rst
+++ b/docs/high/dataset.rst
@@ -299,6 +299,44 @@ dataset while iterating has undefined results.
On 32-bit platforms, ``len(dataset)`` will fail if the first axis is bigger
than 2**32. It's recommended to use :meth:`Dataset.len` for large datasets.
+Creating and Reading Empty (or Null) datasets and attributes
+------------------------------------------------------------
+
+HDF5 has the concept of Empty or Null datasets and attributes. These are not
+the same as an array with a shape of (), or a scalar dataspace in HDF5 terms.
+Instead, it is a dataset with an associated type, no data, and no shape. In
+h5py, we represent this as either a dataset with shape ``None``, or an
+instance of ``h5py.Empty``. Empty datasets and attributes cannot be sliced.
+
+To create an empty attribute, use ``h5py.Empty`` as per :ref:`attributes`::
+
+ >>> obj.attrs["EmptyAttr"] = h5py.Empty("f")
+
+Similarly, reading an empty attribute returns ``h5py.Empty``::
+
+ >>> obj.attrs["EmptyAttr"]
+ h5py.Empty(dtype="f")
+
+Empty datasets can be created by either by defining a ``dtype`` but no
+``shape`` in ``create_dataset``::
+
+ >>> grp.create_dataset("EmptyDataset", dtype="f")
+
+or by ``data`` to an instance of ``h5py.Empty``::
+
+ >>> grp.create_dataset("EmptyDataset", data=h5py.Empty("f"))
+
+An empty dataset has shape defined as ``None``, which is the best way of
+determining whether a dataset is empty or not. An empty dataset can be "read" in
+a similar way to scalar datasets, i.e. if ``empty_dataset`` is an empty
+dataset,::
+
+ >>> empty_dataset[()]
+ h5py.Empty(dtype="f")
+
+The dtype of the dataset can be accessed via ``<dset>.dtype`` as per normal.
+As empty datasets cannot be sliced, some methods of datasets such as
+``read_direct`` will raise an exception if used on a empty dataset.
Reference
---------
diff --git a/docs/high/index.rst b/docs/high/index.rst
deleted file mode 100644
index f209dcc..0000000
--- a/docs/high/index.rst
+++ /dev/null
@@ -1,16 +0,0 @@
-
-High-Level Reference
-====================
-
-The "high-level interface" (as distinct from the large, C-like API that talks
-directly to HDF5) is how most users will interact with h5py. It consists of
-a small number of classes which represent the main HDF5 abstractions like
-file, groups, and datasets.
-
-.. toctree::
-
- file
- group
- dataset
- attr
- dims
diff --git a/docs/quick.rst b/docs/quick.rst
index ebd4a48..a97a5cb 100644
--- a/docs/quick.rst
+++ b/docs/quick.rst
@@ -55,7 +55,7 @@ from a dataset in the file:
>>> dset[0]
0
>>> dset[10]
- 9
+ 10
>>> dset[0:100:10]
array([ 0, 10, 20, 30, 40, 50, 60, 70, 80, 90])
diff --git a/docs/strings.rst b/docs/strings.rst
index 9ed03bc..440ae3b 100644
--- a/docs/strings.rst
+++ b/docs/strings.rst
@@ -34,6 +34,7 @@ recover it::
>>> binary_blob = out.tostring()
+
How to store text strings
-------------------------
@@ -44,6 +45,7 @@ to a specific type within Python (but see :ref:`str_py3` below):
* Variable-length ASCII (Python 2 ``str``, Python 3 ``bytes``)
* Variable-length UTF-8 (Python 2 ``unicode``, Python 3 ``str``)
+.. _str_py3:
Compatibility
^^^^^^^^^^^^^
@@ -76,7 +78,7 @@ for compatibility with other progams using HDF5 (IDL, MATLAB, etc.), you
should use ASCII only.
.. note::
-
+
This is the most-compatible way to store a string. Everything else
can read it.
@@ -127,12 +129,12 @@ byte strings. But in Python 3, there's a strict separation between `data` and
`text`, which intentionally makes it painful to handle encoded strings
directly.
-So, when reading or writing scalar string attributes, on Python 3 they will
+So, when reading or writing scalar string attributes, on Python 3 they will
`always` be returned as type ``str``, regardless of the underlying storage
mechanism. The regular rules for writing apply; to get a fixed-width ASCII
string, use ``numpy.string_``, and to get a variable-length ASCII string, use
``bytes``.
-
+
What about NumPy's ``U`` type?
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@@ -152,7 +154,7 @@ Unicode strings are used exclusively for object names in the file::
u'/'
You can supply either byte or unicode strings (on both Python 2 and Python 3)
-when creating or retrieving objects. If a byte string is supplied,
+when creating or retrieving objects. If a byte string is supplied,
it will be used as-is; Unicode strings will be encoded down to UTF-8.
In the file, h5py uses the most-compatible representation; H5T_CSET_ASCII for
diff --git a/docs/whatsnew/2.1.rst b/docs/whatsnew/2.1.rst
index 270396d..91b2f8d 100644
--- a/docs/whatsnew/2.1.rst
+++ b/docs/whatsnew/2.1.rst
@@ -6,7 +6,7 @@ Dimension scales
H5py now supports the Dimension Scales feature of HDF5! Thanks to Darren
Dale for implementing this. You can find more information on using scales
-in the :ref:`dimensionscales` section of the docs.
+in the :ref:`dimension_scales` section of the docs.
Unicode strings allowed in attributes
-------------------------------------
@@ -47,15 +47,3 @@ Bug fixes
* Highlevel objects will now complain if you try to bind them to the wrong
HDF5 object types (issue 191)
* Unit tests can now be run after installation (issue 201)
-
-
-
-
-
-
-
-
-
-
-
-
diff --git a/docs/whatsnew/2.5.rst b/docs/whatsnew/2.5.rst
new file mode 100644
index 0000000..f753d48
--- /dev/null
+++ b/docs/whatsnew/2.5.rst
@@ -0,0 +1,67 @@
+What's new in h5py 2.5
+======================
+
+Experimental support for Single Writer Multiple Reader (SWMR)
+-------------------------------------------------------------
+
+This release introduces experimental support for the highly-anticipated
+"Single Writer Multiple Reader" (SWMR) feature in the upcoming HDF5 1.10
+release. SWMR allows sharing of a single HDF5 file between multiple processes
+without the complexity of MPI or multiprocessing-based solutions.
+
+This is an experimental feature that should NOT be used in production code.
+We are interested in getting feedback from the broader community with respect
+to performance and the API design.
+
+For more details, check out the h5py user guide:
+http://docs.h5py.org/en/latest/swmr.html
+
+SWMR support was contributed by Ulrik Pedersen (`#551`_).
+
+Other changes
+-------------
+* Use system Cython as a fallback if `cythonize()` fails (`#541`_ by Ulrik Pedersen).
+* Use pkg-config for builing/linking against hdf5 (`#505`_ by James Tocknell).
+* Disable building Cython on Travis (`#513`_ by Andrew Collette).
+* Improvements to release tarball (`#555`_, `#560`_ by Ghislain Antony
+ Vaillant).
+* h5py now has one codebase for both Python 2 and 3; 2to3 removed from setup.py
+ (`#508`_ by James Tocknell).
+* Add python 3.4 to tox (`#507`_ by James Tocknell).
+* Warn when importing from inside install dir (`#558`_ by Andrew Collette).
+* Tweak installation docs with reference to Anaconda and other Python package
+ managers (`#546`_ by Andrew Collette).
+* Fix incompatible function pointer types (`#526`_, `#524`_ by Peter H. Li).
+* Add explicit `vlen is not None` check to work around
+ https://github.com/numpy/numpy/issues/2190 (`#538` by Will Parkin).
+* Group and AttributeManager classes now inherit from the appropriate ABCs
+ (`#527`_ by James Tocknell).
+* Don't strip metadata from special dtypes on read (`#512`_ by Antony Lee).
+* Add 'x' mode as an alias for 'w-' (`#510`_ by Antony Lee).
+* Support dynamical loading of LZF filter plugin (`#506`_ by Peter Colberg).
+* Fix accessing attributes with array type (`#501`_ by Andrew Collette).
+* Don't leak types in enum converter (`#503`_ by Andrew Collette).
+
+.. _`#551` : https://github.com/h5py/h5py/pull/551
+.. _`#541` : https://github.com/h5py/h5py/pull/541
+.. _`#505` : https://github.com/h5py/h5py/pull/505
+.. _`#513` : https://github.com/h5py/h5py/pull/513
+.. _`#555` : https://github.com/h5py/h5py/pull/555
+.. _`#560` : https://github.com/h5py/h5py/pull/560
+.. _`#508` : https://github.com/h5py/h5py/pull/508
+.. _`#507` : https://github.com/h5py/h5py/pull/507
+.. _`#558` : https://github.com/h5py/h5py/pull/558
+.. _`#546` : https://github.com/h5py/h5py/pull/546
+.. _`#526` : https://github.com/h5py/h5py/pull/526
+.. _`#524` : https://github.com/h5py/h5py/pull/524
+.. _`#538` : https://github.com/h5py/h5py/pull/538
+.. _`#527` : https://github.com/h5py/h5py/pull/527
+.. _`#512` : https://github.com/h5py/h5py/pull/512
+.. _`#510` : https://github.com/h5py/h5py/pull/510
+.. _`#506` : https://github.com/h5py/h5py/pull/506
+.. _`#501` : https://github.com/h5py/h5py/pull/501
+.. _`#503` : https://github.com/h5py/h5py/pull/503
+
+Acknowlegements
+---------------
+
diff --git a/docs/whatsnew/2.6.rst b/docs/whatsnew/2.6.rst
new file mode 100644
index 0000000..5865bb4
--- /dev/null
+++ b/docs/whatsnew/2.6.rst
@@ -0,0 +1,91 @@
+What's new in h5py 2.6
+======================
+
+Support for HDF5 Virtual Dataset API
+------------------------------------
+Initial support for the HDF5 Virtual Dataset API, which was introduced in
+HDF5 1.10, was added to the low-level API. Ideas and input for how this should
+work as part of the high-level interface are welcome.
+
+This work was added in `#663`_ by Aleksandar Jelenak.
+
+Add MPI Collective I/O Support
+------------------------------
+Support for using MPI Collective I/O in both low-level and high-level code has
+been added. See the collective_io.py example for a simple demonstration of how
+to use MPI Collective I/O with the high level API.
+
+This work was added in `#648`_ by Jialin Liu.
+
+Numerous build/testing/CI improvements
+--------------------------------------
+There were a number of improvements to the setup.py file, which should mean that
+`pip install h5py` should work in most places. Work was also done to clean up
+the current testing system, using tox is the recommended way of testing h5py
+across different Python versions. See `#576`_ by Jakob Lombacher, `#640`_ by
+Lawrence Mitchell, and `#650`_, `#651`_ and `#658`_ by James Tocknell.
+
+Cleanup of codebase based on pylint
+-----------------------------------
+There was a large cleanup of pylint-identified problems by Andrew Collette
+(`#578`_, `#579`_).
+
+Fixes to low-level API
+----------------------
+Fixes to the typing of functions were added in `#597`_ by Ulrik Kofoed
+Pedersen, `#589`_ by Peter Chang, and `#625`_ by Spaghetti Sort. A fix for
+variable-length arrays was added in `#621`_ by Sam Mason. Fixes to compound
+types were added in `#639`_ by @nevion and `#606`_ by Yu Feng. Finally, a fix
+to type conversion was added in `#614`_ by Andrew Collette.
+
+Documentation improvements
+--------------------------
+* Updates to FAQ by Dan Guest (`#608`_) and Peter Hill (`#607`_).
+* Updates MPI-releated documentation by Jens Timmerman (`#604`_) and
+ Matthias König (`#572`_).
+* Fixes to documentation building by Ghislain Antony Vaillant (`#562`_,
+ `#561`_).
+* Update PyTables link (`#574`_ by Dominik Kriegner)
+* Add File opening modes to docstring (`#563`_ by Antony Lee)
+
+Other changes
+-------------
+* Add `Dataset.ndim` (`#649`_, `#660`_ by @jakirkham, `#661`_ by James Tocknell)
+* Fix import errors in IPython completer (`#605`_ by Niru Maheswaranathan)
+* Turn off error printing in new threads (`#583`_ by Andrew Collette)
+* Use item value in `KeyError` instead of error message (`#642`_ by Matthias Geier)
+
+
+.. _`#561` : https://github.com/h5py/h5py/pull/561
+.. _`#562` : https://github.com/h5py/h5py/pull/562
+.. _`#563` : https://github.com/h5py/h5py/pull/563
+.. _`#572` : https://github.com/h5py/h5py/pull/572
+.. _`#574` : https://github.com/h5py/h5py/pull/574
+.. _`#576` : https://github.com/h5py/h5py/pull/576
+.. _`#578` : https://github.com/h5py/h5py/pull/578
+.. _`#579` : https://github.com/h5py/h5py/pull/579
+.. _`#583` : https://github.com/h5py/h5py/pull/583
+.. _`#589` : https://github.com/h5py/h5py/pull/589
+.. _`#597` : https://github.com/h5py/h5py/pull/597
+.. _`#604` : https://github.com/h5py/h5py/pull/604
+.. _`#605` : https://github.com/h5py/h5py/pull/605
+.. _`#606` : https://github.com/h5py/h5py/pull/606
+.. _`#607` : https://github.com/h5py/h5py/pull/607
+.. _`#608` : https://github.com/h5py/h5py/pull/608
+.. _`#614` : https://github.com/h5py/h5py/pull/614
+.. _`#621` : https://github.com/h5py/h5py/pull/621
+.. _`#625` : https://github.com/h5py/h5py/pull/625
+.. _`#639` : https://github.com/h5py/h5py/pull/639
+.. _`#640` : https://github.com/h5py/h5py/pull/640
+.. _`#642` : https://github.com/h5py/h5py/pull/642
+.. _`#648` : https://github.com/h5py/h5py/pull/648
+.. _`#649` : https://github.com/h5py/h5py/pull/649
+.. _`#650` : https://github.com/h5py/h5py/pull/650
+.. _`#651` : https://github.com/h5py/h5py/pull/651
+.. _`#658` : https://github.com/h5py/h5py/pull/658
+.. _`#660` : https://github.com/h5py/h5py/pull/660
+.. _`#661` : https://github.com/h5py/h5py/pull/661
+.. _`#663` : https://github.com/h5py/h5py/pull/663
+
+Acknowlegements
+---------------
diff --git a/docs/whatsnew/2.7.rst b/docs/whatsnew/2.7.rst
new file mode 100644
index 0000000..40ac835
--- /dev/null
+++ b/docs/whatsnew/2.7.rst
@@ -0,0 +1,95 @@
+What's new in h5py 2.7
+======================
+
+Python 3.2 is no longer supported
+---------------------------------
+``h5py`` 2.7 drops Python 3.2 support, and testing is not longer preformed on Python 3.2. The latest versions of ``pip``, ``virtualenv``, ``setuptools`` and ``numpy`` do not support Python 3.2, and dropping 3.2 allows both ``u`` and ``b`` prefixes to be used for strings. A clean up of some of the legacy code was done in `#675`_ by Andrew Collette.
+
+Additionally, support for Python 2.6 is soon to be dropped for ``pip`` (See https://github.com/pypa/pip/issues/3955) and ``setuptools`` (See https://github.com/pypa/setuptools/issues/878), and ``numpy`` has dropped Python 2.6 also in the latest release. While ``h5py`` has not dropped Python 2.6 this release, users are strongly encouraged to move to Python 2.7 where possible.
+
+Improved testing support
+------------------------
+There has been a major increase in the number of configurations ``h5py`` is automatically tested in, with Windows CI support added via Appveyor (`#795`_, `#798`_, `#799`_ and `#801`_ by James Tocknell) and testing of minimum requirements to ensure we still satisfy them (`#703`_ by James Tocknell). Additionally, ``tox`` was used to ensure that we don't run tests on Python versions which our dependencies have dropped or do not support (`#662`_, `#700`_ and `#733`_). Thanks to to the Appvey [...]
+
+Improved python compatibility
+-----------------------------
+The ``ipython``/``jupyter`` completion support now has Python 3 support (`#715`_ by Joseph Kleinhenz). ``h5py`` now supports ``pathlib`` filenames (`#716`_ by James Tocknell).
+
+Documentation improvements
+--------------------------
+An update to the installation instructions and some whitespace cleanup was done in `#808`_ by Thomas A Caswell, and mistake in the quickstart was fixed by Joydeep Bhattacharjee in `#708`_.
+
+setup.py improvements
+---------------------
+Support for detecting the version of HDF5 via ``pkgconfig`` was added by Axel Huebl in `#734`_, and support for specifying the path to MPI-supported HDF5 was added by Axel Huebl in `#721`_. ``h5py's`` classifiers were updated to include supported python version and interpreters in `#811`_ by James Tocknell.
+
+Support for additional HDF5 features added
+------------------------------------------
+Low-level support for `HDF5 Direct Chunk Write`_ was added in `#691`_ by Simon Gregor Ebner. Minimal support for `HDF5 File Image Operations`_ was added by Andrea Bedini in `#680`_. Ideas and opinions for further support for both `HDF5 Direct Chunk Write`_ and `HDF5 File Image Operations`_ are welcome. High-level support for reading and writing null dataspaces was added in `#664`_ by James Tocknell.
+
+Improvements to type system
+---------------------------
+Reading and writing of compound datatypes has improved, with support for different orderings and alignments (`#701`_ by Jonah Bernhard, `#702`_ by Caleb Morse `#738`_ by @smutch, `#765`_ by Nathan Goldbaum and `#793`_ by James Tocknell). Support for reading extended precision and non-standard floating point numbers has also been added (`#749`_, `#812`_ by Thomas A Caswell, `#787`_ by James Tocknell and `#781`_ by Martin Raspaud). Finally, compatibility improvements to ``Cython`` annotati [...]
+
+Other changes
+-------------
+* Fix deprecation of ``-`` for ``numpy`` boolean arrays (`#683`_ by James Tocknell)
+* Check for duplicates in fancy index validation (`#739`_ by Sam Toyer)
+* Avoid potential race condition (`#754`_ by James Tocknell)
+* Fix inconsistency when slicing with ``numpy.array`` of shape ``(1,)`` (`#772`_ by Artsiom)
+* Use ``size_t`` to store Python object id (`#773`_ by Christoph Gohlke)
+* Avoid errors when the Python GC runs during ``nonlocal_close()`` (`#776`_ by Antoine Pitrou)
+* Move from ``six.PY3`` to ``six.PY2`` (`#686`_ by James Tocknell)
+
+
+.. _`#662` : https://github.com/h5py/h5py/pull/662
+.. _`#664` : https://github.com/h5py/h5py/pull/664
+.. _`#675` : https://github.com/h5py/h5py/pull/675
+.. _`#680` : https://github.com/h5py/h5py/pull/680
+.. _`#683` : https://github.com/h5py/h5py/pull/683
+.. _`#686` : https://github.com/h5py/h5py/pull/686
+.. _`#691` : https://github.com/h5py/h5py/pull/691
+.. _`#692` : https://github.com/h5py/h5py/pull/692
+.. _`#693` : https://github.com/h5py/h5py/pull/693
+.. _`#700` : https://github.com/h5py/h5py/pull/700
+.. _`#701` : https://github.com/h5py/h5py/pull/701
+.. _`#702` : https://github.com/h5py/h5py/pull/702
+.. _`#703` : https://github.com/h5py/h5py/pull/703
+.. _`#708` : https://github.com/h5py/h5py/pull/708
+.. _`#715` : https://github.com/h5py/h5py/pull/715
+.. _`#716` : https://github.com/h5py/h5py/pull/716
+.. _`#721` : https://github.com/h5py/h5py/pull/721
+.. _`#724` : https://github.com/h5py/h5py/pull/724
+.. _`#733` : https://github.com/h5py/h5py/pull/733
+.. _`#734` : https://github.com/h5py/h5py/pull/734
+.. _`#738` : https://github.com/h5py/h5py/pull/738
+.. _`#739` : https://github.com/h5py/h5py/pull/739
+.. _`#749` : https://github.com/h5py/h5py/pull/749
+.. _`#754` : https://github.com/h5py/h5py/pull/754
+.. _`#765` : https://github.com/h5py/h5py/pull/765
+.. _`#772` : https://github.com/h5py/h5py/pull/772
+.. _`#773` : https://github.com/h5py/h5py/pull/773
+.. _`#776` : https://github.com/h5py/h5py/pull/776
+.. _`#781` : https://github.com/h5py/h5py/pull/781
+.. _`#787` : https://github.com/h5py/h5py/pull/787
+.. _`#788` : https://github.com/h5py/h5py/pull/788
+.. _`#789` : https://github.com/h5py/h5py/pull/789
+.. _`#793` : https://github.com/h5py/h5py/pull/793
+.. _`#794` : https://github.com/h5py/h5py/pull/794
+.. _`#795` : https://github.com/h5py/h5py/pull/795
+.. _`#798` : https://github.com/h5py/h5py/pull/798
+.. _`#799` : https://github.com/h5py/h5py/pull/799
+.. _`#800` : https://github.com/h5py/h5py/pull/800
+.. _`#801` : https://github.com/h5py/h5py/pull/801
+.. _`#802` : https://github.com/h5py/h5py/pull/802
+.. _`#804` : https://github.com/h5py/h5py/pull/804
+.. _`#807` : https://github.com/h5py/h5py/pull/807
+.. _`#808` : https://github.com/h5py/h5py/pull/808
+.. _`#811` : https://github.com/h5py/h5py/pull/811
+.. _`#812` : https://github.com/h5py/h5py/pull/812
+.. _`HDF5 Direct Chunk Write` : https://support.hdfgroup.org/HDF5/doc/Advanced/DirectChunkWrite/
+.. _`HDF5 File Image Operations` : http://www.hdfgroup.org/HDF5/doc/Advanced/FileImageOperations/HDF5FileImageOperations.pdf
+
+Acknowlegements
+---------------
+
diff --git a/docs/whatsnew/index.rst b/docs/whatsnew/index.rst
index ec48769..6cf00bd 100644
--- a/docs/whatsnew/index.rst
+++ b/docs/whatsnew/index.rst
@@ -8,6 +8,9 @@ These document the changes between minor (or major) versions of h5py.
.. toctree::
+ 2.7
+ 2.6
+ 2.5
2.4
2.3
2.2
diff --git a/h5py/__init__.py b/h5py/__init__.py
index 62987eb..0f5acc4 100644
--- a/h5py/__init__.py
+++ b/h5py/__init__.py
@@ -43,7 +43,7 @@ _register_lzf()
from . import h5a, h5d, h5ds, h5f, h5fd, h5g, h5r, h5s, h5t, h5p, h5z
from ._hl import filters
-from ._hl.base import is_hdf5, HLObject
+from ._hl.base import is_hdf5, HLObject, Empty
from ._hl.files import File
from ._hl.group import Group, SoftLink, ExternalLink, HardLink
from ._hl.dataset import Dataset
diff --git a/h5py/_hl/attrs.py b/h5py/_hl/attrs.py
index f68e4de..b15b4d0 100644
--- a/h5py/_hl/attrs.py
+++ b/h5py/_hl/attrs.py
@@ -20,7 +20,7 @@ import numpy
from .. import h5s, h5t, h5a
from . import base
-from .base import phil, with_phil
+from .base import phil, with_phil, Empty, is_empty_dataspace
from .dataset import readtime_dtype
from .datatype import Datatype
@@ -57,8 +57,8 @@ class AttributeManager(base.MutableMappingHDF5, base.CommonStateObject):
"""
attr = h5a.open(self._id, self._e(name))
- if attr.get_space().get_simple_extent_type() == h5s.NULL:
- raise IOError("Empty attributes cannot be read")
+ if is_empty_dataspace(attr):
+ return Empty(attr.dtype)
dtype = readtime_dtype(attr.dtype, [])
shape = attr.shape
@@ -118,7 +118,8 @@ class AttributeManager(base.MutableMappingHDF5, base.CommonStateObject):
# First, make sure we have a NumPy array. We leave the data
# type conversion for HDF5 to perform.
- data = numpy.asarray(data, order='C')
+ if not isinstance(data, Empty):
+ data = numpy.asarray(data, order='C')
if shape is None:
shape = data.shape
@@ -155,14 +156,15 @@ class AttributeManager(base.MutableMappingHDF5, base.CommonStateObject):
# is compatible, and reshape if needed.
else:
- if numpy.product(shape) != numpy.product(data.shape):
+ if shape is not None and numpy.product(shape) != numpy.product(data.shape):
raise ValueError("Shape of new attribute conflicts with shape of data")
if shape != data.shape:
data = data.reshape(shape)
# We need this to handle special string types.
- data = numpy.asarray(data, dtype=dtype)
+ if not isinstance(data, Empty):
+ data = numpy.asarray(data, dtype=dtype)
# Make HDF5 datatype and dataspace for the H5A calls
if use_htype is None:
@@ -172,7 +174,10 @@ class AttributeManager(base.MutableMappingHDF5, base.CommonStateObject):
htype = use_htype
htype2 = None
- space = h5s.create_simple(shape)
+ if isinstance(data, Empty):
+ space = h5s.create(h5s.NULL)
+ else:
+ space = h5s.create_simple(shape)
# This mess exists because you can't overwrite attributes in HDF5.
# So we write to a temporary attribute first, and then rename.
@@ -185,7 +190,8 @@ class AttributeManager(base.MutableMappingHDF5, base.CommonStateObject):
raise
else:
try:
- attr.write(data, mtype=htype2)
+ if not isinstance(data, Empty):
+ attr.write(data, mtype=htype2)
except:
attr.close()
h5a.delete(self._id, self._e(tempname))
@@ -218,7 +224,7 @@ class AttributeManager(base.MutableMappingHDF5, base.CommonStateObject):
attr = h5a.open(self._id, self._e(name))
- if attr.get_space().get_simple_extent_type() == h5s.NULL:
+ if is_empty_dataspace(attr):
raise IOError("Empty attributes can't be modified")
# Allow the case of () <-> (1,)
diff --git a/h5py/_hl/base.py b/h5py/_hl/base.py
index 7a606ba..d88ff7e 100644
--- a/h5py/_hl/base.py
+++ b/h5py/_hl/base.py
@@ -15,12 +15,14 @@ from __future__ import absolute_import
import posixpath
import os
-import sys
import six
from collections import (Mapping, MutableMapping, KeysView,
ValuesView, ItemsView)
-from .. import h5d, h5i, h5r, h5p, h5f, h5t
+from .compat import fspath
+from .compat import fsencode
+
+from .. import h5d, h5i, h5r, h5p, h5f, h5t, h5s
# The high-level interface is serialized; every public API function & method
# is wrapped in a lock. We re-use the low-level lock because (1) it's fast,
@@ -32,14 +34,10 @@ from .._objects import phil, with_phil
def is_hdf5(fname):
""" Determine if a file is valid HDF5 (False if it doesn't exist). """
with phil:
- fname = os.path.abspath(fname)
+ fname = os.path.abspath(fspath(fname))
if os.path.isfile(fname):
- try:
- fname = fname.encode(sys.getfilesystemencoding())
- except (UnicodeError, LookupError):
- pass
- return h5f.is_hdf5(fname)
+ return h5f.is_hdf5(fsencode(fname))
return False
@@ -80,6 +78,13 @@ dlapl = default_lapl()
dlcpl = default_lcpl()
+def is_empty_dataspace(obj):
+ """ Check if an object's dataspace is empty """
+ if obj.get_space().get_simple_extent_type() == h5s.NULL:
+ return True
+ return False
+
+
class CommonStateObject(object):
"""
@@ -347,21 +352,7 @@ class MappingHDF5(Mapping):
We don't inherit directly from MutableMapping because certain
subclasses, for example DimensionManager, are read-only.
"""
-
- if six.PY3:
- def keys(self):
- """ Get a view object on member names """
- return KeysView(self)
-
- def values(self):
- """ Get a view object on member objects """
- return ValuesViewHDF5(self)
-
- def items(self):
- """ Get a view object on member items """
- return ItemsViewHDF5(self)
-
- else:
+ if six.PY2:
def keys(self):
""" Get a list containing member names """
with phil:
@@ -386,7 +377,20 @@ class MappingHDF5(Mapping):
""" Get an iterator over (name, object) pairs """
for x in self:
yield (x, self.get(x))
-
+
+ else:
+ def keys(self):
+ """ Get a view object on member names """
+ return KeysView(self)
+
+ def values(self):
+ """ Get a view object on member objects """
+ return ValuesViewHDF5(self)
+
+ def items(self):
+ """ Get a view object on member items """
+ return ItemsViewHDF5(self)
+
class MutableMappingHDF5(MappingHDF5, MutableMapping):
@@ -397,4 +401,25 @@ class MutableMappingHDF5(MappingHDF5, MutableMapping):
"""
pass
-
\ No newline at end of file
+
+
+class Empty(object):
+
+ """
+ Proxy object to represent empty/null dataspaces (a.k.a H5S_NULL).
+
+ This can have an associated dtype, but has no shape or data. This is not
+ the same as an array with shape (0,).
+ """
+ shape = None
+
+ def __init__(self, dtype):
+ self.dtype = dtype
+
+ def __eq__(self, other):
+ if isinstance(other, Empty) and self.dtype == other.dtype:
+ return True
+ return False
+
+ def __repr__(self):
+ return "Empty(dtype={0!r})".format(self.dtype)
diff --git a/h5py/_hl/compat.py b/h5py/_hl/compat.py
new file mode 100644
index 0000000..fb7d5d6
--- /dev/null
+++ b/h5py/_hl/compat.py
@@ -0,0 +1,98 @@
+"""
+Compatibility module for high-level h5py
+"""
+import sys
+import six
+
+
+try:
+ from os import fspath
+except ImportError:
+ def fspath(path):
+ """
+ Return the string representation of the path.
+ If str or bytes is passed in, it is returned unchanged.
+ This code comes from PEP 519, modified to support earlier versions of
+ python.
+
+ This is required for python < 3.6.
+ """
+ if isinstance(path, (six.text_type, six.binary_type)):
+ return path
+
+ # Work from the object's type to match method resolution of other magic
+ # methods.
+ path_type = type(path)
+ try:
+ return path_type.__fspath__(path)
+ except AttributeError:
+ if hasattr(path_type, '__fspath__'):
+ raise
+ try:
+ import pathlib
+ except ImportError:
+ pass
+ else:
+ if isinstance(path, pathlib.PurePath):
+ return six.text_type(path)
+
+ raise TypeError("expected str, bytes or os.PathLike object, not "
+ + path_type.__name__)
+
+# This is from python 3.5 stdlib (hence lacks PEP 519 changes)
+# This was introduced into python 3.2, so python < 3.2 does not have this
+# Effectively, this is only required for python 2.6 and 2.7, and can be removed
+# once support for them is dropped
+def _fscodec():
+ encoding = sys.getfilesystemencoding()
+ if encoding == 'mbcs':
+ errors = 'strict'
+ else:
+ try:
+ from codecs import lookup_error
+ lookup_error('surrogateescape')
+ except LookupError:
+ errors = 'strict'
+ else:
+ errors = 'surrogateescape'
+
+ def fsencode(filename):
+ """
+ Encode filename to the filesystem encoding with 'surrogateescape' error
+ handler, return bytes unchanged. On Windows, use 'strict' error handler if
+ the file system encoding is 'mbcs' (which is the default encoding).
+ """
+ if isinstance(filename, six.binary_type):
+ return filename
+ elif isinstance(filename, six.text_type):
+ return filename.encode(encoding, errors)
+ else:
+ raise TypeError("expect bytes or str, not %s" % type(filename).__name__)
+
+ def fsdecode(filename):
+ """
+ Decode filename from the filesystem encoding with 'surrogateescape' error
+ handler, return str unchanged. On Windows, use 'strict' error handler if
+ the file system encoding is 'mbcs' (which is the default encoding).
+ """
+ if isinstance(filename, six.text_type):
+ return filename
+ elif isinstance(filename, six.binary_type):
+ return filename.decode(encoding, errors)
+ else:
+ raise TypeError("expect bytes or str, not %s" % type(filename).__name__)
+
+ return fsencode, fsdecode
+
+_fsencode, _fsdecode = _fscodec()
+del _fscodec
+
+try:
+ from os import fsencode
+except ImportError:
+ fsencode = _fsencode
+
+try:
+ from os import fsdecode
+except ImportError:
+ fsdecode = _fsdecode
diff --git a/h5py/_hl/dataset.py b/h5py/_hl/dataset.py
index ab257ba..ee15759 100644
--- a/h5py/_hl/dataset.py
+++ b/h5py/_hl/dataset.py
@@ -22,7 +22,7 @@ from six.moves import xrange # pylint: disable=redefined-builtin
import numpy
from .. import h5, h5s, h5t, h5r, h5d, h5p, h5fd
-from .base import HLObject, phil, with_phil
+from .base import HLObject, phil, with_phil, Empty, is_empty_dataspace
from . import filters
from . import selections as sel
from . import selections2 as sel2
@@ -57,14 +57,16 @@ def make_new_dset(parent, shape=None, dtype=None, data=None,
"""
# Convert data to a C-contiguous ndarray
- if data is not None:
+ if data is not None and not isinstance(data, Empty):
from . import base
data = numpy.asarray(data, order="C", dtype=base.guess_dtype(data))
# Validate shape
if shape is None:
if data is None:
- raise TypeError("Either data or shape must be specified")
+ if dtype is None:
+ raise TypeError("One of data, shape or dtype must be specified")
+ data = Empty(dtype)
shape = data.shape
else:
shape = tuple(shape)
@@ -73,7 +75,9 @@ def make_new_dset(parent, shape=None, dtype=None, data=None,
tmp_shape = maxshape if maxshape is not None else shape
# Validate chunk shape
- if isinstance(chunks, tuple) and (-numpy.array([ i>=j for i,j in zip(tmp_shape,chunks) if i is not None])).any():
+ if isinstance(chunks, tuple) and any(
+ chunk > dim for dim, chunk in zip(tmp_shape,chunks) if dim is not None
+ ):
errmsg = "Chunk shape must not be greater than data shape in any dimension. "\
"{} is not compatible with {}".format(chunks, shape)
raise ValueError(errmsg)
@@ -123,12 +127,16 @@ def make_new_dset(parent, shape=None, dtype=None, data=None,
if maxshape is not None:
maxshape = tuple(m if m is not None else h5s.UNLIMITED for m in maxshape)
- sid = h5s.create_simple(shape, maxshape)
+
+ if isinstance(data, Empty):
+ sid = h5s.create(h5s.NULL)
+ else:
+ sid = h5s.create_simple(shape, maxshape)
dset_id = h5d.create(parent.id, None, tid, sid, dcpl=dcpl)
- if data is not None:
+ if (data is not None) and (not isinstance(data, Empty)):
dset_id.write(h5s.ALL, h5s.ALL, data)
return dset_id
@@ -399,11 +407,15 @@ class Dataset(HLObject):
* Boolean "mask" array indexing
"""
args = args if isinstance(args, tuple) else (args,)
+ if is_empty_dataspace(self.id):
+ if not (args == tuple() or args == (Ellipsis,)):
+ raise ValueError("Empty datasets cannot be sliced")
+ return Empty(self.dtype)
# Sort field indices from the rest of the args.
names = tuple(x for x in args if isinstance(x, six.string_types))
args = tuple(x for x in args if not isinstance(x, six.string_types))
- if not six.PY3:
+ if six.PY2:
names = tuple(x.encode('utf-8') if isinstance(x, six.text_type) else x for x in names)
new_dtype = getattr(self._local, 'astype', None)
@@ -504,7 +516,7 @@ class Dataset(HLObject):
# Sort field indices from the slicing
names = tuple(x for x in args if isinstance(x, six.string_types))
args = tuple(x for x in args if not isinstance(x, six.string_types))
- if not six.PY3:
+ if six.PY2:
names = tuple(x.encode('utf-8') if isinstance(x, six.text_type) else x for x in names)
# Generally we try to avoid converting the arrays on the Python
@@ -626,6 +638,8 @@ class Dataset(HLObject):
Broadcasting is supported for simple indexing.
"""
with phil:
+ if is_empty_dataspace(self.id):
+ raise TypeError("Empty datasets have no numpy representation")
if source_sel is None:
source_sel = sel.SimpleSelection(self.shape)
else:
@@ -649,6 +663,8 @@ class Dataset(HLObject):
Broadcasting is supported for simple indexing.
"""
with phil:
+ if is_empty_dataspace(self.id):
+ raise TypeError("Empty datasets cannot be written to")
if source_sel is None:
source_sel = sel.SimpleSelection(source.shape)
else:
@@ -691,9 +707,9 @@ class Dataset(HLObject):
name if name != six.u('') else six.u('/'))
r = six.u('<HDF5 dataset %s: shape %s, type "%s">') % \
(namestr, self.shape, self.dtype.str)
- if six.PY3:
- return r
- return r.encode('utf8')
+ if six.PY2:
+ return r.encode('utf8')
+ return r
if hasattr(h5d.DatasetID, "refresh"):
@with_phil
diff --git a/h5py/_hl/files.py b/h5py/_hl/files.py
index 8dd2c7e..55b7aec 100644
--- a/h5py/_hl/files.py
+++ b/h5py/_hl/files.py
@@ -16,6 +16,10 @@ from __future__ import absolute_import
import sys
import os
+from .compat import fspath
+from .compat import fsencode
+from .compat import fsdecode
+
import six
from .base import phil, with_phil
@@ -50,6 +54,11 @@ def make_fapl(driver, libver, **kwds):
plist.set_libver_bounds(low, high)
if driver is None or (driver == 'windows' and sys.platform == 'win32'):
+ # Prevent swallowing unused key arguments
+ if kwds:
+ msg = "'{key}' is an invalid keyword argument for this function" \
+ .format(key=next(iter(kwds)))
+ raise TypeError(msg)
return plist
if driver == 'sec2':
@@ -149,11 +158,7 @@ class File(Group):
@with_phil
def filename(self):
"""File name on disk"""
- name = h5f.get_name(self.fid)
- try:
- return name.decode(sys.getfilesystemencoding())
- except (UnicodeError, LookupError):
- return name
+ return fsdecode(h5f.get_name(self.fid))
@property
@with_phil
@@ -198,7 +203,7 @@ class File(Group):
@property
@with_phil
def atomic(self):
- """ Set/get MPI-IO atomic mode
+ """ Set/get MPI-IO atomic mode
"""
return self.id.get_mpi_atomicity()
@@ -207,13 +212,13 @@ class File(Group):
def atomic(self, value):
# pylint: disable=missing-docstring
self.id.set_mpi_atomicity(value)
-
+
if swmr_support:
@property
def swmr_mode(self):
""" Controls single-writer multiple-reader mode """
return self._swmr_mode
-
+
@swmr_mode.setter
@with_phil
def swmr_mode(self, value):
@@ -255,50 +260,44 @@ class File(Group):
"""
if swmr and not swmr_support:
raise ValueError("The SWMR feature is not available in this version of the HDF5 library")
-
+
with phil:
if isinstance(name, _objects.ObjectID):
fid = h5i.get_file_id(name)
else:
- try:
- # If the byte string doesn't match the default
- # encoding, just pass it on as-is. Note Unicode
- # objects can always be encoded.
- name = name.encode(sys.getfilesystemencoding())
- except (UnicodeError, LookupError):
- pass
+ name = fsencode(fspath(name))
fapl = make_fapl(driver, libver, **kwds)
fid = make_fid(name, mode, userblock_size, fapl, swmr=swmr)
-
+
if swmr_support:
self._swmr_mode = False
if swmr and mode == 'r':
- self._swmr_mode = True
-
+ self._swmr_mode = True
+
Group.__init__(self, fid)
def close(self):
""" Close the file. All open objects become invalid """
with phil:
# We have to explicitly murder all open objects related to the file
-
+
# Close file-resident objects first, then the files.
# Otherwise we get errors in MPI mode.
id_list = h5f.get_obj_ids(self.id, ~h5f.OBJ_FILE)
file_list = h5f.get_obj_ids(self.id, h5f.OBJ_FILE)
-
+
id_list = [x for x in id_list if h5i.get_file_id(x).id == self.id.id]
file_list = [x for x in file_list if h5i.get_file_id(x).id == self.id.id]
-
+
for id_ in id_list:
while id_.valid:
h5i.dec_ref(id_)
-
+
for id_ in file_list:
while id_.valid:
h5i.dec_ref(id_)
-
+
self.id.close()
_objects.nonlocal_close()
@@ -330,6 +329,6 @@ class File(Group):
r = six.u('<HDF5 file "%s" (mode %s)>') % (os.path.basename(filename),
self.mode)
- if six.PY3:
- return r
- return r.encode('utf8')
+ if six.PY2:
+ return r.encode('utf8')
+ return r
diff --git a/h5py/_hl/group.py b/h5py/_hl/group.py
index e59246c..f919ef0 100644
--- a/h5py/_hl/group.py
+++ b/h5py/_hl/group.py
@@ -16,7 +16,10 @@ from __future__ import absolute_import
import posixpath as pp
import six
import numpy
-import sys
+
+from .compat import fsdecode
+from .compat import fsencode
+from .compat import fspath
from .. import h5g, h5i, h5o, h5r, h5t, h5l, h5p
from . import base
@@ -234,11 +237,7 @@ class Group(HLObject, MutableMappingHDF5):
if getclass:
return ExternalLink
filebytes, linkbytes = self.id.links.get_val(self._e(name))
- try:
- filetext = filebytes.decode(sys.getfilesystemencoding())
- except (UnicodeError, LookupError):
- filetext = filebytes
- return ExternalLink(filetext, self._d(linkbytes))
+ return ExternalLink(fsdecode(filebytes), self._d(linkbytes))
elif typecode == h5l.TYPE_HARD:
return HardLink if getclass else HardLink()
@@ -281,7 +280,7 @@ class Group(HLObject, MutableMappingHDF5):
lcpl=lcpl, lapl=self._lapl)
elif isinstance(obj, ExternalLink):
- self.id.links.create_external(name, self._e(obj.filename),
+ self.id.links.create_external(name, fsencode(obj.filename),
self._e(obj.path), lcpl=lcpl, lapl=self._lapl)
elif isinstance(obj, numpy.dtype):
@@ -473,9 +472,9 @@ class Group(HLObject, MutableMappingHDF5):
) if self.name is not None else six.u("(anonymous)")
r = six.u('<HDF5 group %s (%d members)>') % (namestr, len(self))
- if six.PY3:
- return r
- return r.encode('utf8')
+ if six.PY2:
+ return r.encode('utf8')
+ return r
class HardLink(object):
@@ -526,7 +525,7 @@ class ExternalLink(object):
return self._filename
def __init__(self, filename, path):
- self._filename = str(filename)
+ self._filename = fspath(filename)
self._path = str(path)
def __repr__(self):
diff --git a/h5py/_hl/selections.py b/h5py/_hl/selections.py
index 614b236..8b52e9a 100644
--- a/h5py/_hl/selections.py
+++ b/h5py/_hl/selections.py
@@ -83,6 +83,8 @@ def select(shape, args, dsid):
if not isinstance(a, slice) and a is not Ellipsis:
try:
int(a)
+ if isinstance(a, np.ndarray) and a.shape == (1,):
+ raise Exception()
except Exception:
sel = FancySelection(shape)
sel[args]
@@ -353,7 +355,9 @@ class FancySelection(Selection):
except TypeError:
pass
else:
- if sorted(arg) != list(arg):
+ list_arg = list(arg)
+ adjacent = zip(list_arg[:-1], list_arg[1:])
+ if any(fst >= snd for fst, snd in adjacent):
raise TypeError("Indexing elements must be in increasing order")
if len(sequenceargs) > 1:
diff --git a/h5py/_objects.pxd b/h5py/_objects.pxd
index ed7407e..46ced0d 100644
--- a/h5py/_objects.pxd
+++ b/h5py/_objects.pxd
@@ -15,9 +15,11 @@ cdef class ObjectID:
cdef readonly hid_t id
cdef public int locked # Cannot be closed, explicitly or auto
cdef object _hash
+ cdef size_t _pyid
# Convenience functions
cdef hid_t pdefault(ObjectID pid)
+cdef int is_h5py_obj_valid(ObjectID obj)
# Inheritance scheme (for top-level cimport and import statements):
#
diff --git a/h5py/_objects.pyx b/h5py/_objects.pyx
index 438572c..6f4f9fd 100644
--- a/h5py/_objects.pyx
+++ b/h5py/_objects.pyx
@@ -115,16 +115,13 @@ def nonlocal_close():
"""
cdef ObjectID obj
- for python_id, ref in registry.items():
-
+ # list() needed because the registry can be mutated concurrently
+ for python_id, ref in list(registry.items()):
obj = ref()
- # Object somehow died without being removed from the registry.
- # I think this is impossible, but let's make sure.
+ # Object died while walking the registry list, presumably because
+ # the cyclic GC kicked in.
if obj is None:
- warnings.warn("Found murdered identifier %d of kind %s HDF5 id %d" %
- (python_id, type(obj), obj.id), RuntimeWarning)
- del registry[python_id]
continue
# Locked objects are immortal, as they generally are provided by
@@ -160,41 +157,32 @@ cdef class ObjectID:
property valid:
def __get__(self):
-
- # Locked objects are always valid, regardless of obj.id
- if self.locked:
- return True
-
- # Former zombie object
- if self.id == 0:
- return False
-
- # Ask HDF5. Note that H5Iis_valid only works for "user"
- # identifiers, hence the above checks.
- with _phil:
- return H5Iis_valid(self.id)
+ return is_h5py_obj_valid(self)
def __cinit__(self, id_):
with _phil:
self.id = id_
self.locked = 0
+ self._pyid = id(self)
IF DEBUG_ID:
- print("CINIT - registering %d of kind %s HDF5 id %d" % (id(self), type(self), id_))
- registry[id(self)] = weakref.ref(self)
+ print("CINIT - registering %d of kind %s HDF5 id %d" % (self._pyid, type(self), self.id))
+ registry[self._pyid] = weakref.ref(self)
def __dealloc__(self):
with _phil:
IF DEBUG_ID:
- print("DEALLOC - unregistering %d of kind %s HDF5 id %d" % (id(self), type(self), self.id))
- try:
- # There's no reason to expect it, but in principle H5Idec_ref
- # could raise an exception.
- if self.valid and (not self.locked):
- H5Idec_ref(self.id)
- finally:
- del registry[id(self)]
+ print("DEALLOC - unregistering %d HDF5 id %d" % (self._pyid, self.id))
+ if is_h5py_obj_valid(self) and (not self.locked):
+ if H5Idec_ref(self.id) < 0:
+ warnings.warn(
+ "Reference counting issue with HDF5 id {}".format(
+ self.id
+ )
+ )
+ if self._pyid is not None:
+ del registry[self._pyid]
def _close(self):
@@ -202,14 +190,15 @@ cdef class ObjectID:
with _phil:
IF DEBUG_ID:
- print("CLOSE - %d of kind %s HDF5 id %d" % (id(self), type(self), self.id))
- try:
- # There's no reason to expect it, but in principle H5Idec_ref
- # could raise an exception.
- if self.valid and (not self.locked):
- H5Idec_ref(self.id)
- finally:
- self.id = 0
+ print("CLOSE - %d HDF5 id %d" % (self._pyid, self.id))
+ if is_h5py_obj_valid(self) and (not self.locked):
+ if H5Idec_ref(self.id) < 0:
+ warnings.warn(
+ "Reference counting issue with HDF5 id {}".format(
+ self.id
+ )
+ )
+ self.id = 0
def close(self):
@@ -285,3 +274,25 @@ cdef hid_t pdefault(ObjectID pid):
if pid is None:
return <hid_t>H5P_DEFAULT
return pid.id
+
+
+cdef int is_h5py_obj_valid(ObjectID obj):
+ """
+ Check that h5py object is valid, i.e. HDF5 object wrapper is valid and HDF5
+ object is valid
+ """
+ # MUST BE CALLABLE AT ANY TIME, CANNOT USE PROPERTIES ETC. AS PER
+ # http://cython.readthedocs.io/en/latest/src/userguide/special_methods.html
+
+ # Locked objects are always valid, regardless of obj.id
+ if obj.locked:
+ return True
+
+ # Former zombie object
+ if obj.id == 0:
+ return False
+
+ # Ask HDF5. Note that H5Iis_valid only works for "user"
+ # identifiers, hence the above checks.
+ with _phil:
+ return H5Iis_valid(obj.id)
diff --git a/h5py/api_compat.h b/h5py/api_compat.h
index 96a2da7..52917f4 100644
--- a/h5py/api_compat.h
+++ b/h5py/api_compat.h
@@ -1,5 +1,5 @@
/***** Preamble block *********************************************************
-*
+*
* This file is part of h5py, a Python interface to the HDF5 library.
*
* http://www.h5py.org
@@ -8,7 +8,7 @@
*
* License: Standard 3-clause BSD; see "license.txt" for full license terms
* and contributor agreement.
-*
+*
****** End preamble block ****************************************************/
/* Contains compatibility macros and definitions for use by Cython code */
@@ -31,10 +31,18 @@ typedef void *PyMPI_MPI_Message;
#define h5py_size_n64 (sizeof(npy_complex64))
#define h5py_size_n128 (sizeof(npy_complex128))
+#ifdef NPY_COMPLEX256
+#define h5py_size_n256 (sizeof(npy_complex256))
+#endif
+
#define h5py_offset_n64_real (HOFFSET(npy_complex64, real))
#define h5py_offset_n64_imag (HOFFSET(npy_complex64, imag))
#define h5py_offset_n128_real (HOFFSET(npy_complex128, real))
#define h5py_offset_n128_imag (HOFFSET(npy_complex128, imag))
+#ifdef NPY_COMPLEX256
+#define h5py_offset_n256_real (HOFFSET(npy_complex256, real))
+#define h5py_offset_n256_imag (HOFFSET(npy_complex256, imag))
#endif
+#endif
diff --git a/h5py/api_functions.txt b/h5py/api_functions.txt
index 06e8eea..23fafa3 100644
--- a/h5py/api_functions.txt
+++ b/h5py/api_functions.txt
@@ -111,6 +111,9 @@ hdf5:
1.9.178 herr_t H5Dflush(hid_t dataset_id)
1.9.178 herr_t H5Drefresh(hid_t dataset_id)
+ # Direct Chunk Writing
+ 1.8.11 herr_t H5DOwrite_chunk(hid_t dset_id, hid_t dxpl_id, uint32_t filters, const hsize_t *offset, size_t data_size, const void *buf)
+
# === H5E - Minimal error-handling interface ================================
@@ -145,6 +148,9 @@ hdf5:
herr_t H5Freset_mdc_hit_rate_stats(hid_t file_id)
herr_t H5Fset_mdc_config(hid_t file_id, H5AC_cache_config_t *config_ptr)
+ # File Image Operations
+ 1.8.9 ssize_t H5Fget_file_image(hid_t file_id, void *buf_ptr, size_t buf_len)
+
# MPI functions
MPI 1.8.9 herr_t H5Fset_mpi_atomicity(hid_t file_id, hbool_t flag)
MPI 1.8.9 herr_t H5Fget_mpi_atomicity(hid_t file_id, hbool_t *flag)
@@ -283,6 +289,7 @@ hdf5:
hid_t H5Pget_driver(hid_t fapl_id)
herr_t H5Pget_mdc_config(hid_t plist_id, H5AC_cache_config_t *config_ptr)
herr_t H5Pset_mdc_config(hid_t plist_id, H5AC_cache_config_t *config_ptr)
+ 1.8.9 herr_t H5Pset_file_image(hid_t plist_id, void *buf_ptr, size_t buf_len)
# Dataset creation
herr_t H5Pset_layout(hid_t plist, int layout)
diff --git a/h5py/api_types_ext.pxd b/h5py/api_types_ext.pxd
index b9b7b55..144b75d 100644
--- a/h5py/api_types_ext.pxd
+++ b/h5py/api_types_ext.pxd
@@ -46,7 +46,7 @@ cdef extern from "stdint.h":
ctypedef signed long int int32_t
ctypedef unsigned long int uint32_t
ctypedef signed long long int int64_t
- ctypedef signed long long int uint64_t
+ ctypedef unsigned long long int uint64_t
# Can't use Cython defs because they keep moving them around
cdef extern from "Python.h":
@@ -68,6 +68,11 @@ cdef extern from "api_compat.h":
size_t h5py_offset_n128_real
size_t h5py_offset_n128_imag
+ IF COMPLEX256_SUPPORT:
+ size_t h5py_size_n256
+ size_t h5py_offset_n256_real
+ size_t h5py_offset_n256_imag
+
cdef extern from "lzf_filter.h":
int H5PY_FILTER_LZF
diff --git a/h5py/api_types_hdf5.pxd b/h5py/api_types_hdf5.pxd
index a21b682..5149c1b 100644
--- a/h5py/api_types_hdf5.pxd
+++ b/h5py/api_types_hdf5.pxd
@@ -532,80 +532,74 @@ cdef extern from "hdf5.h":
# --- Predefined datatypes --------------------------------------------------
- cdef enum:
- H5T_NATIVE_B8
- H5T_NATIVE_CHAR
- H5T_NATIVE_SCHAR
- H5T_NATIVE_UCHAR
- H5T_NATIVE_SHORT
- H5T_NATIVE_USHORT
- H5T_NATIVE_INT
- H5T_NATIVE_UINT
- H5T_NATIVE_LONG
- H5T_NATIVE_ULONG
- H5T_NATIVE_LLONG
- H5T_NATIVE_ULLONG
- H5T_NATIVE_FLOAT
- H5T_NATIVE_DOUBLE
- H5T_NATIVE_LDOUBLE
+ cdef hid_t H5T_NATIVE_B8
+ cdef hid_t H5T_NATIVE_CHAR
+ cdef hid_t H5T_NATIVE_SCHAR
+ cdef hid_t H5T_NATIVE_UCHAR
+ cdef hid_t H5T_NATIVE_SHORT
+ cdef hid_t H5T_NATIVE_USHORT
+ cdef hid_t H5T_NATIVE_INT
+ cdef hid_t H5T_NATIVE_UINT
+ cdef hid_t H5T_NATIVE_LONG
+ cdef hid_t H5T_NATIVE_ULONG
+ cdef hid_t H5T_NATIVE_LLONG
+ cdef hid_t H5T_NATIVE_ULLONG
+ cdef hid_t H5T_NATIVE_FLOAT
+ cdef hid_t H5T_NATIVE_DOUBLE
+ cdef hid_t H5T_NATIVE_LDOUBLE
# "Standard" types
- cdef enum:
- H5T_STD_I8LE
- H5T_STD_I16LE
- H5T_STD_I32LE
- H5T_STD_I64LE
- H5T_STD_U8LE
- H5T_STD_U16LE
- H5T_STD_U32LE
- H5T_STD_U64LE
- H5T_STD_B8LE
- H5T_STD_B16LE
- H5T_STD_B32LE
- H5T_STD_B64LE
- H5T_IEEE_F32LE
- H5T_IEEE_F64LE
- H5T_STD_I8BE
- H5T_STD_I16BE
- H5T_STD_I32BE
- H5T_STD_I64BE
- H5T_STD_U8BE
- H5T_STD_U16BE
- H5T_STD_U32BE
- H5T_STD_U64BE
- H5T_STD_B8BE
- H5T_STD_B16BE
- H5T_STD_B32BE
- H5T_STD_B64BE
- H5T_IEEE_F32BE
- H5T_IEEE_F64BE
-
- cdef enum:
- H5T_NATIVE_INT8
- H5T_NATIVE_UINT8
- H5T_NATIVE_INT16
- H5T_NATIVE_UINT16
- H5T_NATIVE_INT32
- H5T_NATIVE_UINT32
- H5T_NATIVE_INT64
- H5T_NATIVE_UINT64
+ cdef hid_t H5T_STD_I8LE
+ cdef hid_t H5T_STD_I16LE
+ cdef hid_t H5T_STD_I32LE
+ cdef hid_t H5T_STD_I64LE
+ cdef hid_t H5T_STD_U8LE
+ cdef hid_t H5T_STD_U16LE
+ cdef hid_t H5T_STD_U32LE
+ cdef hid_t H5T_STD_U64LE
+ cdef hid_t H5T_STD_B8LE
+ cdef hid_t H5T_STD_B16LE
+ cdef hid_t H5T_STD_B32LE
+ cdef hid_t H5T_STD_B64LE
+ cdef hid_t H5T_IEEE_F32LE
+ cdef hid_t H5T_IEEE_F64LE
+ cdef hid_t H5T_STD_I8BE
+ cdef hid_t H5T_STD_I16BE
+ cdef hid_t H5T_STD_I32BE
+ cdef hid_t H5T_STD_I64BE
+ cdef hid_t H5T_STD_U8BE
+ cdef hid_t H5T_STD_U16BE
+ cdef hid_t H5T_STD_U32BE
+ cdef hid_t H5T_STD_U64BE
+ cdef hid_t H5T_STD_B8BE
+ cdef hid_t H5T_STD_B16BE
+ cdef hid_t H5T_STD_B32BE
+ cdef hid_t H5T_STD_B64BE
+ cdef hid_t H5T_IEEE_F32BE
+ cdef hid_t H5T_IEEE_F64BE
+
+ cdef hid_t H5T_NATIVE_INT8
+ cdef hid_t H5T_NATIVE_UINT8
+ cdef hid_t H5T_NATIVE_INT16
+ cdef hid_t H5T_NATIVE_UINT16
+ cdef hid_t H5T_NATIVE_INT32
+ cdef hid_t H5T_NATIVE_UINT32
+ cdef hid_t H5T_NATIVE_INT64
+ cdef hid_t H5T_NATIVE_UINT64
# Unix time types
- cdef enum:
- H5T_UNIX_D32LE
- H5T_UNIX_D64LE
- H5T_UNIX_D32BE
- H5T_UNIX_D64BE
+ cdef hid_t H5T_UNIX_D32LE
+ cdef hid_t H5T_UNIX_D64LE
+ cdef hid_t H5T_UNIX_D32BE
+ cdef hid_t H5T_UNIX_D64BE
# String types
- cdef enum:
- H5T_FORTRAN_S1
- H5T_C_S1
+ cdef hid_t H5T_FORTRAN_S1
+ cdef hid_t H5T_C_S1
# References
- cdef enum:
- H5T_STD_REF_OBJ
- H5T_STD_REF_DSETREG
+ cdef hid_t H5T_STD_REF_OBJ
+ cdef hid_t H5T_STD_REF_DSETREG
# Type-conversion infrastructure
diff --git a/h5py/h5d.pyx b/h5py/h5d.pyx
index 5be3279..fe76e2e 100644
--- a/h5py/h5d.pyx
+++ b/h5py/h5d.pyx
@@ -395,3 +395,38 @@ cdef class DatasetID(ObjectID):
"""
H5Drefresh(self.id)
+
+ IF HDF5_VERSION >= (1, 8, 11):
+
+ def write_direct_chunk(self, offsets, bytes data, H5Z_filter_t filter_mask=H5Z_FILTER_NONE, PropID dxpl=None):
+ """ (offsets, bytes data, H5Z_filter_t filter_mask=H5Z_FILTER_NONE, PropID dxpl=None)
+
+ Writes data from a bytes array (as provided e.g. by struct.pack) directly
+ to a chunk at position specified by the offsets argument.
+
+ Feature requires: 1.8.11 HDF5
+ """
+
+ cdef hid_t dset_id
+ cdef hid_t dxpl_id
+ cdef hid_t space_id = 0
+ cdef hsize_t *offset = NULL
+ cdef size_t data_size
+ cdef int rank
+
+ dset_id = self.id
+ dxpl_id = pdefault(dxpl)
+ space_id = H5Dget_space(self.id)
+ rank = H5Sget_simple_extent_ndims(space_id)
+
+ if len(offsets) != rank:
+ raise TypeError("offset length (%d) must match dataset rank (%d)" % (len(offsets), rank))
+
+ try:
+ offset = <hsize_t*>emalloc(sizeof(hsize_t)*rank)
+ convert_tuple(offsets, offset, rank)
+ H5DOwrite_chunk(dset_id, dxpl_id, filter_mask, offset, len(data), <char *> data)
+ finally:
+ efree(offset)
+ if space_id:
+ H5Sclose(space_id)
diff --git a/h5py/h5f.pyx b/h5py/h5f.pyx
index f01d64f..a7e4a71 100644
--- a/h5py/h5f.pyx
+++ b/h5py/h5f.pyx
@@ -25,6 +25,8 @@ from h5py import _objects
from ._objects import phil, with_phil
import h5fd
+from cpython.bytes cimport PyBytes_FromStringAndSize, PyBytes_AsString
+
# Initialization
# === Public constants and data structures ====================================
@@ -362,6 +364,25 @@ cdef class FileID(GroupID):
H5Fget_vfd_handle(self.id, H5Fget_access_plist(self.id), <void**>&handle)
return handle[0]
+ IF HDF5_VERSION >= (1, 8, 9):
+
+ @with_phil
+ def get_file_image(self):
+ """ () => BYTES
+
+ Retrieves a copy of the image of an existing, open file.
+
+ Feature requries: 1.8.9
+ """
+
+ cdef ssize_t size
+
+ size = H5Fget_file_image(self.id, NULL, 0)
+ image = PyBytes_FromStringAndSize(NULL, size)
+
+ H5Fget_file_image(self.id, PyBytes_AsString(image), size)
+
+ return image
IF MPI and HDF5_VERSION >= (1, 8, 9):
diff --git a/h5py/h5p.pyx b/h5py/h5p.pyx
index b8b0f2b..6878420 100644
--- a/h5py/h5p.pyx
+++ b/h5py/h5p.pyx
@@ -14,6 +14,10 @@
include "config.pxi"
# Compile-time imports
+from cpython.buffer cimport PyObject_CheckBuffer, \
+ PyObject_GetBuffer, PyBuffer_Release, \
+ PyBUF_SIMPLE
+
from utils cimport require_tuple, convert_dims, convert_tuple, \
emalloc, efree, \
check_numpy_write, check_numpy_read
@@ -1161,6 +1165,32 @@ cdef class PropFAID(PropInstanceID):
"""
H5Pset_alignment(self.id, threshold, alignment)
+ IF HDF5_VERSION >= (1, 8, 9):
+
+ @with_phil
+ def set_file_image(self, image):
+ """
+ Copy a file image into the property list. Passing None releases
+ any image currently loaded. The parameter image must either be
+ None or support the buffer protocol.
+ """
+
+ cdef Py_buffer buf
+
+ if image is None:
+ H5Pset_file_image(self.id, NULL, 0)
+ return
+
+ if not PyObject_CheckBuffer(image):
+ raise TypeError("image must support the buffer protocol")
+
+ PyObject_GetBuffer(image, &buf, PyBUF_SIMPLE)
+
+ try:
+ H5Pset_file_image(self.id, buf.buf, buf.len)
+ finally:
+ PyBuffer_Release(&buf)
+
# Link creation
cdef class PropLCID(PropCreateID):
diff --git a/h5py/h5s.pyx b/h5py/h5s.pyx
index 5ff9e5b..9c6346f 100644
--- a/h5py/h5s.pyx
+++ b/h5py/h5s.pyx
@@ -249,6 +249,9 @@ cdef class SpaceID(ObjectID):
cdef int rank
cdef hsize_t* dims = NULL
+ if self.get_simple_extent_type() == H5S_NULL:
+ return None
+
rank = H5Sget_simple_extent_dims(self.id, NULL, NULL)
dims = <hsize_t*>emalloc(sizeof(hsize_t)*rank)
diff --git a/h5py/h5t.pxd b/h5py/h5t.pxd
index 13e9c8c..16ddd93 100644
--- a/h5py/h5t.pxd
+++ b/h5py/h5t.pxd
@@ -67,7 +67,7 @@ cdef class TypeCompoundID(TypeCompositeID):
cpdef TypeID typewrap(hid_t id_)
cdef hid_t H5PY_OBJ
-cpdef TypeID py_create(object dtype, bint logical=*)
+cpdef TypeID py_create(object dtype, bint logical=*, bint aligned=*)
diff --git a/h5py/h5t.pyx b/h5py/h5t.pyx
index 8931592..70c01db 100644
--- a/h5py/h5t.pyx
+++ b/h5py/h5t.pyx
@@ -16,6 +16,7 @@
"""
# Pyrex compile-time imports
+include "config.pxi"
from _objects cimport pdefault
from numpy cimport dtype, ndarray
@@ -26,6 +27,7 @@ from utils cimport emalloc, efree, \
# Runtime imports
import sys
+import operator
from h5 import get_config
import numpy as np
from ._objects import phil, with_phil
@@ -174,6 +176,7 @@ NATIVE_INT64 = lockid(H5T_NATIVE_INT64)
NATIVE_UINT64 = lockid(H5T_NATIVE_UINT64)
NATIVE_FLOAT = lockid(H5T_NATIVE_FLOAT)
NATIVE_DOUBLE = lockid(H5T_NATIVE_DOUBLE)
+NATIVE_LDOUBLE = lockid(H5T_NATIVE_LDOUBLE)
# Unix time types
UNIX_D32LE = lockid(H5T_UNIX_D32LE)
@@ -216,6 +219,11 @@ PYTHON_OBJECT = lockid(H5PY_OBJ)
cdef dict _order_map = { H5T_ORDER_NONE: '|', H5T_ORDER_LE: '<', H5T_ORDER_BE: '>'}
cdef dict _sign_map = { H5T_SGN_NONE: 'u', H5T_SGN_2: 'i' }
+# Available floating point types
+available_ftypes = dict()
+for ftype in np.typeDict.values():
+ if np.issubdtype(ftype, float):
+ available_ftypes[np.dtype(ftype).itemsize] = np.finfo(ftype)
# === General datatype operations =============================================
@@ -941,19 +949,25 @@ cdef class TypeFloatID(TypeAtomicID):
cdef object py_dtype(self):
# Translation function for floating-point types
- size = self.get_size() # int giving number of bytes
order = _order_map[self.get_order()] # string with '<' or '>'
- if size == 2 and not hasattr(np, 'float16'):
- # This build doesn't have float16; promote to float32
- return dtype(order+"f4")
+ s_offset, e_offset, e_size, m_offset, m_size = self.get_fields()
+ e_bias = self.get_ebias()
+
+ # Handle non-standard exponent and mantissa sizes.
+ for size, finfo in sorted(available_ftypes.items()):
+ nmant = finfo.nmant
+ if nmant == 63 and finfo.nexp == 15:
+ # This is an 80-bit float, correct mantissa size
+ nmant += 1
+ if m_size <= nmant and (2**e_size - e_bias - 1) <= finfo.maxexp and (1 - e_bias) >= finfo.minexp:
+ break
+ else:
+ raise ValueError('Insufficient precision in available types to represent ' + str(self.get_fields()))
+
- if size > 8:
- # The native NumPy longdouble is used for 96 and 128-bit floats
- return dtype(order + "f" + str(np.longdouble(1).dtype.itemsize))
-
- return dtype( _order_map[self.get_order()] + "f" + \
- str(self.get_size()) )
+
+ return dtype(order + "f" + str(size) )
# === Composite types (enums and compound) ====================================
@@ -1006,7 +1020,6 @@ cdef class TypeCompositeID(TypeID):
"""
return H5Tget_member_index(self.id, name)
-
cdef class TypeCompoundID(TypeCompositeID):
"""
@@ -1106,7 +1119,17 @@ cdef class TypeCompoundID(TypeCompositeID):
else:
if sys.version[0] == '3':
field_names = [x.decode('utf8') for x in field_names]
- typeobj = dtype({'names': field_names, 'formats': field_types, 'offsets': field_offsets})
+ if len(field_names) > 0:
+ collated_fields = zip(field_names, field_types, field_offsets)
+ ordered_fields = sorted(
+ collated_fields, key=operator.itemgetter(2))
+ field_names, field_types, field_offsets = \
+ map(list, zip(*ordered_fields))
+ typeobj = dtype({
+ 'names': field_names,
+ 'formats': field_types,
+ 'offsets': field_offsets
+ })
return typeobj
@@ -1251,7 +1274,8 @@ cdef class TypeEnumID(TypeCompositeID):
cdef dict _float_le = {2: H5Tcopy(IEEE_F16LE.id), 4: H5Tcopy(H5T_IEEE_F32LE), 8: H5Tcopy(H5T_IEEE_F64LE)}
cdef dict _float_be = {2: H5Tcopy(IEEE_F16BE.id), 4: H5Tcopy(H5T_IEEE_F32BE), 8: H5Tcopy(H5T_IEEE_F64BE)}
-cdef dict _float_nt = _float_le if ORDER_NATIVE == H5T_ORDER_LE else _float_be
+cdef dict _float_nt = dict(_float_le) if ORDER_NATIVE == H5T_ORDER_LE else dict(_float_be)
+_float_nt[sizeof(long double)] = H5Tcopy(H5T_NATIVE_LDOUBLE)
cdef dict _int_le = {1: H5Tcopy(H5T_STD_I8LE), 2: H5Tcopy(H5T_STD_I16LE), 4: H5Tcopy(H5T_STD_I32LE), 8: H5Tcopy(H5T_STD_I64LE)}
cdef dict _int_be = {1: H5Tcopy(H5T_STD_I8BE), 2: H5Tcopy(H5T_STD_I16BE), 4: H5Tcopy(H5T_STD_I32BE), 8: H5Tcopy(H5T_STD_I64BE)}
@@ -1259,7 +1283,7 @@ cdef dict _int_nt = {1: H5Tcopy(H5T_NATIVE_INT8), 2: H5Tcopy(H5T_NATIVE_INT16),
cdef dict _uint_le = {1: H5Tcopy(H5T_STD_U8LE), 2: H5Tcopy(H5T_STD_U16LE), 4: H5Tcopy(H5T_STD_U32LE), 8: H5Tcopy(H5T_STD_U64LE)}
cdef dict _uint_be = {1: H5Tcopy(H5T_STD_U8BE), 2: H5Tcopy(H5T_STD_U16BE), 4: H5Tcopy(H5T_STD_U32BE), 8: H5Tcopy(H5T_STD_U64BE)}
-cdef dict _uint_nt = {1: H5Tcopy(H5T_NATIVE_UINT8), 2: H5Tcopy(H5T_NATIVE_UINT16), 4: H5Tcopy(H5T_NATIVE_UINT32), 8: H5Tcopy(H5T_NATIVE_UINT64)}
+cdef dict _uint_nt = {1: H5Tcopy(H5T_NATIVE_UINT8), 2: H5Tcopy(H5T_NATIVE_UINT16), 4: H5Tcopy(H5T_NATIVE_UINT32), 8: H5Tcopy(H5T_NATIVE_UINT64)}
cdef TypeFloatID _c_float(dtype dt):
# Floats (single and double)
@@ -1391,6 +1415,15 @@ cdef TypeCompoundID _c_complex(dtype dt):
tid_sub = H5T_IEEE_F64BE
else:
tid_sub = H5T_NATIVE_DOUBLE
+
+ elif length == 32:
+ IF COMPLEX256_SUPPORT:
+ size = h5py_size_n256
+ off_r = h5py_offset_n256_real
+ off_i = h5py_offset_n256_imag
+ tid_sub = H5T_NATIVE_LDOUBLE
+ ELSE:
+ raise TypeError("Illegal length %d for complex dtype" % length)
else:
raise TypeError("Illegal length %d for complex dtype" % length)
@@ -1400,28 +1433,61 @@ cdef TypeCompoundID _c_complex(dtype dt):
return TypeCompoundID(tid)
-cdef TypeCompoundID _c_compound(dtype dt, int logical):
+cdef TypeCompoundID _c_compound(dtype dt, int logical, int aligned):
# Compound datatypes
cdef hid_t tid
cdef TypeID type_tmp
cdef dtype dt_tmp
cdef size_t offset
+ cdef size_t offset_step = 0
- cdef dict fields = dt.fields
cdef tuple names = dt.names
-
- # Initial size MUST be 1 to avoid segfaults (issue 166)
- tid = H5Tcreate(H5T_COMPOUND, 1)
-
- offset = 0
- for name in names:
- ename = name.encode('utf8') if isinstance(name, unicode) else name
- dt_tmp = dt.fields[name][0]
- type_tmp = py_create(dt_tmp, logical=logical)
- H5Tset_size(tid, offset+type_tmp.get_size())
- H5Tinsert(tid, ename, offset, type_tmp.id)
- offset += type_tmp.get_size()
+ cdef dict fields = {}
+ cdef list offsets
+
+ # The challenge with correctly converting a numpy/h5py dtype to a HDF5 type
+ # which is composed of subtypes has three aspects we must consider
+ # 1. numpy/h5py dtypes do not always have the same size and HDF5, even when
+ # equivalent (can result in overlapping elements if not careful)
+ # 2. For correct round-tripping of aligned dtypes, we need to consider how
+ # much padding we need
+ # 3. There is no requirement that the offsets be monotonically increasing
+ #
+ # The code below tries to cover these aspects
+
+ for name, field in dt.fields.items():
+ dt_tmp = field[0]
+ offset = field[1]
+ fields[offset] = {
+ "name": name.encode('utf8') if isinstance(name, unicode) else name,
+ "dtype": dtype(dt_tmp),
+ "size": py_create(dt_tmp, logical=logical).get_size(),
+ }
+
+ offsets = list(sorted(fields))
+ # Set initial size to itemsize or last offset plus itemsize, whichever is
+ # bigger
+ tid = H5Tcreate(H5T_COMPOUND,
+ max(dt.itemsize, offsets[-1] + fields[offsets[-1]]["size"])
+ )
+
+ for i, offset in enumerate(offsets):
+ dt_tmp = fields[offset]["dtype"]
+ type_tmp = py_create(dt_tmp, logical=logical, aligned=aligned)
+ if aligned and type_tmp.get_size() > dt_tmp.itemsize:
+ raise TypeError("Enforced alignment not compatible with HDF5 type")
+ # Increase size if initial too small, which can happen if there are out
+ # of order fields (as determined by offsets)
+ if H5Tget_size(tid) < (offset + offset_step + type_tmp.get_size()):
+ H5Tset_size(tid, offset + offset_step + type_tmp.get_size())
+ H5Tinsert(tid, fields[offset]["name"], offset + offset_step, type_tmp.id)
+
+ if (i + 1 < len(offsets)) and fields[offset]["size"] > offsets[i + 1]:
+ if aligned:
+ raise TypeError("dtype results in overlapping fields")
+ else:
+ offset_step += fields[offset]["size"] - offsets[i + 1]
return TypeCompoundID(tid)
@@ -1447,7 +1513,7 @@ cdef TypeReferenceID _c_ref(object refclass):
raise TypeError("Unrecognized reference code")
-cpdef TypeID py_create(object dtype_in, bint logical=0):
+cpdef TypeID py_create(object dtype_in, bint logical=0, bint aligned=0):
"""(OBJECT dtype_in, BOOL logical=False) => TypeID
Given a Numpy dtype object, generate a byte-for-byte memory-compatible
@@ -1467,6 +1533,8 @@ cpdef TypeID py_create(object dtype_in, bint logical=0):
cdef dtype dt = dtype(dtype_in)
cdef char kind = dt.kind
+ aligned = getattr(dtype_in, "isalignedstruct", aligned)
+
with phil:
# Float
if kind == c'f':
@@ -1489,7 +1557,7 @@ cpdef TypeID py_create(object dtype_in, bint logical=0):
# Compound
elif kind == c'V' and dt.names is not None:
- return _c_compound(dt, logical)
+ return _c_compound(dt, logical, aligned)
# Array or opaque
elif kind == c'V':
diff --git a/h5py/ipy_completer.py b/h5py/ipy_completer.py
index fa098ef..d1e9b93 100644
--- a/h5py/ipy_completer.py
+++ b/h5py/ipy_completer.py
@@ -108,12 +108,16 @@ def h5py_item_completer(context, command):
return []
path, _ = posixpath.split(item)
- if path:
- items = (posixpath.join(path, name) for name in obj[path].iterkeys())
- else:
- items = obj.iterkeys()
- items = list(items)
+ try:
+ if path:
+ items = (posixpath.join(path, name) for name in obj[path].keys())
+ else:
+ items = obj.keys()
+ except AttributeError:
+ return []
+
+ items = list(items)
readline.set_completer_delims(' \t\n`!@#$^&*()=+[{]}\\|;:\'",<>?')
return [i for i in items if i[:len(item)] == item]
diff --git a/h5py/tests/common.py b/h5py/tests/common.py
index 583fd0c..1c2ad53 100644
--- a/h5py/tests/common.py
+++ b/h5py/tests/common.py
@@ -13,6 +13,7 @@ import sys
import os
import shutil
import tempfile
+from contextlib import contextmanager
from six import unichr
@@ -162,3 +163,22 @@ class TestCase(ut.TestCase):
else:
with self.assertRaises(exc):
dset[s]
+
+NUMPY_RELEASE_VERSION = tuple([int(i) for i in np.__version__.split(".")[0:2]])
+
+ at contextmanager
+def closed_tempfile(suffix='', text=None):
+ """
+ Context manager which yields the path to a closed temporary file with the
+ suffix `suffix`. The file will be deleted on exiting the context. An
+ additional argument `text` can be provided to have the file contain `text`.
+ """
+ with tempfile.NamedTemporaryFile(
+ 'w+t', suffix=suffix, delete=False
+ ) as test_file:
+ file_name = test_file.name
+ if text is not None:
+ test_file.write(text)
+ test_file.flush()
+ yield file_name
+ shutil.rmtree(file_name, ignore_errors=True)
diff --git a/h5py/tests/hl/test_dataset_getitem.py b/h5py/tests/hl/test_dataset_getitem.py
index cf4b8e0..127e65e 100644
--- a/h5py/tests/hl/test_dataset_getitem.py
+++ b/h5py/tests/hl/test_dataset_getitem.py
@@ -57,6 +57,7 @@ class TestEmpty(TestCase):
tid.set_size(10)
dsid = h5py.h5d.create(self.f.id, b'x', tid, sid)
self.dset = h5py.Dataset(dsid)
+ self.empty_obj = h5py.Empty(np.dtype("S10"))
def test_ndim(self):
""" Verify number of dimensions """
@@ -64,17 +65,15 @@ class TestEmpty(TestCase):
def test_shape(self):
""" Verify shape """
- self.assertEquals(self.dset.shape, tuple())
+ self.assertEquals(self.dset.shape, None)
def test_ellipsis(self):
- """ Ellipsis -> IOError """
- with self.assertRaises(IOError):
- out = self.dset[...]
+ """ Ellipsis -> ValueError """
+ self.assertEquals(self.dset[...], self.empty_obj)
def test_tuple(self):
""" () -> IOError """
- with self.assertRaises(IOError):
- out = self.dset[()]
+ self.assertEquals(self.dset[()], self.empty_obj)
def test_slice(self):
""" slice -> ValueError """
@@ -396,6 +395,15 @@ class Test1DFloat(TestCase):
def test_indexlist_simple(self):
self.assertNumpyBehavior(self.dset, self.data, np.s_[[1,2,5]])
+
+ def test_indexlist_single_index_ellipsis(self):
+ self.assertNumpyBehavior(self.dset, self.data, np.s_[[0], ...])
+
+ def test_indexlist_numpyarray_single_index_ellipsis(self):
+ self.assertNumpyBehavior(self.dset, self.data, np.s_[np.array([0]), ...])
+
+ def test_indexlist_numpyarray_ellipsis(self):
+ self.assertNumpyBehavior(self.dset, self.data, np.s_[np.array([1, 2, 5]), ...])
# Another UnboundLocalError
@ut.expectedFailure
@@ -412,9 +420,6 @@ class Test1DFloat(TestCase):
with self.assertRaises(TypeError):
self.dset[[1,3,2]]
- # This results in IOError as the argument is not properly validated.
- # Suggest IndexError be raised.
- @ut.expectedFailure
def test_indexlist_repeated(self):
""" we forbid repeated index values """
with self.assertRaises(TypeError):
diff --git a/h5py/tests/hl/test_datatype.py b/h5py/tests/hl/test_datatype.py
index e7b5264..d8a9bea 100644
--- a/h5py/tests/hl/test_datatype.py
+++ b/h5py/tests/hl/test_datatype.py
@@ -43,3 +43,55 @@ class TestVlen(TestCase):
self.assertEqual(arr1, arr2)
self.assertEqual(h5py.check_dtype(enum=h5py.check_dtype(vlen=dt1)),
h5py.check_dtype(enum=h5py.check_dtype(vlen=dt2)))
+
+
+class TestOffsets(TestCase):
+ """
+ Check that compound members with aligned or manual offsets are handled
+ correctly.
+ """
+
+ def test_aligned_offsets(self):
+ dt = np.dtype('i2,i8', align=True)
+ ht = h5py.h5t.py_create(dt)
+ self.assertEqual(dt.itemsize, ht.get_size())
+ self.assertEqual(
+ [dt.fields[i][1] for i in dt.names],
+ [ht.get_member_offset(i) for i in range(ht.get_nmembers())]
+ )
+
+
+ def test_aligned_data(self):
+ dt = np.dtype('i2,f8', align=True)
+ data = np.empty(10, dtype=dt)
+
+ data['f0'] = np.array(np.random.randint(-100, 100, size=data.size), dtype='i2')
+ data['f1'] = np.random.rand(data.size)
+
+ fname = self.mktemp()
+
+ with h5py.File(fname, 'w') as f:
+ f['data'] = data
+
+ with h5py.File(fname, 'r') as f:
+ self.assertArrayEqual(f['data'], data)
+
+
+ def test_out_of_order_offsets(self):
+ dt = np.dtype({
+ 'names' : ['f1', 'f2', 'f3'],
+ 'formats' : ['<f4', '<i4', '<f8'],
+ 'offsets' : [0, 16, 8]
+ })
+ data = np.empty(10, dtype=dt)
+ data['f1'] = np.random.rand(data.size)
+ data['f2'] = np.random.random_integers(-10, 10, data.size)
+ data['f3'] = np.random.rand(data.size)*-1
+
+ fname = self.mktemp()
+
+ with h5py.File(fname, 'w') as fd:
+ fd.create_dataset('data', data=data)
+
+ with h5py.File(fname, 'r') as fd:
+ self.assertArrayEqual(fd['data'], data)
diff --git a/h5py/tests/old/__init__.py b/h5py/tests/old/__init__.py
index 379d316..d88e658 100644
--- a/h5py/tests/old/__init__.py
+++ b/h5py/tests/old/__init__.py
@@ -8,6 +8,7 @@ from . import ( test_attrs,
test_datatype,
test_dimension_scales,
test_file,
+ test_file_image,
test_group,
test_h5,
test_h5f,
@@ -16,7 +17,7 @@ from . import ( test_attrs,
test_objects,
test_selections,
test_slicing )
-
+
MODULES = ( test_attrs,
test_attrs_data,
test_base,
@@ -24,6 +25,7 @@ MODULES = ( test_attrs,
test_datatype,
test_dimension_scales,
test_file,
+ test_file_image,
test_group,
test_h5,
test_h5f,
diff --git a/h5py/tests/old/test_attrs_data.py b/h5py/tests/old/test_attrs_data.py
index 1c92d77..5330e18 100644
--- a/h5py/tests/old/test_attrs_data.py
+++ b/h5py/tests/old/test_attrs_data.py
@@ -24,6 +24,7 @@ from .common import TestCase, ut
import h5py
from h5py import h5a, h5s, h5t
from h5py.highlevel import File
+from h5py._hl.base import is_empty_dataspace
class BaseAttrs(TestCase):
@@ -197,31 +198,45 @@ class TestEmpty(BaseAttrs):
tid = h5t.C_S1.copy()
tid.set_size(10)
aid = h5a.create(self.f.id, b'x', tid, sid)
+ self.empty_obj = h5py.Empty(np.dtype("S10"))
def test_read(self):
- with self.assertRaises(IOError):
- self.f.attrs['x']
+ self.assertEqual(
+ self.empty_obj, self.f.attrs['x']
+ )
+
+ def test_write(self):
+ self.f.attrs["y"] = self.empty_obj
+ self.assertTrue(is_empty_dataspace(h5a.open(self.f.id, b'y')))
def test_modify(self):
with self.assertRaises(IOError):
self.f.attrs.modify('x', 1)
def test_values(self):
- with self.assertRaises(IOError):
- # list() is for Py3 where these are iterators
- list(self.f.attrs.values())
+ # list() is for Py3 where these are iterators
+ values = list(self.f.attrs.values())
+ self.assertEqual(
+ [self.empty_obj], values
+ )
def test_items(self):
- with self.assertRaises(IOError):
- list(self.f.attrs.items())
+ items = list(self.f.attrs.items())
+ self.assertEqual(
+ [(six.u("x"), self.empty_obj)], items
+ )
def test_itervalues(self):
- with self.assertRaises(IOError):
- list(six.itervalues(self.f.attrs))
+ values = list(six.itervalues(self.f.attrs))
+ self.assertEqual(
+ [self.empty_obj], values
+ )
def test_iteritems(self):
- with self.assertRaises(IOError):
- list(six.iteritems(self.f.attrs))
+ items = list(six.iteritems(self.f.attrs))
+ self.assertEqual(
+ [(six.u("x"), self.empty_obj)], items
+ )
class TestWriteException(BaseAttrs):
diff --git a/h5py/tests/old/test_base.py b/h5py/tests/old/test_base.py
index d9192b0..08f5a7a 100644
--- a/h5py/tests/old/test_base.py
+++ b/h5py/tests/old/test_base.py
@@ -53,10 +53,10 @@ class TestRepr(BaseTest):
USTRING = six.unichr(0xfc) + six.unichr(0xdf)
def _check_type(self, obj):
- if six.PY3:
- self.assertIsInstance(repr(obj), six.text_type)
- else:
+ if six.PY2:
self.assertIsInstance(repr(obj), bytes)
+ else:
+ self.assertIsInstance(repr(obj), six.text_type)
def test_group(self):
""" Group repr() with unicode """
diff --git a/h5py/tests/old/test_dataset.py b/h5py/tests/old/test_dataset.py
index 26d6e05..05726fc 100644
--- a/h5py/tests/old/test_dataset.py
+++ b/h5py/tests/old/test_dataset.py
@@ -26,6 +26,7 @@ import numpy as np
from .common import ut, TestCase
from h5py.highlevel import File, Group, Dataset
+from h5py._hl.base import is_empty_dataspace
from h5py import h5t
import h5py
@@ -87,6 +88,18 @@ class TestCreateShape(BaseDataset):
with self.assertRaises(TypeError):
self.f.create_dataset('foo')
+ def test_long_double(self):
+ """ Confirm that the default dtype is float """
+ dset = self.f.create_dataset('foo', (63,), dtype=np.longdouble)
+ self.assertEqual(dset.dtype, np.longdouble)
+
+ @ut.skipIf(not hasattr(np, "complex256"), "No support for complex256")
+ def test_complex256(self):
+ """ Confirm that the default dtype is float """
+ dset = self.f.create_dataset('foo', (63,),
+ dtype=np.dtype('complex256'))
+ self.assertEqual(dset.dtype, np.dtype('complex256'))
+
class TestCreateData(BaseDataset):
@@ -130,6 +143,14 @@ class TestCreateData(BaseDataset):
# there was no test here!
self.assertEqual(True, False)
+ def test_empty_create_via_None_shape(self):
+ self.f.create_dataset('foo', dtype='f')
+ self.assertTrue(is_empty_dataspace(self.f['foo'].id))
+
+ def test_empty_create_via_Empty_class(self):
+ self.f.create_dataset('foo', data=h5py.Empty(dtype='f'))
+ self.assertTrue(is_empty_dataspace(self.f['foo'].id))
+
class TestCreateRequire(BaseDataset):
diff --git a/h5py/tests/old/test_file.py b/h5py/tests/old/test_file.py
index ff9d19c..698b6ff 100644
--- a/h5py/tests/old/test_file.py
+++ b/h5py/tests/old/test_file.py
@@ -16,13 +16,22 @@
from __future__ import absolute_import, with_statement
import os, stat
+from sys import platform
+import tempfile
import six
from .common import ut, TestCase, unicode_filenames
+from ..common import closed_tempfile
from h5py.highlevel import File
import h5py
+try:
+ import pathlib
+except ImportError:
+ pathlib = None
+
+
mpi = h5py.get_config().mpi
class TestFileOpen(TestCase):
@@ -42,10 +51,14 @@ class TestFileOpen(TestCase):
# Existing readonly file; open read-only
os.chmod(fname, stat.S_IREAD)
+ # Running as root (e.g. in a docker container) gives 'r+' as the file
+ # mode, even for a read-only file. See
+ # https://github.com/h5py/h5py/issues/696
+ exp_mode = 'r+' if os.stat(fname).st_uid == 0 and platform != "win32" else 'r'
try:
with File(fname) as f:
self.assertTrue(f)
- self.assertEqual(f.mode, 'r')
+ self.assertEqual(f.mode, exp_mode)
finally:
os.chmod(fname, stat.S_IWRITE)
@@ -54,7 +67,7 @@ class TestFileOpen(TestCase):
f.write(b'\x00')
with self.assertRaises(IOError):
File(fname)
-
+
def test_create(self):
""" Mode 'w' opens file in overwrite mode """
fname = self.mktemp()
@@ -245,6 +258,8 @@ class TestDrivers(TestCase):
self.assertEqual(f.driver, 'mpio')
@ut.skipUnless(mpi, "Parallel HDF5 required")
+ @ut.skipIf(h5py.version.hdf5_version_tuple < (1,8,9),
+ "mpio atomic file operations were added in HDF5 1.8.9+")
def test_mpi_atomic(self):
""" Enable atomic mode for MPIO driver """
from mpi4py import MPI
@@ -538,3 +553,25 @@ class TestCloseInvalidatesOpenObjectIDs(TestCase):
self.assertFalse(bool(f1.id))
self.assertFalse(bool(g1.id))
+ at ut.skipIf(pathlib is None, "pathlib module not installed")
+class TestPathlibSupport(TestCase):
+
+ """
+ Check that h5py doesn't break on pathlib
+ """
+ def test_pathlib_accepted_file(self):
+ """ Check that pathlib is accepted by h5py.File """
+ with closed_tempfile() as f:
+ path = pathlib.Path(f)
+ with File(path) as f2:
+ self.assertTrue(True)
+
+ def test_pathlib_name_match(self):
+ """ Check that using pathlib does not affect naming """
+ with closed_tempfile() as f:
+ path = pathlib.Path(f)
+ with File(path) as h5f1:
+ pathlib_name = h5f1.filename
+ with File(f) as h5f2:
+ normal_name = h5f2.filename
+ self.assertEqual(pathlib_name, normal_name)
diff --git a/h5py/tests/old/test_file_image.py b/h5py/tests/old/test_file_image.py
new file mode 100644
index 0000000..93b6e9e
--- /dev/null
+++ b/h5py/tests/old/test_file_image.py
@@ -0,0 +1,26 @@
+from __future__ import absolute_import
+
+import h5py
+from h5py import h5f, h5p
+
+from .common import ut, TestCase
+
+ at ut.skipUnless(h5py.version.hdf5_version_tuple >= (1, 8, 9), 'file image operations require HDF5 >= 1.8.9')
+class TestFileImage(TestCase):
+ def test_load_from_image(self):
+ from binascii import a2b_base64
+ from zlib import decompress
+
+ compressed_image = 'eJzr9HBx4+WS4mIAAQ4OBhYGAQZk8B8KKjhQ+TD5BCjNCKU7oPQKJpg4I1hOAiouCDUfXV1IkKsrSPV/NACzx4AFQnMwjIKRCDxcHQNAdASUD0ulJ5hQ1ZWkFpeAaFh69KDQXkYGNohZjDA+JCUzMkIEmKHqELQAWKkAByytOoBJViAPJM7ExATWyAE0B8RgZkyAJmlYDoEAIahukJoNU6+HMTA0UOgT6oBgP38XUI6G5UMFZrzKR8EoGAUjGMDKYVgxDSsuAHcfMK8='
+
+ image = decompress(a2b_base64(compressed_image))
+
+ fapl = h5p.create(h5py.h5p.FILE_ACCESS)
+ fapl.set_fapl_core()
+ fapl.set_file_image(image)
+
+ fid = h5f.open(self.mktemp().encode(), h5py.h5f.ACC_RDONLY, fapl=fapl)
+ f = h5py.File(fid)
+
+ self.assertTrue('test' in f)
+
diff --git a/h5py/tests/old/test_group.py b/h5py/tests/old/test_group.py
index 7dedc4b..bfe19c1 100644
--- a/h5py/tests/old/test_group.py
+++ b/h5py/tests/old/test_group.py
@@ -1,3 +1,4 @@
+# -*- coding: utf-8 -*-
# This file is part of h5py, a Python interface to the HDF5 library.
#
# http://www.h5py.org
@@ -21,7 +22,9 @@ from __future__ import absolute_import
import collections
import numpy as np
import os
+import os.path
import sys
+from tempfile import mkdtemp
import six
@@ -30,6 +33,17 @@ import h5py
from h5py.highlevel import File, Group, SoftLink, HardLink, ExternalLink
from h5py.highlevel import Dataset, Datatype
from h5py import h5t
+from h5py._hl.compat import fsencode
+
+# If we can't encode unicode filenames, there's not much point failing tests
+# which must fail
+try:
+ fsencode(u"α")
+except UnicodeEncodeError:
+ NO_FS_UNICODE = True
+else:
+ NO_FS_UNICODE = False
+
class BaseGroup(TestCase):
@@ -418,7 +432,7 @@ class TestPy2Dict(BaseMapping):
self.assertSameElements([x for x in self.f.iteritems()],
[(x, self.f.get(x)) for x in self.groups])
- at ut.skipIf(not six.PY3, "Py3")
+ at ut.skipIf(six.PY2, "Py3")
class TestPy3Dict(BaseMapping):
def test_keys(self):
@@ -731,6 +745,30 @@ class TestExternalLinks(TestCase):
f2.close()
self.assertFalse(f2)
+ @ut.skipIf(NO_FS_UNICODE, "No unicode filename support")
+ def test_unicode_encode(self):
+ """
+ Check that external links encode unicode filenames properly
+ Testing issue #732
+ """
+ ext_filename = os.path.join(mkdtemp(), u"α.hdf5")
+ with File(ext_filename, "w") as ext_file:
+ ext_file.create_group('external')
+ self.f['ext'] = ExternalLink(ext_filename, '/external')
+
+ @ut.skipIf(NO_FS_UNICODE, "No unicode filename support")
+ def test_unicode_decode(self):
+ """
+ Check that external links decode unicode filenames properly
+ Testing issue #732
+ """
+ ext_filename = os.path.join(mkdtemp(), u"α.hdf5")
+ with File(ext_filename, "w") as ext_file:
+ ext_file.create_group('external')
+ ext_file["external"].attrs["ext_attr"] = "test"
+ self.f['ext'] = ExternalLink(ext_filename, '/external')
+ self.assertEqual(self.f["ext"].attrs["ext_attr"], "test")
+
class TestExtLinkBugs(TestCase):
"""
diff --git a/h5py/tests/old/test_h5d_direct_chunk_write.py b/h5py/tests/old/test_h5d_direct_chunk_write.py
new file mode 100644
index 0000000..8407f9e
--- /dev/null
+++ b/h5py/tests/old/test_h5d_direct_chunk_write.py
@@ -0,0 +1,34 @@
+from __future__ import absolute_import
+
+import h5py
+import numpy
+
+from .common import ut, TestCase
+
+
+ at ut.skipUnless(h5py.version.hdf5_version_tuple >= (1, 8, 11), 'Direct Chunk Writing requires HDF5 >= 1.8.11')
+class TestWriteDirectChunk(TestCase):
+ def test_write_direct_chunk(self):
+
+ filename = self.mktemp().encode()
+ filehandle = h5py.File(filename, "w")
+
+ dataset = filehandle.create_dataset("data", (100, 100, 100),
+ maxshape=(None, 100, 100),
+ chunks=(1, 100, 100),
+ dtype='float32')
+
+ # writing
+ array = numpy.zeros((10, 100, 100))
+ for index in range(10):
+ a = numpy.random.rand(100, 100).astype('float32')
+ dataset.id.write_direct_chunk((index, 0, 0), a.tostring(), filter_mask=1)
+ array[index] = a
+
+ filehandle.close()
+
+ # checking
+ filehandle = h5py.File(filename, "r")
+ for i in range(10):
+ read_data = filehandle["data"][i]
+ self.assertTrue((array[i] == read_data).all())
diff --git a/h5py/tests/old/test_h5t.py b/h5py/tests/old/test_h5t.py
index 2b2a83d..a263b73 100644
--- a/h5py/tests/old/test_h5t.py
+++ b/h5py/tests/old/test_h5t.py
@@ -9,15 +9,17 @@
from __future__ import absolute_import
-try:
- import unittest2 as ut
-except ImportError:
- import unittest as ut
+import sys
import numpy as np
+from six import PY2, text_type
+
import h5py
from h5py import h5t
+from ..common import TestCase, ut
+
+
class TestCompound(ut.TestCase):
"""
@@ -36,3 +38,171 @@ class TestCompound(ut.TestCase):
self.assertEqual(tid.get_member_offset(0), 0)
self.assertEqual(tid.get_member_offset(1), h5t.STD_REF_OBJ.get_size())
+ def test_out_of_order_offsets(self):
+ size = 20
+ type_dict = {
+ 'names' : ['f1', 'f2', 'f3'],
+ 'formats' : ['<f4', '<i4', '<f8'],
+ 'offsets' : [0, 16, 8]
+ }
+
+ expected_dtype = np.dtype(type_dict)
+
+ tid = h5t.create(h5t.COMPOUND, size)
+ for name, offset, dt in zip(
+ type_dict["names"], type_dict["offsets"], type_dict["formats"]
+ ):
+ tid.insert(
+ name.encode("utf8") if isinstance(name, text_type) else name,
+ offset,
+ h5t.py_create(dt)
+ )
+
+ self.assertEqual(tid.dtype, expected_dtype)
+ self.assertEqual(tid.dtype.itemsize, size)
+
+
+class TestTypeFloatID(TestCase):
+ """Test TypeFloatID."""
+
+ def test_custom_float_promotion(self):
+ """Custom floats are correctly promoted to standard floats on read."""
+ test_filename = self.mktemp()
+ dataset = 'DS1'
+ dataset2 = 'DS2'
+ dataset3 = 'DS3'
+ dataset4 = 'DS4'
+ dataset5 = 'DS5'
+
+ # Strings are handled very differently between python2 and python3.
+ if not PY2:
+ test_filename = test_filename.encode()
+ dataset = dataset.encode()
+ dataset2 = dataset2.encode()
+ dataset3 = dataset3.encode()
+ dataset4 = dataset4.encode()
+ dataset5 = dataset5.encode()
+
+ DIM0 = 4
+ DIM1 = 7
+
+ wdata = np.array([[-1.50066626e-09, 1.40062184e-09, 1.81216819e-10,
+ 4.01087163e-10, 4.27917257e-10, -7.04858394e-11,
+ 5.74800652e-10],
+ [-1.50066626e-09, 4.86579665e-10, 3.42879503e-10,
+ 5.12045517e-10, 5.10226528e-10, 2.24190444e-10,
+ 3.93356459e-10],
+ [-1.50066626e-09, 5.24778443e-10, 8.19454726e-10,
+ 1.28966349e-09, 1.68483894e-10, 5.71276360e-11,
+ -1.08684617e-10],
+ [-1.50066626e-09, -1.08343556e-10, -1.58934199e-10,
+ 8.52196536e-10, 6.18456397e-10, 6.16637408e-10,
+ 1.31694833e-09]], dtype=np.float32)
+
+ wdata2 = np.array([[-1.50066626e-09, 5.63886715e-10, -8.74251782e-11,
+ 1.32558853e-10, 1.59161573e-10, 2.29420039e-10,
+ -7.24185156e-11],
+ [-1.50066626e-09, 1.87810656e-10, 7.74889486e-10,
+ 3.95630195e-10, 9.42236511e-10, 8.38554115e-10,
+ -8.71978045e-11],
+ [-1.50066626e-09, 6.20275387e-10, 7.34871719e-10,
+ 6.64840627e-10, 2.64662958e-10, 1.05319486e-09,
+ 1.68256520e-10],
+ [-1.50066626e-09, 1.67347025e-10, 5.12045517e-10,
+ 3.36513040e-10, 1.02545528e-10, 1.28784450e-09,
+ 4.06089384e-10]], dtype=np.float32)
+
+ # Create a new file using the default properties.
+ fid = h5py.h5f.create(test_filename)
+ # Create the dataspace. No maximum size parameter needed.
+ dims = (DIM0, DIM1)
+ space = h5py.h5s.create_simple(dims)
+
+ # create a custom type with larger bias
+ mytype = h5t.IEEE_F16LE
+ mytype = h5t.IEEE_F16LE.copy()
+ mytype.set_fields(14, 9, 5, 0, 9)
+ mytype.set_size(2)
+ mytype.set_ebias(53)
+ mytype.lock()
+
+ dset = h5py.h5d.create(fid, dataset, mytype, space)
+ dset.write(h5py.h5s.ALL, h5py.h5s.ALL, wdata)
+
+ del dset
+
+ # create a custom type with larger exponent
+ mytype2 = h5t.IEEE_F16LE
+ mytype2 = h5t.IEEE_F16LE.copy()
+ mytype2.set_fields(15, 9, 6, 0, 9)
+ mytype2.set_size(2)
+ mytype2.set_ebias(53)
+ mytype2.lock()
+
+ dset = h5py.h5d.create(fid, dataset2, mytype2, space)
+ dset.write(h5py.h5s.ALL, h5py.h5s.ALL, wdata2)
+
+ del dset
+
+ # create a custom type which reimplements 16-bit floats
+ mytype3 = h5t.IEEE_F16LE
+ mytype3 = h5t.IEEE_F16LE.copy()
+ mytype3.set_fields(15, 10, 5, 0, 10)
+ mytype3.set_size(2)
+ mytype3.set_ebias(15)
+ mytype3.lock()
+
+ dset = h5py.h5d.create(fid, dataset3, mytype3, space)
+ dset.write(h5py.h5s.ALL, h5py.h5s.ALL, wdata2)
+
+ del dset
+
+ # create a custom type with larger bias
+ mytype4 = h5t.IEEE_F16LE
+ mytype4 = h5t.IEEE_F16LE.copy()
+ mytype4.set_fields(15, 10, 5, 0, 10)
+ mytype4.set_size(2)
+ mytype4.set_ebias(258)
+ mytype4.lock()
+
+ dset = h5py.h5d.create(fid, dataset4, mytype4, space)
+ dset.write(h5py.h5s.ALL, h5py.h5s.ALL, wdata2)
+
+ del dset
+
+ # create a dataset with long doubles
+ dset = h5py.h5d.create(fid, dataset5, h5t.NATIVE_LDOUBLE, space)
+ dset.write(h5py.h5s.ALL, h5py.h5s.ALL, wdata2)
+
+ # Explicitly close and release resources.
+ del space
+ del dset
+ del fid
+
+ f = h5py.File(test_filename, 'r')
+
+ # ebias promotion to float32
+ values = f[dataset][:]
+ self.assert_(np.all(values == wdata))
+ self.assert_(values.dtype == np.float32)
+
+ # esize promotion to float32
+ values = f[dataset2][:]
+ self.assert_(np.all(values == wdata2))
+ self.assert_(values.dtype == np.float32)
+
+ # regular half floats
+ dset = f[dataset3]
+ try:
+ self.assert_(dset.dtype == np.float16)
+ except AttributeError:
+ self.assert_(dset.dtype == np.float32)
+
+ # ebias promotion to float64
+ dset = f[dataset4]
+ self.assert_(dset.dtype == np.float64)
+
+ # long double floats
+
+ dset = f[dataset5]
+ self.assert_(dset.dtype == np.longdouble)
diff --git a/h5py/tests/old/test_slicing.py b/h5py/tests/old/test_slicing.py
index fba6cec..d36a54e 100644
--- a/h5py/tests/old/test_slicing.py
+++ b/h5py/tests/old/test_slicing.py
@@ -222,27 +222,6 @@ class TestArraySlicing(BaseSlicing):
self.assertTrue(np.all(dset[...] == out))
-class TestEmptySlicing(BaseSlicing):
-
- """
- Empty (H5S_NULL) datasets can't be sliced
- """
-
- def setUp(self):
- BaseSlicing.setUp(self)
- sid = h5s.create(h5s.NULL)
- tid = h5t.C_S1.copy()
- tid.set_size(10)
- dsid = h5d.create(self.f.id, b'x', tid, sid)
- self.dataset = self.f['x']
-
- def test_ellipsis(self):
- with self.assertRaises(IOError):
- self.dataset[...]
-
- def test_empty_tuple(self):
- with self.assertRaises(IOError):
- self.dataset[()]
class TestZeroLengthSlicing(BaseSlicing):
@@ -302,7 +281,7 @@ class TestFieldNames(BaseSlicing):
def test_read(self):
""" Test read with field selections (bytes and unicode) """
- if not six.PY3:
+ if six.PY2:
# Byte strings are only allowed for field names on Py2
self.assertArrayEqual(self.dset[b'a'], self.data['a'])
self.assertArrayEqual(self.dset[six.u('a')], self.data['a'])
diff --git a/setup.py b/setup.py
index 5ed287d..9c5d516 100755
--- a/setup.py
+++ b/setup.py
@@ -2,7 +2,7 @@
"""
This is the main setup script for h5py (http://www.h5py.org).
-
+
Most of the functionality is provided in two separate modules:
setup_configure, which manages compile-time/Cython-time build options
for h5py, and setup_build, which handles the actual compilation process.
@@ -22,9 +22,9 @@ import os.path as op
import setup_build, setup_configure
-VERSION = '2.6.0'
+VERSION = '2.7.0rc2'
-NUMPY_DEP = 'numpy>=1.6.1'
+NUMPY_DEP = 'numpy>=1.7'
# these are required to use h5py
RUN_REQUIRES = [NUMPY_DEP, 'six']
@@ -34,6 +34,13 @@ RUN_REQUIRES = [NUMPY_DEP, 'six']
# RUN_REQUIRES can be removed when setup.py test is removed
SETUP_REQUIRES = RUN_REQUIRES + [NUMPY_DEP, 'Cython>=0.19', 'pkgconfig']
+# Needed to avoid trying to install numpy/cython on pythons which the latest
+# versions don't support
+if "sdist" in sys.argv and "bdist_wheel" not in sys.argv and "install" not in sys.argv:
+ use_setup_requires = False
+else:
+ use_setup_requires = True
+
# --- Custom Distutils commands -----------------------------------------------
@@ -41,7 +48,7 @@ class test(Command):
"""
Custom Distutils command to run the h5py test suite.
-
+
This command will invoke build/build_ext if the project has not
already been built. It then patches in the build directory to
sys.path and runs the test suite directly.
@@ -71,7 +78,7 @@ class test(Command):
buildobj = self.distribution.get_command_obj('build')
buildobj.run()
-
+
oldpath = sys.path
try:
sys.path = [op.abspath(buildobj.build_lib)] + oldpath
@@ -81,8 +88,8 @@ class test(Command):
sys.exit(1)
finally:
sys.path = oldpath
-
-
+
+
CMDCLASS = {'build_ext': setup_build.h5py_build_ext,
'configure': setup_configure.configure,
'test': test, }
@@ -97,7 +104,16 @@ Intended Audience :: Developers
Intended Audience :: Information Technology
Intended Audience :: Science/Research
License :: OSI Approved :: BSD License
+Programming Language :: Cython
Programming Language :: Python
+Programming Language :: Python :: 2
+Programming Language :: Python :: 2.6
+Programming Language :: Python :: 2.7
+Programming Language :: Python :: 3
+Programming Language :: Python :: 3.3
+Programming Language :: Python :: 3.4
+Programming Language :: Python :: 3.5
+Programming Language :: Python :: Implementation :: CPython
Topic :: Scientific/Engineering
Topic :: Database
Topic :: Software Development :: Libraries :: Python Modules
@@ -136,15 +152,15 @@ setup(
long_description = long_desc,
classifiers = [x for x in cls_txt.split("\n") if x],
author = 'Andrew Collette',
- author_email = 'andrew dot collette at gmail dot com',
+ author_email = 'andrew.collette at gmail.com',
maintainer = 'Andrew Collette',
- maintainer_email = 'andrew dot collette at gmail dot com',
+ maintainer_email = 'andrew.collette at gmail.com',
url = 'http://www.h5py.org',
download_url = 'https://pypi.python.org/pypi/h5py',
packages = ['h5py', 'h5py._hl', 'h5py.tests', 'h5py.tests.old', 'h5py.tests.hl'],
package_data = package_data,
ext_modules = [Extension('h5py.x',['x.c'])], # To trick build into running build_ext
install_requires = RUN_REQUIRES,
- setup_requires = SETUP_REQUIRES,
+ setup_requires = SETUP_REQUIRES if use_setup_requires else [],
cmdclass = CMDCLASS,
)
diff --git a/setup_build.py b/setup_build.py
index 6e3468b..ffe19b7 100644
--- a/setup_build.py
+++ b/setup_build.py
@@ -35,20 +35,22 @@ EXTRA_SRC = {'h5z': [ localpath("lzf/lzf_filter.c"),
localpath("lzf/lzf/lzf_c.c"),
localpath("lzf/lzf/lzf_d.c")]}
+COMPILER_SETTINGS = {
+ 'libraries' : ['hdf5', 'hdf5_hl'],
+ 'include_dirs' : [localpath('lzf')],
+ 'library_dirs' : [],
+ 'define_macros' : [('H5_USE_16_API', None)]
+}
if sys.platform.startswith('win'):
- COMPILER_SETTINGS = {
- 'libraries' : ['h5py_hdf5', 'h5py_hdf5_hl'],
- 'include_dirs' : [localpath('lzf'), localpath('windows')],
- 'library_dirs' : [],
- 'define_macros' : [('H5_USE_16_API', None), ('_HDF5USEDLL_', None)] }
-
+ COMPILER_SETTINGS['include_dirs'].append(localpath('windows'))
+ COMPILER_SETTINGS['define_macros'].extend([
+ ('_HDF5USEDLL_', None),
+ ('H5_BUILT_AS_DYNAMIC_LIB', None)
+ ])
else:
- COMPILER_SETTINGS = {
- 'libraries' : ['hdf5', 'hdf5_hl'],
- 'include_dirs' : [localpath('lzf'), '/opt/local/include', '/usr/local/include'],
- 'library_dirs' : ['/opt/local/lib', '/usr/local/lib'],
- 'define_macros' : [('H5_USE_16_API', None)] }
+ COMPILER_SETTINGS['include_dirs'].extend(['/opt/local/include''/usr/local/include'])
+ COMPILER_SETTINGS['library_dirs'].extend(['/opt/local/include''/usr/local/include'])
class h5py_build_ext(build_ext):
@@ -145,6 +147,7 @@ class h5py_build_ext(build_ext):
""" Distutils calls this method to run the command """
from Cython.Build import cythonize
+ import numpy
# Provides all of our build options
config = self.distribution.get_command_obj('configure')
@@ -176,10 +179,14 @@ DEF MPI4PY_V2 = %(mpi4py_v2)s
DEF HDF5_VERSION = %(version)s
DEF SWMR_MIN_HDF5_VERSION = (1,9,178)
DEF VDS_MIN_HDF5_VERSION = (1,9,233)
+DEF COMPLEX256_SUPPORT = %(complex256_support)s
"""
- s %= {'mpi': bool(config.mpi),
- 'mpi4py_v2': bool(v2),
- 'version': tuple(int(x) for x in config.hdf5_version.split('.'))}
+ s %= {
+ 'mpi': bool(config.mpi),
+ 'mpi4py_v2': bool(v2),
+ 'version': tuple(int(x) for x in config.hdf5_version.split('.')),
+ 'complex256_support': hasattr(numpy, 'complex256')
+ }
s = s.encode('utf-8')
f.write(s)
diff --git a/setup_configure.py b/setup_configure.py
index fc7fb08..5e51d82 100644
--- a/setup_configure.py
+++ b/setup_configure.py
@@ -58,6 +58,7 @@ class EnvironmentOptions(object):
def __init__(self):
self.hdf5 = os.environ.get('HDF5_DIR')
self.hdf5_version = os.environ.get('HDF5_VERSION')
+ self.mpi = os.environ.get('HDF5_MPI') == "ON"
if self.hdf5_version is not None:
validate_version(self.hdf5_version)
@@ -126,6 +127,8 @@ class configure(Command):
dct['env_hdf5_version'] = env.hdf5_version
if self.mpi is not None:
dct['cmd_mpi'] = self.mpi
+ if env.mpi is not None:
+ dct['env_mpi'] = env.mpi
self.rebuild_required = dct.get('rebuild') or dct != oldsettings
@@ -163,9 +166,13 @@ class configure(Command):
if self.mpi is None:
self.mpi = oldsettings.get('cmd_mpi')
+ if self.mpi is None:
+ self.mpi = env.mpi
+ if self.mpi is None:
+ self.mpi = oldsettings.get('env_mpi')
# Step 3: print the resulting configuration to stdout
-
+
print('*' * 80)
print(' ' * 23 + "Summary of the h5py configuration")
print('')
@@ -204,7 +211,7 @@ def autodetect_version(hdf5_dir=None):
libdirs = ['/usr/local/lib', '/opt/local/lib']
try:
if pkgconfig.exists("hdf5"):
- libdirs.append(pkgconfig.parse("hdf5")['library_dirs'])
+ libdirs.extend(pkgconfig.parse("hdf5")['library_dirs'])
except EnvironmentError:
pass
if hdf5_dir is not None:
diff --git a/tox.ini b/tox.ini
index f565c8b..1e87573 100644
--- a/tox.ini
+++ b/tox.ini
@@ -1,16 +1,62 @@
[tox]
-envlist = {py26,py27,py32,py33,py34}-{test,pylint}-{nodeps,deps}
+envlist = {py26,py27,py33,py34,py35,pypy}-{test}-{deps,mindeps}
[testenv]
deps =
- deps: numpy>=1.6.1
+ deps: numpy>=1.7
deps: cython>=0.19
- pylint: astroid>=1.3,<1.4
- pylint: pylint>=1.4,<1.5
- py26: unittest2
+ mindeps: numpy==1.7
+ mindeps: cython==0.19
commands =
+ test: python {toxinidir}/ci/fix_paths.py {envsitepackagesdir}
test: python -c "from sys import exit; import h5py; exit(0) if h5py.run_tests().wasSuccessful() else exit(1)"
- pylint: pylint h5py
changedir =
test: {toxworkdir}
- pylint: {toxinidir}
+passenv =
+ HDF5_DIR
+ TOXPYTHON
+basepython =
+ pypy: {env:TOXPYTHON:pypy}
+ py26: {env:TOXPYTHON:python2.6}
+ py27: {env:TOXPYTHON:python2.7}
+ py33: {env:TOXPYTHON:python3.3}
+ py34: {env:TOXPYTHON:python3.4}
+ py35: {env:TOXPYTHON:python3.5}
+
+[testenv:py26-test-deps]
+deps =
+ unittest2
+ numpy>=1.7,<1.11
+ cython>=0.19
+
+[testenv:py33-test-deps]
+deps =
+ numpy>=1.7,<1.12
+ cython>=0.19
+
+[testenv:py26-test-mindeps]
+deps =
+ unittest2
+ numpy==1.7
+ cython==0.19
+
+[testenv:py33-test-mindeps]
+deps =
+ numpy==1.7
+ cython==0.19
+
+[testenv:py34-test-mindeps]
+deps =
+ numpy==1.9
+ cython==0.19
+
+[testenv:py35-test-mindeps]
+deps =
+ numpy==1.10.0.post2
+ cython==0.19
+
+[testenv:py34-test-mpi4py]
+deps =
+ numpy==1.9
+ cython==0.19
+ mpi4py>=1.3.1
--
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-science/packages/h5py.git
More information about the debian-science-commits
mailing list