[python-hdf5storage] 60/84: Added workaround for reading MAT files with variables larger than 2 GB when the variables to read aren't given explicitly.
Ghislain Vaillant
ghisvail-guest at moszumanska.debian.org
Mon Feb 29 08:25:04 UTC 2016
This is an automated email from the git hooks/post-receive script.
ghisvail-guest pushed a commit to annotated tag 0.1.10
in repository python-hdf5storage.
commit 95f9727788a5f013a1909b9dcad93cf00e1a9de5
Author: Freja Nordsiek <fnordsie at gmail.com>
Date: Fri Aug 21 17:36:57 2015 -0400
Added workaround for reading MAT files with variables larger than 2 GB when the variables to read aren't given explicitly.
---
hdf5storage/__init__.py | 25 +++++++++++++++----------
1 file changed, 15 insertions(+), 10 deletions(-)
diff --git a/hdf5storage/__init__.py b/hdf5storage/__init__.py
index e722943..f430b65 100644
--- a/hdf5storage/__init__.py
+++ b/hdf5storage/__init__.py
@@ -1360,16 +1360,21 @@ def loadmat(file_name, mdict=None, appendmat=True,
# Read everything if we were instructed.
if variable_names is None:
- # Read everything from the root node.
- data = read(path='/', filename=filename, options=options)
-
- # If we didn't make a dict but instead got a structured
- # ndarray, extract all the fields and make a dict from them.
- if not isinstance(data, dict):
- new_data = dict()
- for field in data.dtype.names:
- new_data[field] = data[field][0]
- data = new_data
+ data = dict()
+ with h5py.File(filename) as f:
+ for k in f:
+ # Read if not group_for_references. Data that
+ # produces errors when read is dicarded (the OSError
+ # that would happen if this is not an HDF5 file
+ # would already have happened when opening the
+ # file).
+ if f[k].name != options.group_for_references:
+ try:
+ data[k] = lowlevel.read_data(f, f, k,
+ options)
+ except:
+ pass
+
else:
# Extract the desired fields into a dictionary one by one.
data = dict()
--
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-science/packages/python-hdf5storage.git
More information about the debian-science-commits
mailing list