[python-hdf5storage] 115/152: Added write support for numpy.ndarrays that have fields in their dtype as Groups/matlab structures.
Ghislain Vaillant
ghisvail-guest at moszumanska.debian.org
Mon Feb 29 08:24:40 UTC 2016
This is an automated email from the git hooks/post-receive script.
ghisvail-guest pushed a commit to annotated tag 0.1
in repository python-hdf5storage.
commit f381b8d87ffb6f315fc68bec4d4093411dfaf3bb
Author: Freja Nordsiek <fnordsie at gmail.com>
Date: Thu Feb 13 21:17:31 2014 -0500
Added write support for numpy.ndarrays that have fields in their dtype as Groups/matlab structures.
---
doc/source/storage_format.rst | 166 ++++++++++++++++++--------------
hdf5storage/Marshallers.py | 216 +++++++++++++++++++++++++++++++-----------
hdf5storage/__init__.py | 90 ++++++++++++------
3 files changed, 317 insertions(+), 155 deletions(-)
diff --git a/doc/source/storage_format.rst b/doc/source/storage_format.rst
index 5b5bbbb..1cc9419 100644
--- a/doc/source/storage_format.rst
+++ b/doc/source/storage_format.rst
@@ -47,43 +47,45 @@ stored (Group or Dataset), what type/s it is converted to (no conversion
if none are listed), as well as the first version of this package to
support the datatype.
-============= ======= ================================= ================
-Type Version Converted to Group or Dataset
-============= ======= ================================= ================
-bool 0.1 np.bool\_ or np.uint8 [1]_ Dataset
-None 0.1 ``np.float64([])`` Dataset
-int 0.1 np.int64 Dataset
-float 0.1 np.float64 Dataset
-complex 0.1 np.complex128 Dataset
-str 0.1 np.uint32/16 [2]_ Dataset
-bytes 0.1 np.bytes\_ or np.uint16 [3]_ Dataset
-bytearray 0.1 np.bytes\_ or np.uint16 [3]_ Dataset
-list 0.1 np.object\_ Dataset
-tuple 0.1 np.object\_ Dataset
-set 0.1 np.object\_ Dataset
-frozenset 0.1 np.object\_ Dataset
-cl.deque 0.1 np.object\_ Dataset
-dict [4]_ 0.1 Group
-np.bool\_ 0.1 not or np.uint8 [1]_ Dataset
-np.void 0.1 Dataset
-np.uint8 0.1 Dataset
-np.uint16 0.1 Dataset
-np.uint32 0.1 Dataset
-np.uint64 0.1 Dataset
-np.uint8 0.1 Dataset
-np.int16 0.1 Dataset
-np.int32 0.1 Dataset
-np.int64 0.1 Dataset
-np.float16 0.1 Dataset
-np.float32 0.1 Dataset
-np.float64 0.1 Dataset
-np.complex64 0.1 Dataset
-np.complex128 0.1 Dataset
-np.str\_ 0.1 np.uint32/16 [2]_ Dataset
-np.bytes\_ 0.1 np.bytes\_ or np.uint16 [3]_ Dataset
-np.object\_ 0.1 Dataset
-np.chararray 0.1 np.bytes\_ or np.uint16/32 [2,3]_ Dataset
-============= ======= ================================= ================
+============= ======= ==================================== =====================
+Type Version Converted to Group or Dataset
+============= ======= ==================================== =====================
+bool 0.1 np.bool\_ or np.uint8 [1]_ Dataset
+None 0.1 ``np.float64([])`` Dataset
+int 0.1 np.int64 Dataset
+float 0.1 np.float64 Dataset
+complex 0.1 np.complex128 Dataset
+str 0.1 np.uint32/16 [2]_ Dataset
+bytes 0.1 np.bytes\_ or np.uint16 [3]_ Dataset
+bytearray 0.1 np.bytes\_ or np.uint16 [3]_ Dataset
+list 0.1 np.object\_ Dataset
+tuple 0.1 np.object\_ Dataset
+set 0.1 np.object\_ Dataset
+frozenset 0.1 np.object\_ Dataset
+cl.deque 0.1 np.object\_ Dataset
+dict [4]_ 0.1 Group
+np.bool\_ 0.1 not or np.uint8 [1]_ Dataset
+np.void 0.1 Dataset
+np.uint8 0.1 Dataset
+np.uint16 0.1 Dataset
+np.uint32 0.1 Dataset
+np.uint64 0.1 Dataset
+np.uint8 0.1 Dataset
+np.int16 0.1 Dataset
+np.int32 0.1 Dataset
+np.int64 0.1 Dataset
+np.float16 0.1 Dataset
+np.float32 0.1 Dataset
+np.float64 0.1 Dataset
+np.complex64 0.1 Dataset
+np.complex128 0.1 Dataset
+np.str\_ 0.1 np.uint32/16 [2]_ Dataset
+np.bytes\_ 0.1 np.bytes\_ or np.uint16 [3]_ Dataset
+np.object\_ 0.1 Dataset
+np.ndarray 0.1 not or Group of contents [5]_ Dataset or Group [5]_
+np.matrix 0.1 np.ndarray Dataset
+np.chararray 0.1 np.bytes\_ or np.uint16/32 [2]_ [3]_ Dataset
+============= ======= ==================================== =====================
.. [1] Depends on the selected options. Always ``np.uint8`` when
``convert_bools_to_uint8 == True`` (set implicitly when
@@ -102,6 +104,12 @@ np.chararray 0.1 np.bytes\_ or np.uint16/32 [2,3]_ Dataset
``np.uint16`` in UTF-16 encoding. Otherwise, it is just written
as ``np.bytes_``.
.. [4] All keys must be ``str``.
+.. [5] If it doesn't have any fields in its dtype or if
+ :py:attr:`Options.fielded_numpy_ndarray_as_struct` is not set, it
+ is not converted and is written as is as a Dataset. Otherwise, it
+ is written as a Group with its the contents of its individual
+ fields written as Datasets within the Group having the fields as
+ names.
Attributes
@@ -131,9 +139,9 @@ None 'builtins.NoneType' 'float64' 'double'
int 'int' 'int64' 'int64'
float 'float' 'float64' 'double'
complex 'complex' 'complex128' 'double'
-str 'str' 'str#' [5]_ 'char' 2
-bytes 'bytes' 'bytes#' [5]_ 'char' 2
-bytearray 'bytearray' 'bytes#' [5]_ 'char' 2
+str 'str' 'str#' [6]_ 'char' 2
+bytes 'bytes' 'bytes#' [6]_ 'char' 2
+bytearray 'bytearray' 'bytes#' [6]_ 'char' 2
list 'list' 'object' 'cell'
tuple 'tuple' 'object' 'cell'
set 'set' 'object' 'cell'
@@ -141,7 +149,7 @@ frozenset 'frozenset' 'object' 'cell'
cl.deque 'collections.deque' 'object' 'cell'
dict 'dict' 'struct'
np.bool\_ 'numpy.bool' 'bool' 'logical' 1
-np.void 'numpy.void' 'void#' [5]_
+np.void 'numpy.void' 'void#' [6]_
np.uint8 'numpy.uint8' 'uint8' 'uint8'
np.uint16 'numpy.uint16' 'uint16' 'uint16'
np.uint32 'numpy.uint32' 'uint32' 'uint32'
@@ -155,22 +163,24 @@ np.float32 'numpy.float32' 'float32' 'single'
np.float64 'numpy.float64' 'float64' 'double'
np.complex64 'numpy.complex64' 'complex64' 'single'
np.complex128 'numpy.complex128' 'complex128' 'double'
-np.str\_ 'numpy.str\_' 'str#' [5]_ 'char' or 'uint32' 2 or 4 [6]_
-np.bytes\_ 'numpy.bytes\_' 'bytes#' [5]_ 'char' 2
+np.str\_ 'numpy.str\_' 'str#' [6]_ 'char' or 'uint32' 2 or 4 [6]_
+np.bytes\_ 'numpy.bytes\_' 'bytes#' [6]_ 'char' 2
np.object\_ 'numpy.object\_' 'object' 'cell'
-np.ndarray 'numpy.ndarray' [7]_ [7]_
-np.matrix 'numpy.matrix' [7]_ [7]_
-np.chararray 'numpy.chararray' [7]_ 'char' [7]_
+np.ndarray 'numpy.ndarray' [8]_ [8]_ [9]_
+np.matrix 'numpy.matrix' [8]_ [8]_
+np.chararray 'numpy.chararray' [8]_ 'char' [8]_
============= =================== =========================== ================== =================
-.. [5] '#' is replaced by the number of bits taken up by the string, or
+.. [6] '#' is replaced by the number of bits taken up by the string, or
each string in the case that it is an array of strings. This is 8
and 32 bits per character for ``np.bytes_`` and ``np.str_``
respectively.
-.. [6] ``2`` if it is stored as ``np.uint16`` or ``4`` if ``np.uint32``.
-
-.. [7] The value that would be put in for a scalar of the same dtype is
+.. [7] ``2`` if it is stored as ``np.uint16`` or ``4`` if ``np.uint32``.
+.. [8] The value that would be put in for a scalar of the same dtype is
used.
+.. [9] If its dtype has fields and
+ :py:attr:`Options.fielded_numpy_ndarray_as_struct` is set, it is
+ set to 'cell' overriding anything else.
Python.Shape
@@ -277,19 +287,20 @@ transfomations are listed below by their option name, other than
`complex_names` and `group_for_references` which were explained in the
previous section.
-============================ ====================
-attribute value
-============================ ====================
-delete_unused_variables ``True``
-make_atleast_2d ``True``
-convert_numpy_bytes_to_utf16 ``True``
-convert_numpy_str_to_utf16 ``True``
-convert_bools_to_uint8 ``True``
-reverse_dimension_order ``True``
-store_shape_for_empty ``True``
-complex_names ``('real', 'imag')``
-group_for_references ``'/#refs#'``
-============================ ====================
+=============================== ====================
+attribute value
+=============================== ====================
+delete_unused_variables ``True``
+fielded_numpy_ndarray_as_struct ``True``
+make_atleast_2d ``True``
+convert_numpy_bytes_to_utf16 ``True``
+convert_numpy_str_to_utf16 ``True``
+convert_bools_to_uint8 ``True``
+reverse_dimension_order ``True``
+store_shape_for_empty ``True``
+complex_names ``('real', 'imag')``
+group_for_references ``'/#refs#'``
+=============================== ====================
delete_unused_variables
@@ -302,6 +313,19 @@ Group (would end up a struct in MATLAB) that currently exist in the file
but are not in the object being stored should be deleted on the file or
not.
+fielded_numpy_ndarray_as_struct
+-------------------------------
+
+``bool``
+
+Whether ``np.ndarray`` types (or things converted to them) should be
+written as structures/Groups if their dtype has fields. A dtype with
+fields looks like ``np.dtype([('a', np.uint16), ('b': np.float32)])``.
+If an array satisfies this criterion and the option is set, rather than
+writing the data as a single Dataset, it is written as a Group with the
+contents of the individual fields written as Datasets within it. This
+option is set to ``True`` implicitly by ``matlab_compatible``.
+
make_at_least_2d
----------------
@@ -373,12 +397,12 @@ This table gives the MATLAB classes that can be read from a MAT file,
the first version of this package that can read them, and the Python
type they are read as if there is no Python metadata attached to them.
-============ ======= ================================
+============ ======= =================================
MATLAB Class Version Python Type
-============ ======= ================================
+============ ======= =================================
logical 0.1 np.bool\_
-single 0.1 np.float32 or np.complex64 [8]_
-double 0.1 np.float64 or np.complex128 [8]_
+single 0.1 np.float32 or np.complex64 [10]_
+double 0.1 np.float64 or np.complex128 [10]_
uint8 0.1 np.uint8
uint16 0.1 np.uint16
uint32 0.1 np.uint32
@@ -387,9 +411,9 @@ int8 0.1 np.int8
int16 0.1 np.int16
int32 0.1 np.int32
int64 0.1 np.int64
-struct 0.1 dict [9]_
+struct 0.1 dict [11]_
cell 0.1 np.object\_
-============ ======= ================================
+============ ======= =================================
-.. [8] Depends on whether there is a complex part or not.
-.. [9] Structure arrays are not supported.
+.. [10] Depends on whether there is a complex part or not.
+.. [11] Structure arrays are not supported.
diff --git a/hdf5storage/Marshallers.py b/hdf5storage/Marshallers.py
index 8390115..540a167 100644
--- a/hdf5storage/Marshallers.py
+++ b/hdf5storage/Marshallers.py
@@ -39,6 +39,65 @@ from hdf5storage import lowlevel
from hdf5storage.lowlevel import write_data, read_data
+def write_object_array(f, data, options):
+ """ Writes an array of objects recursively.
+
+ Writes the elements of the given object array recursively in the
+ HDF5 Group ``options.group_for_references`` and returns an
+ ``h5py.Reference`` array to all the elements.
+
+ Parameters
+ ----------
+ f : h5py.File
+ The HDF5 file handle that is open.
+ data : numpy.ndarray of objects
+ Numpy object array to write the elements of.
+ options : hdf5storage.core.Options
+ hdf5storage options object.
+
+ See Also
+ --------
+ hdf5storage.Options.group_for_references
+ h5py.Reference
+
+ """
+ # We need to grab the special reference dtype and make an empty
+ # array to store all the references in.
+ ref_dtype = h5py.special_dtype(ref=h5py.Reference)
+ data_refs = np.zeros(shape=data.shape, dtype='object')
+
+ # Go through all the elements of data and write them, gabbing their
+ # references and putting them in data_refs. They will be put in
+ # group_for_references, which is also what the H5PATH needs to be
+ # set to if we are doing MATLAB compatibility (otherwise, the
+ # attribute needs to be deleted).
+
+ if options.group_for_references not in f:
+ f.create_group(options.group_for_references)
+
+ grp2 = f[options.group_for_references]
+
+ if not isinstance(grp2, h5py.Group):
+ del f[options.group_for_references]
+ f.create_group(options.group_for_references)
+ grp2 = f[options.group_for_references]
+
+ for index, x in np.ndenumerate(data):
+ data_refs[index] = None
+ name_for_ref = next_unused_name_in_group(grp2, 16)
+ write_data(f, grp2, name_for_ref, x, None, options)
+ data_refs[index] = grp2[name_for_ref].ref
+ if options.matlab_compatible:
+ set_attribute_string(grp2[name_for_ref],
+ 'H5PATH', grp2.name)
+ else:
+ del_attribute(grp2[name_for_ref], 'H5PATH')
+
+ # Now, the dtype needs to be changed to the reference type and the
+ # whole thing copied over to data_to_store.
+ return data_refs.astype(dtype=ref_dtype).copy()
+
+
class TypeMarshaller(object):
""" Base class for marshallers of Python types.
@@ -369,7 +428,8 @@ class NumpyScalarArrayMarshaller(TypeMarshaller):
'single': np.float32,
'double': np.float64,
'char': np.str_,
- 'cell': np.object_}
+ 'cell': np.object_,
+ 'canonical empty': np.float64}
# Set matlab_classes to the supported classes (the values).
@@ -378,9 +438,13 @@ class NumpyScalarArrayMarshaller(TypeMarshaller):
def write(self, f, grp, name, data, type_string, options):
# If we are doing matlab compatibility and the data type is not
# one of those that is supported for matlab, skip writing the
- # data or throw an error if appropriate.
+ # data or throw an error if appropriate. Fielded ndarrays and
+ # recarrays are compatible if the
+ # fielded_numpy_ndarray_as_struct option is set.
if options.matlab_compatible \
- and data.dtype.type not in self.__MATLAB_classes:
+ and not (data.dtype.type in self.__MATLAB_classes \
+ or (data.dtype.fields is not None \
+ and options.fielded_numpy_ndarray_as_struct)):
if options.action_for_matlab_incompatible == 'error':
raise lowlevel.TypeNotMatlabCompatibleError( \
'Data type ' + data.dtype.name
@@ -473,65 +537,103 @@ class NumpyScalarArrayMarshaller(TypeMarshaller):
# (data_to_store is still an object), then we must recursively
# write what each element points to and make an array of the
# references to them.
-
if data_to_store.dtype.name == 'object':
- ref_dtype = h5py.special_dtype(ref=h5py.Reference)
- data_refs = data_to_store.copy()
-
- # Go through all the elements of data and write them,
- # gabbing their references and putting them in
- # data_refs. They will be put in group_for_references, which
- # is also what the H5PATH needs to be set to if we are doing
- # MATLAB compatibility (otherwise, the attribute needs to be
- # deleted).
-
- if options.group_for_references not in f:
- f.create_group(options.group_for_references)
-
- grp2 = f[options.group_for_references]
-
- if not isinstance(grp2, h5py.Group):
- del f[options.group_for_references]
- grp2 = f[options.group_for_references]
-
- for index, x in np.ndenumerate(data_to_store):
- data_refs[index] = None
- name_for_ref = next_unused_name_in_group(grp2, 16)
- write_data(f, grp2, name_for_ref, x, None, options)
- data_refs[index] = grp2[name_for_ref].ref
- if options.matlab_compatible:
- set_attribute_string(grp2[name_for_ref],
- 'H5PATH', grp2.name)
- else:
- del_attribute(grp2[name_for_ref], 'H5PATH')
-
- # Now, the dtype needs to be changed to the reference type
- # and the whole thing copied over to data_to_store.
- data_to_store = data_refs.astype(dtype=ref_dtype)
-
- # The data must first be written. If name is not present yet,
- # then it must be created. If it is present, but not a Dataset,
- # has the wrong dtype, or is the wrong shape; then it must be
- # deleted and then written. Otherwise, it is just overwritten in
- # place (note, this will not change any filters or chunking
- # settings, but will keep the file from growing needlessly).
+ data_to_store = write_object_array(f, data_to_store,
+ options)
- if name not in grp:
- grp.create_dataset(name, data=data_to_store,
- **options.array_options)
- elif not isinstance(grp[name], h5py.Dataset) \
- or grp[name].dtype != data_to_store.dtype \
- or grp[name].shape != data_to_store.shape:
- del grp[name]
- grp.create_dataset(name, data=data_to_store,
- **options.array_options)
+ # If it an ndarray with fields and we are writing such things as
+ # a Group/struct, that needs to be handled. Otherwise, it is
+ # simply written as is to a Dataset. As HDF5 Reference types do
+ # look like a fielded object array, those have to be excluded
+ # explicitly. Complex types may have been converted so that they
+ # can have different field names as an HDF5 COMPOUND type, so
+ # those have to be escluded too.
+
+ if data_to_store.dtype.fields is not None \
+ and h5py.check_dtype(ref=data_to_store.dtype) \
+ is not h5py.Reference \
+ and not np.iscomplexobj(data) \
+ and options.fielded_numpy_ndarray_as_struct:
+ # If the group doesn't exist, it needs to be created. If it
+ # already exists but is not a group, it needs to be deleted
+ # before being created.
+
+ if name not in grp:
+ grp.create_group(name)
+ elif not isinstance(grp[name], h5py.Group):
+ del grp[name]
+ grp.create_group(name)
+
+ grp2 = grp[name]
+
+ # Write the metadata, and set the MATLAB_class to 'struct'
+ # explicitly.
+ self.write_metadata(f, grp, name, data, type_string,
+ options)
+ if options.matlab_compatible:
+ set_attribute_string(grp[name], 'MATLAB_class',
+ 'struct')
+
+ # Grab the list of fields.
+ field_names = list(data_to_store.dtype.fields.keys())
+
+ # Delete any Datasets/Groups not corresponding to a field
+ # name in data if that option is set.
+
+ if options.delete_unused_variables:
+ for field in {i for i in grp2}.difference( \
+ set(field_names)):
+ del grp2[field]
+
+ # Go field by field making an object array (make an empty
+ # object array and assign element wise) and write it inside
+ # the Group. The H5PATH attribute needs to be set
+ # appropriately, while all other attributes need to be
+ # deleted.
+ for field in field_names:
+ new_data = np.zeros(shape=data_to_store.shape,
+ dtype='object')
+ for index, x in np.ndenumerate(data_to_store):
+ new_data[index] = x[field]
+
+ write_data(f, grp2, field, new_data, None, options)
+
+ if field in grp2:
+ if options.matlab_compatible:
+ set_attribute_string(grp2[field], 'H5PATH',
+ grp2.name)
+ else:
+ del_attribute(grp2[field], 'H5PATH')
+
+ for attribute in (set(grp2[field].attrs.keys()) \
+ - {'H5PATH'}):
+ del_attribute(grp2[field], attribute)
else:
- grp[name][...] = data_to_store
-
- # Write the metadata using the inherited function (good enough).
+ # The data must first be written. If name is not present
+ # yet, then it must be created. If it is present, but not a
+ # Dataset, has the wrong dtype, or is the wrong shape; then
+ # it must be deleted and then written. Otherwise, it is just
+ # overwritten in place (note, this will not change any
+ # filters or chunking settings, but will keep the file from
+ # growing needlessly).
+
+ if name not in grp:
+ grp.create_dataset(name, data=data_to_store,
+ **options.array_options)
+ elif not isinstance(grp[name], h5py.Dataset) \
+ or grp[name].dtype != data_to_store.dtype \
+ or grp[name].shape != data_to_store.shape:
+ del grp[name]
+ grp.create_dataset(name, data=data_to_store,
+ **options.array_options)
+ else:
+ grp[name][...] = data_to_store
- self.write_metadata(f, grp, name, data, type_string, options)
+ # Write the metadata using the inherited function (good
+ # enough).
+ self.write_metadata(f, grp, name, data, type_string,
+ options)
def write_metadata(self, f, grp, name, data, type_string, options):
# First, call the inherited version to do most of the work.
diff --git a/hdf5storage/__init__.py b/hdf5storage/__init__.py
index f73ff53..fdfd19b 100644
--- a/hdf5storage/__init__.py
+++ b/hdf5storage/__init__.py
@@ -59,19 +59,20 @@ class Options(object):
compatible with MATLAB's HDF5 based version 7.3 mat file format. It
overrides several options to the values in the following table.
- ============================ ====================
- attribute value
- ============================ ====================
- delete_unused_variables ``True``
- make_atleast_2d ``True``
- convert_numpy_bytes_to_utf16 ``True``
- convert_numpy_str_to_utf16 ``True``
- convert_bools_to_uint8 ``True``
- reverse_dimension_order ``True``
- store_shape_for_empty ``True``
- complex_names ``('real', 'imag')``
- group_for_references ``'/#refs#'``
- ============================ ====================
+ =============================== ====================
+ attribute value
+ =============================== ====================
+ delete_unused_variables ``True``
+ fielded_numpy_ndarray_as_struct ``True``
+ make_atleast_2d ``True``
+ convert_numpy_bytes_to_utf16 ``True``
+ convert_numpy_str_to_utf16 ``True``
+ convert_bools_to_uint8 ``True``
+ reverse_dimension_order ``True``
+ store_shape_for_empty ``True``
+ complex_names ``('real', 'imag')``
+ group_for_references ``'/#refs#'``
+ =============================== ====================
In addition to setting these options, a specially formatted block of
bytes is put at the front of the file so that MATLAB can recognize
@@ -88,6 +89,8 @@ class Options(object):
'error'.
delete_unused_variables : bool, optional
See Attributes.
+ fielded_numpy_ndarray_as_struct : bool, optional
+ See Attributes.
make_atleast_2d : bool, optional
See Attributes.
convert_numpy_bytes_to_utf16 : bool, optional
@@ -115,6 +118,7 @@ class Options(object):
matlab_compatible : bool
action_for_matlab_incompatible : str
delete_unused_variables : bool
+ fielded_numpy_ndarray_as_struct : bool
make_atleast_2d : bool
convert_numpy_bytes_to_utf16 : bool
convert_numpy_str_to_utf16 : bool
@@ -130,12 +134,13 @@ class Options(object):
``h5py.Group.create_dataset`` options for writing scalars.
marshaller_collection : MarshallerCollection
Collection of marshallers to disk.
-
+
"""
def __init__(self, store_python_metadata=True,
matlab_compatible=True,
action_for_matlab_incompatible='error',
delete_unused_variables=False,
+ fielded_numpy_ndarray_as_struct=False,
make_atleast_2d=False,
convert_numpy_bytes_to_utf16=False,
convert_numpy_str_to_utf16=False,
@@ -151,6 +156,7 @@ class Options(object):
self._store_python_metadata = True
self._action_for_matlab_incompatible = 'error'
self._delete_unused_variables = False
+ self._fielded_numpy_ndarray_as_struct = False
self._make_atleast_2d = False
self._convert_numpy_bytes_to_utf16 = False
self._convert_numpy_str_to_utf16 = False
@@ -170,6 +176,8 @@ class Options(object):
self.action_for_matlab_incompatible = \
action_for_matlab_incompatible
self.delete_unused_variables = delete_unused_variables
+ self.fielded_numpy_ndarray_as_struct = \
+ fielded_numpy_ndarray_as_struct
self.make_atleast_2d = make_atleast_2d
self.convert_numpy_bytes_to_utf16 = convert_numpy_bytes_to_utf16
self.convert_numpy_str_to_utf16 = convert_numpy_str_to_utf16
@@ -232,19 +240,20 @@ class Options(object):
which is HDF5 based. Setting it to ``True`` forces other options
to hold the specific values in the table below.
- ============================ ====================
- attribute value
- ============================ ====================
- delete_unused_variables ``True``
- make_atleast_2d ``True``
- convert_numpy_bytes_to_utf16 ``True``
- convert_numpy_str_to_utf16 ``True``
- convert_bools_to_uint8 ``True``
- reverse_dimension_order ``True``
- store_shape_for_empty ``True``
- complex_names ``('real', 'imag')``
- group_for_references ``'/#refs#'``
- ============================ ====================
+ =============================== ====================
+ attribute value
+ =============================== ====================
+ delete_unused_variables ``True``
+ fielded_numpy_ndarray_as_struct ``True``
+ make_atleast_2d ``True``
+ convert_numpy_bytes_to_utf16 ``True``
+ convert_numpy_str_to_utf16 ``True``
+ convert_bools_to_uint8 ``True``
+ reverse_dimension_order ``True``
+ store_shape_for_empty ``True``
+ complex_names ``('real', 'imag')``
+ group_for_references ``'/#refs#'``
+ =============================== ====================
In addition to setting these options, a specially formatted
block of bytes is put at the front of the file so that MATLAB
@@ -261,6 +270,7 @@ class Options(object):
self._matlab_compatible = value
if value:
self._delete_unused_variables = True
+ self._fielded_numpy_ndarray_as_struct = True
self._make_atleast_2d = True
self._convert_numpy_bytes_to_utf16 = True
self._convert_numpy_str_to_utf16 = True
@@ -322,6 +332,32 @@ class Options(object):
self._matlab_compatible = False
@property
+ def fielded_numpy_ndarray_as_struct(self):
+ """ Whether or not to convert fielded ndarrays to structs.
+
+ bool
+
+ If ``True`` (defaults to ``False`` unless MATLAB compatibility
+ is being done), all ``numpy.ndarray``s with fields (compound
+ dtypes) are written as HDF5 Groups with the fields as Datasets
+ (correspond to struct arrays in MATLAB).
+
+ Must be ``True`` if doing MATLAB compatibility. MATLAB cannot
+ handle the compound types made by writing these types.
+
+ """
+ return self._fielded_numpy_ndarray_as_struct
+
+ @fielded_numpy_ndarray_as_struct.setter
+ def fielded_numpy_ndarray_as_struct(self, value):
+ # Check that it is a bool, and then set it. If it is false, we
+ # are not doing MATLAB compatible formatting.
+ if isinstance(value, bool):
+ self._fielded_numpy_ndarray_as_struct = value
+ if not self._fielded_numpy_ndarray_as_struct:
+ self._matlab_compatible = False
+
+ @property
def make_atleast_2d(self):
""" Whether or not to convert scalar types to 2D arrays.
--
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-science/packages/python-hdf5storage.git
More information about the debian-science-commits
mailing list