[hdf-compass] 207/295: h5serv plugin
Ghislain Vaillant
ghisvail-guest at moszumanska.debian.org
Sun May 8 10:35:45 UTC 2016
This is an automated email from the git hooks/post-receive script.
ghisvail-guest pushed a commit to branch debian/master
in repository hdf-compass.
commit c3b23d636228035740e77180d95861e5ceddf839
Author: jreadey <jreadey at hdfgroup.org>
Date: Thu Oct 29 08:28:35 2015 -0700
h5serv plugin
---
hdf_compass/compass_viewer/viewer.py | 6 +
hdf_compass/hdf5rest_model/__init__.py | 18 ++
hdf_compass/hdf5rest_model/hdf5dtype.py | 458 ++++++++++++++++++++++++++++++++
hdf_compass/hdf5rest_model/model.py | 385 +++++++++++++++++++++++++++
hdf_compass/hdf5rest_model/test.py | 24 ++
5 files changed, 891 insertions(+)
diff --git a/hdf_compass/compass_viewer/viewer.py b/hdf_compass/compass_viewer/viewer.py
index 7a3df92..e521419 100644
--- a/hdf_compass/compass_viewer/viewer.py
+++ b/hdf_compass/compass_viewer/viewer.py
@@ -227,6 +227,12 @@ def load_plugins():
% (".".join(str(i) for i in lib.__version__), ".".join(str(i) for i in lib.__dap__)))
except ImportError:
log.warning("Opendap plugin: NOT loaded")
+
+ from hdf_compass import hdf5rest_model
+ try:
+ from hdf_compass import hdf5rest_model
+ except ImportError:
+ log.warning("HDF5 REST: plugin NOT loaded")
def run():
diff --git a/hdf_compass/hdf5rest_model/__init__.py b/hdf_compass/hdf5rest_model/__init__.py
new file mode 100644
index 0000000..e2c7449
--- /dev/null
+++ b/hdf_compass/hdf5rest_model/__init__.py
@@ -0,0 +1,18 @@
+##############################################################################
+# Copyright by The HDF Group. #
+# All rights reserved. #
+# #
+# This file is part of the HDF Compass Viewer. The full HDF Compass #
+# copyright notice, including terms governing use, modification, and #
+# terms governing use, modification, and redistribution, is contained in #
+# the file COPYING, which can be found at the root of the source code #
+# distribution tree. If you do not have access to this file, you may #
+# request a copy from help at hdfgroup.org. #
+##############################################################################
+from __future__ import absolute_import, division, print_function, unicode_literals
+
+from .model import HDF5RestStore, HDF5RestGroup, HDF5RestDataset, HDF5RestKV
+
+import logging
+log = logging.getLogger(__name__)
+log.addHandler(logging.NullHandler())
diff --git a/hdf_compass/hdf5rest_model/hdf5dtype.py b/hdf_compass/hdf5rest_model/hdf5dtype.py
new file mode 100755
index 0000000..4d75a40
--- /dev/null
+++ b/hdf_compass/hdf5rest_model/hdf5dtype.py
@@ -0,0 +1,458 @@
+##############################################################################
+# Copyright by The HDF Group. #
+# All rights reserved. #
+# #
+# This file is part of H5Serv (HDF5 REST Server) Service, Libraries and #
+# Utilities. The full HDF5 REST Server copyright notice, including #
+# terms governing use, modification, and redistribution, is contained in #
+# the file COPYING, which can be found at the root of the source code #
+# distribution tree. If you do not have access to this file, you may #
+# request a copy from help at hdfgroup.org. #
+##############################################################################
+
+"""
+This class is used to map between HDF5 type representations and numpy types
+
+"""
+import numpy as np
+from h5py.h5t import special_dtype
+from h5py.h5t import check_dtype
+from h5py.h5r import Reference
+from h5py.h5r import RegionReference
+
+
+"""
+Convert the given type item to a predefined type string for
+predefined integer and floating point types ("H5T_STD_I64LE", et. al).
+For compound types, recursively iterate through the typeItem and do same
+conversion for fields of the compound type.
+"""
+def getTypeResponse(typeItem):
+ response = None
+ if 'uuid' in typeItem:
+ # committed type, just return uuid
+ response = 'datatypes/' + typeItem['uuid']
+ elif typeItem['class'] == 'H5T_INTEGER' or typeItem['class'] == 'H5T_FLOAT':
+ # just return the class and base for pre-defined types
+ response = {}
+ response['class'] = typeItem['class']
+ response['base'] = typeItem['base']
+ elif typeItem['class'] == 'H5T_OPAQUE':
+ response = {}
+ response['class'] = 'H5T_OPAQUE'
+ response['size'] = typeItem['size']
+ elif typeItem['class'] == 'H5T_REFERENCE':
+ response = {}
+ response['class'] = 'H5T_REFERENCE'
+ response['base'] = typeItem['base']
+ elif typeItem['class'] == 'H5T_COMPOUND':
+ response = {}
+ response['class'] = 'H5T_COMPOUND'
+ fieldList = []
+ for field in typeItem['fields']:
+ fieldItem = { }
+ fieldItem['name'] = field['name']
+ fieldItem['type'] = getTypeResponse(field['type']) # recursive call
+ fieldList.append(fieldItem)
+ response['fields'] = fieldList
+ else:
+ response = {} # otherwise, return full type
+ for k in typeItem.keys():
+ if k == 'base':
+ if type(typeItem[k]) == dict:
+ response[k] = getTypeResponse(typeItem[k]) # recursive call
+ else:
+ response[k] = typeItem[k] # predefined type
+ elif k not in ('size', 'base_size'):
+ response[k] = typeItem[k]
+ return response
+
+
+"""
+ Return type info.
+ For primitive types, return string with typename
+ For compound types return array of dictionary items
+"""
+def getTypeItem(dt):
+ type_info = {}
+ if len(dt) <= 1:
+ type_info = getTypeElement(dt)
+ else:
+ names = dt.names
+ type_info['class'] = 'H5T_COMPOUND'
+ fields = []
+ for name in names:
+ field = { 'name': name }
+ field['type'] = getTypeElement(dt[name])
+ fields.append(field)
+ type_info['fields'] = fields
+ return type_info
+
+"""
+ Get element type info - either a complete type or element of a compound type
+ Returns dictionary
+ Note: only getTypeItem should call this!
+"""
+
+def getTypeElement(dt):
+ if len(dt) > 1:
+ raise Exception("unexpected numpy type passed to getTypeElement")
+
+ type_info = {}
+
+ if dt.kind == 'O':
+ # numpy object type - assume this is a h5py variable length extension
+ h5t_check = check_dtype(vlen=dt)
+ if h5t_check is not None:
+
+ if h5t_check == str:
+ type_info['class'] = 'H5T_STRING'
+ type_info['length'] = 'H5T_VARIABLE'
+ type_info['charSet'] = 'H5T_CSET_ASCII'
+ type_info['strPad'] = 'H5T_STR_NULLTERM'
+ elif h5t_check == unicode:
+ type_info['class'] = 'H5T_STRING'
+ type_info['length'] = 'H5T_VARIABLE'
+ type_info['charSet'] = 'H5T_CSET_UTF8'
+ type_info['strPad'] = 'H5T_STR_NULLTERM'
+ elif type(h5t_check) == np.dtype:
+ # vlen data
+ type_info['class'] = 'H5T_VLEN'
+ type_info['size'] = 'H5T_VARIABLE'
+ type_info['base'] = getBaseType(h5t_check)
+ else:
+ #unknown vlen type
+ raise TypeError("Unknown h5py vlen type: " + h5t_check)
+ else:
+ # check for reference type
+ h5t_check = check_dtype(ref=dt)
+ if h5t_check is not None:
+ type_info['class'] = 'H5T_REFERENCE'
+
+ if h5t_check is Reference:
+ type_info['base'] = 'H5T_STD_REF_OBJ' # objref
+ elif h5t_check is RegionReference:
+ type_info['base'] = 'H5T_STD_REF_DSETREG' # region ref
+ else:
+ raise TypeError("unexpected reference type")
+ else:
+ raise TypeError("unknown object type")
+ elif dt.kind == 'V':
+ baseType = getBaseType(dt)
+ if dt.shape:
+ # array type
+ type_info['dims'] = dt.shape
+ type_info['class'] = 'H5T_ARRAY'
+ type_info['base'] = baseType
+ elif baseType['class'] == 'H5T_OPAQUE':
+ # expecting this to be an opaque type
+ type_info = baseType # just promote the base type
+ else:
+ raise TypeError("unexpected Void type")
+ elif dt.kind == 'S':
+ # String type
+ baseType = getBaseType(dt)
+ type_info = baseType # just use base type
+ elif dt.kind == 'U':
+ # Unicode String type
+ baseType = getBaseType(dt)
+ type_info = baseType # just use base type
+ elif dt.kind == 'i' or dt.kind == 'u':
+ # integer type
+ baseType = getBaseType(dt)
+ # numpy integer type - but check to see if this is the hypy
+ # enum extension
+ mapping = check_dtype(enum=dt)
+
+ if mapping:
+ # yes, this is an enum!
+ type_info['class'] = 'H5T_ENUM'
+ type_info['mapping'] = mapping
+ type_info['base'] = baseType
+ else:
+ type_info = baseType # just use base type
+
+ elif dt.kind == 'f':
+ # floating point type
+ baseType = getBaseType(dt)
+ type_info = baseType # just use base type
+ else:
+ # unexpected kind
+ raise TypeError("unexpected dtype kind: " + dt.kind)
+
+ return type_info
+
+"""
+Get Base type info for given type element.
+"""
+def getBaseType(dt):
+ if len(dt) > 1:
+ raise TypeError("unexpected numpy type passed to getTypeElement")
+
+ predefined_int_types = {
+ 'int8': 'H5T_STD_I8',
+ 'uint8': 'H5T_STD_U8',
+ 'int16': 'H5T_STD_I16',
+ 'uint16': 'H5T_STD_U16',
+ 'int32': 'H5T_STD_I32',
+ 'uint32': 'H5T_STD_U32',
+ 'int64': 'H5T_STD_I64',
+ 'uint64': 'H5T_STD_U64'
+ }
+ predefined_float_types = {
+ 'float32': 'H5T_IEEE_F32',
+ 'float64': 'H5T_IEEE_F64'
+ }
+ type_info = {}
+
+ #type_info['base_size'] = dt.base.itemsize
+
+ # primitive type
+ if dt.base.kind == 'S':
+ # Fixed length string type
+ type_info['class'] = 'H5T_STRING'
+ type_info['charSet'] = 'H5T_CSET_ASCII'
+ type_info['length'] = dt.base.itemsize
+ type_info['strPad'] = 'H5T_STR_NULLPAD'
+ elif dt.base.kind == 'V':
+ type_info['class'] = 'H5T_OPAQUE'
+ type_info['size'] = dt.itemsize
+ type_info['tag'] = '' # todo - determine tag
+ elif dt.base.kind == 'i' or dt.base.kind == 'u':
+ type_info['class'] = 'H5T_INTEGER'
+ byteorder = 'LE'
+ if dt.base.byteorder == '>':
+ byteorder = 'BE'
+ if dt.base.name in predefined_int_types:
+ #maps to one of the HDF5 predefined types
+ type_info['base'] = predefined_int_types[dt.base.name] + byteorder
+ elif dt.base.kind == 'f':
+ type_info['class'] = 'H5T_FLOAT'
+ byteorder = 'LE'
+ if dt.base.byteorder == '>':
+ byteorder = 'BE'
+ if dt.base.name in predefined_float_types:
+ #maps to one of the HDF5 predefined types
+ type_info['base'] = predefined_float_types[dt.base.name] + byteorder
+ elif dt.base.kind == 'O':
+ # check for reference type
+ h5t_check = check_dtype(ref=dt)
+ if h5t_check is not None:
+ type_info['class'] = 'H5T_REFERENCE'
+ if h5t_check is Reference:
+ type_info['base'] = 'H5T_STD_REF_OBJ' # objref
+ elif h5t_check is RegionReference:
+ type_info['base'] = 'H5T_STD_REF_DSETREG' # region ref
+ else:
+ raise TypeError("unexpected reference type")
+ else:
+ raise TypeError("unknown object type")
+ else:
+ # unexpected kind
+ raise TypeError("unexpected dtype base kind: " + dt.base.kind)
+
+ return type_info
+
+
+def getNumpyTypename(hdf5TypeName, typeClass=None):
+ predefined_int_types = {
+ 'H5T_STD_I8': 'i1',
+ 'H5T_STD_U8': 'u1',
+ 'H5T_STD_I16': 'i2',
+ 'H5T_STD_U16': 'u2',
+ 'H5T_STD_I32': 'i4',
+ 'H5T_STD_U32': 'u4',
+ 'H5T_STD_I64': 'i8',
+ 'H5T_STD_U64': 'u8'
+ }
+ predefined_float_types = {
+ 'H5T_IEEE_F32': 'f4',
+ 'H5T_IEEE_F64': 'f8'
+ }
+
+ if len(hdf5TypeName) < 3:
+ raise Exception("Type Error: invalid typename: ")
+ endian = '<' # default endian
+ key = hdf5TypeName
+ if hdf5TypeName.endswith('LE'):
+ key = hdf5TypeName[:-2]
+ elif hdf5TypeName.endswith('BE'):
+ key = hdf5TypeName[:-2]
+ endian = '>'
+
+ if key in predefined_int_types and (typeClass == None or
+ typeClass == 'H5T_INTEGER'):
+ return endian + predefined_int_types[key]
+ if key in predefined_float_types and (typeClass == None or
+ typeClass == 'H5T_FLOAT'):
+ return endian + predefined_float_types[key]
+ raise TypeError("Type Error: invalid type")
+
+
+def createBaseDataType(typeItem):
+
+ dtRet = None
+ if type(typeItem) == str or type(typeItem) == unicode:
+ # should be one of the predefined types
+ dtName = getNumpyTypename(typeItem)
+ dtRet = np.dtype(dtName)
+ return dtRet # return predefined type
+
+ if type(typeItem) != dict:
+ raise TypeError("Type Error: invalid type")
+
+
+ if 'class' not in typeItem:
+ raise KeyError("'class' not provided")
+ typeClass = typeItem['class']
+
+ dims = ''
+ if 'dims' in typeItem:
+ dims = None
+ if type(typeItem['dims']) == int:
+ dims = (typeItem['dims']) # make into a tuple
+ elif type(typeItem['dims']) not in (list, tuple):
+ raise TypeError("expected list or integer for dims")
+ else:
+ dims = typeItem['dims']
+ dims = str(tuple(dims))
+
+ if typeClass == 'H5T_INTEGER':
+ if 'base' not in typeItem:
+ raise KeyError("'base' not provided")
+ baseType = getNumpyTypename(typeItem['base'], typeClass='H5T_INTEGER')
+ dtRet = np.dtype(dims + baseType)
+ elif typeClass == 'H5T_FLOAT':
+ if 'base' not in typeItem:
+ raise KeyError("'base' not provided")
+ baseType = getNumpyTypename(typeItem['base'], typeClass='H5T_FLOAT')
+ dtRet = np.dtype(dims + baseType)
+ elif typeClass == 'H5T_STRING':
+ if 'length' not in typeItem:
+ raise KeyError("'length' not provided")
+ if 'charSet' not in typeItem:
+ raise KeyError("'charSet' not provided")
+
+ if typeItem['length'] == 'H5T_VARIABLE':
+ if dims:
+ raise TypeError("ArrayType is not supported for variable len types")
+ if typeItem['charSet'] == 'H5T_CSET_ASCII':
+ dtRet = special_dtype(vlen=str)
+ elif typeItem['charSet'] == 'H5T_CSET_UTF8':
+ dtRet = special_dtype(vlen=unicode)
+ else:
+ raise TypeError("unexpected 'charSet' value")
+ else:
+ nStrSize = typeItem['length']
+ if type(nStrSize) != int:
+ raise TypeError("expecting integer value for 'length'")
+ type_code = None
+ if typeItem['charSet'] == 'H5T_CSET_ASCII':
+ type_code = 'S'
+ elif typeItem['charSet'] == 'H5T_CSET_UTF8':
+ raise TypeError("fixed-width unicode strings are not supported")
+ else:
+ raise TypeError("unexpected 'charSet' value")
+ dtRet = np.dtype(dims + type_code + str(nStrSize)) # fixed size string
+ elif typeClass == 'H5T_VLEN':
+ if dims:
+ raise TypeError("ArrayType is not supported for variable len types")
+ if 'base' not in typeItem:
+ raise KeyError("'base' not provided")
+ baseType = createBaseDataType(typeItem['base'])
+ dtRet = special_dtype(vlen=np.dtype(baseType))
+ elif typeClass == 'H5T_OPAQUE':
+ if dims:
+ raise TypeError("Opaque Type is not supported for variable len types")
+ if 'size' not in typeItem:
+ raise KeyError("'size' not provided")
+ nSize = int(typeItem['size'])
+ if nSize <= 0:
+ raise TypeError("'size' must be non-negative")
+ dtRet = np.dtype('V' + str(nSize))
+ elif typeClass == 'H5T_ARRAY':
+ if not dims:
+ raise KeyError("'dims' must be provided for array types")
+ if 'base' not in typeItem:
+ raise KeyError("'base' not provided")
+ arrayBaseType = typeItem['base']
+ if type(arrayBaseType) is dict:
+ if "class" not in arrayBaseType:
+ raise KeyError("'class' not provided for array base type")
+ if arrayBaseType["class"] not in ('H5T_INTEGER', 'H5T_FLOAT', 'H5T_STRING'):
+ raise TypeError("Array Type base type must be integer, float, or string")
+
+ baseType = createDataType(arrayBaseType)
+ dtRet = np.dtype(dims+baseType.str)
+ return dtRet # return predefined type
+ elif typeClass == 'H5T_REFERENCE':
+ if 'base' not in typeItem:
+ raise KeyError("'base' not provided")
+ if typeItem['base'] == 'H5T_STD_REF_OBJ':
+ dtRet = special_dtype(ref=Reference)
+ elif typeItem['base'] == 'H5T_STD_REF_DSETREG':
+ dtRet = special_dtype(ref=RegionReference)
+ else:
+ raise TypeError("Invalid base type for reference type")
+
+ else:
+ raise TypeError("Invalid type class")
+
+
+ return dtRet
+
+def createDataType(typeItem):
+ dtRet = None
+ if type(typeItem) == str or type(typeItem) == unicode:
+ # should be one of the predefined types
+ dtName = getNumpyTypename(typeItem)
+ dtRet = np.dtype(dtName)
+ return dtRet # return predefined type
+
+ if type(typeItem) != dict:
+ raise TypeError("invalid type")
+
+
+ if 'class' not in typeItem:
+ raise KeyError("'class' not provided")
+ typeClass = typeItem['class']
+
+ if typeClass == 'H5T_COMPOUND':
+ if 'fields' not in typeItem:
+ raise KeyError("'fields' not provided for compound type")
+ fields = typeItem['fields']
+ if type(fields) is not list:
+ raise TypeError("Type Error: expected list type for 'fields'")
+ if not fields:
+ raise KeyError("no 'field' elements provided")
+ subtypes = []
+ for field in fields:
+
+ if type(field) != dict:
+ raise TypeError("Expected dictionary type for field")
+ if 'name' not in field:
+ raise KeyError("'name' missing from field")
+ if 'type' not in field:
+ raise KeyError("'type' missing from field")
+ field_name = field['name']
+ if type(field_name) == unicode:
+ # convert to ascii
+ ascii_name = field_name.encode('ascii')
+ if ascii_name != field_name:
+ raise TypeError("non-ascii field name not allowed")
+ field['name'] = ascii_name
+
+ dt = createDataType(field['type']) # recursive call
+ if dt is None:
+ raise Exception("unexpected error")
+ subtypes.append((field['name'], dt)) # append tuple
+ dtRet = np.dtype(subtypes)
+ else:
+ dtRet = createBaseDataType(typeItem) # create non-compound dt
+ return dtRet
+
+
+
+
+
+
diff --git a/hdf_compass/hdf5rest_model/model.py b/hdf_compass/hdf5rest_model/model.py
new file mode 100644
index 0000000..5e6a092
--- /dev/null
+++ b/hdf_compass/hdf5rest_model/model.py
@@ -0,0 +1,385 @@
+##############################################################################
+# Copyright by The HDF Group. #
+# All rights reserved. #
+# #
+# This file is part of the HDF Compass Viewer. The full HDF Compass #
+# copyright notice, including terms governing use, modification, and #
+# terms governing use, modification, and redistribution, is contained in #
+# the file COPYING, which can be found at the root of the source code #
+# distribution tree. If you do not have access to this file, you may #
+# request a copy from help at hdfgroup.org. #
+##############################################################################
+
+"""
+Implementation of compass_model classes for HDF5 files.
+"""
+from __future__ import absolute_import, division, print_function, unicode_literals
+
+from itertools import groupby
+import sys
+import os.path as op
+import posixpath as pp
+import json
+import requests
+
+import logging
+log = logging.getLogger(__name__)
+log.addHandler(logging.NullHandler())
+
+# Py2App can't successfully import otherwise
+from hdf_compass import compass_model
+from hdf_compass.utils import url2path
+
+from . import hdf5dtype
+
+def get_json(endpoint, domain=None, uri=None):
+
+ # try to do a GET from the domain
+ req = endpoint
+ if uri is not None:
+ req += uri
+
+ headers = {}
+ if domain is not None:
+ headers['host'] = domain
+ #self.log.info("GET: " + req)
+ print("GET: " + req)
+ rsp = requests.get(req, headers=headers)
+ #self.log.info("RSP: " + str(rsp.status_code) + ':' + rsp.text)
+ print("RSP: " + str(rsp.status_code) + ':' + rsp.text)
+ if rsp.status_code != 200:
+ print("status_code: %d", rsp.status_code)
+ raise IOError(rsp.reason)
+ #print "rsp text", rsp.text
+ rsp_json = json.loads(rsp.text)
+
+ return rsp_json
+
+def sort_key(name):
+ """ Sorting key for names in an HDF5 group.
+
+ We provide "natural" sort order; e.g. "7" comes before "12".
+ """
+ return [(int(''.join(g)) if k else ''.join(g)) for k, g in groupby(name, key=unicode.isdigit)]
+
+
+class HDF5RestStore(compass_model.Store):
+ """
+ Data store implementation using an HDF5 file.
+
+ Keys are the full names of objects in the file.
+ """
+
+ def __contains__(self, key):
+ return key in self.f
+
+ @property
+ def url(self):
+ return self._url
+
+ @property
+ def display_name(self):
+ if self.domain:
+ return self.domain
+ else:
+ return self.endpoint
+
+ @property
+ def root(self):
+ return self['/']
+
+ @property
+ def valid(self):
+ return '/' in self._keystore
+
+ @property
+ def keystore(self):
+ return self._keystore
+
+ @staticmethod
+ def can_handle(url):
+ try:
+ flag = True
+ rsp_json = get_json(url)
+ for key in ("root", "created", "hrefs", "lastModified"):
+ if key not in rsp_json:
+ flag = False
+ break
+ log.debug("able to handle %s? %r" % (url, flag))
+ return flag
+ except Exception:
+ log.debug("able to handle %s? no" % url)
+ return False
+ return True
+
+
+ def __init__(self, url):
+ if not self.can_handle(url):
+ raise ValueError(url)
+ # extract domain if there's a "host" query param
+ queryParam = "host="
+ print("url:" + url)
+ nindex = url.find('?' + queryParam)
+ if nindex < 0:
+ nindex = url.find('&' + queryParam)
+ if nindex > 0:
+ domain = url[(nindex + len(queryParam) + 1):]
+ # trim any additional query params
+ nindex = domain.find['&']
+ if nindex > 0:
+ domain = domain[:nindex]
+ self._domain = domain
+ else:
+ self._domain = None
+
+ nindex = url.find('?')
+ if nindex < 0:
+ self._endpoint = url
+ else:
+ self._endpoint = url[:nindex]
+
+ rsp = self.get('/')
+
+ self.f = {}
+ self.f['/'] = "/groups/" + rsp['root']
+
+ @property
+ def endpoint(self):
+ return self._endpoint
+
+ @property
+ def domain(self):
+ return self._domain
+
+ @property
+ def objid(self):
+ return self._objid
+
+ def get(self, uri):
+ rsp = get_json(self.endpoint, domain=self.domain, uri=uri)
+ return rsp
+
+ def close(self):
+ pass
+
+ def get_parent(self, key):
+ # HDFCompass requires the parent of the root container be None
+ if key == "" or key == "/":
+ return None
+
+ pkey = pp.dirname(key)
+ if pkey == "":
+ pkey = "/"
+
+ if pkey not in self._keystore:
+ # is it possible to get to a key without traversing the parents?
+ # if so, we should query the server for the given path
+ raise IOError("parent not found")
+ return self[pkey]
+
+
+class HDF5RestGroup(compass_model.Container):
+ """ Represents an HDF5 group, to be displayed in the browser view. """
+
+ class_kind = "HDF5 Group"
+
+ @staticmethod
+ def can_handle(store, key):
+ return key in store and store.f[key].startswith("/groups/")
+
+ def get_names(self):
+
+ # Lazily build the list of names; this helps when browsing big files
+ if self._xnames is None:
+ rsp = self.store.get(self._uri + "/links")
+ self._xnames = []
+ links = rsp["links"]
+ for link in links:
+ name = link["title"]
+ self._xnames.append(name)
+ link_key = pp.join(self.key, name)
+ if link_key not in self.store.f:
+ if link["class"] == "H5L_TYPE_HARD":
+ self.store.f[link_key] = '/' + link["collection"] + '/' + link["id"]
+ else:
+ pass # todo support soft/external links
+
+ # Natural sort is expensive
+ if len(self._xnames) < 1000:
+ self._xnames.sort(key=sort_key)
+
+ return self._xnames
+
+ def __init__(self, store, key):
+ self._store = store
+ self._key = key
+ self._uri = store.f[key]
+ self._xnames = None
+ rsp = store.get(self._uri)
+ self._count = rsp["linkCount"]
+ self.get_names()
+
+ @property
+ def key(self):
+ return self._key
+
+ @property
+ def store(self):
+ return self._store
+
+ @property
+ def display_name(self):
+ name = pp.basename(self.key)
+ if name == "":
+ name = '/'
+ return name
+
+ @property
+ def display_title(self):
+ return "%s %s" % (self.store.display_name, self.key)
+
+ @property
+ def description(self):
+ return 'Group "%s" (%d members)' % (self.display_name, len(self))
+
+ def __len__(self):
+ return self._count
+
+ def __iter__(self):
+ for name in self._names:
+ yield self.store[pp.join(self.key, name)]
+
+ def __getitem__(self, idx):
+ name = self._names[idx]
+ return self.store[pp.join(self.key, name)]
+
+
+class HDF5RestDataset(compass_model.Array):
+ """ Represents an HDF5 dataset. """
+
+ class_kind = "HDF5 Dataset"
+
+ @staticmethod
+ def can_handle(store, key):
+ return key in store and store.f[key].startswith("/datasets/")
+
+ def __init__(self, store, key):
+ self._store = store
+ self._key = key
+ self._uri = store.f[key]
+ rsp = store.get(self._uri)
+ shape_json = rsp["shape"]
+ if shape_json["class"] == "H5S_SCALAR":
+ self._shape = ()
+ elif shape_json["class"] == "H5S_SIMPLE":
+ self._shape = shape_json["dims"]
+ else:
+ raise IOError("Unexpected shape class: " + shape_json["class"])
+ type_json = rsp["type"]
+ self._dtype = hdf5dtype.createDataType(type_json)
+
+
+ @property
+ def key(self):
+ return self._key
+
+ @property
+ def store(self):
+ return self._store
+
+ @property
+ def display_name(self):
+ return pp.basename(self.key)
+
+ @property
+ def description(self):
+ return 'Dataset "%s"' % (self.display_name,)
+
+ @property
+ def shape(self):
+ return self._shape
+
+ @property
+ def dtype(self):
+ return self._dtype
+
+ def __getitem__(self, args):
+ print("getitem: " + str(args))
+ return None
+ return self._dset[args]
+
+ def is_plottable(self):
+ if self.dtype.kind == 'S':
+ log.debug("Not plottable since ASCII String (characters: %d)" % self.dtype.itemsize)
+ return False
+ if self.dtype.kind == 'U':
+ log.debug("Not plottable since Unicode String (characters: %d)" % self.dtype.itemsize)
+ return False
+ return True
+
+
+class HDF5RestKV(compass_model.KeyValue):
+ """ A KeyValue node used for HDF5 attributes. """
+
+ class_kind = "HDF5 Attributes"
+
+ @staticmethod
+ def can_handle(store, key):
+ canhandle = False
+ if key in store:
+ uri = store.f[key]
+ if uri.startswith("/groups/"):
+ canhandle = True
+ elif uri.startswith("/datasets/"):
+ canhandle = True
+ elif uri.startswith("/datatypes/"):
+ canhandle = True
+ return canhandle
+
+ def __init__(self, store, key):
+ self._store = store
+ self._key = key
+ self._uri = store.f[key]
+
+ rsp = store.get(self._uri + "/attributes")
+ attributes = rsp["attributes"]
+ names = []
+ for attr in attributes:
+ names.append(attr["name"])
+ self._names = names
+
+ @property
+ def key(self):
+ return self._key
+
+ @property
+ def store(self):
+ return self._store
+
+ @property
+ def display_name(self):
+ n = pp.basename(self.key)
+ return n if n != '' else '/'
+
+ @property
+ def description(self):
+ return self.display_name
+
+ @property
+ def keys(self):
+ return self._names[:]
+
+ def __getitem__(self, name):
+ return self._obj.attrs[name]
+
+
+
+
+# Register handlers
+HDF5RestStore.push(HDF5RestKV)
+HDF5RestStore.push(HDF5RestDataset)
+#HDF5RestStore.push(HDF5Text)
+HDF5RestStore.push(HDF5RestGroup)
+#HDF5RestStore.push(HDF5Image)
+
+compass_model.push(HDF5RestStore)
diff --git a/hdf_compass/hdf5rest_model/test.py b/hdf_compass/hdf5rest_model/test.py
new file mode 100644
index 0000000..3fd21a7
--- /dev/null
+++ b/hdf_compass/hdf5rest_model/test.py
@@ -0,0 +1,24 @@
+##############################################################################
+# Copyright by The HDF Group. #
+# All rights reserved. #
+# #
+# This file is part of the HDF Compass Viewer. The full HDF Compass #
+# copyright notice, including terms governing use, modification, and #
+# terms governing use, modification, and redistribution, is contained in #
+# the file COPYING, which can be found at the root of the source code #
+# distribution tree. If you do not have access to this file, you may #
+# request a copy from help at hdfgroup.org. #
+##############################################################################
+from __future__ import absolute_import, division, print_function
+
+from hdf_compass.compass_model.test import container, store
+from hdf_compass.hdf5rest_model import HDF5RestGroup, HDF5RestStore
+from hdf_compass.utils import data_url
+
+import os
+
+url = "http://127.0.0.1:5000"
+
+s = store(HDF5RestStore, url)
+c = container(HDF5RestStore, url, HDF5RestGroup, "/")
+
--
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-science/packages/hdf-compass.git
More information about the debian-science-commits
mailing list