From 339f32ca888688c6458903302a6fd76525b9b4b3 Mon Sep 17 00:00:00 2001 From: Aleksandar Jelenak Date: Sat, 11 Feb 2017 11:54:46 -0500 Subject: [PATCH 1/2] Add production rules for dataset byte streams --- docs/bnf/dataset.rst | 16 +++++++++++++++- 1 file changed, 15 insertions(+), 1 deletion(-) diff --git a/docs/bnf/dataset.rst b/docs/bnf/dataset.rst index d801d23..0a7881d 100644 --- a/docs/bnf/dataset.rst +++ b/docs/bnf/dataset.rst @@ -8,7 +8,8 @@ Dataset : "type" ":" `datatype` | `datatype_object_ref` "," : "shape" ":" `dataspace` "," : "value" ":" `json_value` "," - : "creationProperties" ":" `dcpl` + : "creationProperties" ":" `dcpl` "," + : "byteStreams" ":" `byte_stream_array` : "}" json_value: `json_string` :| `json_number` @@ -53,3 +54,16 @@ Dataset : "offset" ":" `non_negative_integer` : "size" ":" `positive_integer` : "}" + byte_stream_array: "[" `byte_stream_list` "]" + byte_stream_list: `byte_stream`, ("," `byte_stream`)* + byte_stream: "{" + : "offset" ":" `non_negative_integer` "," + : "size" ":" `non_negative_integer` "," + : "uuid" ":" `uuid` "," + : "cksum" ":" `checksum` "," + : "dspace_anchor" ":" `dims_array` + : "}" + checksum: "{" + : "type" ":" `identifier` "," + : "value" ":" `ascii_string_wo_slash` + : "}" From 0f95e8427329c880c46fe06fe96912964f2ee90b Mon Sep 17 00:00:00 2001 From: Aleksandar Jelenak Date: Fri, 3 Jun 2022 13:52:16 +0200 Subject: [PATCH 2/2] Bring bytestreams branch up to date --- .gitattributes | 1 + .gitignore | 6 +- .readthedocs.yaml | 15 + .travis.yml | 21 +- COPYING | 2 +- Docker/README.md | 16 +- MANIFEST.in | 2 + README.rst | 63 +- data/hdf5/scalar_array_dset.h5 | Bin 0 -> 7228 bytes data/json/array_dset.json | 154 +- data/json/bool_attr.json | 45 + data/json/bool_dset.json | 62 + data/json/comp_complex.json | 408 ++ data/json/compound_array.json | 85 +- data/json/compound_array_vlen_string.json | 81 +- data/json/enum_attr.json | 124 +- data/json/enum_dset.json | 114 +- data/json/h5ex_d_sofloat.json | 4138 ++++++++--------- data/json/h5ex_d_soint.json | 4138 ++++++++--------- data/json/h5ex_d_unlimod.json | 1 - data/json/nullspace_dset.json | 21 +- data/json/scalar_array_dset.json | 61 + data/json/tall_with_udlink.json | 652 +-- data/json/tstr.json | 2440 ++++++++++ data/json/vlen_string_dset_utc.json | 2340 ++++++++++ docs/Installation/index.rst | 129 - docs/Utilities.rst | 90 - docs/bnf/attribute_collection.rst | 2 +- docs/bnf/dataset.rst | 2 +- docs/bnf/datatype.rst | 8 +- docs/bnf/index.rst | 6 +- docs/bp/index.rst | 2 - docs/conf.py | 181 +- docs/examples/array_dset.json | 2 +- docs/examples/array_dset.rst | 2 +- docs/examples/classic.rst | 6 +- docs/examples/compound.json | 1 + docs/examples/compound.rst | 2 +- docs/examples/datatype_object.json | 2 +- docs/examples/datatype_object.rst | 2 +- docs/examples/empty.json | 2 +- docs/examples/empty.rst | 2 +- docs/examples/enum_attr.json | 4 +- docs/examples/enum_attr.rst | 2 +- docs/examples/fixed_string_dset.json | 2 +- docs/examples/fixed_string_dset.rst | 2 +- docs/examples/index.rst | 2 + docs/examples/null_objref_dset.json | 2 +- docs/examples/null_objref_dset.rst | 2 +- docs/examples/nullspace_dset.json | 2 +- docs/examples/nullspace_dset.rst | 2 +- docs/examples/objref_attr.json | 2 +- docs/examples/objref_attr.rst | 2 +- docs/examples/objref_dset.json | 16 +- docs/examples/regionref_attr.json | 2 +- docs/examples/regionref_attr.rst | 2 +- docs/examples/resizable.json | 6 +- docs/examples/resizable.rst | 2 +- docs/examples/sample.json | 58 +- docs/examples/scalar.json | 2 +- docs/examples/scalar.rst | 2 +- docs/examples/tall.json | 2 +- docs/examples/tall.rst | 2 +- docs/examples/tgroup.json | 92 +- docs/examples/tgroup.rst | 2 +- docs/examples/vlen_dset.json | 2 +- docs/examples/vlen_dset.rst | 2 +- docs/examples/vlen_string_attr.json | 8 +- docs/examples/vlen_string_attr.rst | 2 +- docs/index.rst | 12 +- docs/requirements.txt | 2 + docs/schema/attribute.rst | 5 + docs/schema/dataset.rst | 5 + docs/schema/dataspaces.rst | 5 + docs/schema/datatypes.rst | 5 + docs/schema/file.rst | 7 + docs/schema/filters.rst | 5 + docs/schema/group.rst | 5 + docs/schema/index.rst | 15 + docs/specs.rst | 8 + docs/tools/h5json.rst | 129 + h5json/__init__.py | 8 +- h5json/_version.py | 657 +++ h5json/apiversion.py | 15 + h5json/h5tojson/__init__.py | 11 + {h5tojson => h5json/h5tojson}/h5tojson.py | 146 +- h5json/hdf5db.py | 1631 ++++--- h5json/hdf5dtype.py | 562 +-- h5json/jsontoh5/__init__.py | 13 + {jsontoh5 => h5json/jsontoh5}/jsontoh5.py | 100 +- .../sample.json => h5json/schema/__init__.py | 0 h5json/schema/attribute.schema.json | 66 + h5json/schema/dataset.schema.json | 205 + h5json/schema/dataspaces.schema.json | 86 + h5json/schema/datatypes.schema.json | 611 +++ h5json/schema/filters.schema.json | 197 + h5json/schema/group.schema.json | 255 + h5json/schema/hdf5.schema.json | 246 + h5json/validator/__init__.py | 0 h5json/validator/validator.py | 98 + setup.cfg | 8 + setup.py | 138 +- test/__init__.py | 2 +- test/integ/__init__.py | 2 +- test/integ/h5tojson_test.py | 34 +- test/integ/jsontoh5_test.py | 58 +- test/unit/__init__.py | 2 +- test/unit/hdf5dbTest.py | 1299 +++--- test/unit/hdf5dtypeTest.py | 687 +-- testall.py | 49 +- versioneer.py | 2123 +++++++++ 111 files changed, 17775 insertions(+), 7394 deletions(-) create mode 100644 .gitattributes create mode 100644 .readthedocs.yaml create mode 100644 MANIFEST.in create mode 100644 data/hdf5/scalar_array_dset.h5 create mode 100644 data/json/bool_attr.json create mode 100644 data/json/bool_dset.json create mode 100644 data/json/comp_complex.json delete mode 100644 data/json/h5ex_d_unlimod.json create mode 100644 data/json/scalar_array_dset.json create mode 100644 data/json/vlen_string_dset_utc.json delete mode 100755 docs/Installation/index.rst delete mode 100755 docs/Utilities.rst delete mode 100644 docs/bp/index.rst create mode 100644 docs/requirements.txt create mode 100644 docs/schema/attribute.rst create mode 100644 docs/schema/dataset.rst create mode 100644 docs/schema/dataspaces.rst create mode 100644 docs/schema/datatypes.rst create mode 100644 docs/schema/file.rst create mode 100644 docs/schema/filters.rst create mode 100644 docs/schema/group.rst create mode 100644 docs/schema/index.rst create mode 100644 docs/specs.rst create mode 100755 docs/tools/h5json.rst create mode 100644 h5json/_version.py create mode 100644 h5json/apiversion.py create mode 100644 h5json/h5tojson/__init__.py rename {h5tojson => h5json/h5tojson}/h5tojson.py (62%) create mode 100644 h5json/jsontoh5/__init__.py rename {jsontoh5 => h5json/jsontoh5}/jsontoh5.py (79%) rename data/json/sample.json => h5json/schema/__init__.py (100%) create mode 100644 h5json/schema/attribute.schema.json create mode 100644 h5json/schema/dataset.schema.json create mode 100644 h5json/schema/dataspaces.schema.json create mode 100644 h5json/schema/datatypes.schema.json create mode 100644 h5json/schema/filters.schema.json create mode 100644 h5json/schema/group.schema.json create mode 100644 h5json/schema/hdf5.schema.json create mode 100644 h5json/validator/__init__.py create mode 100644 h5json/validator/validator.py create mode 100644 versioneer.py diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000..971411f --- /dev/null +++ b/.gitattributes @@ -0,0 +1 @@ +h5json/_version.py export-subst diff --git a/.gitignore b/.gitignore index fe8e48d..9a9a23d 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,7 @@ +# Misc. +.vscode +.DS_Store + # Byte-compiled / optimized / DLL files __pycache__/ *.pyc @@ -19,7 +23,7 @@ sdist/ var/ *.egg-info/ .installed.cfg -*.egg +*.egg? # PyInstaller # Usually these files are written by a python script from a template diff --git a/.readthedocs.yaml b/.readthedocs.yaml new file mode 100644 index 0000000..68b30d6 --- /dev/null +++ b/.readthedocs.yaml @@ -0,0 +1,15 @@ +version: 2 + +build: + os: ubuntu-20.04 + tools: + python: "3.9" + +python: + install: + - requirements: docs/requirements.txt + +sphinx: + builder: html + configuration: docs/conf.py + fail_on_warning: true diff --git a/.travis.yml b/.travis.yml index 49522be..0f3189a 100644 --- a/.travis.yml +++ b/.travis.yml @@ -4,18 +4,17 @@ notifications: email: false python: - - "2.7" - - "3.3" - - "3.4" - - "3.5" + - "3.7" + - "3.8" + - "3.9" install: - - sudo apt-get update -qq - - sudo apt-get install -qq libhdf5-serial-dev - - pip uninstall numpy -y - - pip install numpy>=1.10.4 - - pip install h5py + - sudo apt-get update -qq + - sudo apt-get install -qq libhdf5-serial-dev + - pip uninstall numpy -y + - pip install numpy>=1.10.4 + - pip install h5py script: - - python setup.py install - - python testall.py + - python setup.py install + - python testall.py diff --git a/COPYING b/COPYING index a5a811a..e6f3ab3 100755 --- a/COPYING +++ b/COPYING @@ -4,7 +4,7 @@ h5serv Software Service, Libraries and Utilities ----------------------------------------------------------------------------- h5serv (HDF5 REST Server) Service, Libraries and Utilities -Copyright 2014-2016 by The HDF Group. +Copyright 2014-2017 by The HDF Group. All rights reserved. diff --git a/Docker/README.md b/Docker/README.md index 8b49cd3..f12bf48 100644 --- a/Docker/README.md +++ b/Docker/README.md @@ -1,24 +1,26 @@ -## Dockerfile Image for hdf5-json Python Package +# Docker Images for HDF5/JSON + +## hdf5-json Python Package + +Ingredients: -## Ingredients: - * Python 3.5 * HDF5 1.8.16 * h5py 2.6.0 * PyTables 3.2.2 * hdf5-json package -## Instructions +### Instructions -Run with the docker '-it' flag, and the data volume to use. +Build Docker image from [`Dockerfile`](./Dockerfile). Run with the docker `-it` flag, and a data volume to use. Example: -```$docker run -it -v :/data hdfgroup/hdf5-json /bin/bash``` + $ docker run -it -v :/data hdfgroup/hdf5-json /bin/bash Where "mydata" is the path to a folder on the host that (presumably) holds some HDF5 files to use with hdf5-json. - + Also sample HDF5 and JSON files can be found on /usr/local/src/hdf5-json/data/. ## See Also: diff --git a/MANIFEST.in b/MANIFEST.in new file mode 100644 index 0000000..efa22bb --- /dev/null +++ b/MANIFEST.in @@ -0,0 +1,2 @@ +include versioneer.py +include h5json/_version.py diff --git a/README.rst b/README.rst index fdfc4fb..2210f38 100644 --- a/README.rst +++ b/README.rst @@ -1,66 +1,39 @@ -hdf5-json -========= +h5json +====== .. image:: https://travis-ci.org/HDFGroup/hdf5-json.svg?branch=develop - :target: https://travis-ci.org/HDFGroup/hdf5-json + :target: https://travis-ci.org/HDFGroup/hdf5-json -Specification and tools for representing HDF5 in JSON +.. image:: https://readthedocs.org/projects/hdf5-json/badge/?version=latest + :target: https://hdf5-json.readthedocs.io/en/latest/?badge=latest + :alt: Documentation Status + +Specification and tools for representing HDF5 in JSON. Introduction ------------ -This repository contains a specification, library, and utilities for describing HDF5 content in JSON. -The utilities can be used to convert any HDF5 file to JSON or from a JSON file (using the convention +This repository contains a specification, library, and utilities for describing HDF5 content in JSON. +The utilities can be used to convert any HDF5 file to JSON or from a JSON file (using the convention described here to HDF5). -The library is useful for any Python application that needs to translate between HDF5 objects and JSON -serializations. In addition to the utilities provided in this repository, the library is used by HDF -Server (a RESTful web service for HDF5), and HDF Product Designer (an application for creating product -designs). - -This respository also include utilities to generate code in Fortran or Python based on a JSON file. +The library is useful for any Python application that needs to translate between HDF5 objects and JSON +serializations. In addition to the utilities provided in this repository, the library is used by `HDF +Server `_ (a RESTful web service for HDF5), and `HDF Product Designer `_ (an application for creating product +designs). -See :doc:`Utilities` for a description of the included utilties. Websites -------- * Main website: http://www.hdfgroup.org * Source code: https://github.com/HDFGroup/hdf5-json -* Mailing list: hdf-forum@lists.hdfgroup.org -* Documentation: http://hdf5-json.readthedocs.org - -Related Projects ----------------- -* HDF Server: https://www.hdfgroup.org/projects/hdfserver/ -* Product Designer: https://wiki.earthdata.nasa.gov/display/HPD/HDF+Product+Designer - -Installing ------------ - -Via pip:: - - pip install h5json - -From a release tarball or Git checkout:: +* HDF Forum: https://forum.hdfgroup.org/c/hsds +* Documentation: https://hdf5-json.readthedocs.org - python setup.py install - python testall.py # optional - -The hd5tojson.py and jsontoh5.py convertors will be added to the path. -See docs/Installation.rst for step by step instructions. - - -Uninstalling ------------- - -Just remove the install directory and all contents to uninstall. - - Reporting bugs (and general feedback) ------------------------------------- -Create new issues at http://github.com/HDFGroup/hdf5-json/issues for any problems you find. - -For general questions/feedback, please use the list (hdf-forum@lists.hdfgroup.org). +Create new issues at http://github.com/HDFGroup/hdf5-json/issues for any problems you find. +For general questions/feedback, please post on the `HDF Forum `_. diff --git a/data/hdf5/scalar_array_dset.h5 b/data/hdf5/scalar_array_dset.h5 new file mode 100644 index 0000000000000000000000000000000000000000..04001b21c6a319daaa16f54dba8142e7a801af55 GIT binary patch literal 7228 zcmeD5aB<`1lHy_j0S*oZ76t(@6Gr@p0vj2K2#gPtPk=HQp>zk7Ucm%mFfxE31A_!q zTo7tLy1I}cS62q0N|^aD8mf(9Gc$yR>N=S0C_UUmz{Axs0OZFAs7GPx=>#;L!TcW| zpOO?G&%odkYzP*DrAr5x0upI|50@Y&h1E)jBam^005BaMXNQ<&3^kIA36#=Bg@Abo zBN>C#{|1VKKpcc(U=s!_6<}5Bt3WE zvF>0$e-}`uf@K8-X{dpt6iFfA@9g2tW5vM2%)rSY0*bVv{QQ#8P)`>INk&NMfg1Bl+|=4%&>Y9WEBasqRef;-i!a`glspaPb7Hp+F%!tM0`l>YIoq&PITcyR*L!xC#I!OAvrm2au`w4Efa+k) z6i^ic0>qT45S0+JfghPS${r1Y(GVC7fzc2c4S~@R7!85Z5Eu=C(GVC7fzc2c4S~@R zpk@ew$Cnhpt3$^BVKf6HBXE!g)Td`wWni=d(xCYWCeR!eNKjBg9yEvp1PTn|91P<1 iKo%=-c7U0S12~e!%m^Fyg-v2GtOqL|O2jvZO_Kq|lfYO2 literal 0 HcmV?d00001 diff --git a/data/json/array_dset.json b/data/json/array_dset.json index 8aae0df..937c844 100644 --- a/data/json/array_dset.json +++ b/data/json/array_dset.json @@ -1,144 +1,144 @@ { - "apiVersion": "1.0.0", + "apiVersion": "1.0.0", "datasets": { "9b3519cf-83f8-11e5-8e8c-3c15c2da029e": { "alias": [ "/DS1" - ], + ], "creationProperties": { - "allocTime": "H5D_ALLOC_TIME_LATE", - "fillTime": "H5D_FILL_TIME_IFSET", + "allocTime": "H5D_ALLOC_TIME_LATE", + "fillTime": "H5D_FILL_TIME_IFSET", "layout": { "class": "H5D_CONTIGUOUS" } - }, + }, "shape": { - "class": "H5S_SIMPLE", + "class": "H5S_SIMPLE", "dims": [ 4 ] - }, + }, "type": { "base": { - "base": "H5T_STD_I64LE", + "base": "H5T_STD_I64LE", "class": "H5T_INTEGER" - }, - "class": "H5T_ARRAY", + }, + "class": "H5T_ARRAY", "dims": [ - 3, + 3, 5 ] - }, + }, "value": [ [ [ - 0, - 0, - 0, - 0, + 0, + 0, + 0, + 0, 0 - ], + ], [ - 0, - -1, - -2, - -3, + 0, + -1, + -2, + -3, -4 - ], + ], [ - 0, - -2, - -4, - -6, + 0, + -2, + -4, + -6, -8 ] - ], + ], [ [ - 0, - 1, - 2, - 3, + 0, + 1, + 2, + 3, 4 - ], + ], [ - 1, - 1, - 1, - 1, + 1, + 1, + 1, + 1, 1 - ], + ], [ - 2, - 1, - 0, - -1, + 2, + 1, + 0, + -1, -2 ] - ], + ], [ [ - 0, - 2, - 4, - 6, + 0, + 2, + 4, + 6, 8 - ], + ], [ - 2, - 3, - 4, - 5, + 2, + 3, + 4, + 5, 6 - ], + ], [ - 4, - 4, - 4, - 4, + 4, + 4, + 4, + 4, 4 ] - ], + ], [ [ - 0, - 3, - 6, - 9, + 0, + 3, + 6, + 9, 12 - ], + ], [ - 3, - 5, - 7, - 9, + 3, + 5, + 7, + 9, 11 - ], + ], [ - 6, - 7, - 8, - 9, + 6, + 7, + 8, + 9, 10 ] ] ] } - }, + }, "groups": { "9b338cbd-83f8-11e5-9763-3c15c2da029e": { "alias": [ "/" - ], + ], "links": [ { - "class": "H5L_TYPE_HARD", - "collection": "datasets", - "id": "9b3519cf-83f8-11e5-8e8c-3c15c2da029e", + "class": "H5L_TYPE_HARD", + "collection": "datasets", + "id": "9b3519cf-83f8-11e5-8e8c-3c15c2da029e", "title": "DS1" } ] } - }, + }, "root": "9b338cbd-83f8-11e5-9763-3c15c2da029e" } diff --git a/data/json/bool_attr.json b/data/json/bool_attr.json new file mode 100644 index 0000000..ff092b9 --- /dev/null +++ b/data/json/bool_attr.json @@ -0,0 +1,45 @@ +{ + "apiVersion": "1.1.0", + "groups": { + "a58ed768-0370-11e7-926f-3c15c2da029e": { + "alias": [ + "/" + ], + "attributes": [ + { + "name": "attr1", + "shape": { + "class": "H5S_SIMPLE", + "dims": [ + 4 + ] + }, + "type": { + "base": { + "base": "H5T_STD_I8LE", + "class": "H5T_INTEGER" + }, + "class": "H5T_ENUM", + "members": [ + { + "name": "FALSE", + "value": 0 + }, + { + "name": "TRUE", + "value": 1 + } + ] + }, + "value": [ + false, + true, + false, + true + ] + } + ] + } + }, + "root": "a58ed768-0370-11e7-926f-3c15c2da029e" +} diff --git a/data/json/bool_dset.json b/data/json/bool_dset.json new file mode 100644 index 0000000..29e46d8 --- /dev/null +++ b/data/json/bool_dset.json @@ -0,0 +1,62 @@ +{ + "apiVersion": "1.1.0", + "datasets": { + "b59c0308-0370-11e7-a673-3c15c2da029e": { + "alias": [ + "/DS1" + ], + "creationProperties": { + "allocTime": "H5D_ALLOC_TIME_LATE", + "fillTime": "H5D_FILL_TIME_IFSET", + "layout": { + "class": "H5D_CONTIGUOUS" + } + }, + "shape": { + "class": "H5S_SIMPLE", + "dims": [ + 4 + ] + }, + "type": { + "base": { + "base": "H5T_STD_I8LE", + "class": "H5T_INTEGER" + }, + "class": "H5T_ENUM", + "members": [ + { + "name": "FALSE", + "value": 0 + }, + { + "name": "TRUE", + "value": 1 + } + ] + }, + "value": [ + false, + true, + false, + true + ] + } + }, + "groups": { + "b59a29f0-0370-11e7-86c9-3c15c2da029e": { + "alias": [ + "/" + ], + "links": [ + { + "class": "H5L_TYPE_HARD", + "collection": "datasets", + "id": "b59c0308-0370-11e7-a673-3c15c2da029e", + "title": "DS1" + } + ] + } + }, + "root": "b59a29f0-0370-11e7-86c9-3c15c2da029e" +} diff --git a/data/json/comp_complex.json b/data/json/comp_complex.json new file mode 100644 index 0000000..9fdcb58 --- /dev/null +++ b/data/json/comp_complex.json @@ -0,0 +1,408 @@ +{ + "apiVersion": "1.1.1", + "datasets": { + "a4afc53a-9b72-11ec-b0a3-8c8590747994": { + "alias": [ + "/n" + ], + "attributes": [ + { + "name": "CLASS", + "shape": { + "class": "H5S_SCALAR" + }, + "type": { + "charSet": "H5T_CSET_ASCII", + "class": "H5T_STRING", + "length": 16, + "strPad": "H5T_STR_NULLPAD" + }, + "value": "DIMENSION_SCALE" + }, + { + "name": "NAME", + "shape": { + "class": "H5S_SCALAR" + }, + "type": { + "charSet": "H5T_CSET_ASCII", + "class": "H5T_STRING", + "length": 64, + "strPad": "H5T_STR_NULLPAD" + }, + "value": "This is a netCDF dimension but not a netCDF variable. 2" + }, + { + "name": "REFERENCE_LIST", + "shape": { + "class": "H5S_SIMPLE", + "dims": [ + 1 + ] + }, + "type": { + "class": "H5T_COMPOUND", + "fields": [ + { + "name": "dataset", + "type": { + "base": "H5T_STD_REF_OBJ", + "class": "H5T_REFERENCE" + } + }, + { + "name": "dimension", + "type": { + "base": "H5T_STD_I32LE", + "class": "H5T_INTEGER" + } + } + ] + }, + "value": [ + [ + "datasets/a4afebb4-9b72-11ec-b0a3-8c8590747994", + 0 + ] + ] + } + ], + "creationProperties": { + "allocTime": "H5D_ALLOC_TIME_LATE", + "fillTime": "H5D_FILL_TIME_IFSET", + "layout": { + "class": "H5D_CONTIGUOUS" + } + }, + "shape": { + "class": "H5S_SIMPLE", + "dims": [ + 2 + ] + }, + "type": { + "base": "H5T_IEEE_F32BE", + "class": "H5T_FLOAT" + }, + "value": [ + 0.0, + 0.0 + ] + }, + "a4afebb4-9b72-11ec-b0a3-8c8590747994": { + "alias": [ + "/phony_compound_var" + ], + "attributes": [ + { + "name": "DIMENSION_LIST", + "shape": { + "class": "H5S_SIMPLE", + "dims": [ + 1 + ] + }, + "type": { + "base": { + "base": "H5T_STD_REF_OBJ", + "class": "H5T_REFERENCE" + }, + "class": "H5T_VLEN" + }, + "value": [ + [ + "datasets/a4afc53a-9b72-11ec-b0a3-8c8590747994" + ] + ] + } + ], + "creationProperties": { + "allocTime": "H5D_ALLOC_TIME_LATE", + "fillTime": "H5D_FILL_TIME_IFSET", + "layout": { + "class": "H5D_CONTIGUOUS" + } + }, + "shape": { + "class": "H5S_SIMPLE", + "dims": [ + 2 + ] + }, + "type": { + "class": "H5T_COMPOUND", + "fields": [ + { + "name": "yy", + "type": { + "base": { + "class": "H5T_COMPOUND", + "fields": [ + { + "name": "x", + "type": { + "class": "H5T_COMPOUND", + "fields": [ + { + "name": "i", + "type": { + "base": "H5T_STD_I16LE", + "class": "H5T_INTEGER" + } + }, + { + "name": "j", + "type": { + "base": "H5T_STD_I32LE", + "class": "H5T_INTEGER" + } + } + ] + } + }, + { + "name": "y", + "type": { + "base": { + "base": "H5T_IEEE_F64LE", + "class": "H5T_FLOAT" + }, + "class": "H5T_ARRAY", + "dims": [ + 2 + ] + } + } + ] + }, + "class": "H5T_ARRAY", + "dims": [ + 2 + ] + } + } + ] + }, + "value": [ + [ + [ + [ + [ + 1, + 200000 + ], + [ + -100000.285657, + 3.1415926 + ] + ], + [ + [ + 2, + 400000 + ], + [ + 200000.151617, + 273.15 + ] + ] + ] + ], + [ + [ + [ + [ + 3, + 600000 + ], + [ + -200000.285657, + 6.1415926 + ] + ], + [ + [ + 4, + 800000 + ], + [ + 400000.151617, + 476.15 + ] + ] + ] + ] + ] + } + }, + "datatypes": { + "a4af6bc6-9b72-11ec-b0a3-8c8590747994": { + "alias": [ + "/cmp1" + ], + "type": { + "class": "H5T_COMPOUND", + "fields": [ + { + "name": "i", + "type": { + "base": "H5T_STD_I16LE", + "class": "H5T_INTEGER" + } + }, + { + "name": "j", + "type": { + "base": "H5T_STD_I32LE", + "class": "H5T_INTEGER" + } + } + ] + } + }, + "a4af85b6-9b72-11ec-b0a3-8c8590747994": { + "alias": [ + "/cmp2" + ], + "type": { + "class": "H5T_COMPOUND", + "fields": [ + { + "name": "x", + "type": { + "class": "H5T_COMPOUND", + "fields": [ + { + "name": "i", + "type": { + "base": "H5T_STD_I16LE", + "class": "H5T_INTEGER" + } + }, + { + "name": "j", + "type": { + "base": "H5T_STD_I32LE", + "class": "H5T_INTEGER" + } + } + ] + } + }, + { + "name": "y", + "type": { + "base": { + "base": "H5T_IEEE_F64LE", + "class": "H5T_FLOAT" + }, + "class": "H5T_ARRAY", + "dims": [ + 2 + ] + } + } + ] + } + }, + "a4af9fd8-9b72-11ec-b0a3-8c8590747994": { + "alias": [ + "/cmp3" + ], + "type": { + "class": "H5T_COMPOUND", + "fields": [ + { + "name": "yy", + "type": { + "base": { + "class": "H5T_COMPOUND", + "fields": [ + { + "name": "x", + "type": { + "class": "H5T_COMPOUND", + "fields": [ + { + "name": "i", + "type": { + "base": "H5T_STD_I16LE", + "class": "H5T_INTEGER" + } + }, + { + "name": "j", + "type": { + "base": "H5T_STD_I32LE", + "class": "H5T_INTEGER" + } + } + ] + } + }, + { + "name": "y", + "type": { + "base": { + "base": "H5T_IEEE_F64LE", + "class": "H5T_FLOAT" + }, + "class": "H5T_ARRAY", + "dims": [ + 2 + ] + } + } + ] + }, + "class": "H5T_ARRAY", + "dims": [ + 2 + ] + } + } + ] + } + } + }, + "groups": { + "a4af19d2-9b72-11ec-b0a3-8c8590747994": { + "alias": [ + "/" + ], + "links": [ + { + "class": "H5L_TYPE_HARD", + "collection": "datatypes", + "id": "a4af6bc6-9b72-11ec-b0a3-8c8590747994", + "title": "cmp1" + }, + { + "class": "H5L_TYPE_HARD", + "collection": "datatypes", + "id": "a4af85b6-9b72-11ec-b0a3-8c8590747994", + "title": "cmp2" + }, + { + "class": "H5L_TYPE_HARD", + "collection": "datatypes", + "id": "a4af9fd8-9b72-11ec-b0a3-8c8590747994", + "title": "cmp3" + }, + { + "class": "H5L_TYPE_HARD", + "collection": "datasets", + "id": "a4afc53a-9b72-11ec-b0a3-8c8590747994", + "title": "n" + }, + { + "class": "H5L_TYPE_HARD", + "collection": "datasets", + "id": "a4afebb4-9b72-11ec-b0a3-8c8590747994", + "title": "phony_compound_var" + } + ] + } + }, + "root": "a4af19d2-9b72-11ec-b0a3-8c8590747994" +} diff --git a/data/json/compound_array.json b/data/json/compound_array.json index 3858a62..98eb0af 100644 --- a/data/json/compound_array.json +++ b/data/json/compound_array.json @@ -1,34 +1,81 @@ { + "apiVersion": "1.1.0", "datasets": { "4c97c75c-2553-42b7-a733-1c18887001c2": { - "type": { - "fields": [ - {"type": {"class": "H5T_FLOAT", "base": "H5T_IEEE_F64LE"}, "name": "Time"}, - {"type": {"class": "H5T_ARRAY", "dims": [4], "base": - {"class": "H5T_FLOAT", "base": "H5T_IEEE_F32LE"} }, "name": "Concentration"}, - {"type": {"class": "H5T_ARRAY", "dims": [8], "base": - {"class": "H5T_INTEGER", "base": "H5T_STD_I8LE"}}, "name": "Species"}], - "class": "H5T_COMPOUND"}, - "creationProperties": {"layout": {"class": "H5D_CONTIGUOUS"}}, "description": "", - "shape": {"maxims": [15], "class": "H5S_SIMPLE", "dims": [15]}, - "attributes": []}}, + "alias": [ + "/dset" + ], + "type": { + "fields": [ + { + "type": { + "class": "H5T_FLOAT", + "base": "H5T_IEEE_F64LE" + }, + "name": "Time" + }, + { + "type": { + "class": "H5T_ARRAY", + "dims": [ + 4 + ], + "base": { + "class": "H5T_FLOAT", + "base": "H5T_IEEE_F32LE" + } + }, + "name": "Concentration" + }, + { + "type": { + "class": "H5T_ARRAY", + "dims": [ + 8 + ], + "base": { + "class": "H5T_INTEGER", + "base": "H5T_STD_I8LE" + } + }, + "name": "Species" + } + ], + "class": "H5T_COMPOUND" + }, + "creationProperties": { + "layout": { + "class": "H5D_CONTIGUOUS" + } + }, + "description": "", + "shape": { + "maxdims": [ + 15 + ], + "class": "H5S_SIMPLE", + "dims": [ + 15 + ] + }, + "attributes": [] + } + }, "groups": { "31e6e63a-41da-11e5-a8c1-3c15c2da029e": { "alias": [ "/" - ], - "attributes": [ - - ], + ], + "attributes": [], "links": [ { - "class": "H5L_TYPE_HARD", - "collection": "datasets", - "id": "4c97c75c-2553-42b7-a733-1c18887001c2", + "class": "H5L_TYPE_HARD", + "collection": "datasets", + "id": "4c97c75c-2553-42b7-a733-1c18887001c2", "title": "dset" } ] } - }, + }, "root": "31e6e63a-41da-11e5-a8c1-3c15c2da029e" } diff --git a/data/json/compound_array_vlen_string.json b/data/json/compound_array_vlen_string.json index cb5eff3..c06fbf0 100644 --- a/data/json/compound_array_vlen_string.json +++ b/data/json/compound_array_vlen_string.json @@ -1,34 +1,77 @@ { + "apiVersion": "1.1.0", "datasets": { "ce2cfb40-cf76-11e5-ac33-3c15c2da029e": { - "type": { - "fields": [ - {"type": {"class": "H5T_INTEGER", "base": "H5T_STD_U64BE"}, "name": "VALUE1"}, - {"type": {"class": "H5T_FLOAT", "base": "H5T_IEEE_F64BE"}, "name": "VALUE2"}, - {"type": {"class": "H5T_ARRAY", "dims": [8], "base": - {"class": "H5T_STRING", "charSet": "H5T_CSET_ASCII", - "strPad": "H5T_STR_NULLTERM", "length": "H5T_VARIABLE"}}, "name": "VALUE3"}], - "class": "H5T_COMPOUND"}, - "creationProperties": {"layout": {"class": "H5D_CONTIGUOUS"}}, "description": "", - "shape": {"maxims": [15], "class": "H5S_SIMPLE", "dims": [10]}, - "attributes": []}}, + "alias": [ + "/dset" + ], + "type": { + "fields": [ + { + "type": { + "class": "H5T_INTEGER", + "base": "H5T_STD_U64BE" + }, + "name": "VALUE1" + }, + { + "type": { + "class": "H5T_FLOAT", + "base": "H5T_IEEE_F64BE" + }, + "name": "VALUE2" + }, + { + "type": { + "class": "H5T_ARRAY", + "dims": [ + 8 + ], + "base": { + "class": "H5T_STRING", + "charSet": "H5T_CSET_ASCII", + "strPad": "H5T_STR_NULLTERM", + "length": "H5T_VARIABLE" + } + }, + "name": "VALUE3" + } + ], + "class": "H5T_COMPOUND" + }, + "creationProperties": { + "layout": { + "class": "H5D_CONTIGUOUS" + } + }, + "description": "", + "shape": { + "maxdims": [ + 15 + ], + "class": "H5S_SIMPLE", + "dims": [ + 10 + ] + }, + "attributes": [] + } + }, "groups": { "ee0b8f12-cf76-11e5-ae05-3c15c2da029e": { "alias": [ "/" - ], - "attributes": [ - - ], + ], + "attributes": [], "links": [ { - "class": "H5L_TYPE_HARD", - "collection": "datasets", - "id": "ce2cfb40-cf76-11e5-ac33-3c15c2da029e", + "class": "H5L_TYPE_HARD", + "collection": "datasets", + "id": "ce2cfb40-cf76-11e5-ac33-3c15c2da029e", "title": "dset" } ] } - }, + }, "root": "ee0b8f12-cf76-11e5-ae05-3c15c2da029e" } diff --git a/data/json/enum_attr.json b/data/json/enum_attr.json index 95709bf..9e9d94a 100644 --- a/data/json/enum_attr.json +++ b/data/json/enum_attr.json @@ -1,104 +1,116 @@ { - "apiVersion": "1.0.0", + "apiVersion": "1.0.0", "datasets": { "9f8b66ee-83f8-11e5-828a-3c15c2da029e": { "alias": [ "/DS1" - ], + ], "attributes": [ { - "name": "A1", + "name": "A1", "shape": { - "class": "H5S_SIMPLE", + "class": "H5S_SIMPLE", "dims": [ - 4, + 4, 7 ] - }, + }, "type": { "base": { - "base": "H5T_STD_I16BE", + "base": "H5T_STD_I16BE", "class": "H5T_INTEGER" - }, - "class": "H5T_ENUM", - "mapping": { - "GAS": 2, - "LIQUID": 1, - "PLASMA": 3, - "SOLID": 0 - } - }, + }, + "class": "H5T_ENUM", + "members": [ + { + "name": "GAS", + "value": 2 + }, + { + "name": "LIQUID", + "value": 1 + }, + { + "name": "PLASMA", + "value": 3 + }, + { + "name": "SOLID", + "value": 0 + } + ] + }, "value": [ [ - 0, - 0, - 0, - 0, - 0, - 0, + 0, + 0, + 0, + 0, + 0, + 0, 0 - ], + ], [ - 0, - 1, - 2, - 3, - 0, - 1, + 0, + 1, + 2, + 3, + 0, + 1, 2 - ], + ], [ - 0, - 2, - 0, - 2, - 0, - 2, + 0, + 2, + 0, + 2, + 0, + 2, 0 - ], + ], [ - 0, - 3, - 2, - 1, - 0, - 3, + 0, + 3, + 2, + 1, + 0, + 3, 2 ] ] } - ], + ], "creationProperties": { - "allocTime": "H5D_ALLOC_TIME_LATE", - "fillTime": "H5D_FILL_TIME_IFSET", + "allocTime": "H5D_ALLOC_TIME_LATE", + "fillTime": "H5D_FILL_TIME_IFSET", "layout": { "class": "H5D_CONTIGUOUS" } - }, + }, "shape": { "class": "H5S_NULL" - }, + }, "type": { - "base": "H5T_STD_I32LE", + "base": "H5T_STD_I32LE", "class": "H5T_INTEGER" - }, + }, "value": null } - }, + }, "groups": { "9f8a11e6-83f8-11e5-93c7-3c15c2da029e": { "alias": [ "/" - ], + ], "links": [ { - "class": "H5L_TYPE_HARD", - "collection": "datasets", - "id": "9f8b66ee-83f8-11e5-828a-3c15c2da029e", + "class": "H5L_TYPE_HARD", + "collection": "datasets", + "id": "9f8b66ee-83f8-11e5-828a-3c15c2da029e", "title": "DS1" } ] } - }, + }, "root": "9f8a11e6-83f8-11e5-93c7-3c15c2da029e" } diff --git a/data/json/enum_dset.json b/data/json/enum_dset.json index 73945e0..d2afcd4 100644 --- a/data/json/enum_dset.json +++ b/data/json/enum_dset.json @@ -1,91 +1,103 @@ { - "apiVersion": "1.0.0", + "apiVersion": "1.0.0", "datasets": { "9fa5814c-83f8-11e5-8820-3c15c2da029e": { "alias": [ "/DS1" - ], + ], "creationProperties": { - "allocTime": "H5D_ALLOC_TIME_LATE", - "fillTime": "H5D_FILL_TIME_IFSET", + "allocTime": "H5D_ALLOC_TIME_LATE", + "fillTime": "H5D_FILL_TIME_IFSET", "layout": { "class": "H5D_CONTIGUOUS" } - }, + }, "shape": { - "class": "H5S_SIMPLE", + "class": "H5S_SIMPLE", "dims": [ - 4, + 4, 7 ] - }, + }, "type": { "base": { - "base": "H5T_STD_I16BE", + "base": "H5T_STD_I16BE", "class": "H5T_INTEGER" - }, - "class": "H5T_ENUM", - "mapping": { - "GAS": 2, - "LIQUID": 1, - "PLASMA": 3, - "SOLID": 0 - } - }, + }, + "class": "H5T_ENUM", + "members": [ + { + "name": "GAS", + "value": 2 + }, + { + "name": "LIQUID", + "value": 1 + }, + { + "name": "PLASMA", + "value": 3 + }, + { + "name": "SOLID", + "value": 0 + } + ] + }, "value": [ [ - 0, - 0, - 0, - 0, - 0, - 0, + 0, + 0, + 0, + 0, + 0, + 0, 0 - ], + ], [ - 0, - 1, - 2, - 3, - 0, - 1, + 0, + 1, + 2, + 3, + 0, + 1, 2 - ], + ], [ - 0, - 2, - 0, - 2, - 0, - 2, + 0, + 2, + 0, + 2, + 0, + 2, 0 - ], + ], [ - 0, - 3, - 2, - 1, - 0, - 3, + 0, + 3, + 2, + 1, + 0, + 3, 2 ] ] } - }, + }, "groups": { "9fa476bd-83f8-11e5-99f7-3c15c2da029e": { "alias": [ "/" - ], + ], "links": [ { - "class": "H5L_TYPE_HARD", - "collection": "datasets", - "id": "9fa5814c-83f8-11e5-8820-3c15c2da029e", + "class": "H5L_TYPE_HARD", + "collection": "datasets", + "id": "9fa5814c-83f8-11e5-8820-3c15c2da029e", "title": "DS1" } ] } - }, + }, "root": "9fa476bd-83f8-11e5-99f7-3c15c2da029e" } diff --git a/data/json/h5ex_d_sofloat.json b/data/json/h5ex_d_sofloat.json index af0190a..b585a67 100644 --- a/data/json/h5ex_d_sofloat.json +++ b/data/json/h5ex_d_sofloat.json @@ -1,2170 +1,2170 @@ { - "apiVersion": "1.0.0", + "apiVersion": "1.0.0", "datasets": { "a121fd68-83f8-11e5-badf-3c15c2da029e": { "alias": [ "/DS1" - ], + ], "creationProperties": { - "allocTime": "H5D_ALLOC_TIME_INCR", - "fillTime": "H5D_FILL_TIME_IFSET", + "allocTime": "H5D_ALLOC_TIME_INCR", + "fillTime": "H5D_FILL_TIME_IFSET", "filters": [ { - "class": "H5Z_FILTER_SCALEOFFSET", - "id": 6, - "name": "scaleoffset", + "class": "H5Z_FILTER_SCALEOFFSET", + "id": 6, + "scaleOffset": 2, "scaleType": "H5Z_SO_FLOAT_DSCALE" } - ], + ], "layout": { - "class": "H5D_CHUNKED", + "class": "H5D_CHUNKED", "dims": [ - 4, + 4, 8 ] } - }, + }, "shape": { - "class": "H5S_SIMPLE", + "class": "H5S_SIMPLE", "dims": [ - 32, + 32, 64 ] - }, + }, "type": { - "base": "H5T_IEEE_F64LE", + "base": "H5T_IEEE_F64LE", "class": "H5T_FLOAT" - }, + }, "value": [ [ - 3.329230769230769, - 1.7692307692307692, - 2.439230769230769, - 3.2992307692307694, - 4.229230769230769, - 5.189230769230769, - 6.159230769230769, - 7.139230769230769, - 8.120481927710843, - 9.110481927710843, - 10.100481927710844, - 11.090481927710844, - 12.080481927710842, - 13.070481927710844, - 14.070481927710844, - 15.060481927710843, - 16.061349693251532, - 17.061349693251532, - 18.05134969325153, - 19.05134969325153, - 20.05134969325153, - 21.05134969325153, - 22.041349693251533, - 23.041349693251533, - 24.041152263374485, - 25.041152263374485, - 26.041152263374485, - 27.041152263374485, - 28.031152263374487, - 29.031152263374487, - 30.031152263374487, - 31.031152263374487, - 32.030959752321984, - 33.030959752321984, - 34.030959752321984, - 35.030959752321984, - 36.030959752321984, - 37.030959752321984, - 38.030959752321984, - 39.020959752321986, - 40.02481389578164, - 41.02481389578164, - 42.02481389578164, - 43.02481389578164, - 44.02481389578164, - 45.02481389578164, - 46.02481389578164, - 47.02481389578164, - 48.02070393374741, - 49.02070393374741, - 50.02070393374741, - 51.02070393374741, - 52.02070393374741, - 53.02070393374741, - 54.02070393374741, - 55.02070393374741, - 56.01776198934281, - 57.01776198934281, - 58.01776198934281, - 59.01776198934281, - 60.01776198934281, - 61.01776198934281, - 62.01776198934281, + 3.329230769230769, + 1.7692307692307692, + 2.439230769230769, + 3.2992307692307694, + 4.229230769230769, + 5.189230769230769, + 6.159230769230769, + 7.139230769230769, + 8.120481927710843, + 9.110481927710843, + 10.100481927710844, + 11.090481927710844, + 12.080481927710842, + 13.070481927710844, + 14.070481927710844, + 15.060481927710843, + 16.061349693251532, + 17.061349693251532, + 18.05134969325153, + 19.05134969325153, + 20.05134969325153, + 21.05134969325153, + 22.041349693251533, + 23.041349693251533, + 24.041152263374485, + 25.041152263374485, + 26.041152263374485, + 27.041152263374485, + 28.031152263374487, + 29.031152263374487, + 30.031152263374487, + 31.031152263374487, + 32.030959752321984, + 33.030959752321984, + 34.030959752321984, + 35.030959752321984, + 36.030959752321984, + 37.030959752321984, + 38.030959752321984, + 39.020959752321986, + 40.02481389578164, + 41.02481389578164, + 42.02481389578164, + 43.02481389578164, + 44.02481389578164, + 45.02481389578164, + 46.02481389578164, + 47.02481389578164, + 48.02070393374741, + 49.02070393374741, + 50.02070393374741, + 51.02070393374741, + 52.02070393374741, + 53.02070393374741, + 54.02070393374741, + 55.02070393374741, + 56.01776198934281, + 57.01776198934281, + 58.01776198934281, + 59.01776198934281, + 60.01776198934281, + 61.01776198934281, + 62.01776198934281, 63.01776198934281 - ], + ], [ - 6.6692307692307695, - 2.539230769230769, - 2.8692307692307693, - 3.609230769230769, - 4.469230769230769, - 5.3792307692307695, - 6.319230769230769, - 7.269230769230769, - 8.240481927710842, - 9.210481927710843, - 10.190481927710843, - 11.180481927710844, - 12.160481927710844, - 13.150481927710842, - 14.140481927710843, - 15.130481927710843, - 16.12134969325153, - 17.111349693251533, - 18.111349693251533, - 19.10134969325153, - 20.10134969325153, - 21.091349693251534, - 22.091349693251534, - 23.081349693251532, - 24.081152263374484, - 25.081152263374484, - 26.071152263374486, - 27.071152263374486, - 28.071152263374486, - 29.071152263374486, - 30.061152263374485, - 31.061152263374485, - 32.060959752321985, - 33.060959752321985, - 34.060959752321985, - 35.060959752321985, - 36.05095975232199, - 37.05095975232199, - 38.05095975232199, - 39.05095975232199, - 40.044813895781644, - 41.044813895781644, - 42.044813895781644, - 43.044813895781644, - 44.044813895781644, - 45.044813895781644, - 46.044813895781644, - 47.044813895781644, - 48.040703933747416, - 49.040703933747416, - 50.040703933747416, - 51.040703933747416, - 52.04070393374741, - 53.04070393374741, - 54.04070393374741, - 55.04070393374741, - 56.03776198934281, - 57.03776198934281, - 58.03776198934281, - 59.03776198934281, - 60.03776198934281, - 61.02776198934281, - 62.02776198934281, + 6.6692307692307695, + 2.539230769230769, + 2.8692307692307693, + 3.609230769230769, + 4.469230769230769, + 5.3792307692307695, + 6.319230769230769, + 7.269230769230769, + 8.240481927710842, + 9.210481927710843, + 10.190481927710843, + 11.180481927710844, + 12.160481927710844, + 13.150481927710842, + 14.140481927710843, + 15.130481927710843, + 16.12134969325153, + 17.111349693251533, + 18.111349693251533, + 19.10134969325153, + 20.10134969325153, + 21.091349693251534, + 22.091349693251534, + 23.081349693251532, + 24.081152263374484, + 25.081152263374484, + 26.071152263374486, + 27.071152263374486, + 28.071152263374486, + 29.071152263374486, + 30.061152263374485, + 31.061152263374485, + 32.060959752321985, + 33.060959752321985, + 34.060959752321985, + 35.060959752321985, + 36.05095975232199, + 37.05095975232199, + 38.05095975232199, + 39.05095975232199, + 40.044813895781644, + 41.044813895781644, + 42.044813895781644, + 43.044813895781644, + 44.044813895781644, + 45.044813895781644, + 46.044813895781644, + 47.044813895781644, + 48.040703933747416, + 49.040703933747416, + 50.040703933747416, + 51.040703933747416, + 52.04070393374741, + 53.04070393374741, + 54.04070393374741, + 55.04070393374741, + 56.03776198934281, + 57.03776198934281, + 58.03776198934281, + 59.03776198934281, + 60.03776198934281, + 61.02776198934281, + 62.02776198934281, 63.02776198934281 - ], + ], [ - 9.99923076923077, - 3.309230769230769, - 3.309230769230769, - 3.9092307692307693, - 4.69923076923077, - 5.569230769230769, - 6.479230769230769, - 7.409230769230769, - 8.360481927710843, - 9.320481927710842, - 10.290481927710843, - 11.270481927710843, - 12.240481927710842, - 13.230481927710844, - 14.210481927710843, - 15.200481927710843, - 16.181349693251533, - 17.171349693251532, - 18.161349693251534, - 19.151349693251532, - 20.151349693251532, - 21.141349693251534, - 22.131349693251533, - 23.131349693251533, - 24.121152263374483, - 25.121152263374483, - 26.111152263374485, - 27.111152263374485, - 28.101152263374484, - 29.101152263374484, - 30.101152263374484, - 31.091152263374486, - 32.090959752321986, - 33.090959752321986, - 34.090959752321986, - 35.08095975232198, - 36.08095975232198, - 37.08095975232198, - 38.08095975232198, - 39.08095975232198, - 40.07481389578164, - 41.07481389578164, - 42.07481389578164, - 43.06481389578164, - 44.06481389578164, - 45.06481389578164, - 46.06481389578164, - 47.06481389578164, - 48.06070393374741, - 49.06070393374741, - 50.06070393374741, - 51.06070393374741, - 52.06070393374741, - 53.06070393374741, - 54.050703933747414, - 55.050703933747414, - 56.05776198934281, - 57.04776198934281, - 58.04776198934281, - 59.04776198934281, - 60.04776198934281, - 61.04776198934281, - 62.04776198934281, + 9.99923076923077, + 3.309230769230769, + 3.309230769230769, + 3.9092307692307693, + 4.69923076923077, + 5.569230769230769, + 6.479230769230769, + 7.409230769230769, + 8.360481927710843, + 9.320481927710842, + 10.290481927710843, + 11.270481927710843, + 12.240481927710842, + 13.230481927710844, + 14.210481927710843, + 15.200481927710843, + 16.181349693251533, + 17.171349693251532, + 18.161349693251534, + 19.151349693251532, + 20.151349693251532, + 21.141349693251534, + 22.131349693251533, + 23.131349693251533, + 24.121152263374483, + 25.121152263374483, + 26.111152263374485, + 27.111152263374485, + 28.101152263374484, + 29.101152263374484, + 30.101152263374484, + 31.091152263374486, + 32.090959752321986, + 33.090959752321986, + 34.090959752321986, + 35.08095975232198, + 36.08095975232198, + 37.08095975232198, + 38.08095975232198, + 39.08095975232198, + 40.07481389578164, + 41.07481389578164, + 42.07481389578164, + 43.06481389578164, + 44.06481389578164, + 45.06481389578164, + 46.06481389578164, + 47.06481389578164, + 48.06070393374741, + 49.06070393374741, + 50.06070393374741, + 51.06070393374741, + 52.06070393374741, + 53.06070393374741, + 54.050703933747414, + 55.050703933747414, + 56.05776198934281, + 57.04776198934281, + 58.04776198934281, + 59.04776198934281, + 60.04776198934281, + 61.04776198934281, + 62.04776198934281, 63.04776198934281 - ], + ], [ - 13.329230769230769, - 4.079230769230769, - 3.739230769230769, - 4.2092307692307696, - 4.929230769230769, - 5.759230769230769, - 6.639230769230769, - 7.549230769230769, - 8.480481927710843, - 9.430481927710844, - 10.390481927710843, - 11.350481927710844, - 12.320481927710844, - 13.300481927710843, - 14.280481927710843, - 15.260481927710842, - 16.241349693251532, - 17.23134969325153, - 18.221349693251533, - 19.21134969325153, - 20.201349693251533, - 21.19134969325153, - 22.181349693251533, - 23.171349693251532, - 24.161152263374486, - 25.161152263374486, - 26.151152263374485, - 27.151152263374485, - 28.141152263374487, - 29.141152263374487, - 30.131152263374485, - 31.131152263374485, - 32.12095975232199, - 33.12095975232199, - 34.12095975232198, - 35.11095975232198, - 36.11095975232198, - 37.11095975232198, - 38.100959752321984, - 39.100959752321984, - 40.09481389578164, - 41.09481389578164, - 42.09481389578164, - 43.09481389578164, - 44.09481389578164, - 45.08481389578164, - 46.08481389578164, - 47.08481389578164, - 48.080703933747415, - 49.080703933747415, - 50.080703933747415, - 51.080703933747415, - 52.080703933747415, - 53.07070393374741, - 54.07070393374741, - 55.07070393374741, - 56.067761989342806, - 57.067761989342806, - 58.067761989342806, - 59.067761989342806, - 60.067761989342806, - 61.067761989342806, - 62.067761989342806, + 13.329230769230769, + 4.079230769230769, + 3.739230769230769, + 4.2092307692307696, + 4.929230769230769, + 5.759230769230769, + 6.639230769230769, + 7.549230769230769, + 8.480481927710843, + 9.430481927710844, + 10.390481927710843, + 11.350481927710844, + 12.320481927710844, + 13.300481927710843, + 14.280481927710843, + 15.260481927710842, + 16.241349693251532, + 17.23134969325153, + 18.221349693251533, + 19.21134969325153, + 20.201349693251533, + 21.19134969325153, + 22.181349693251533, + 23.171349693251532, + 24.161152263374486, + 25.161152263374486, + 26.151152263374485, + 27.151152263374485, + 28.141152263374487, + 29.141152263374487, + 30.131152263374485, + 31.131152263374485, + 32.12095975232199, + 33.12095975232199, + 34.12095975232198, + 35.11095975232198, + 36.11095975232198, + 37.11095975232198, + 38.100959752321984, + 39.100959752321984, + 40.09481389578164, + 41.09481389578164, + 42.09481389578164, + 43.09481389578164, + 44.09481389578164, + 45.08481389578164, + 46.08481389578164, + 47.08481389578164, + 48.080703933747415, + 49.080703933747415, + 50.080703933747415, + 51.080703933747415, + 52.080703933747415, + 53.07070393374741, + 54.07070393374741, + 55.07070393374741, + 56.067761989342806, + 57.067761989342806, + 58.067761989342806, + 59.067761989342806, + 60.067761989342806, + 61.067761989342806, + 62.067761989342806, 63.067761989342806 - ], + ], [ - 16.66391304347826, - 4.843913043478261, - 4.173913043478261, - 4.513913043478261, - 5.163913043478261, - 5.943913043478261, - 6.793913043478261, - 7.6839130434782605, - 8.602409638554217, - 9.542409638554217, - 10.482409638554216, - 11.442409638554217, - 12.402409638554218, - 13.372409638554217, - 14.352409638554217, - 15.322409638554216, - 16.30674846625767, - 17.28674846625767, - 18.27674846625767, - 19.25674846625767, - 20.24674846625767, - 21.23674846625767, - 22.226748466257668, - 23.21674846625767, - 24.20576131687243, - 25.195761316872428, - 26.18576131687243, - 27.18576131687243, - 28.17576131687243, - 29.16576131687243, - 30.16576131687243, - 31.15576131687243, - 32.154798761609904, - 33.154798761609904, - 34.144798761609906, - 35.144798761609906, - 36.1347987616099, - 37.1347987616099, - 38.1347987616099, - 39.1247987616099, - 40.12406947890819, - 41.12406947890819, - 42.11406947890819, - 43.11406947890819, - 44.11406947890819, - 45.11406947890819, - 46.10406947890819, - 47.10406947890819, - 48.10351966873706, - 49.10351966873706, - 50.10351966873706, - 51.09351966873706, - 52.09351966873706, - 53.09351966873706, - 54.09351966873706, - 55.09351966873706, - 56.08880994671403, - 57.08880994671403, - 58.08880994671403, - 59.08880994671403, - 60.07880994671403, - 61.07880994671403, - 62.07880994671403, + 16.66391304347826, + 4.843913043478261, + 4.173913043478261, + 4.513913043478261, + 5.163913043478261, + 5.943913043478261, + 6.793913043478261, + 7.6839130434782605, + 8.602409638554217, + 9.542409638554217, + 10.482409638554216, + 11.442409638554217, + 12.402409638554218, + 13.372409638554217, + 14.352409638554217, + 15.322409638554216, + 16.30674846625767, + 17.28674846625767, + 18.27674846625767, + 19.25674846625767, + 20.24674846625767, + 21.23674846625767, + 22.226748466257668, + 23.21674846625767, + 24.20576131687243, + 25.195761316872428, + 26.18576131687243, + 27.18576131687243, + 28.17576131687243, + 29.16576131687243, + 30.16576131687243, + 31.15576131687243, + 32.154798761609904, + 33.154798761609904, + 34.144798761609906, + 35.144798761609906, + 36.1347987616099, + 37.1347987616099, + 38.1347987616099, + 39.1247987616099, + 40.12406947890819, + 41.12406947890819, + 42.11406947890819, + 43.11406947890819, + 44.11406947890819, + 45.11406947890819, + 46.10406947890819, + 47.10406947890819, + 48.10351966873706, + 49.10351966873706, + 50.10351966873706, + 51.09351966873706, + 52.09351966873706, + 53.09351966873706, + 54.09351966873706, + 55.09351966873706, + 56.08880994671403, + 57.08880994671403, + 58.08880994671403, + 59.08880994671403, + 60.07880994671403, + 61.07880994671403, + 62.07880994671403, 63.07880994671403 - ], + ], [ - 20.00391304347826, - 5.613913043478261, - 4.6039130434782605, - 4.81391304347826, - 5.3939130434782605, - 6.133913043478261, - 6.953913043478261, - 7.82391304347826, - 8.722409638554216, - 9.642409638554216, - 10.582409638554218, - 11.532409638554217, - 12.492409638554218, - 13.452409638554217, - 14.422409638554218, - 15.392409638554216, - 16.36674846625767, - 17.34674846625767, - 18.32674846625767, - 19.30674846625767, - 20.296748466257668, - 21.27674846625767, - 22.26674846625767, - 23.25674846625767, - 24.24576131687243, - 25.23576131687243, - 26.22576131687243, - 27.21576131687243, - 28.21576131687243, - 29.20576131687243, - 30.195761316872428, - 31.195761316872428, - 32.184798761609905, - 33.184798761609905, - 34.17479876160991, - 35.17479876160991, - 36.1647987616099, - 37.1647987616099, - 38.154798761609904, - 39.154798761609904, - 40.14406947890819, - 41.14406947890819, - 42.14406947890819, - 43.13406947890819, - 44.13406947890819, - 45.13406947890819, - 46.13406947890819, - 47.12406947890819, - 48.12351966873706, - 49.12351966873706, - 50.12351966873706, - 51.113519668737055, - 52.113519668737055, - 53.113519668737055, - 54.113519668737055, - 55.10351966873706, - 56.10880994671403, - 57.10880994671403, - 58.09880994671403, - 59.09880994671403, - 60.09880994671403, - 61.09880994671403, - 62.09880994671403, + 20.00391304347826, + 5.613913043478261, + 4.6039130434782605, + 4.81391304347826, + 5.3939130434782605, + 6.133913043478261, + 6.953913043478261, + 7.82391304347826, + 8.722409638554216, + 9.642409638554216, + 10.582409638554218, + 11.532409638554217, + 12.492409638554218, + 13.452409638554217, + 14.422409638554218, + 15.392409638554216, + 16.36674846625767, + 17.34674846625767, + 18.32674846625767, + 19.30674846625767, + 20.296748466257668, + 21.27674846625767, + 22.26674846625767, + 23.25674846625767, + 24.24576131687243, + 25.23576131687243, + 26.22576131687243, + 27.21576131687243, + 28.21576131687243, + 29.20576131687243, + 30.195761316872428, + 31.195761316872428, + 32.184798761609905, + 33.184798761609905, + 34.17479876160991, + 35.17479876160991, + 36.1647987616099, + 37.1647987616099, + 38.154798761609904, + 39.154798761609904, + 40.14406947890819, + 41.14406947890819, + 42.14406947890819, + 43.13406947890819, + 44.13406947890819, + 45.13406947890819, + 46.13406947890819, + 47.12406947890819, + 48.12351966873706, + 49.12351966873706, + 50.12351966873706, + 51.113519668737055, + 52.113519668737055, + 53.113519668737055, + 54.113519668737055, + 55.10351966873706, + 56.10880994671403, + 57.10880994671403, + 58.09880994671403, + 59.09880994671403, + 60.09880994671403, + 61.09880994671403, + 62.09880994671403, 63.09880994671403 - ], + ], [ - 23.333913043478262, - 6.383913043478261, - 5.043913043478261, - 5.123913043478261, - 5.623913043478261, - 6.32391304347826, - 7.113913043478261, - 7.953913043478261, - 8.842409638554217, - 9.752409638554218, - 10.682409638554217, - 11.622409638554217, - 12.572409638554218, - 13.522409638554217, - 14.492409638554218, - 15.462409638554217, - 16.42674846625767, - 17.40674846625767, - 18.38674846625767, - 19.36674846625767, - 20.34674846625767, - 21.32674846625767, - 22.31674846625767, - 23.296748466257668, - 24.285761316872428, - 25.27576131687243, - 26.265761316872428, - 27.25576131687243, - 28.24576131687243, - 29.23576131687243, - 30.23576131687243, - 31.22576131687243, - 32.214798761609906, - 33.214798761609906, - 34.2047987616099, - 35.1947987616099, - 36.1947987616099, - 37.184798761609905, - 38.184798761609905, - 39.17479876160991, - 40.17406947890819, - 41.17406947890819, - 42.16406947890819, - 43.16406947890819, - 44.15406947890819, - 45.15406947890819, - 46.15406947890819, - 47.14406947890819, - 48.143519668737056, - 49.143519668737056, - 50.143519668737056, - 51.13351966873706, - 52.13351966873706, - 53.13351966873706, - 54.13351966873706, - 55.12351966873706, - 56.12880994671403, - 57.11880994671403, - 58.11880994671403, - 59.11880994671403, - 60.11880994671403, - 61.11880994671403, - 62.10880994671403, + 23.333913043478262, + 6.383913043478261, + 5.043913043478261, + 5.123913043478261, + 5.623913043478261, + 6.32391304347826, + 7.113913043478261, + 7.953913043478261, + 8.842409638554217, + 9.752409638554218, + 10.682409638554217, + 11.622409638554217, + 12.572409638554218, + 13.522409638554217, + 14.492409638554218, + 15.462409638554217, + 16.42674846625767, + 17.40674846625767, + 18.38674846625767, + 19.36674846625767, + 20.34674846625767, + 21.32674846625767, + 22.31674846625767, + 23.296748466257668, + 24.285761316872428, + 25.27576131687243, + 26.265761316872428, + 27.25576131687243, + 28.24576131687243, + 29.23576131687243, + 30.23576131687243, + 31.22576131687243, + 32.214798761609906, + 33.214798761609906, + 34.2047987616099, + 35.1947987616099, + 36.1947987616099, + 37.184798761609905, + 38.184798761609905, + 39.17479876160991, + 40.17406947890819, + 41.17406947890819, + 42.16406947890819, + 43.16406947890819, + 44.15406947890819, + 45.15406947890819, + 46.15406947890819, + 47.14406947890819, + 48.143519668737056, + 49.143519668737056, + 50.143519668737056, + 51.13351966873706, + 52.13351966873706, + 53.13351966873706, + 54.13351966873706, + 55.12351966873706, + 56.12880994671403, + 57.11880994671403, + 58.11880994671403, + 59.11880994671403, + 60.11880994671403, + 61.11880994671403, + 62.10880994671403, 63.10880994671403 - ], + ], [ - 26.66391304347826, - 7.15391304347826, - 5.473913043478261, - 5.423913043478261, - 5.863913043478261, - 6.513913043478261, - 7.273913043478261, - 8.09391304347826, - 8.962409638554217, - 9.862409638554217, - 10.772409638554217, - 11.712409638554217, - 12.652409638554218, - 13.602409638554217, - 14.562409638554218, - 15.522409638554217, - 16.48674846625767, - 17.46674846625767, - 18.43674846625767, - 19.41674846625767, - 20.39674846625767, - 21.37674846625767, - 22.35674846625767, - 23.34674846625767, - 24.32576131687243, - 25.31576131687243, - 26.30576131687243, - 27.29576131687243, - 28.28576131687243, - 29.27576131687243, - 30.265761316872428, - 31.25576131687243, - 32.24479876160991, - 33.24479876160991, - 34.2347987616099, - 35.224798761609904, - 36.224798761609904, - 37.214798761609906, - 38.2047987616099, - 39.2047987616099, - 40.19406947890819, - 41.19406947890819, - 42.19406947890819, - 43.18406947890819, - 44.18406947890819, - 45.17406947890819, - 46.17406947890819, - 47.17406947890819, - 48.16351966873706, - 49.16351966873706, - 50.16351966873706, - 51.153519668737054, - 52.153519668737054, - 53.153519668737054, - 54.143519668737056, - 55.143519668737056, - 56.13880994671403, - 57.13880994671403, - 58.13880994671403, - 59.13880994671403, - 60.12880994671403, - 61.12880994671403, - 62.12880994671403, + 26.66391304347826, + 7.15391304347826, + 5.473913043478261, + 5.423913043478261, + 5.863913043478261, + 6.513913043478261, + 7.273913043478261, + 8.09391304347826, + 8.962409638554217, + 9.862409638554217, + 10.772409638554217, + 11.712409638554217, + 12.652409638554218, + 13.602409638554217, + 14.562409638554218, + 15.522409638554217, + 16.48674846625767, + 17.46674846625767, + 18.43674846625767, + 19.41674846625767, + 20.39674846625767, + 21.37674846625767, + 22.35674846625767, + 23.34674846625767, + 24.32576131687243, + 25.31576131687243, + 26.30576131687243, + 27.29576131687243, + 28.28576131687243, + 29.27576131687243, + 30.265761316872428, + 31.25576131687243, + 32.24479876160991, + 33.24479876160991, + 34.2347987616099, + 35.224798761609904, + 36.224798761609904, + 37.214798761609906, + 38.2047987616099, + 39.2047987616099, + 40.19406947890819, + 41.19406947890819, + 42.19406947890819, + 43.18406947890819, + 44.18406947890819, + 45.17406947890819, + 46.17406947890819, + 47.17406947890819, + 48.16351966873706, + 49.16351966873706, + 50.16351966873706, + 51.153519668737054, + 52.153519668737054, + 53.153519668737054, + 54.143519668737056, + 55.143519668737056, + 56.13880994671403, + 57.13880994671403, + 58.13880994671403, + 59.13880994671403, + 60.12880994671403, + 61.12880994671403, + 62.12880994671403, 63.12880994671403 - ], + ], [ - 29.997272727272726, - 7.927272727272728, - 5.917272727272728, - 5.7272727272727275, - 6.097272727272728, - 6.697272727272727, - 7.427272727272728, - 8.237272727272728, - 9.08433734939759, - 9.96433734939759, - 10.874337349397589, - 11.79433734939759, - 12.73433734939759, - 13.67433734939759, - 14.63433734939759, - 15.58433734939759, - 16.552147239263803, - 17.522147239263802, - 18.492147239263804, - 19.462147239263803, - 20.442147239263804, - 21.422147239263804, - 22.402147239263805, - 23.3821472392638, - 24.37037037037037, - 25.36037037037037, - 26.34037037037037, - 27.33037037037037, - 28.32037037037037, - 29.31037037037037, - 30.30037037037037, - 31.29037037037037, - 32.27863777089783, - 33.26863777089783, - 34.25863777089783, - 35.25863777089783, - 36.24863777089783, - 37.23863777089783, - 38.23863777089783, - 39.22863777089783, - 40.22332506203474, - 41.21332506203474, - 42.21332506203474, - 43.203325062034736, - 44.203325062034736, - 45.203325062034736, - 46.19332506203474, - 47.19332506203474, - 48.18633540372671, - 49.18633540372671, - 50.17633540372671, - 51.17633540372671, - 52.17633540372671, - 53.166335403726706, - 54.166335403726706, - 55.166335403726706, - 56.15985790408526, - 57.15985790408526, - 58.14985790408526, - 59.14985790408526, - 60.14985790408526, - 61.14985790408526, - 62.13985790408526, + 29.997272727272726, + 7.927272727272728, + 5.917272727272728, + 5.7272727272727275, + 6.097272727272728, + 6.697272727272727, + 7.427272727272728, + 8.237272727272728, + 9.08433734939759, + 9.96433734939759, + 10.874337349397589, + 11.79433734939759, + 12.73433734939759, + 13.67433734939759, + 14.63433734939759, + 15.58433734939759, + 16.552147239263803, + 17.522147239263802, + 18.492147239263804, + 19.462147239263803, + 20.442147239263804, + 21.422147239263804, + 22.402147239263805, + 23.3821472392638, + 24.37037037037037, + 25.36037037037037, + 26.34037037037037, + 27.33037037037037, + 28.32037037037037, + 29.31037037037037, + 30.30037037037037, + 31.29037037037037, + 32.27863777089783, + 33.26863777089783, + 34.25863777089783, + 35.25863777089783, + 36.24863777089783, + 37.23863777089783, + 38.23863777089783, + 39.22863777089783, + 40.22332506203474, + 41.21332506203474, + 42.21332506203474, + 43.203325062034736, + 44.203325062034736, + 45.203325062034736, + 46.19332506203474, + 47.19332506203474, + 48.18633540372671, + 49.18633540372671, + 50.17633540372671, + 51.17633540372671, + 52.17633540372671, + 53.166335403726706, + 54.166335403726706, + 55.166335403726706, + 56.15985790408526, + 57.15985790408526, + 58.14985790408526, + 59.14985790408526, + 60.14985790408526, + 61.14985790408526, + 62.13985790408526, 63.13985790408526 - ], + ], [ - 33.337272727272726, - 8.697272727272727, - 6.347272727272728, - 6.027272727272727, - 6.327272727272727, - 6.887272727272728, - 7.587272727272728, - 8.367272727272727, - 9.204337349397589, - 10.07433734939759, - 10.97433734939759, - 11.88433734939759, - 12.81433734939759, - 13.75433734939759, - 14.694337349397589, - 15.65433734939759, - 16.612147239263802, - 17.582147239263804, - 18.5421472392638, - 19.522147239263802, - 20.492147239263804, - 21.472147239263805, - 22.4521472392638, - 23.432147239263802, - 24.41037037037037, - 25.39037037037037, - 26.380370370370372, - 27.37037037037037, - 28.35037037037037, - 29.34037037037037, - 30.33037037037037, - 31.32037037037037, - 32.30863777089783, - 33.298637770897834, - 34.28863777089783, - 35.27863777089783, - 36.27863777089783, - 37.26863777089783, - 38.258637770897835, - 39.258637770897835, - 40.24332506203474, - 41.24332506203474, - 42.23332506203474, - 43.23332506203474, - 44.22332506203474, - 45.22332506203474, - 46.21332506203474, - 47.21332506203474, - 48.20633540372671, - 49.20633540372671, - 50.19633540372671, - 51.19633540372671, - 52.18633540372671, - 53.18633540372671, - 54.18633540372671, - 55.17633540372671, - 56.17985790408526, - 57.169857904085255, - 58.169857904085255, - 59.169857904085255, - 60.169857904085255, - 61.15985790408526, - 62.15985790408526, + 33.337272727272726, + 8.697272727272727, + 6.347272727272728, + 6.027272727272727, + 6.327272727272727, + 6.887272727272728, + 7.587272727272728, + 8.367272727272727, + 9.204337349397589, + 10.07433734939759, + 10.97433734939759, + 11.88433734939759, + 12.81433734939759, + 13.75433734939759, + 14.694337349397589, + 15.65433734939759, + 16.612147239263802, + 17.582147239263804, + 18.5421472392638, + 19.522147239263802, + 20.492147239263804, + 21.472147239263805, + 22.4521472392638, + 23.432147239263802, + 24.41037037037037, + 25.39037037037037, + 26.380370370370372, + 27.37037037037037, + 28.35037037037037, + 29.34037037037037, + 30.33037037037037, + 31.32037037037037, + 32.30863777089783, + 33.298637770897834, + 34.28863777089783, + 35.27863777089783, + 36.27863777089783, + 37.26863777089783, + 38.258637770897835, + 39.258637770897835, + 40.24332506203474, + 41.24332506203474, + 42.23332506203474, + 43.23332506203474, + 44.22332506203474, + 45.22332506203474, + 46.21332506203474, + 47.21332506203474, + 48.20633540372671, + 49.20633540372671, + 50.19633540372671, + 51.19633540372671, + 52.18633540372671, + 53.18633540372671, + 54.18633540372671, + 55.17633540372671, + 56.17985790408526, + 57.169857904085255, + 58.169857904085255, + 59.169857904085255, + 60.169857904085255, + 61.15985790408526, + 62.15985790408526, 63.15985790408526 - ], + ], [ - 36.66727272727273, - 9.457272727272727, - 6.787272727272727, - 6.337272727272728, - 6.557272727272728, - 7.077272727272728, - 7.747272727272728, - 8.507272727272728, - 9.32433734939759, - 10.18433734939759, - 11.06433734939759, - 11.97433734939759, - 12.89433734939759, - 13.82433734939759, - 14.76433734939759, - 15.714337349397589, - 16.672147239263804, - 17.6321472392638, - 18.602147239263804, - 19.572147239263803, - 20.542147239263805, - 21.512147239263804, - 22.492147239263804, - 23.472147239263805, - 24.45037037037037, - 25.43037037037037, - 26.42037037037037, - 27.40037037037037, - 28.39037037037037, - 29.380370370370372, - 30.36037037037037, - 31.35037037037037, - 32.33863777089783, - 33.32863777089783, - 34.31863777089783, - 35.30863777089783, - 36.29863777089783, - 37.29863777089783, - 38.28863777089783, - 39.27863777089783, - 40.273325062034736, - 41.26332506203474, - 42.26332506203474, - 43.25332506203474, - 44.24332506203474, - 45.24332506203474, - 46.23332506203474, - 47.23332506203474, - 48.22633540372671, - 49.22633540372671, - 50.21633540372671, - 51.21633540372671, - 52.20633540372671, - 53.20633540372671, - 54.20633540372671, - 55.19633540372671, - 56.199857904085256, - 57.18985790408526, - 58.18985790408526, - 59.18985790408526, - 60.17985790408525, - 61.17985790408525, - 62.17985790408525, + 36.66727272727273, + 9.457272727272727, + 6.787272727272727, + 6.337272727272728, + 6.557272727272728, + 7.077272727272728, + 7.747272727272728, + 8.507272727272728, + 9.32433734939759, + 10.18433734939759, + 11.06433734939759, + 11.97433734939759, + 12.89433734939759, + 13.82433734939759, + 14.76433734939759, + 15.714337349397589, + 16.672147239263804, + 17.6321472392638, + 18.602147239263804, + 19.572147239263803, + 20.542147239263805, + 21.512147239263804, + 22.492147239263804, + 23.472147239263805, + 24.45037037037037, + 25.43037037037037, + 26.42037037037037, + 27.40037037037037, + 28.39037037037037, + 29.380370370370372, + 30.36037037037037, + 31.35037037037037, + 32.33863777089783, + 33.32863777089783, + 34.31863777089783, + 35.30863777089783, + 36.29863777089783, + 37.29863777089783, + 38.28863777089783, + 39.27863777089783, + 40.273325062034736, + 41.26332506203474, + 42.26332506203474, + 43.25332506203474, + 44.24332506203474, + 45.24332506203474, + 46.23332506203474, + 47.23332506203474, + 48.22633540372671, + 49.22633540372671, + 50.21633540372671, + 51.21633540372671, + 52.20633540372671, + 53.20633540372671, + 54.20633540372671, + 55.19633540372671, + 56.199857904085256, + 57.18985790408526, + 58.18985790408526, + 59.18985790408526, + 60.17985790408525, + 61.17985790408525, + 62.17985790408525, 63.169857904085255 - ], + ], [ - 39.99727272727273, - 10.227272727272727, - 7.217272727272728, - 6.637272727272728, - 6.787272727272727, - 7.2672727272727276, - 7.907272727272728, - 8.647272727272728, - 9.444337349397589, - 10.29433734939759, - 11.16433734939759, - 12.06433734939759, - 12.97433734939759, - 13.90433734939759, - 14.83433734939759, - 15.784337349397589, - 16.732147239263803, - 17.692147239263804, - 18.652147239263805, - 19.622147239263803, - 20.592147239263802, - 21.5621472392638, - 22.542147239263805, - 23.512147239263804, - 24.49037037037037, - 25.47037037037037, - 26.46037037037037, - 27.44037037037037, - 28.42037037037037, - 29.41037037037037, - 30.40037037037037, - 31.380370370370372, - 32.368637770897834, - 33.35863777089783, - 34.34863777089783, - 35.33863777089783, - 36.32863777089783, - 37.31863777089783, - 38.30863777089783, - 39.30863777089783, - 40.29332506203474, - 41.29332506203474, - 42.28332506203474, - 43.273325062034736, - 44.273325062034736, - 45.26332506203474, - 46.26332506203474, - 47.25332506203474, - 48.24633540372671, - 49.24633540372671, - 50.236335403726706, - 51.236335403726706, - 52.22633540372671, - 53.22633540372671, - 54.21633540372671, - 55.21633540372671, - 56.209857904085254, - 57.209857904085254, - 58.209857904085254, - 59.199857904085256, - 60.199857904085256, - 61.199857904085256, - 62.18985790408526, + 39.99727272727273, + 10.227272727272727, + 7.217272727272728, + 6.637272727272728, + 6.787272727272727, + 7.2672727272727276, + 7.907272727272728, + 8.647272727272728, + 9.444337349397589, + 10.29433734939759, + 11.16433734939759, + 12.06433734939759, + 12.97433734939759, + 13.90433734939759, + 14.83433734939759, + 15.784337349397589, + 16.732147239263803, + 17.692147239263804, + 18.652147239263805, + 19.622147239263803, + 20.592147239263802, + 21.5621472392638, + 22.542147239263805, + 23.512147239263804, + 24.49037037037037, + 25.47037037037037, + 26.46037037037037, + 27.44037037037037, + 28.42037037037037, + 29.41037037037037, + 30.40037037037037, + 31.380370370370372, + 32.368637770897834, + 33.35863777089783, + 34.34863777089783, + 35.33863777089783, + 36.32863777089783, + 37.31863777089783, + 38.30863777089783, + 39.30863777089783, + 40.29332506203474, + 41.29332506203474, + 42.28332506203474, + 43.273325062034736, + 44.273325062034736, + 45.26332506203474, + 46.26332506203474, + 47.25332506203474, + 48.24633540372671, + 49.24633540372671, + 50.236335403726706, + 51.236335403726706, + 52.22633540372671, + 53.22633540372671, + 54.21633540372671, + 55.21633540372671, + 56.209857904085254, + 57.209857904085254, + 58.209857904085254, + 59.199857904085256, + 60.199857904085256, + 61.199857904085256, + 62.18985790408526, 63.18985790408526 - ], + ], [ - 43.32939393939394, - 10.99939393939394, - 7.649393939393939, - 6.9393939393939394, - 7.0193939393939395, - 7.449393939393939, - 8.059393939393939, - 8.77939393939394, - 9.566265060240964, - 10.396265060240964, - 11.266265060240963, - 12.146265060240964, - 13.056265060240964, - 13.976265060240964, - 14.906265060240964, - 15.846265060240963, - 16.79754601226994, - 17.74754601226994, - 18.70754601226994, - 19.67754601226994, - 20.63754601226994, - 21.60754601226994, - 22.58754601226994, - 23.55754601226994, - 24.53497942386831, - 25.51497942386831, - 26.494979423868312, - 27.474979423868312, - 28.45497942386831, - 29.44497942386831, - 30.42497942386831, - 31.41497942386831, - 32.40247678018576, - 33.39247678018576, - 34.382476780185755, - 35.37247678018576, - 36.36247678018576, - 37.35247678018576, - 38.342476780185756, - 39.33247678018576, - 40.32258064516129, - 41.31258064516129, - 42.302580645161285, - 43.302580645161285, - 44.29258064516129, - 45.28258064516129, - 46.28258064516129, - 47.27258064516129, - 48.26915113871635, - 49.259151138716355, - 50.259151138716355, - 51.24915113871635, - 52.24915113871635, - 53.23915113871635, - 54.23915113871635, - 55.23915113871635, - 56.230905861456485, - 57.230905861456485, - 58.22090586145649, - 59.22090586145649, - 60.21090586145648, - 61.21090586145648, - 62.21090586145648, + 43.32939393939394, + 10.99939393939394, + 7.649393939393939, + 6.9393939393939394, + 7.0193939393939395, + 7.449393939393939, + 8.059393939393939, + 8.77939393939394, + 9.566265060240964, + 10.396265060240964, + 11.266265060240963, + 12.146265060240964, + 13.056265060240964, + 13.976265060240964, + 14.906265060240964, + 15.846265060240963, + 16.79754601226994, + 17.74754601226994, + 18.70754601226994, + 19.67754601226994, + 20.63754601226994, + 21.60754601226994, + 22.58754601226994, + 23.55754601226994, + 24.53497942386831, + 25.51497942386831, + 26.494979423868312, + 27.474979423868312, + 28.45497942386831, + 29.44497942386831, + 30.42497942386831, + 31.41497942386831, + 32.40247678018576, + 33.39247678018576, + 34.382476780185755, + 35.37247678018576, + 36.36247678018576, + 37.35247678018576, + 38.342476780185756, + 39.33247678018576, + 40.32258064516129, + 41.31258064516129, + 42.302580645161285, + 43.302580645161285, + 44.29258064516129, + 45.28258064516129, + 46.28258064516129, + 47.27258064516129, + 48.26915113871635, + 49.259151138716355, + 50.259151138716355, + 51.24915113871635, + 52.24915113871635, + 53.23915113871635, + 54.23915113871635, + 55.23915113871635, + 56.230905861456485, + 57.230905861456485, + 58.22090586145649, + 59.22090586145649, + 60.21090586145648, + 61.21090586145648, + 62.21090586145648, 63.200905861456484 - ], + ], [ - 46.669393939393935, - 11.76939393939394, - 8.08939393939394, - 7.239393939393939, - 7.25939393939394, - 7.63939393939394, - 8.219393939393939, - 8.91939393939394, - 9.686265060240963, - 10.506265060240963, - 11.356265060240965, - 12.236265060240964, - 13.136265060240964, - 14.056265060240964, - 14.976265060240964, - 15.916265060240963, - 16.85754601226994, - 17.807546012269942, - 18.76754601226994, - 19.72754601226994, - 20.68754601226994, - 21.65754601226994, - 22.62754601226994, - 23.59754601226994, - 24.57497942386831, - 25.55497942386831, - 26.53497942386831, - 27.51497942386831, - 28.494979423868312, - 29.474979423868312, - 30.46497942386831, - 31.44497942386831, - 32.43247678018576, - 33.42247678018576, - 34.412476780185756, - 35.39247678018576, - 36.382476780185755, - 37.37247678018576, - 38.36247678018576, - 39.35247678018576, - 40.34258064516129, - 41.34258064516129, - 42.332580645161286, - 43.32258064516129, - 44.31258064516129, - 45.31258064516129, - 46.302580645161285, - 47.29258064516129, - 48.289151138716356, - 49.27915113871635, - 50.27915113871635, - 51.26915113871635, - 52.26915113871635, - 53.259151138716355, - 54.259151138716355, - 55.24915113871636, - 56.25090586145649, - 57.24090586145648, - 58.24090586145648, - 59.24090586145648, - 60.230905861456485, - 61.230905861456485, - 62.22090586145649, + 46.669393939393935, + 11.76939393939394, + 8.08939393939394, + 7.239393939393939, + 7.25939393939394, + 7.63939393939394, + 8.219393939393939, + 8.91939393939394, + 9.686265060240963, + 10.506265060240963, + 11.356265060240965, + 12.236265060240964, + 13.136265060240964, + 14.056265060240964, + 14.976265060240964, + 15.916265060240963, + 16.85754601226994, + 17.807546012269942, + 18.76754601226994, + 19.72754601226994, + 20.68754601226994, + 21.65754601226994, + 22.62754601226994, + 23.59754601226994, + 24.57497942386831, + 25.55497942386831, + 26.53497942386831, + 27.51497942386831, + 28.494979423868312, + 29.474979423868312, + 30.46497942386831, + 31.44497942386831, + 32.43247678018576, + 33.42247678018576, + 34.412476780185756, + 35.39247678018576, + 36.382476780185755, + 37.37247678018576, + 38.36247678018576, + 39.35247678018576, + 40.34258064516129, + 41.34258064516129, + 42.332580645161286, + 43.32258064516129, + 44.31258064516129, + 45.31258064516129, + 46.302580645161285, + 47.29258064516129, + 48.289151138716356, + 49.27915113871635, + 50.27915113871635, + 51.26915113871635, + 52.26915113871635, + 53.259151138716355, + 54.259151138716355, + 55.24915113871636, + 56.25090586145649, + 57.24090586145648, + 58.24090586145648, + 59.24090586145648, + 60.230905861456485, + 61.230905861456485, + 62.22090586145649, 63.22090586145649 - ], + ], [ - 49.99939393939394, - 12.539393939393939, - 8.51939393939394, - 7.54939393939394, - 7.489393939393939, - 7.829393939393939, - 8.379393939393939, - 9.059393939393939, - 9.806265060240964, - 10.616265060240965, - 11.456265060240964, - 12.326265060240964, - 13.216265060240964, - 14.126265060240964, - 15.046265060240964, - 15.976265060240964, - 16.91754601226994, - 17.86754601226994, - 18.81754601226994, - 19.77754601226994, - 20.73754601226994, - 21.70754601226994, - 22.67754601226994, - 23.64754601226994, - 24.61497942386831, - 25.59497942386831, - 26.57497942386831, - 27.544979423868313, - 28.53497942386831, - 29.51497942386831, - 30.494979423868312, - 31.474979423868312, - 32.46247678018576, - 33.452476780185755, - 34.43247678018576, - 35.42247678018576, - 36.412476780185756, - 37.40247678018576, - 38.39247678018576, - 39.382476780185755, - 40.372580645161285, - 41.36258064516129, - 42.35258064516129, - 43.34258064516129, - 44.34258064516129, - 45.332580645161286, - 46.32258064516129, - 47.31258064516129, - 48.30915113871635, - 49.30915113871635, - 50.299151138716354, - 51.289151138716356, - 52.28915113871635, - 53.27915113871635, - 54.27915113871635, - 55.26915113871635, - 56.270905861456484, - 57.260905861456486, - 58.260905861456486, - 59.25090586145649, - 60.25090586145649, - 61.24090586145648, - 62.24090586145648, + 49.99939393939394, + 12.539393939393939, + 8.51939393939394, + 7.54939393939394, + 7.489393939393939, + 7.829393939393939, + 8.379393939393939, + 9.059393939393939, + 9.806265060240964, + 10.616265060240965, + 11.456265060240964, + 12.326265060240964, + 13.216265060240964, + 14.126265060240964, + 15.046265060240964, + 15.976265060240964, + 16.91754601226994, + 17.86754601226994, + 18.81754601226994, + 19.77754601226994, + 20.73754601226994, + 21.70754601226994, + 22.67754601226994, + 23.64754601226994, + 24.61497942386831, + 25.59497942386831, + 26.57497942386831, + 27.544979423868313, + 28.53497942386831, + 29.51497942386831, + 30.494979423868312, + 31.474979423868312, + 32.46247678018576, + 33.452476780185755, + 34.43247678018576, + 35.42247678018576, + 36.412476780185756, + 37.40247678018576, + 38.39247678018576, + 39.382476780185755, + 40.372580645161285, + 41.36258064516129, + 42.35258064516129, + 43.34258064516129, + 44.34258064516129, + 45.332580645161286, + 46.32258064516129, + 47.31258064516129, + 48.30915113871635, + 49.30915113871635, + 50.299151138716354, + 51.289151138716356, + 52.28915113871635, + 53.27915113871635, + 54.27915113871635, + 55.26915113871635, + 56.270905861456484, + 57.260905861456486, + 58.260905861456486, + 59.25090586145649, + 60.25090586145649, + 61.24090586145648, + 62.24090586145648, 63.24090586145648 - ], + ], [ - 53.32939393939394, - 13.309393939393939, - 8.959393939393939, - 7.84939393939394, - 7.71939393939394, - 8.01939393939394, - 8.539393939393939, - 9.18939393939394, - 9.926265060240963, - 10.716265060240964, - 11.556265060240964, - 12.416265060240963, - 13.296265060240964, - 14.206265060240963, - 15.116265060240963, - 16.046265060240962, - 16.97754601226994, - 17.92754601226994, - 18.87754601226994, - 19.82754601226994, - 20.787546012269942, - 21.74754601226994, - 22.717546012269942, - 23.68754601226994, - 24.654979423868312, - 25.634979423868312, - 26.60497942386831, - 27.58497942386831, - 28.564979423868312, - 29.544979423868313, - 30.52497942386831, - 31.51497942386831, - 32.49247678018576, - 33.482476780185756, - 34.46247678018576, - 35.452476780185755, - 36.44247678018576, - 37.43247678018576, - 38.42247678018576, - 39.40247678018576, - 40.39258064516129, - 41.38258064516129, - 42.38258064516129, - 43.372580645161285, - 44.36258064516129, - 45.35258064516129, - 46.34258064516129, - 47.34258064516129, - 48.329151138716355, - 49.329151138716355, - 50.31915113871635, - 51.30915113871635, - 52.30915113871635, - 53.299151138716354, - 54.299151138716354, - 55.28915113871635, - 56.28090586145648, - 57.28090586145648, - 58.270905861456484, - 59.270905861456484, - 60.260905861456486, - 61.260905861456486, - 62.260905861456486, + 53.32939393939394, + 13.309393939393939, + 8.959393939393939, + 7.84939393939394, + 7.71939393939394, + 8.01939393939394, + 8.539393939393939, + 9.18939393939394, + 9.926265060240963, + 10.716265060240964, + 11.556265060240964, + 12.416265060240963, + 13.296265060240964, + 14.206265060240963, + 15.116265060240963, + 16.046265060240962, + 16.97754601226994, + 17.92754601226994, + 18.87754601226994, + 19.82754601226994, + 20.787546012269942, + 21.74754601226994, + 22.717546012269942, + 23.68754601226994, + 24.654979423868312, + 25.634979423868312, + 26.60497942386831, + 27.58497942386831, + 28.564979423868312, + 29.544979423868313, + 30.52497942386831, + 31.51497942386831, + 32.49247678018576, + 33.482476780185756, + 34.46247678018576, + 35.452476780185755, + 36.44247678018576, + 37.43247678018576, + 38.42247678018576, + 39.40247678018576, + 40.39258064516129, + 41.38258064516129, + 42.38258064516129, + 43.372580645161285, + 44.36258064516129, + 45.35258064516129, + 46.34258064516129, + 47.34258064516129, + 48.329151138716355, + 49.329151138716355, + 50.31915113871635, + 51.30915113871635, + 52.30915113871635, + 53.299151138716354, + 54.299151138716354, + 55.28915113871635, + 56.28090586145648, + 57.28090586145648, + 58.270905861456484, + 59.270905861456484, + 60.260905861456486, + 61.260905861456486, + 62.260905861456486, 63.25090586145649 - ], + ], [ - 56.66348837209303, - 14.073488372093024, - 9.393488372093023, - 8.153488372093022, - 7.953488372093023, - 8.203488372093023, - 8.693488372093023, - 9.333488372093022, - 10.048192771084338, - 10.828192771084337, - 11.648192771084338, - 12.508192771084339, - 13.378192771084338, - 14.278192771084338, - 15.188192771084339, - 16.108192771084337, - 17.042944785276074, - 17.982944785276075, - 18.932944785276074, - 19.882944785276074, - 20.832944785276073, - 21.802944785276075, - 22.762944785276073, - 23.732944785276075, - 24.699588477366255, - 25.669588477366254, - 26.649588477366255, - 27.619588477366257, - 28.599588477366254, - 29.579588477366254, - 30.559588477366255, - 31.539588477366255, - 32.526315789473685, - 33.50631578947368, - 34.496315789473684, - 35.486315789473686, - 36.46631578947368, - 37.456315789473685, - 38.44631578947369, - 39.43631578947368, - 40.421836228287845, - 41.41183622828785, - 42.40183622828784, - 43.39183622828784, - 44.381836228287845, - 45.37183622828785, - 46.37183622828785, - 47.36183622828784, - 48.351966873706004, - 49.341966873706006, - 50.341966873706006, - 51.331966873706, - 52.321966873706, - 53.321966873706, - 54.311966873706005, - 55.311966873706005, - 56.301953818827705, - 57.29195381882771, - 58.29195381882771, - 59.2819538188277, - 60.2819538188277, - 61.2819538188277, - 62.271953818827704, + 56.66348837209303, + 14.073488372093024, + 9.393488372093023, + 8.153488372093022, + 7.953488372093023, + 8.203488372093023, + 8.693488372093023, + 9.333488372093022, + 10.048192771084338, + 10.828192771084337, + 11.648192771084338, + 12.508192771084339, + 13.378192771084338, + 14.278192771084338, + 15.188192771084339, + 16.108192771084337, + 17.042944785276074, + 17.982944785276075, + 18.932944785276074, + 19.882944785276074, + 20.832944785276073, + 21.802944785276075, + 22.762944785276073, + 23.732944785276075, + 24.699588477366255, + 25.669588477366254, + 26.649588477366255, + 27.619588477366257, + 28.599588477366254, + 29.579588477366254, + 30.559588477366255, + 31.539588477366255, + 32.526315789473685, + 33.50631578947368, + 34.496315789473684, + 35.486315789473686, + 36.46631578947368, + 37.456315789473685, + 38.44631578947369, + 39.43631578947368, + 40.421836228287845, + 41.41183622828785, + 42.40183622828784, + 43.39183622828784, + 44.381836228287845, + 45.37183622828785, + 46.37183622828785, + 47.36183622828784, + 48.351966873706004, + 49.341966873706006, + 50.341966873706006, + 51.331966873706, + 52.321966873706, + 53.321966873706, + 54.311966873706005, + 55.311966873706005, + 56.301953818827705, + 57.29195381882771, + 58.29195381882771, + 59.2819538188277, + 60.2819538188277, + 61.2819538188277, + 62.271953818827704, 63.271953818827704 - ], + ], [ - 60.00348837209302, - 14.843488372093024, - 9.823488372093024, - 8.453488372093023, - 8.183488372093024, - 8.393488372093023, - 8.853488372093024, - 9.463488372093023, - 10.168192771084337, - 10.938192771084339, - 11.748192771084337, - 12.588192771084337, - 13.468192771084338, - 14.358192771084337, - 15.258192771084339, - 16.178192771084337, - 17.102944785276073, - 18.042944785276074, - 18.982944785276075, - 19.932944785276074, - 20.882944785276074, - 21.842944785276075, - 22.802944785276075, - 23.772944785276074, - 24.739588477366254, - 25.709588477366257, - 26.679588477366256, - 27.659588477366256, - 28.639588477366257, - 29.609588477366255, - 30.589588477366256, - 31.579588477366254, - 32.556315789473686, - 33.53631578947368, - 34.526315789473685, - 35.50631578947368, - 36.496315789473684, - 37.486315789473686, - 38.46631578947368, - 39.456315789473685, - 40.44183622828785, - 41.43183622828784, - 42.421836228287845, - 43.41183622828785, - 44.40183622828784, - 45.40183622828785, - 46.39183622828784, - 47.381836228287845, - 48.37196687370601, - 49.361966873706, - 50.361966873706, - 51.351966873706004, - 52.341966873706006, - 53.341966873706006, - 54.33196687370601, - 55.321966873706, - 56.32195381882771, - 57.3119538188277, - 58.3119538188277, - 59.301953818827705, - 60.301953818827705, - 61.29195381882771, - 62.29195381882771, + 60.00348837209302, + 14.843488372093024, + 9.823488372093024, + 8.453488372093023, + 8.183488372093024, + 8.393488372093023, + 8.853488372093024, + 9.463488372093023, + 10.168192771084337, + 10.938192771084339, + 11.748192771084337, + 12.588192771084337, + 13.468192771084338, + 14.358192771084337, + 15.258192771084339, + 16.178192771084337, + 17.102944785276073, + 18.042944785276074, + 18.982944785276075, + 19.932944785276074, + 20.882944785276074, + 21.842944785276075, + 22.802944785276075, + 23.772944785276074, + 24.739588477366254, + 25.709588477366257, + 26.679588477366256, + 27.659588477366256, + 28.639588477366257, + 29.609588477366255, + 30.589588477366256, + 31.579588477366254, + 32.556315789473686, + 33.53631578947368, + 34.526315789473685, + 35.50631578947368, + 36.496315789473684, + 37.486315789473686, + 38.46631578947368, + 39.456315789473685, + 40.44183622828785, + 41.43183622828784, + 42.421836228287845, + 43.41183622828785, + 44.40183622828784, + 45.40183622828785, + 46.39183622828784, + 47.381836228287845, + 48.37196687370601, + 49.361966873706, + 50.361966873706, + 51.351966873706004, + 52.341966873706006, + 53.341966873706006, + 54.33196687370601, + 55.321966873706, + 56.32195381882771, + 57.3119538188277, + 58.3119538188277, + 59.301953818827705, + 60.301953818827705, + 61.29195381882771, + 62.29195381882771, 63.2819538188277 - ], + ], [ - 63.33348837209303, - 15.613488372093023, - 10.263488372093024, - 8.753488372093024, - 8.423488372093024, - 8.583488372093024, - 9.013488372093024, - 9.603488372093024, - 10.288192771084338, - 11.038192771084338, - 11.848192771084339, - 12.678192771084337, - 13.548192771084338, - 14.428192771084337, - 15.328192771084339, - 16.23819277108434, - 17.162944785276075, - 18.102944785276073, - 19.042944785276074, - 19.982944785276075, - 20.932944785276074, - 21.89294478527607, - 22.852944785276073, - 23.812944785276073, - 24.779588477366254, - 25.749588477366256, - 26.719588477366255, - 27.699588477366255, - 28.669588477366254, - 29.649588477366255, - 30.629588477366255, - 31.609588477366255, - 32.58631578947369, - 33.566315789473684, - 34.556315789473686, - 35.53631578947368, - 36.526315789473685, - 37.50631578947369, - 38.496315789473684, - 39.486315789473686, - 40.47183622828784, - 41.461836228287844, - 42.451836228287846, - 43.44183622828785, - 44.43183622828784, - 45.421836228287845, - 46.41183622828785, - 47.40183622828785, - 48.391966873706004, - 49.381966873706006, - 50.381966873706006, - 51.37196687370601, - 52.361966873706, - 53.351966873706004, - 54.351966873706004, - 55.341966873706006, - 56.341953818827704, - 57.33195381882771, - 58.32195381882771, - 59.32195381882771, - 60.3119538188277, - 61.3119538188277, - 62.301953818827705, + 63.33348837209303, + 15.613488372093023, + 10.263488372093024, + 8.753488372093024, + 8.423488372093024, + 8.583488372093024, + 9.013488372093024, + 9.603488372093024, + 10.288192771084338, + 11.038192771084338, + 11.848192771084339, + 12.678192771084337, + 13.548192771084338, + 14.428192771084337, + 15.328192771084339, + 16.23819277108434, + 17.162944785276075, + 18.102944785276073, + 19.042944785276074, + 19.982944785276075, + 20.932944785276074, + 21.89294478527607, + 22.852944785276073, + 23.812944785276073, + 24.779588477366254, + 25.749588477366256, + 26.719588477366255, + 27.699588477366255, + 28.669588477366254, + 29.649588477366255, + 30.629588477366255, + 31.609588477366255, + 32.58631578947369, + 33.566315789473684, + 34.556315789473686, + 35.53631578947368, + 36.526315789473685, + 37.50631578947369, + 38.496315789473684, + 39.486315789473686, + 40.47183622828784, + 41.461836228287844, + 42.451836228287846, + 43.44183622828785, + 44.43183622828784, + 45.421836228287845, + 46.41183622828785, + 47.40183622828785, + 48.391966873706004, + 49.381966873706006, + 50.381966873706006, + 51.37196687370601, + 52.361966873706, + 53.351966873706004, + 54.351966873706004, + 55.341966873706006, + 56.341953818827704, + 57.33195381882771, + 58.32195381882771, + 59.32195381882771, + 60.3119538188277, + 61.3119538188277, + 62.301953818827705, 63.301953818827705 - ], + ], [ - 66.66348837209303, - 16.383488372093023, - 10.693488372093023, - 9.063488372093023, - 8.653488372093022, - 8.773488372093023, - 9.173488372093024, - 9.743488372093022, - 10.408192771084337, - 11.148192771084338, - 11.938192771084339, - 12.768192771084339, - 13.628192771084338, - 14.508192771084339, - 15.398192771084338, - 16.308192771084336, - 17.222944785276074, - 18.152944785276073, - 19.092944785276075, - 20.032944785276072, - 20.982944785276075, - 21.942944785276076, - 22.89294478527607, - 23.862944785276074, - 24.819588477366256, - 25.789588477366255, - 26.759588477366254, - 27.729588477366256, - 28.709588477366253, - 29.679588477366256, - 30.659588477366256, - 31.639588477366257, - 32.61631578947369, - 33.596315789473685, - 34.58631578947369, - 35.566315789473684, - 36.54631578947368, - 37.53631578947368, - 38.526315789473685, - 39.50631578947369, - 40.491836228287845, - 41.48183622828785, - 42.47183622828784, - 43.461836228287844, - 44.451836228287846, - 45.44183622828784, - 46.43183622828784, - 47.421836228287845, - 48.41196687370601, - 49.401966873706, - 50.401966873706, - 51.391966873706004, - 52.381966873706006, - 53.371966873706, - 54.371966873706, - 55.361966873706, - 56.3519538188277, - 57.3519538188277, - 58.341953818827704, - 59.341953818827704, - 60.33195381882771, - 61.32195381882771, - 62.32195381882771, + 66.66348837209303, + 16.383488372093023, + 10.693488372093023, + 9.063488372093023, + 8.653488372093022, + 8.773488372093023, + 9.173488372093024, + 9.743488372093022, + 10.408192771084337, + 11.148192771084338, + 11.938192771084339, + 12.768192771084339, + 13.628192771084338, + 14.508192771084339, + 15.398192771084338, + 16.308192771084336, + 17.222944785276074, + 18.152944785276073, + 19.092944785276075, + 20.032944785276072, + 20.982944785276075, + 21.942944785276076, + 22.89294478527607, + 23.862944785276074, + 24.819588477366256, + 25.789588477366255, + 26.759588477366254, + 27.729588477366256, + 28.709588477366253, + 29.679588477366256, + 30.659588477366256, + 31.639588477366257, + 32.61631578947369, + 33.596315789473685, + 34.58631578947369, + 35.566315789473684, + 36.54631578947368, + 37.53631578947368, + 38.526315789473685, + 39.50631578947369, + 40.491836228287845, + 41.48183622828785, + 42.47183622828784, + 43.461836228287844, + 44.451836228287846, + 45.44183622828784, + 46.43183622828784, + 47.421836228287845, + 48.41196687370601, + 49.401966873706, + 50.401966873706, + 51.391966873706004, + 52.381966873706006, + 53.371966873706, + 54.371966873706, + 55.361966873706, + 56.3519538188277, + 57.3519538188277, + 58.341953818827704, + 59.341953818827704, + 60.33195381882771, + 61.32195381882771, + 62.32195381882771, 63.3119538188277 - ], + ], [ - 70.00372093023256, - 17.15372093023256, - 11.133720930232558, - 9.363720930232558, - 8.883720930232558, - 8.963720930232558, - 9.333720930232557, - 9.873720930232558, - 10.53012048192771, - 11.26012048192771, - 12.04012048192771, - 12.86012048192771, - 13.71012048192771, - 14.580120481927711, - 15.470120481927712, - 16.37012048192771, - 17.288343558282207, - 18.218343558282207, - 19.148343558282207, - 20.088343558282208, - 21.038343558282207, - 21.988343558282207, - 22.938343558282206, - 23.898343558282207, - 24.864197530864196, - 25.834197530864195, - 26.794197530864196, - 27.774197530864196, - 28.744197530864195, - 29.714197530864197, - 30.694197530864194, - 31.674197530864195, - 32.65015479876161, - 33.63015479876161, - 34.61015479876161, - 35.59015479876161, - 36.58015479876161, - 37.560154798761616, - 38.55015479876161, - 39.530154798761615, - 40.521091811414394, - 41.511091811414396, - 42.50109181141439, - 43.481091811414394, - 44.4710918114144, - 45.46109181141439, - 46.45109181141439, - 47.441091811414395, - 48.43478260869565, - 49.42478260869565, - 50.414782608695646, - 51.40478260869565, - 52.40478260869565, - 53.39478260869565, - 54.38478260869565, - 55.37478260869565, - 56.37300177619893, - 57.363001776198935, - 58.363001776198935, - 59.35300177619893, - 60.35300177619893, - 61.34300177619893, - 62.333001776198934, + 70.00372093023256, + 17.15372093023256, + 11.133720930232558, + 9.363720930232558, + 8.883720930232558, + 8.963720930232558, + 9.333720930232557, + 9.873720930232558, + 10.53012048192771, + 11.26012048192771, + 12.04012048192771, + 12.86012048192771, + 13.71012048192771, + 14.580120481927711, + 15.470120481927712, + 16.37012048192771, + 17.288343558282207, + 18.218343558282207, + 19.148343558282207, + 20.088343558282208, + 21.038343558282207, + 21.988343558282207, + 22.938343558282206, + 23.898343558282207, + 24.864197530864196, + 25.834197530864195, + 26.794197530864196, + 27.774197530864196, + 28.744197530864195, + 29.714197530864197, + 30.694197530864194, + 31.674197530864195, + 32.65015479876161, + 33.63015479876161, + 34.61015479876161, + 35.59015479876161, + 36.58015479876161, + 37.560154798761616, + 38.55015479876161, + 39.530154798761615, + 40.521091811414394, + 41.511091811414396, + 42.50109181141439, + 43.481091811414394, + 44.4710918114144, + 45.46109181141439, + 46.45109181141439, + 47.441091811414395, + 48.43478260869565, + 49.42478260869565, + 50.414782608695646, + 51.40478260869565, + 52.40478260869565, + 53.39478260869565, + 54.38478260869565, + 55.37478260869565, + 56.37300177619893, + 57.363001776198935, + 58.363001776198935, + 59.35300177619893, + 60.35300177619893, + 61.34300177619893, + 62.333001776198934, 63.333001776198934 - ], + ], [ - 73.33372093023256, - 17.923720930232555, - 11.563720930232558, - 9.663720930232557, - 9.113720930232558, - 9.153720930232558, - 9.493720930232557, - 10.013720930232559, - 10.65012048192771, - 11.37012048192771, - 12.14012048192771, - 12.95012048192771, - 13.79012048192771, - 14.650120481927711, - 15.54012048192771, - 16.44012048192771, - 17.348343558282206, - 18.268343558282208, - 19.198343558282208, - 20.13834355828221, - 21.088343558282208, - 22.02834355828221, - 22.988343558282207, - 23.948343558282208, - 24.904197530864195, - 25.874197530864198, - 26.834197530864195, - 27.804197530864197, - 28.774197530864196, - 29.754197530864197, - 30.724197530864195, - 31.704197530864196, - 32.68015479876161, - 33.66015479876161, - 34.640154798761614, - 35.62015479876161, - 36.61015479876161, - 37.59015479876161, - 38.570154798761614, - 39.560154798761616, - 40.5410918114144, - 41.53109181141439, - 42.521091811414394, - 43.511091811414396, - 44.50109181141439, - 45.481091811414394, - 46.4710918114144, - 47.46109181141439, - 48.45478260869565, - 49.44478260869565, - 50.43478260869565, - 51.42478260869565, - 52.42478260869565, - 53.414782608695646, - 54.40478260869565, - 55.39478260869565, - 56.393001776198936, - 57.38300177619893, - 58.37300177619893, - 59.37300177619893, - 60.363001776198935, - 61.363001776198935, - 62.35300177619894, + 73.33372093023256, + 17.923720930232555, + 11.563720930232558, + 9.663720930232557, + 9.113720930232558, + 9.153720930232558, + 9.493720930232557, + 10.013720930232559, + 10.65012048192771, + 11.37012048192771, + 12.14012048192771, + 12.95012048192771, + 13.79012048192771, + 14.650120481927711, + 15.54012048192771, + 16.44012048192771, + 17.348343558282206, + 18.268343558282208, + 19.198343558282208, + 20.13834355828221, + 21.088343558282208, + 22.02834355828221, + 22.988343558282207, + 23.948343558282208, + 24.904197530864195, + 25.874197530864198, + 26.834197530864195, + 27.804197530864197, + 28.774197530864196, + 29.754197530864197, + 30.724197530864195, + 31.704197530864196, + 32.68015479876161, + 33.66015479876161, + 34.640154798761614, + 35.62015479876161, + 36.61015479876161, + 37.59015479876161, + 38.570154798761614, + 39.560154798761616, + 40.5410918114144, + 41.53109181141439, + 42.521091811414394, + 43.511091811414396, + 44.50109181141439, + 45.481091811414394, + 46.4710918114144, + 47.46109181141439, + 48.45478260869565, + 49.44478260869565, + 50.43478260869565, + 51.42478260869565, + 52.42478260869565, + 53.414782608695646, + 54.40478260869565, + 55.39478260869565, + 56.393001776198936, + 57.38300177619893, + 58.37300177619893, + 59.37300177619893, + 60.363001776198935, + 61.363001776198935, + 62.35300177619894, 63.34300177619893 - ], + ], [ - 76.66372093023256, - 18.69372093023256, - 12.003720930232557, - 9.973720930232558, - 9.353720930232559, - 9.343720930232559, - 9.653720930232558, - 10.153720930232558, - 10.77012048192771, - 11.47012048192771, - 12.23012048192771, - 13.04012048192771, - 13.87012048192771, - 14.73012048192771, - 15.61012048192771, - 16.50012048192771, - 17.40834355828221, - 18.328343558282207, - 19.258343558282206, - 20.188343558282206, - 21.128343558282207, - 22.078343558282207, - 23.02834355828221, - 23.988343558282207, - 24.944197530864194, - 25.904197530864195, - 26.874197530864194, - 27.844197530864196, - 28.814197530864195, - 29.784197530864198, - 30.754197530864197, - 31.734197530864197, - 32.710154798761614, - 33.69015479876161, - 34.670154798761615, - 35.65015479876161, - 36.63015479876161, - 37.62015479876161, - 38.600154798761615, - 39.59015479876161, - 40.57109181141439, - 41.56109181141439, - 42.5410918114144, - 43.53109181141439, - 44.521091811414394, - 45.511091811414396, - 46.5010918114144, - 47.49109181141439, - 48.47478260869565, - 49.46478260869565, - 50.45478260869565, - 51.44478260869565, - 52.43478260869565, - 53.43478260869565, - 54.42478260869565, - 55.414782608695646, - 56.41300177619893, - 57.403001776198934, - 58.393001776198936, - 59.38300177619893, - 60.38300177619893, - 61.37300177619893, - 62.37300177619893, + 76.66372093023256, + 18.69372093023256, + 12.003720930232557, + 9.973720930232558, + 9.353720930232559, + 9.343720930232559, + 9.653720930232558, + 10.153720930232558, + 10.77012048192771, + 11.47012048192771, + 12.23012048192771, + 13.04012048192771, + 13.87012048192771, + 14.73012048192771, + 15.61012048192771, + 16.50012048192771, + 17.40834355828221, + 18.328343558282207, + 19.258343558282206, + 20.188343558282206, + 21.128343558282207, + 22.078343558282207, + 23.02834355828221, + 23.988343558282207, + 24.944197530864194, + 25.904197530864195, + 26.874197530864194, + 27.844197530864196, + 28.814197530864195, + 29.784197530864198, + 30.754197530864197, + 31.734197530864197, + 32.710154798761614, + 33.69015479876161, + 34.670154798761615, + 35.65015479876161, + 36.63015479876161, + 37.62015479876161, + 38.600154798761615, + 39.59015479876161, + 40.57109181141439, + 41.56109181141439, + 42.5410918114144, + 43.53109181141439, + 44.521091811414394, + 45.511091811414396, + 46.5010918114144, + 47.49109181141439, + 48.47478260869565, + 49.46478260869565, + 50.45478260869565, + 51.44478260869565, + 52.43478260869565, + 53.43478260869565, + 54.42478260869565, + 55.414782608695646, + 56.41300177619893, + 57.403001776198934, + 58.393001776198936, + 59.38300177619893, + 60.38300177619893, + 61.37300177619893, + 62.37300177619893, 63.363001776198935 - ], + ], [ - 80.00372093023256, - 19.463720930232558, - 12.433720930232557, - 10.273720930232559, - 9.583720930232557, - 9.523720930232559, - 9.813720930232558, - 10.283720930232558, - 10.89012048192771, - 11.580120481927711, - 12.330120481927711, - 13.12012048192771, - 13.95012048192771, - 14.80012048192771, - 15.68012048192771, - 16.57012048192771, - 17.468343558282207, - 18.38834355828221, - 19.308343558282207, - 20.248343558282208, - 21.178343558282208, - 22.128343558282207, - 23.078343558282207, - 24.02834355828221, - 24.984197530864197, - 25.944197530864194, - 26.914197530864197, - 27.874197530864194, - 28.844197530864196, - 29.814197530864195, - 30.794197530864196, - 31.764197530864195, - 32.740154798761615, - 33.72015479876161, - 34.70015479876161, - 35.68015479876161, - 36.66015479876161, - 37.640154798761614, - 38.63015479876161, - 39.61015479876161, - 40.591091811414394, - 41.581091811414396, - 42.57109181141439, - 43.551091811414395, - 44.54109181141439, - 45.53109181141439, - 46.521091811414394, - 47.511091811414396, - 48.49478260869565, - 49.484782608695646, - 50.47478260869565, - 51.46478260869565, - 52.45478260869565, - 53.45478260869565, - 54.44478260869565, - 55.43478260869565, - 56.42300177619893, - 57.42300177619893, - 58.41300177619893, - 59.403001776198934, - 60.403001776198934, - 61.39300177619893, - 62.38300177619893, + 80.00372093023256, + 19.463720930232558, + 12.433720930232557, + 10.273720930232559, + 9.583720930232557, + 9.523720930232559, + 9.813720930232558, + 10.283720930232558, + 10.89012048192771, + 11.580120481927711, + 12.330120481927711, + 13.12012048192771, + 13.95012048192771, + 14.80012048192771, + 15.68012048192771, + 16.57012048192771, + 17.468343558282207, + 18.38834355828221, + 19.308343558282207, + 20.248343558282208, + 21.178343558282208, + 22.128343558282207, + 23.078343558282207, + 24.02834355828221, + 24.984197530864197, + 25.944197530864194, + 26.914197530864197, + 27.874197530864194, + 28.844197530864196, + 29.814197530864195, + 30.794197530864196, + 31.764197530864195, + 32.740154798761615, + 33.72015479876161, + 34.70015479876161, + 35.68015479876161, + 36.66015479876161, + 37.640154798761614, + 38.63015479876161, + 39.61015479876161, + 40.591091811414394, + 41.581091811414396, + 42.57109181141439, + 43.551091811414395, + 44.54109181141439, + 45.53109181141439, + 46.521091811414394, + 47.511091811414396, + 48.49478260869565, + 49.484782608695646, + 50.47478260869565, + 51.46478260869565, + 52.45478260869565, + 53.45478260869565, + 54.44478260869565, + 55.43478260869565, + 56.42300177619893, + 57.42300177619893, + 58.41300177619893, + 59.403001776198934, + 60.403001776198934, + 61.39300177619893, + 62.38300177619893, 63.38300177619893 - ], + ], [ - 83.33698113207548, - 20.226981132075473, - 12.866981132075471, - 10.57698113207547, - 9.81698113207547, - 9.716981132075471, - 9.966981132075471, - 10.426981132075472, - 11.012048192771084, - 11.692048192771084, - 12.432048192771084, - 13.212048192771086, - 14.032048192771084, - 14.882048192771084, - 15.752048192771085, - 16.632048192771084, - 17.533742331288344, - 18.443742331288345, - 19.363742331288343, - 20.293742331288342, - 21.233742331288344, - 22.173742331288345, - 23.123742331288344, - 24.073742331288344, - 25.02880658436214, - 25.98880658436214, - 26.94880658436214, - 27.91880658436214, - 28.87880658436214, - 29.84880658436214, - 30.82880658436214, - 31.79880658436214, - 32.77399380804953, - 33.75399380804953, - 34.723993808049535, - 35.70399380804953, - 36.683993808049536, - 37.67399380804953, - 38.653993808049535, - 39.63399380804953, - 40.62034739454094, - 41.60034739454094, - 42.59034739454094, - 43.580347394540944, - 44.56034739454094, - 45.55034739454094, - 46.540347394540944, - 47.53034739454094, - 48.5175983436853, - 49.5075983436853, - 50.4975983436853, - 51.4875983436853, - 52.4775983436853, - 53.4675983436853, - 54.4575983436853, - 55.4475983436853, - 56.44404973357016, - 57.43404973357016, - 58.42404973357016, - 59.42404973357016, - 60.41404973357016, - 61.40404973357016, - 62.40404973357016, + 83.33698113207548, + 20.226981132075473, + 12.866981132075471, + 10.57698113207547, + 9.81698113207547, + 9.716981132075471, + 9.966981132075471, + 10.426981132075472, + 11.012048192771084, + 11.692048192771084, + 12.432048192771084, + 13.212048192771086, + 14.032048192771084, + 14.882048192771084, + 15.752048192771085, + 16.632048192771084, + 17.533742331288344, + 18.443742331288345, + 19.363742331288343, + 20.293742331288342, + 21.233742331288344, + 22.173742331288345, + 23.123742331288344, + 24.073742331288344, + 25.02880658436214, + 25.98880658436214, + 26.94880658436214, + 27.91880658436214, + 28.87880658436214, + 29.84880658436214, + 30.82880658436214, + 31.79880658436214, + 32.77399380804953, + 33.75399380804953, + 34.723993808049535, + 35.70399380804953, + 36.683993808049536, + 37.67399380804953, + 38.653993808049535, + 39.63399380804953, + 40.62034739454094, + 41.60034739454094, + 42.59034739454094, + 43.580347394540944, + 44.56034739454094, + 45.55034739454094, + 46.540347394540944, + 47.53034739454094, + 48.5175983436853, + 49.5075983436853, + 50.4975983436853, + 51.4875983436853, + 52.4775983436853, + 53.4675983436853, + 54.4575983436853, + 55.4475983436853, + 56.44404973357016, + 57.43404973357016, + 58.42404973357016, + 59.42404973357016, + 60.41404973357016, + 61.40404973357016, + 62.40404973357016, 63.394049733570164 - ], + ], [ - 86.66698113207548, - 20.99698113207547, - 13.306981132075471, - 10.876981132075471, - 10.046981132075471, - 9.90698113207547, - 10.126981132075471, - 10.556981132075471, - 11.132048192771084, - 11.792048192771084, - 12.522048192771084, - 13.302048192771085, - 14.112048192771084, - 14.952048192771084, - 15.822048192771085, - 16.702048192771084, - 17.593742331288343, - 18.503742331288343, - 19.423742331288345, - 20.343742331288343, - 21.283742331288344, - 22.223742331288346, - 23.163742331288343, - 24.113742331288343, - 25.06880658436214, - 26.02880658436214, - 26.98880658436214, - 27.94880658436214, - 28.91880658436214, - 29.88880658436214, - 30.858806584362142, - 31.82880658436214, - 32.80399380804953, - 33.78399380804953, - 34.75399380804953, - 35.73399380804953, - 36.71399380804953, - 37.693993808049534, - 38.67399380804953, - 39.66399380804953, - 40.640347394540946, - 41.63034739454094, - 42.610347394540945, - 43.60034739454094, - 44.59034739454094, - 45.570347394540946, - 46.56034739454094, - 47.55034739454094, - 48.5375983436853, - 49.5275983436853, - 50.5175983436853, - 51.5075983436853, - 52.4975983436853, - 53.4875983436853, - 54.4775983436853, - 55.4675983436853, - 56.464049733570164, - 57.45404973357016, - 58.44404973357016, - 59.43404973357016, - 60.43404973357016, - 61.42404973357016, - 62.41404973357016, + 86.66698113207548, + 20.99698113207547, + 13.306981132075471, + 10.876981132075471, + 10.046981132075471, + 9.90698113207547, + 10.126981132075471, + 10.556981132075471, + 11.132048192771084, + 11.792048192771084, + 12.522048192771084, + 13.302048192771085, + 14.112048192771084, + 14.952048192771084, + 15.822048192771085, + 16.702048192771084, + 17.593742331288343, + 18.503742331288343, + 19.423742331288345, + 20.343742331288343, + 21.283742331288344, + 22.223742331288346, + 23.163742331288343, + 24.113742331288343, + 25.06880658436214, + 26.02880658436214, + 26.98880658436214, + 27.94880658436214, + 28.91880658436214, + 29.88880658436214, + 30.858806584362142, + 31.82880658436214, + 32.80399380804953, + 33.78399380804953, + 34.75399380804953, + 35.73399380804953, + 36.71399380804953, + 37.693993808049534, + 38.67399380804953, + 39.66399380804953, + 40.640347394540946, + 41.63034739454094, + 42.610347394540945, + 43.60034739454094, + 44.59034739454094, + 45.570347394540946, + 46.56034739454094, + 47.55034739454094, + 48.5375983436853, + 49.5275983436853, + 50.5175983436853, + 51.5075983436853, + 52.4975983436853, + 53.4875983436853, + 54.4775983436853, + 55.4675983436853, + 56.464049733570164, + 57.45404973357016, + 58.44404973357016, + 59.43404973357016, + 60.43404973357016, + 61.42404973357016, + 62.41404973357016, 63.41404973357016 - ], + ], [ - 89.99698113207548, - 21.766981132075472, - 13.73698113207547, - 11.176981132075472, - 10.276981132075472, - 10.096981132075472, - 10.286981132075471, - 10.696981132075472, - 11.252048192771085, - 11.902048192771085, - 12.622048192771084, - 13.392048192771085, - 14.192048192771084, - 15.032048192771084, - 15.892048192771085, - 16.762048192771083, - 17.653742331288345, - 18.563742331288346, - 19.473742331288346, - 20.403742331288345, - 21.333742331288345, - 22.263742331288345, - 23.213742331288344, - 24.163742331288343, - 25.10880658436214, - 26.06880658436214, - 27.02880658436214, - 27.98880658436214, - 28.95880658436214, - 29.91880658436214, - 30.88880658436214, - 31.858806584362142, - 32.833993808049534, - 33.81399380804953, - 34.78399380804953, - 35.763993808049534, - 36.74399380804953, - 37.723993808049535, - 38.70399380804953, - 39.683993808049536, - 40.67034739454094, - 41.650347394540944, - 42.640347394540946, - 43.62034739454094, - 44.610347394540945, - 45.60034739454095, - 46.580347394540944, - 47.570347394540946, - 48.5575983436853, - 49.5475983436853, - 50.5375983436853, - 51.5275983436853, - 52.5175983436853, - 53.5075983436853, - 54.4975983436853, - 55.4875983436853, - 56.48404973357016, - 57.47404973357016, - 58.464049733570164, - 59.45404973357016, - 60.44404973357016, - 61.44404973357016, - 62.43404973357016, + 89.99698113207548, + 21.766981132075472, + 13.73698113207547, + 11.176981132075472, + 10.276981132075472, + 10.096981132075472, + 10.286981132075471, + 10.696981132075472, + 11.252048192771085, + 11.902048192771085, + 12.622048192771084, + 13.392048192771085, + 14.192048192771084, + 15.032048192771084, + 15.892048192771085, + 16.762048192771083, + 17.653742331288345, + 18.563742331288346, + 19.473742331288346, + 20.403742331288345, + 21.333742331288345, + 22.263742331288345, + 23.213742331288344, + 24.163742331288343, + 25.10880658436214, + 26.06880658436214, + 27.02880658436214, + 27.98880658436214, + 28.95880658436214, + 29.91880658436214, + 30.88880658436214, + 31.858806584362142, + 32.833993808049534, + 33.81399380804953, + 34.78399380804953, + 35.763993808049534, + 36.74399380804953, + 37.723993808049535, + 38.70399380804953, + 39.683993808049536, + 40.67034739454094, + 41.650347394540944, + 42.640347394540946, + 43.62034739454094, + 44.610347394540945, + 45.60034739454095, + 46.580347394540944, + 47.570347394540946, + 48.5575983436853, + 49.5475983436853, + 50.5375983436853, + 51.5275983436853, + 52.5175983436853, + 53.5075983436853, + 54.4975983436853, + 55.4875983436853, + 56.48404973357016, + 57.47404973357016, + 58.464049733570164, + 59.45404973357016, + 60.44404973357016, + 61.44404973357016, + 62.43404973357016, 63.42404973357016 - ], + ], [ - 93.33698113207548, - 22.53698113207547, - 14.176981132075472, - 11.48698113207547, - 10.50698113207547, - 10.286981132075471, - 10.446981132075472, - 10.836981132075472, - 11.372048192771084, - 12.012048192771084, - 12.722048192771084, - 13.482048192771085, - 14.272048192771084, - 15.102048192771084, - 15.962048192771086, - 16.832048192771083, - 17.713742331288344, - 18.613742331288343, - 19.533742331288344, - 20.453742331288346, - 21.383742331288346, - 22.313742331288346, - 23.253742331288343, - 24.203742331288346, - 25.14880658436214, - 26.108806584362142, - 27.06880658436214, - 28.02880658436214, - 28.98880658436214, - 29.95880658436214, - 30.928806584362142, - 31.89880658436214, - 32.863993808049536, - 33.84399380804953, - 34.81399380804953, - 35.793993808049535, - 36.77399380804953, - 37.75399380804953, - 38.73399380804953, - 39.71399380804953, - 40.69034739454094, - 41.680347394540945, - 42.66034739454094, - 43.650347394540944, - 44.63034739454094, - 45.62034739454094, - 46.60034739454095, - 47.59034739454094, - 48.5775983436853, - 49.5675983436853, - 50.5575983436853, - 51.5475983436853, - 52.5375983436853, - 53.5275983436853, - 54.5175983436853, - 55.5075983436853, - 56.49404973357016, - 57.48404973357016, - 58.48404973357016, - 59.47404973357016, - 60.464049733570164, - 61.45404973357016, - 62.45404973357016, + 93.33698113207548, + 22.53698113207547, + 14.176981132075472, + 11.48698113207547, + 10.50698113207547, + 10.286981132075471, + 10.446981132075472, + 10.836981132075472, + 11.372048192771084, + 12.012048192771084, + 12.722048192771084, + 13.482048192771085, + 14.272048192771084, + 15.102048192771084, + 15.962048192771086, + 16.832048192771083, + 17.713742331288344, + 18.613742331288343, + 19.533742331288344, + 20.453742331288346, + 21.383742331288346, + 22.313742331288346, + 23.253742331288343, + 24.203742331288346, + 25.14880658436214, + 26.108806584362142, + 27.06880658436214, + 28.02880658436214, + 28.98880658436214, + 29.95880658436214, + 30.928806584362142, + 31.89880658436214, + 32.863993808049536, + 33.84399380804953, + 34.81399380804953, + 35.793993808049535, + 36.77399380804953, + 37.75399380804953, + 38.73399380804953, + 39.71399380804953, + 40.69034739454094, + 41.680347394540945, + 42.66034739454094, + 43.650347394540944, + 44.63034739454094, + 45.62034739454094, + 46.60034739454095, + 47.59034739454094, + 48.5775983436853, + 49.5675983436853, + 50.5575983436853, + 51.5475983436853, + 52.5375983436853, + 53.5275983436853, + 54.5175983436853, + 55.5075983436853, + 56.49404973357016, + 57.48404973357016, + 58.48404973357016, + 59.47404973357016, + 60.464049733570164, + 61.45404973357016, + 62.45404973357016, 63.44404973357016 - ], + ], [ - 96.66169811320755, - 23.311698113207548, - 14.611698113207545, - 11.791698113207547, - 10.741698113207546, - 10.471698113207546, - 10.601698113207547, - 10.971698113207546, - 11.493975903614459, - 12.113975903614458, - 12.813975903614459, - 13.563975903614459, - 14.353975903614458, - 15.183975903614458, - 16.02397590361446, - 16.89397590361446, - 17.779141104294478, - 18.679141104294477, - 19.589141104294477, - 20.499141104294477, - 21.429141104294477, - 22.35914110429448, - 23.299141104294478, - 24.249141104294477, - 25.19341563786008, - 26.14341563786008, - 27.10341563786008, - 28.063415637860082, - 29.023415637860083, - 29.993415637860082, - 30.953415637860083, - 31.92341563786008, - 32.89783281733746, - 33.86783281733746, - 34.84783281733746, - 35.81783281733746, - 36.79783281733746, - 37.77783281733746, - 38.75783281733746, - 39.73783281733746, - 40.71960297766749, - 41.69960297766749, - 42.68960297766749, - 43.669602977667495, - 44.65960297766749, - 45.639602977667494, - 46.62960297766749, - 47.60960297766749, - 48.60041407867495, - 49.590414078674954, - 50.58041407867495, - 51.56041407867495, - 52.550414078674955, - 53.54041407867495, - 54.53041407867495, - 55.52041407867495, - 56.51509769094139, - 57.50509769094139, - 58.495097690941385, - 59.48509769094139, - 60.48509769094139, - 61.47509769094139, - 62.46509769094139, + 96.66169811320755, + 23.311698113207548, + 14.611698113207545, + 11.791698113207547, + 10.741698113207546, + 10.471698113207546, + 10.601698113207547, + 10.971698113207546, + 11.493975903614459, + 12.113975903614458, + 12.813975903614459, + 13.563975903614459, + 14.353975903614458, + 15.183975903614458, + 16.02397590361446, + 16.89397590361446, + 17.779141104294478, + 18.679141104294477, + 19.589141104294477, + 20.499141104294477, + 21.429141104294477, + 22.35914110429448, + 23.299141104294478, + 24.249141104294477, + 25.19341563786008, + 26.14341563786008, + 27.10341563786008, + 28.063415637860082, + 29.023415637860083, + 29.993415637860082, + 30.953415637860083, + 31.92341563786008, + 32.89783281733746, + 33.86783281733746, + 34.84783281733746, + 35.81783281733746, + 36.79783281733746, + 37.77783281733746, + 38.75783281733746, + 39.73783281733746, + 40.71960297766749, + 41.69960297766749, + 42.68960297766749, + 43.669602977667495, + 44.65960297766749, + 45.639602977667494, + 46.62960297766749, + 47.60960297766749, + 48.60041407867495, + 49.590414078674954, + 50.58041407867495, + 51.56041407867495, + 52.550414078674955, + 53.54041407867495, + 54.53041407867495, + 55.52041407867495, + 56.51509769094139, + 57.50509769094139, + 58.495097690941385, + 59.48509769094139, + 60.48509769094139, + 61.47509769094139, + 62.46509769094139, 63.455097690941386 - ], + ], [ - 100.00169811320755, - 24.081698113207544, - 15.041698113207547, - 12.091698113207546, - 10.981698113207546, - 10.661698113207546, - 10.761698113207546, - 11.111698113207547, - 11.613975903614458, - 12.223975903614459, - 12.913975903614459, - 13.653975903614459, - 14.443975903614458, - 15.253975903614458, - 16.093975903614457, - 16.963975903614458, - 17.839141104294477, - 18.729141104294477, - 19.639141104294477, - 20.55914110429448, - 21.479141104294477, - 22.409141104294477, - 23.34914110429448, - 24.28914110429448, - 25.23341563786008, - 26.18341563786008, - 27.14341563786008, - 28.10341563786008, - 29.063415637860082, - 30.023415637860083, - 30.993415637860082, - 31.96341563786008, - 32.92783281733746, - 33.89783281733746, - 34.877832817337456, - 35.84783281733746, - 36.82783281733746, - 37.807832817337456, - 38.78783281733746, - 39.76783281733746, - 40.739602977667495, - 41.72960297766749, - 42.709602977667494, - 43.68960297766749, - 44.67960297766749, - 45.65960297766749, - 46.64960297766749, - 47.62960297766749, - 48.620414078674955, - 49.61041407867495, - 50.60041407867495, - 51.58041407867495, - 52.57041407867495, - 53.56041407867495, - 54.550414078674955, - 55.54041407867495, - 56.53509769094139, - 57.52509769094139, - 58.51509769094139, - 59.50509769094139, - 60.495097690941385, - 61.48509769094139, - 62.48509769094139, + 100.00169811320755, + 24.081698113207544, + 15.041698113207547, + 12.091698113207546, + 10.981698113207546, + 10.661698113207546, + 10.761698113207546, + 11.111698113207547, + 11.613975903614458, + 12.223975903614459, + 12.913975903614459, + 13.653975903614459, + 14.443975903614458, + 15.253975903614458, + 16.093975903614457, + 16.963975903614458, + 17.839141104294477, + 18.729141104294477, + 19.639141104294477, + 20.55914110429448, + 21.479141104294477, + 22.409141104294477, + 23.34914110429448, + 24.28914110429448, + 25.23341563786008, + 26.18341563786008, + 27.14341563786008, + 28.10341563786008, + 29.063415637860082, + 30.023415637860083, + 30.993415637860082, + 31.96341563786008, + 32.92783281733746, + 33.89783281733746, + 34.877832817337456, + 35.84783281733746, + 36.82783281733746, + 37.807832817337456, + 38.78783281733746, + 39.76783281733746, + 40.739602977667495, + 41.72960297766749, + 42.709602977667494, + 43.68960297766749, + 44.67960297766749, + 45.65960297766749, + 46.64960297766749, + 47.62960297766749, + 48.620414078674955, + 49.61041407867495, + 50.60041407867495, + 51.58041407867495, + 52.57041407867495, + 53.56041407867495, + 54.550414078674955, + 55.54041407867495, + 56.53509769094139, + 57.52509769094139, + 58.51509769094139, + 59.50509769094139, + 60.495097690941385, + 61.48509769094139, + 62.48509769094139, 63.47509769094139 - ], + ], [ - 103.33169811320755, - 24.841698113207546, - 15.481698113207546, - 12.391698113207546, - 11.211698113207547, - 10.851698113207547, - 10.921698113207546, - 11.241698113207546, - 11.733975903614459, - 12.333975903614459, - 13.013975903614458, - 13.743975903614459, - 14.523975903614458, - 15.333975903614459, - 16.163975903614457, - 17.02397590361446, - 17.89914110429448, - 18.78914110429448, - 19.689141104294478, - 20.60914110429448, - 21.529141104294478, - 22.459141104294478, - 23.389141104294477, - 24.32914110429448, - 25.27341563786008, - 26.223415637860082, - 27.18341563786008, - 28.133415637860082, - 29.09341563786008, - 30.05341563786008, - 31.023415637860083, - 31.993415637860082, - 32.95783281733746, - 33.92783281733746, - 34.90783281733746, - 35.877832817337456, - 36.85783281733746, - 37.82783281733746, - 38.807832817337456, - 39.78783281733746, - 40.76960297766749, - 41.74960297766749, - 42.72960297766749, - 43.71960297766749, - 44.69960297766749, - 45.67960297766749, - 46.669602977667495, - 47.65960297766749, - 48.64041407867495, - 49.63041407867495, - 50.620414078674955, - 51.60041407867495, - 52.590414078674954, - 53.58041407867495, - 54.57041407867495, - 55.56041407867495, - 56.55509769094139, - 57.54509769094139, - 58.53509769094139, - 59.52509769094139, - 60.51509769094139, - 61.50509769094139, - 62.49509769094139, + 103.33169811320755, + 24.841698113207546, + 15.481698113207546, + 12.391698113207546, + 11.211698113207547, + 10.851698113207547, + 10.921698113207546, + 11.241698113207546, + 11.733975903614459, + 12.333975903614459, + 13.013975903614458, + 13.743975903614459, + 14.523975903614458, + 15.333975903614459, + 16.163975903614457, + 17.02397590361446, + 17.89914110429448, + 18.78914110429448, + 19.689141104294478, + 20.60914110429448, + 21.529141104294478, + 22.459141104294478, + 23.389141104294477, + 24.32914110429448, + 25.27341563786008, + 26.223415637860082, + 27.18341563786008, + 28.133415637860082, + 29.09341563786008, + 30.05341563786008, + 31.023415637860083, + 31.993415637860082, + 32.95783281733746, + 33.92783281733746, + 34.90783281733746, + 35.877832817337456, + 36.85783281733746, + 37.82783281733746, + 38.807832817337456, + 39.78783281733746, + 40.76960297766749, + 41.74960297766749, + 42.72960297766749, + 43.71960297766749, + 44.69960297766749, + 45.67960297766749, + 46.669602977667495, + 47.65960297766749, + 48.64041407867495, + 49.63041407867495, + 50.620414078674955, + 51.60041407867495, + 52.590414078674954, + 53.58041407867495, + 54.57041407867495, + 55.56041407867495, + 56.55509769094139, + 57.54509769094139, + 58.53509769094139, + 59.52509769094139, + 60.51509769094139, + 61.50509769094139, + 62.49509769094139, 63.48509769094139 - ], + ], [ - 106.66169811320755, - 25.611698113207545, - 15.911698113207546, - 12.701698113207547, - 11.441698113207547, - 11.041698113207547, - 11.081698113207546, - 11.381698113207547, - 11.853975903614458, - 12.443975903614458, - 13.103975903614458, - 13.833975903614459, - 14.603975903614458, - 15.403975903614459, - 16.233975903614457, - 17.093975903614457, - 17.959141104294478, - 18.84914110429448, - 19.749141104294477, - 20.659141104294477, - 21.57914110429448, - 22.499141104294477, - 23.439141104294478, - 24.369141104294478, - 25.313415637860082, - 26.26341563786008, - 27.21341563786008, - 28.17341563786008, - 29.133415637860082, - 30.093415637860083, - 31.05341563786008, - 32.02341563786008, - 32.98783281733746, - 33.95783281733746, - 34.93783281733746, - 35.90783281733746, - 36.877832817337456, - 37.85783281733746, - 38.83783281733746, - 39.81783281733746, - 40.78960297766749, - 41.779602977667494, - 42.75960297766749, - 43.739602977667495, - 44.71960297766749, - 45.709602977667494, - 46.68960297766749, - 47.67960297766749, - 48.660414078674954, - 49.65041407867495, - 50.64041407867495, - 51.620414078674955, - 52.61041407867495, - 53.60041407867495, - 54.590414078674954, - 55.58041407867495, - 56.565097690941386, - 57.55509769094139, - 58.54509769094139, - 59.53509769094139, - 60.535097690941384, - 61.52509769094139, - 62.51509769094139, + 106.66169811320755, + 25.611698113207545, + 15.911698113207546, + 12.701698113207547, + 11.441698113207547, + 11.041698113207547, + 11.081698113207546, + 11.381698113207547, + 11.853975903614458, + 12.443975903614458, + 13.103975903614458, + 13.833975903614459, + 14.603975903614458, + 15.403975903614459, + 16.233975903614457, + 17.093975903614457, + 17.959141104294478, + 18.84914110429448, + 19.749141104294477, + 20.659141104294477, + 21.57914110429448, + 22.499141104294477, + 23.439141104294478, + 24.369141104294478, + 25.313415637860082, + 26.26341563786008, + 27.21341563786008, + 28.17341563786008, + 29.133415637860082, + 30.093415637860083, + 31.05341563786008, + 32.02341563786008, + 32.98783281733746, + 33.95783281733746, + 34.93783281733746, + 35.90783281733746, + 36.877832817337456, + 37.85783281733746, + 38.83783281733746, + 39.81783281733746, + 40.78960297766749, + 41.779602977667494, + 42.75960297766749, + 43.739602977667495, + 44.71960297766749, + 45.709602977667494, + 46.68960297766749, + 47.67960297766749, + 48.660414078674954, + 49.65041407867495, + 50.64041407867495, + 51.620414078674955, + 52.61041407867495, + 53.60041407867495, + 54.590414078674954, + 55.58041407867495, + 56.565097690941386, + 57.55509769094139, + 58.54509769094139, + 59.53509769094139, + 60.535097690941384, + 61.52509769094139, + 62.51509769094139, 63.50509769094139 ] ] } - }, + }, "groups": { "a120e517-83f8-11e5-a662-3c15c2da029e": { "alias": [ "/" - ], + ], "links": [ { - "class": "H5L_TYPE_HARD", - "collection": "datasets", - "id": "a121fd68-83f8-11e5-badf-3c15c2da029e", + "class": "H5L_TYPE_HARD", + "collection": "datasets", + "id": "a121fd68-83f8-11e5-badf-3c15c2da029e", "title": "DS1" } ] } - }, + }, "root": "a120e517-83f8-11e5-a662-3c15c2da029e" } diff --git a/data/json/h5ex_d_soint.json b/data/json/h5ex_d_soint.json index 5b388cf..b5bad63 100644 --- a/data/json/h5ex_d_soint.json +++ b/data/json/h5ex_d_soint.json @@ -1,2170 +1,2170 @@ { - "apiVersion": "1.0.0", + "apiVersion": "1.0.0", "datasets": { "a13c90b0-83f8-11e5-9ace-3c15c2da029e": { "alias": [ "/DS1" - ], + ], "creationProperties": { - "allocTime": "H5D_ALLOC_TIME_INCR", - "fillTime": "H5D_FILL_TIME_IFSET", + "allocTime": "H5D_ALLOC_TIME_INCR", + "fillTime": "H5D_FILL_TIME_IFSET", "filters": [ { - "class": "H5Z_FILTER_SCALEOFFSET", - "id": 6, - "name": "scaleoffset", + "class": "H5Z_FILTER_SCALEOFFSET", + "id": 6, + "scaleOffset": 4, "scaleType": "H5Z_SO_INT" } - ], + ], "layout": { - "class": "H5D_CHUNKED", + "class": "H5D_CHUNKED", "dims": [ - 4, + 4, 8 ] } - }, + }, "shape": { - "class": "H5S_SIMPLE", + "class": "H5S_SIMPLE", "dims": [ - 32, + 32, 64 ] - }, + }, "type": { - "base": "H5T_STD_I32LE", + "base": "H5T_STD_I32LE", "class": "H5T_INTEGER" - }, + }, "value": [ [ - 0, - -1, - -2, - -3, - -4, - -5, - -6, - -7, - -8, - -9, - -10, - -11, - -12, - -13, - -14, - -15, - -16, - -17, - -18, - -19, - -20, - -21, - -22, - -23, - -24, - -25, - -26, - -27, - -28, - -29, - -30, - -31, - -32, - -33, - -34, - -35, - -36, - -37, - -38, - -39, - -40, - -41, - -42, - -43, - -44, - -45, - -46, - -47, - -48, - -49, - -50, - -51, - -52, - -53, - -54, - -55, - -56, - -57, - -58, - -59, - -60, - -61, - -62, + 0, + -1, + -2, + -3, + -4, + -5, + -6, + -7, + -8, + -9, + -10, + -11, + -12, + -13, + -14, + -15, + -16, + -17, + -18, + -19, + -20, + -21, + -22, + -23, + -24, + -25, + -26, + -27, + -28, + -29, + -30, + -31, + -32, + -33, + -34, + -35, + -36, + -37, + -38, + -39, + -40, + -41, + -42, + -43, + -44, + -45, + -46, + -47, + -48, + -49, + -50, + -51, + -52, + -53, + -54, + -55, + -56, + -57, + -58, + -59, + -60, + -61, + -62, -63 - ], + ], [ - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, 0 - ], + ], [ - 0, - 1, - 2, - 3, - 4, - 5, - 6, - 7, - 8, - 9, - 10, - 11, - 12, - 13, - 14, - 15, - 16, - 17, - 18, - 19, - 20, - 21, - 22, - 23, - 24, - 25, - 26, - 27, - 28, - 29, - 30, - 31, - 32, - 33, - 34, - 35, - 36, - 37, - 38, - 39, - 40, - 41, - 42, - 43, - 44, - 45, - 46, - 47, - 48, - 49, - 50, - 51, - 52, - 53, - 54, - 55, - 56, - 57, - 58, - 59, - 60, - 61, - 62, + 0, + 1, + 2, + 3, + 4, + 5, + 6, + 7, + 8, + 9, + 10, + 11, + 12, + 13, + 14, + 15, + 16, + 17, + 18, + 19, + 20, + 21, + 22, + 23, + 24, + 25, + 26, + 27, + 28, + 29, + 30, + 31, + 32, + 33, + 34, + 35, + 36, + 37, + 38, + 39, + 40, + 41, + 42, + 43, + 44, + 45, + 46, + 47, + 48, + 49, + 50, + 51, + 52, + 53, + 54, + 55, + 56, + 57, + 58, + 59, + 60, + 61, + 62, 63 - ], + ], [ - 0, - 2, - 4, - 6, - 8, - 10, - 12, - 14, - 16, - 18, - 20, - 22, - 24, - 26, - 28, - 30, - 32, - 34, - 36, - 38, - 40, - 42, - 44, - 46, - 48, - 50, - 52, - 54, - 56, - 58, - 60, - 62, - 64, - 66, - 68, - 70, - 72, - 74, - 76, - 78, - 80, - 82, - 84, - 86, - 88, - 90, - 92, - 94, - 96, - 98, - 100, - 102, - 104, - 106, - 108, - 110, - 112, - 114, - 116, - 118, - 120, - 122, - 124, + 0, + 2, + 4, + 6, + 8, + 10, + 12, + 14, + 16, + 18, + 20, + 22, + 24, + 26, + 28, + 30, + 32, + 34, + 36, + 38, + 40, + 42, + 44, + 46, + 48, + 50, + 52, + 54, + 56, + 58, + 60, + 62, + 64, + 66, + 68, + 70, + 72, + 74, + 76, + 78, + 80, + 82, + 84, + 86, + 88, + 90, + 92, + 94, + 96, + 98, + 100, + 102, + 104, + 106, + 108, + 110, + 112, + 114, + 116, + 118, + 120, + 122, + 124, 126 - ], + ], [ - 0, - 3, - 6, - 9, - 12, - 15, - 18, - 21, - 24, - 27, - 30, - 33, - 36, - 39, - 42, - 45, - 48, - 51, - 54, - 57, - 60, - 63, - 66, - 69, - 72, - 75, - 78, - 81, - 84, - 87, - 90, - 93, - 96, - 99, - 102, - 105, - 108, - 111, - 114, - 117, - 120, - 123, - 126, - 129, - 132, - 135, - 138, - 141, - 144, - 147, - 150, - 153, - 156, - 159, - 162, - 165, - 168, - 171, - 174, - 177, - 180, - 183, - 186, + 0, + 3, + 6, + 9, + 12, + 15, + 18, + 21, + 24, + 27, + 30, + 33, + 36, + 39, + 42, + 45, + 48, + 51, + 54, + 57, + 60, + 63, + 66, + 69, + 72, + 75, + 78, + 81, + 84, + 87, + 90, + 93, + 96, + 99, + 102, + 105, + 108, + 111, + 114, + 117, + 120, + 123, + 126, + 129, + 132, + 135, + 138, + 141, + 144, + 147, + 150, + 153, + 156, + 159, + 162, + 165, + 168, + 171, + 174, + 177, + 180, + 183, + 186, 189 - ], + ], [ - 0, - 4, - 8, - 12, - 16, - 20, - 24, - 28, - 32, - 36, - 40, - 44, - 48, - 52, - 56, - 60, - 64, - 68, - 72, - 76, - 80, - 84, - 88, - 92, - 96, - 100, - 104, - 108, - 112, - 116, - 120, - 124, - 128, - 132, - 136, - 140, - 144, - 148, - 152, - 156, - 160, - 164, - 168, - 172, - 176, - 180, - 184, - 188, - 192, - 196, - 200, - 204, - 208, - 212, - 216, - 220, - 224, - 228, - 232, - 236, - 240, - 244, - 248, + 0, + 4, + 8, + 12, + 16, + 20, + 24, + 28, + 32, + 36, + 40, + 44, + 48, + 52, + 56, + 60, + 64, + 68, + 72, + 76, + 80, + 84, + 88, + 92, + 96, + 100, + 104, + 108, + 112, + 116, + 120, + 124, + 128, + 132, + 136, + 140, + 144, + 148, + 152, + 156, + 160, + 164, + 168, + 172, + 176, + 180, + 184, + 188, + 192, + 196, + 200, + 204, + 208, + 212, + 216, + 220, + 224, + 228, + 232, + 236, + 240, + 244, + 248, 252 - ], + ], [ - 0, - 5, - 10, - 15, - 20, - 25, - 30, - 35, - 40, - 45, - 50, - 55, - 60, - 65, - 70, - 75, - 80, - 85, - 90, - 95, - 100, - 105, - 110, - 115, - 120, - 125, - 130, - 135, - 140, - 145, - 150, - 155, - 160, - 165, - 170, - 175, - 180, - 185, - 190, - 195, - 200, - 205, - 210, - 215, - 220, - 225, - 230, - 235, - 240, - 245, - 250, - 255, - 260, - 265, - 270, - 275, - 280, - 285, - 290, - 295, - 300, - 305, - 310, + 0, + 5, + 10, + 15, + 20, + 25, + 30, + 35, + 40, + 45, + 50, + 55, + 60, + 65, + 70, + 75, + 80, + 85, + 90, + 95, + 100, + 105, + 110, + 115, + 120, + 125, + 130, + 135, + 140, + 145, + 150, + 155, + 160, + 165, + 170, + 175, + 180, + 185, + 190, + 195, + 200, + 205, + 210, + 215, + 220, + 225, + 230, + 235, + 240, + 245, + 250, + 255, + 260, + 265, + 270, + 275, + 280, + 285, + 290, + 295, + 300, + 305, + 310, 315 - ], + ], [ - 0, - 6, - 12, - 18, - 24, - 30, - 36, - 42, - 48, - 54, - 60, - 66, - 72, - 78, - 84, - 90, - 96, - 102, - 108, - 114, - 120, - 126, - 132, - 138, - 144, - 150, - 156, - 162, - 168, - 174, - 180, - 186, - 192, - 198, - 204, - 210, - 216, - 222, - 228, - 234, - 240, - 246, - 252, - 258, - 264, - 270, - 276, - 282, - 288, - 294, - 300, - 306, - 312, - 318, - 324, - 330, - 336, - 342, - 348, - 354, - 360, - 366, - 372, + 0, + 6, + 12, + 18, + 24, + 30, + 36, + 42, + 48, + 54, + 60, + 66, + 72, + 78, + 84, + 90, + 96, + 102, + 108, + 114, + 120, + 126, + 132, + 138, + 144, + 150, + 156, + 162, + 168, + 174, + 180, + 186, + 192, + 198, + 204, + 210, + 216, + 222, + 228, + 234, + 240, + 246, + 252, + 258, + 264, + 270, + 276, + 282, + 288, + 294, + 300, + 306, + 312, + 318, + 324, + 330, + 336, + 342, + 348, + 354, + 360, + 366, + 372, 378 - ], + ], [ - 0, - 7, - 14, - 21, - 28, - 35, - 42, - 49, - 56, - 63, - 70, - 77, - 84, - 91, - 98, - 105, - 112, - 119, - 126, - 133, - 140, - 147, - 154, - 161, - 168, - 175, - 182, - 189, - 196, - 203, - 210, - 217, - 224, - 231, - 238, - 245, - 252, - 259, - 266, - 273, - 280, - 287, - 294, - 301, - 308, - 315, - 322, - 329, - 336, - 343, - 350, - 357, - 364, - 371, - 378, - 385, - 392, - 399, - 406, - 413, - 420, - 427, - 434, + 0, + 7, + 14, + 21, + 28, + 35, + 42, + 49, + 56, + 63, + 70, + 77, + 84, + 91, + 98, + 105, + 112, + 119, + 126, + 133, + 140, + 147, + 154, + 161, + 168, + 175, + 182, + 189, + 196, + 203, + 210, + 217, + 224, + 231, + 238, + 245, + 252, + 259, + 266, + 273, + 280, + 287, + 294, + 301, + 308, + 315, + 322, + 329, + 336, + 343, + 350, + 357, + 364, + 371, + 378, + 385, + 392, + 399, + 406, + 413, + 420, + 427, + 434, 441 - ], + ], [ - 0, - 8, - 16, - 24, - 32, - 40, - 48, - 56, - 64, - 72, - 80, - 88, - 96, - 104, - 112, - 120, - 128, - 136, - 144, - 152, - 160, - 168, - 176, - 184, - 192, - 200, - 208, - 216, - 224, - 232, - 240, - 248, - 256, - 264, - 272, - 280, - 288, - 296, - 304, - 312, - 320, - 328, - 336, - 344, - 352, - 360, - 368, - 376, - 384, - 392, - 400, - 408, - 416, - 424, - 432, - 440, - 448, - 456, - 464, - 472, - 480, - 488, - 496, + 0, + 8, + 16, + 24, + 32, + 40, + 48, + 56, + 64, + 72, + 80, + 88, + 96, + 104, + 112, + 120, + 128, + 136, + 144, + 152, + 160, + 168, + 176, + 184, + 192, + 200, + 208, + 216, + 224, + 232, + 240, + 248, + 256, + 264, + 272, + 280, + 288, + 296, + 304, + 312, + 320, + 328, + 336, + 344, + 352, + 360, + 368, + 376, + 384, + 392, + 400, + 408, + 416, + 424, + 432, + 440, + 448, + 456, + 464, + 472, + 480, + 488, + 496, 504 - ], + ], [ - 0, - 9, - 18, - 27, - 36, - 45, - 54, - 63, - 72, - 81, - 90, - 99, - 108, - 117, - 126, - 135, - 144, - 153, - 162, - 171, - 180, - 189, - 198, - 207, - 216, - 225, - 234, - 243, - 252, - 261, - 270, - 279, - 288, - 297, - 306, - 315, - 324, - 333, - 342, - 351, - 360, - 369, - 378, - 387, - 396, - 405, - 414, - 423, - 432, - 441, - 450, - 459, - 468, - 477, - 486, - 495, - 504, - 513, - 522, - 531, - 540, - 549, - 558, + 0, + 9, + 18, + 27, + 36, + 45, + 54, + 63, + 72, + 81, + 90, + 99, + 108, + 117, + 126, + 135, + 144, + 153, + 162, + 171, + 180, + 189, + 198, + 207, + 216, + 225, + 234, + 243, + 252, + 261, + 270, + 279, + 288, + 297, + 306, + 315, + 324, + 333, + 342, + 351, + 360, + 369, + 378, + 387, + 396, + 405, + 414, + 423, + 432, + 441, + 450, + 459, + 468, + 477, + 486, + 495, + 504, + 513, + 522, + 531, + 540, + 549, + 558, 567 - ], + ], [ - 0, - 10, - 20, - 30, - 40, - 50, - 60, - 70, - 80, - 90, - 100, - 110, - 120, - 130, - 140, - 150, - 160, - 170, - 180, - 190, - 200, - 210, - 220, - 230, - 240, - 250, - 260, - 270, - 280, - 290, - 300, - 310, - 320, - 330, - 340, - 350, - 360, - 370, - 380, - 390, - 400, - 410, - 420, - 430, - 440, - 450, - 460, - 470, - 480, - 490, - 500, - 510, - 520, - 530, - 540, - 550, - 560, - 570, - 580, - 590, - 600, - 610, - 620, + 0, + 10, + 20, + 30, + 40, + 50, + 60, + 70, + 80, + 90, + 100, + 110, + 120, + 130, + 140, + 150, + 160, + 170, + 180, + 190, + 200, + 210, + 220, + 230, + 240, + 250, + 260, + 270, + 280, + 290, + 300, + 310, + 320, + 330, + 340, + 350, + 360, + 370, + 380, + 390, + 400, + 410, + 420, + 430, + 440, + 450, + 460, + 470, + 480, + 490, + 500, + 510, + 520, + 530, + 540, + 550, + 560, + 570, + 580, + 590, + 600, + 610, + 620, 630 - ], + ], [ - 0, - 11, - 22, - 33, - 44, - 55, - 66, - 77, - 88, - 99, - 110, - 121, - 132, - 143, - 154, - 165, - 176, - 187, - 198, - 209, - 220, - 231, - 242, - 253, - 264, - 275, - 286, - 297, - 308, - 319, - 330, - 341, - 352, - 363, - 374, - 385, - 396, - 407, - 418, - 429, - 440, - 451, - 462, - 473, - 484, - 495, - 506, - 517, - 528, - 539, - 550, - 561, - 572, - 583, - 594, - 605, - 616, - 627, - 638, - 649, - 660, - 671, - 682, + 0, + 11, + 22, + 33, + 44, + 55, + 66, + 77, + 88, + 99, + 110, + 121, + 132, + 143, + 154, + 165, + 176, + 187, + 198, + 209, + 220, + 231, + 242, + 253, + 264, + 275, + 286, + 297, + 308, + 319, + 330, + 341, + 352, + 363, + 374, + 385, + 396, + 407, + 418, + 429, + 440, + 451, + 462, + 473, + 484, + 495, + 506, + 517, + 528, + 539, + 550, + 561, + 572, + 583, + 594, + 605, + 616, + 627, + 638, + 649, + 660, + 671, + 682, 693 - ], + ], [ - 0, - 12, - 24, - 36, - 48, - 60, - 72, - 84, - 96, - 108, - 120, - 132, - 144, - 156, - 168, - 180, - 192, - 204, - 216, - 228, - 240, - 252, - 264, - 276, - 288, - 300, - 312, - 324, - 336, - 348, - 360, - 372, - 384, - 396, - 408, - 420, - 432, - 444, - 456, - 468, - 480, - 492, - 504, - 516, - 528, - 540, - 552, - 564, - 576, - 588, - 600, - 612, - 624, - 636, - 648, - 660, - 672, - 684, - 696, - 708, - 720, - 732, - 744, + 0, + 12, + 24, + 36, + 48, + 60, + 72, + 84, + 96, + 108, + 120, + 132, + 144, + 156, + 168, + 180, + 192, + 204, + 216, + 228, + 240, + 252, + 264, + 276, + 288, + 300, + 312, + 324, + 336, + 348, + 360, + 372, + 384, + 396, + 408, + 420, + 432, + 444, + 456, + 468, + 480, + 492, + 504, + 516, + 528, + 540, + 552, + 564, + 576, + 588, + 600, + 612, + 624, + 636, + 648, + 660, + 672, + 684, + 696, + 708, + 720, + 732, + 744, 756 - ], + ], [ - 0, - 13, - 26, - 39, - 52, - 65, - 78, - 91, - 104, - 117, - 130, - 143, - 156, - 169, - 182, - 195, - 208, - 221, - 234, - 247, - 260, - 273, - 286, - 299, - 312, - 325, - 338, - 351, - 364, - 377, - 390, - 403, - 416, - 429, - 442, - 455, - 468, - 481, - 494, - 507, - 520, - 533, - 546, - 559, - 572, - 585, - 598, - 611, - 624, - 637, - 650, - 663, - 676, - 689, - 702, - 715, - 728, - 741, - 754, - 767, - 780, - 793, - 806, + 0, + 13, + 26, + 39, + 52, + 65, + 78, + 91, + 104, + 117, + 130, + 143, + 156, + 169, + 182, + 195, + 208, + 221, + 234, + 247, + 260, + 273, + 286, + 299, + 312, + 325, + 338, + 351, + 364, + 377, + 390, + 403, + 416, + 429, + 442, + 455, + 468, + 481, + 494, + 507, + 520, + 533, + 546, + 559, + 572, + 585, + 598, + 611, + 624, + 637, + 650, + 663, + 676, + 689, + 702, + 715, + 728, + 741, + 754, + 767, + 780, + 793, + 806, 819 - ], + ], [ - 0, - 14, - 28, - 42, - 56, - 70, - 84, - 98, - 112, - 126, - 140, - 154, - 168, - 182, - 196, - 210, - 224, - 238, - 252, - 266, - 280, - 294, - 308, - 322, - 336, - 350, - 364, - 378, - 392, - 406, - 420, - 434, - 448, - 462, - 476, - 490, - 504, - 518, - 532, - 546, - 560, - 574, - 588, - 602, - 616, - 630, - 644, - 658, - 672, - 686, - 700, - 714, - 728, - 742, - 756, - 770, - 784, - 798, - 812, - 826, - 840, - 854, - 868, + 0, + 14, + 28, + 42, + 56, + 70, + 84, + 98, + 112, + 126, + 140, + 154, + 168, + 182, + 196, + 210, + 224, + 238, + 252, + 266, + 280, + 294, + 308, + 322, + 336, + 350, + 364, + 378, + 392, + 406, + 420, + 434, + 448, + 462, + 476, + 490, + 504, + 518, + 532, + 546, + 560, + 574, + 588, + 602, + 616, + 630, + 644, + 658, + 672, + 686, + 700, + 714, + 728, + 742, + 756, + 770, + 784, + 798, + 812, + 826, + 840, + 854, + 868, 882 - ], + ], [ - 0, - 15, - 30, - 45, - 60, - 75, - 90, - 105, - 120, - 135, - 150, - 165, - 180, - 195, - 210, - 225, - 240, - 255, - 270, - 285, - 300, - 315, - 330, - 345, - 360, - 375, - 390, - 405, - 420, - 435, - 450, - 465, - 480, - 495, - 510, - 525, - 540, - 555, - 570, - 585, - 600, - 615, - 630, - 645, - 660, - 675, - 690, - 705, - 720, - 735, - 750, - 765, - 780, - 795, - 810, - 825, - 840, - 855, - 870, - 885, - 900, - 915, - 930, + 0, + 15, + 30, + 45, + 60, + 75, + 90, + 105, + 120, + 135, + 150, + 165, + 180, + 195, + 210, + 225, + 240, + 255, + 270, + 285, + 300, + 315, + 330, + 345, + 360, + 375, + 390, + 405, + 420, + 435, + 450, + 465, + 480, + 495, + 510, + 525, + 540, + 555, + 570, + 585, + 600, + 615, + 630, + 645, + 660, + 675, + 690, + 705, + 720, + 735, + 750, + 765, + 780, + 795, + 810, + 825, + 840, + 855, + 870, + 885, + 900, + 915, + 930, 945 - ], + ], [ - 0, - 16, - 32, - 48, - 64, - 80, - 96, - 112, - 128, - 144, - 160, - 176, - 192, - 208, - 224, - 240, - 256, - 272, - 288, - 304, - 320, - 336, - 352, - 368, - 384, - 400, - 416, - 432, - 448, - 464, - 480, - 496, - 512, - 528, - 544, - 560, - 576, - 592, - 608, - 624, - 640, - 656, - 672, - 688, - 704, - 720, - 736, - 752, - 768, - 784, - 800, - 816, - 832, - 848, - 864, - 880, - 896, - 912, - 928, - 944, - 960, - 976, - 992, + 0, + 16, + 32, + 48, + 64, + 80, + 96, + 112, + 128, + 144, + 160, + 176, + 192, + 208, + 224, + 240, + 256, + 272, + 288, + 304, + 320, + 336, + 352, + 368, + 384, + 400, + 416, + 432, + 448, + 464, + 480, + 496, + 512, + 528, + 544, + 560, + 576, + 592, + 608, + 624, + 640, + 656, + 672, + 688, + 704, + 720, + 736, + 752, + 768, + 784, + 800, + 816, + 832, + 848, + 864, + 880, + 896, + 912, + 928, + 944, + 960, + 976, + 992, 1008 - ], + ], [ - 0, - 17, - 34, - 51, - 68, - 85, - 102, - 119, - 136, - 153, - 170, - 187, - 204, - 221, - 238, - 255, - 272, - 289, - 306, - 323, - 340, - 357, - 374, - 391, - 408, - 425, - 442, - 459, - 476, - 493, - 510, - 527, - 544, - 561, - 578, - 595, - 612, - 629, - 646, - 663, - 680, - 697, - 714, - 731, - 748, - 765, - 782, - 799, - 816, - 833, - 850, - 867, - 884, - 901, - 918, - 935, - 952, - 969, - 986, - 1003, - 1020, - 1037, - 1054, + 0, + 17, + 34, + 51, + 68, + 85, + 102, + 119, + 136, + 153, + 170, + 187, + 204, + 221, + 238, + 255, + 272, + 289, + 306, + 323, + 340, + 357, + 374, + 391, + 408, + 425, + 442, + 459, + 476, + 493, + 510, + 527, + 544, + 561, + 578, + 595, + 612, + 629, + 646, + 663, + 680, + 697, + 714, + 731, + 748, + 765, + 782, + 799, + 816, + 833, + 850, + 867, + 884, + 901, + 918, + 935, + 952, + 969, + 986, + 1003, + 1020, + 1037, + 1054, 1071 - ], + ], [ - 0, - 18, - 36, - 54, - 72, - 90, - 108, - 126, - 144, - 162, - 180, - 198, - 216, - 234, - 252, - 270, - 288, - 306, - 324, - 342, - 360, - 378, - 396, - 414, - 432, - 450, - 468, - 486, - 504, - 522, - 540, - 558, - 576, - 594, - 612, - 630, - 648, - 666, - 684, - 702, - 720, - 738, - 756, - 774, - 792, - 810, - 828, - 846, - 864, - 882, - 900, - 918, - 936, - 954, - 972, - 990, - 1008, - 1026, - 1044, - 1062, - 1080, - 1098, - 1116, + 0, + 18, + 36, + 54, + 72, + 90, + 108, + 126, + 144, + 162, + 180, + 198, + 216, + 234, + 252, + 270, + 288, + 306, + 324, + 342, + 360, + 378, + 396, + 414, + 432, + 450, + 468, + 486, + 504, + 522, + 540, + 558, + 576, + 594, + 612, + 630, + 648, + 666, + 684, + 702, + 720, + 738, + 756, + 774, + 792, + 810, + 828, + 846, + 864, + 882, + 900, + 918, + 936, + 954, + 972, + 990, + 1008, + 1026, + 1044, + 1062, + 1080, + 1098, + 1116, 1134 - ], + ], [ - 0, - 19, - 38, - 57, - 76, - 95, - 114, - 133, - 152, - 171, - 190, - 209, - 228, - 247, - 266, - 285, - 304, - 323, - 342, - 361, - 380, - 399, - 418, - 437, - 456, - 475, - 494, - 513, - 532, - 551, - 570, - 589, - 608, - 627, - 646, - 665, - 684, - 703, - 722, - 741, - 760, - 779, - 798, - 817, - 836, - 855, - 874, - 893, - 912, - 931, - 950, - 969, - 988, - 1007, - 1026, - 1045, - 1064, - 1083, - 1102, - 1121, - 1140, - 1159, - 1178, + 0, + 19, + 38, + 57, + 76, + 95, + 114, + 133, + 152, + 171, + 190, + 209, + 228, + 247, + 266, + 285, + 304, + 323, + 342, + 361, + 380, + 399, + 418, + 437, + 456, + 475, + 494, + 513, + 532, + 551, + 570, + 589, + 608, + 627, + 646, + 665, + 684, + 703, + 722, + 741, + 760, + 779, + 798, + 817, + 836, + 855, + 874, + 893, + 912, + 931, + 950, + 969, + 988, + 1007, + 1026, + 1045, + 1064, + 1083, + 1102, + 1121, + 1140, + 1159, + 1178, 1197 - ], + ], [ - 0, - 20, - 40, - 60, - 80, - 100, - 120, - 140, - 160, - 180, - 200, - 220, - 240, - 260, - 280, - 300, - 320, - 340, - 360, - 380, - 400, - 420, - 440, - 460, - 480, - 500, - 520, - 540, - 560, - 580, - 600, - 620, - 640, - 660, - 680, - 700, - 720, - 740, - 760, - 780, - 800, - 820, - 840, - 860, - 880, - 900, - 920, - 940, - 960, - 980, - 1000, - 1020, - 1040, - 1060, - 1080, - 1100, - 1120, - 1140, - 1160, - 1180, - 1200, - 1220, - 1240, + 0, + 20, + 40, + 60, + 80, + 100, + 120, + 140, + 160, + 180, + 200, + 220, + 240, + 260, + 280, + 300, + 320, + 340, + 360, + 380, + 400, + 420, + 440, + 460, + 480, + 500, + 520, + 540, + 560, + 580, + 600, + 620, + 640, + 660, + 680, + 700, + 720, + 740, + 760, + 780, + 800, + 820, + 840, + 860, + 880, + 900, + 920, + 940, + 960, + 980, + 1000, + 1020, + 1040, + 1060, + 1080, + 1100, + 1120, + 1140, + 1160, + 1180, + 1200, + 1220, + 1240, 1260 - ], + ], [ - 0, - 21, - 42, - 63, - 84, - 105, - 126, - 147, - 168, - 189, - 210, - 231, - 252, - 273, - 294, - 315, - 336, - 357, - 378, - 399, - 420, - 441, - 462, - 483, - 504, - 525, - 546, - 567, - 588, - 609, - 630, - 651, - 672, - 693, - 714, - 735, - 756, - 777, - 798, - 819, - 840, - 861, - 882, - 903, - 924, - 945, - 966, - 987, - 1008, - 1029, - 1050, - 1071, - 1092, - 1113, - 1134, - 1155, - 1176, - 1197, - 1218, - 1239, - 1260, - 1281, - 1302, + 0, + 21, + 42, + 63, + 84, + 105, + 126, + 147, + 168, + 189, + 210, + 231, + 252, + 273, + 294, + 315, + 336, + 357, + 378, + 399, + 420, + 441, + 462, + 483, + 504, + 525, + 546, + 567, + 588, + 609, + 630, + 651, + 672, + 693, + 714, + 735, + 756, + 777, + 798, + 819, + 840, + 861, + 882, + 903, + 924, + 945, + 966, + 987, + 1008, + 1029, + 1050, + 1071, + 1092, + 1113, + 1134, + 1155, + 1176, + 1197, + 1218, + 1239, + 1260, + 1281, + 1302, 1323 - ], + ], [ - 0, - 22, - 44, - 66, - 88, - 110, - 132, - 154, - 176, - 198, - 220, - 242, - 264, - 286, - 308, - 330, - 352, - 374, - 396, - 418, - 440, - 462, - 484, - 506, - 528, - 550, - 572, - 594, - 616, - 638, - 660, - 682, - 704, - 726, - 748, - 770, - 792, - 814, - 836, - 858, - 880, - 902, - 924, - 946, - 968, - 990, - 1012, - 1034, - 1056, - 1078, - 1100, - 1122, - 1144, - 1166, - 1188, - 1210, - 1232, - 1254, - 1276, - 1298, - 1320, - 1342, - 1364, + 0, + 22, + 44, + 66, + 88, + 110, + 132, + 154, + 176, + 198, + 220, + 242, + 264, + 286, + 308, + 330, + 352, + 374, + 396, + 418, + 440, + 462, + 484, + 506, + 528, + 550, + 572, + 594, + 616, + 638, + 660, + 682, + 704, + 726, + 748, + 770, + 792, + 814, + 836, + 858, + 880, + 902, + 924, + 946, + 968, + 990, + 1012, + 1034, + 1056, + 1078, + 1100, + 1122, + 1144, + 1166, + 1188, + 1210, + 1232, + 1254, + 1276, + 1298, + 1320, + 1342, + 1364, 1386 - ], + ], [ - 0, - 23, - 46, - 69, - 92, - 115, - 138, - 161, - 184, - 207, - 230, - 253, - 276, - 299, - 322, - 345, - 368, - 391, - 414, - 437, - 460, - 483, - 506, - 529, - 552, - 575, - 598, - 621, - 644, - 667, - 690, - 713, - 736, - 759, - 782, - 805, - 828, - 851, - 874, - 897, - 920, - 943, - 966, - 989, - 1012, - 1035, - 1058, - 1081, - 1104, - 1127, - 1150, - 1173, - 1196, - 1219, - 1242, - 1265, - 1288, - 1311, - 1334, - 1357, - 1380, - 1403, - 1426, + 0, + 23, + 46, + 69, + 92, + 115, + 138, + 161, + 184, + 207, + 230, + 253, + 276, + 299, + 322, + 345, + 368, + 391, + 414, + 437, + 460, + 483, + 506, + 529, + 552, + 575, + 598, + 621, + 644, + 667, + 690, + 713, + 736, + 759, + 782, + 805, + 828, + 851, + 874, + 897, + 920, + 943, + 966, + 989, + 1012, + 1035, + 1058, + 1081, + 1104, + 1127, + 1150, + 1173, + 1196, + 1219, + 1242, + 1265, + 1288, + 1311, + 1334, + 1357, + 1380, + 1403, + 1426, 1449 - ], + ], [ - 0, - 24, - 48, - 72, - 96, - 120, - 144, - 168, - 192, - 216, - 240, - 264, - 288, - 312, - 336, - 360, - 384, - 408, - 432, - 456, - 480, - 504, - 528, - 552, - 576, - 600, - 624, - 648, - 672, - 696, - 720, - 744, - 768, - 792, - 816, - 840, - 864, - 888, - 912, - 936, - 960, - 984, - 1008, - 1032, - 1056, - 1080, - 1104, - 1128, - 1152, - 1176, - 1200, - 1224, - 1248, - 1272, - 1296, - 1320, - 1344, - 1368, - 1392, - 1416, - 1440, - 1464, - 1488, + 0, + 24, + 48, + 72, + 96, + 120, + 144, + 168, + 192, + 216, + 240, + 264, + 288, + 312, + 336, + 360, + 384, + 408, + 432, + 456, + 480, + 504, + 528, + 552, + 576, + 600, + 624, + 648, + 672, + 696, + 720, + 744, + 768, + 792, + 816, + 840, + 864, + 888, + 912, + 936, + 960, + 984, + 1008, + 1032, + 1056, + 1080, + 1104, + 1128, + 1152, + 1176, + 1200, + 1224, + 1248, + 1272, + 1296, + 1320, + 1344, + 1368, + 1392, + 1416, + 1440, + 1464, + 1488, 1512 - ], + ], [ - 0, - 25, - 50, - 75, - 100, - 125, - 150, - 175, - 200, - 225, - 250, - 275, - 300, - 325, - 350, - 375, - 400, - 425, - 450, - 475, - 500, - 525, - 550, - 575, - 600, - 625, - 650, - 675, - 700, - 725, - 750, - 775, - 800, - 825, - 850, - 875, - 900, - 925, - 950, - 975, - 1000, - 1025, - 1050, - 1075, - 1100, - 1125, - 1150, - 1175, - 1200, - 1225, - 1250, - 1275, - 1300, - 1325, - 1350, - 1375, - 1400, - 1425, - 1450, - 1475, - 1500, - 1525, - 1550, + 0, + 25, + 50, + 75, + 100, + 125, + 150, + 175, + 200, + 225, + 250, + 275, + 300, + 325, + 350, + 375, + 400, + 425, + 450, + 475, + 500, + 525, + 550, + 575, + 600, + 625, + 650, + 675, + 700, + 725, + 750, + 775, + 800, + 825, + 850, + 875, + 900, + 925, + 950, + 975, + 1000, + 1025, + 1050, + 1075, + 1100, + 1125, + 1150, + 1175, + 1200, + 1225, + 1250, + 1275, + 1300, + 1325, + 1350, + 1375, + 1400, + 1425, + 1450, + 1475, + 1500, + 1525, + 1550, 1575 - ], + ], [ - 0, - 26, - 52, - 78, - 104, - 130, - 156, - 182, - 208, - 234, - 260, - 286, - 312, - 338, - 364, - 390, - 416, - 442, - 468, - 494, - 520, - 546, - 572, - 598, - 624, - 650, - 676, - 702, - 728, - 754, - 780, - 806, - 832, - 858, - 884, - 910, - 936, - 962, - 988, - 1014, - 1040, - 1066, - 1092, - 1118, - 1144, - 1170, - 1196, - 1222, - 1248, - 1274, - 1300, - 1326, - 1352, - 1378, - 1404, - 1430, - 1456, - 1482, - 1508, - 1534, - 1560, - 1586, - 1612, + 0, + 26, + 52, + 78, + 104, + 130, + 156, + 182, + 208, + 234, + 260, + 286, + 312, + 338, + 364, + 390, + 416, + 442, + 468, + 494, + 520, + 546, + 572, + 598, + 624, + 650, + 676, + 702, + 728, + 754, + 780, + 806, + 832, + 858, + 884, + 910, + 936, + 962, + 988, + 1014, + 1040, + 1066, + 1092, + 1118, + 1144, + 1170, + 1196, + 1222, + 1248, + 1274, + 1300, + 1326, + 1352, + 1378, + 1404, + 1430, + 1456, + 1482, + 1508, + 1534, + 1560, + 1586, + 1612, 1638 - ], + ], [ - 0, - 27, - 54, - 81, - 108, - 135, - 162, - 189, - 216, - 243, - 270, - 297, - 324, - 351, - 378, - 405, - 432, - 459, - 486, - 513, - 540, - 567, - 594, - 621, - 648, - 675, - 702, - 729, - 756, - 783, - 810, - 837, - 864, - 891, - 918, - 945, - 972, - 999, - 1026, - 1053, - 1080, - 1107, - 1134, - 1161, - 1188, - 1215, - 1242, - 1269, - 1296, - 1323, - 1350, - 1377, - 1404, - 1431, - 1458, - 1485, - 1512, - 1539, - 1566, - 1593, - 1620, - 1647, - 1674, + 0, + 27, + 54, + 81, + 108, + 135, + 162, + 189, + 216, + 243, + 270, + 297, + 324, + 351, + 378, + 405, + 432, + 459, + 486, + 513, + 540, + 567, + 594, + 621, + 648, + 675, + 702, + 729, + 756, + 783, + 810, + 837, + 864, + 891, + 918, + 945, + 972, + 999, + 1026, + 1053, + 1080, + 1107, + 1134, + 1161, + 1188, + 1215, + 1242, + 1269, + 1296, + 1323, + 1350, + 1377, + 1404, + 1431, + 1458, + 1485, + 1512, + 1539, + 1566, + 1593, + 1620, + 1647, + 1674, 1701 - ], + ], [ - 0, - 28, - 56, - 84, - 112, - 140, - 168, - 196, - 224, - 252, - 280, - 308, - 336, - 364, - 392, - 420, - 448, - 476, - 504, - 532, - 560, - 588, - 616, - 644, - 672, - 700, - 728, - 756, - 784, - 812, - 840, - 868, - 896, - 924, - 952, - 980, - 1008, - 1036, - 1064, - 1092, - 1120, - 1148, - 1176, - 1204, - 1232, - 1260, - 1288, - 1316, - 1344, - 1372, - 1400, - 1428, - 1456, - 1484, - 1512, - 1540, - 1568, - 1596, - 1624, - 1652, - 1680, - 1708, - 1736, + 0, + 28, + 56, + 84, + 112, + 140, + 168, + 196, + 224, + 252, + 280, + 308, + 336, + 364, + 392, + 420, + 448, + 476, + 504, + 532, + 560, + 588, + 616, + 644, + 672, + 700, + 728, + 756, + 784, + 812, + 840, + 868, + 896, + 924, + 952, + 980, + 1008, + 1036, + 1064, + 1092, + 1120, + 1148, + 1176, + 1204, + 1232, + 1260, + 1288, + 1316, + 1344, + 1372, + 1400, + 1428, + 1456, + 1484, + 1512, + 1540, + 1568, + 1596, + 1624, + 1652, + 1680, + 1708, + 1736, 1764 - ], + ], [ - 0, - 29, - 58, - 87, - 116, - 145, - 174, - 203, - 232, - 261, - 290, - 319, - 348, - 377, - 406, - 435, - 464, - 493, - 522, - 551, - 580, - 609, - 638, - 667, - 696, - 725, - 754, - 783, - 812, - 841, - 870, - 899, - 928, - 957, - 986, - 1015, - 1044, - 1073, - 1102, - 1131, - 1160, - 1189, - 1218, - 1247, - 1276, - 1305, - 1334, - 1363, - 1392, - 1421, - 1450, - 1479, - 1508, - 1537, - 1566, - 1595, - 1624, - 1653, - 1682, - 1711, - 1740, - 1769, - 1798, + 0, + 29, + 58, + 87, + 116, + 145, + 174, + 203, + 232, + 261, + 290, + 319, + 348, + 377, + 406, + 435, + 464, + 493, + 522, + 551, + 580, + 609, + 638, + 667, + 696, + 725, + 754, + 783, + 812, + 841, + 870, + 899, + 928, + 957, + 986, + 1015, + 1044, + 1073, + 1102, + 1131, + 1160, + 1189, + 1218, + 1247, + 1276, + 1305, + 1334, + 1363, + 1392, + 1421, + 1450, + 1479, + 1508, + 1537, + 1566, + 1595, + 1624, + 1653, + 1682, + 1711, + 1740, + 1769, + 1798, 1827 - ], + ], [ - 0, - 30, - 60, - 90, - 120, - 150, - 180, - 210, - 240, - 270, - 300, - 330, - 360, - 390, - 420, - 450, - 480, - 510, - 540, - 570, - 600, - 630, - 660, - 690, - 720, - 750, - 780, - 810, - 840, - 870, - 900, - 930, - 960, - 990, - 1020, - 1050, - 1080, - 1110, - 1140, - 1170, - 1200, - 1230, - 1260, - 1290, - 1320, - 1350, - 1380, - 1410, - 1440, - 1470, - 1500, - 1530, - 1560, - 1590, - 1620, - 1650, - 1680, - 1710, - 1740, - 1770, - 1800, - 1830, - 1860, + 0, + 30, + 60, + 90, + 120, + 150, + 180, + 210, + 240, + 270, + 300, + 330, + 360, + 390, + 420, + 450, + 480, + 510, + 540, + 570, + 600, + 630, + 660, + 690, + 720, + 750, + 780, + 810, + 840, + 870, + 900, + 930, + 960, + 990, + 1020, + 1050, + 1080, + 1110, + 1140, + 1170, + 1200, + 1230, + 1260, + 1290, + 1320, + 1350, + 1380, + 1410, + 1440, + 1470, + 1500, + 1530, + 1560, + 1590, + 1620, + 1650, + 1680, + 1710, + 1740, + 1770, + 1800, + 1830, + 1860, 1890 ] ] } - }, + }, "groups": { "a13b7f35-83f8-11e5-a111-3c15c2da029e": { "alias": [ "/" - ], + ], "links": [ { - "class": "H5L_TYPE_HARD", - "collection": "datasets", - "id": "a13c90b0-83f8-11e5-9ace-3c15c2da029e", + "class": "H5L_TYPE_HARD", + "collection": "datasets", + "id": "a13c90b0-83f8-11e5-9ace-3c15c2da029e", "title": "DS1" } ] } - }, + }, "root": "a13b7f35-83f8-11e5-a111-3c15c2da029e" } diff --git a/data/json/h5ex_d_unlimod.json b/data/json/h5ex_d_unlimod.json deleted file mode 100644 index 49d2dbf..0000000 --- a/data/json/h5ex_d_unlimod.json +++ /dev/null @@ -1 +0,0 @@ -Cannot find file: ../data/hdf5/h5ex_d_unlimod.h5 diff --git a/data/json/nullspace_dset.json b/data/json/nullspace_dset.json index d71b110..8808f21 100644 --- a/data/json/nullspace_dset.json +++ b/data/json/nullspace_dset.json @@ -1,33 +1,34 @@ { + "apiVersion": "1.1.0", "datasets": { "23d3e919-7b53-11e4-961d-3c15c2da029e": { "alias": [ "/DS1" - ], + ], "shape": { "class": "H5S_NULL" - }, + }, "type": { - "base": "H5T_STD_I32LE", + "base": "H5T_STD_I32LE", "class": "H5T_INTEGER" - }, + }, "value": null } - }, + }, "groups": { "23d2e06b-7b53-11e4-9910-3c15c2da029e": { "alias": [ "/" - ], + ], "links": [ { - "class": "H5L_TYPE_HARD", - "collection": "datasets", - "id": "23d3e919-7b53-11e4-961d-3c15c2da029e", + "class": "H5L_TYPE_HARD", + "collection": "datasets", + "id": "23d3e919-7b53-11e4-961d-3c15c2da029e", "title": "DS1" } ] } - }, + }, "root": "23d2e06b-7b53-11e4-9910-3c15c2da029e" } diff --git a/data/json/scalar_array_dset.json b/data/json/scalar_array_dset.json new file mode 100644 index 0000000..de90b73 --- /dev/null +++ b/data/json/scalar_array_dset.json @@ -0,0 +1,61 @@ +{ + "apiVersion": "1.1.0", + "datasets": { + "8752a454-05e2-11e7-900a-3c15c2da029e": { + "alias": [ + "/DS1" + ], + "creationProperties": { + "allocTime": "H5D_ALLOC_TIME_LATE", + "fillTime": "H5D_FILL_TIME_IFSET", + "layout": { + "class": "H5D_CONTIGUOUS" + } + }, + "shape": { + "class": "H5S_SCALAR" + }, + "type": { + "base": { + "base": "H5T_IEEE_F32LE", + "class": "H5T_FLOAT" + }, + "class": "H5T_ARRAY", + "dims": [ + 3, + 2 + ] + }, + "value": [ + [ + 0.0, + 0.0 + ], + [ + 0.0, + 0.0 + ], + [ + 0.0, + 0.0 + ] + ] + } + }, + "groups": { + "8750b178-05e2-11e7-b6e4-3c15c2da029e": { + "alias": [ + "/" + ], + "links": [ + { + "class": "H5L_TYPE_HARD", + "collection": "datasets", + "id": "8752a454-05e2-11e7-900a-3c15c2da029e", + "title": "DS1" + } + ] + } + }, + "root": "8750b178-05e2-11e7-b6e4-3c15c2da029e" +} diff --git a/data/json/tall_with_udlink.json b/data/json/tall_with_udlink.json index 9374b9c..2dd08dd 100644 --- a/data/json/tall_with_udlink.json +++ b/data/json/tall_with_udlink.json @@ -1,525 +1,535 @@ { - "apiVersion": "1.0.0", + "apiVersion": "1.0.0", "datasets": { "a33bc46e-83f8-11e5-940c-3c15c2da029e": { "alias": [ "/g1/g1.1/dset1.1.1" - ], + ], "attributes": [ { - "name": "attr1", + "name": "attr1", "shape": { - "class": "H5S_SIMPLE", + "class": "H5S_SIMPLE", "dims": [ 27 ] - }, + }, "type": { - "base": "H5T_STD_I8LE", + "base": "H5T_STD_I8LE", "class": "H5T_INTEGER" - }, + }, "value": [ - 49, - 115, - 116, - 32, - 97, - 116, - 116, - 114, - 105, - 98, - 117, - 116, - 101, - 32, - 111, - 102, - 32, - 100, - 115, - 101, - 116, - 49, - 46, - 49, - 46, - 49, + 49, + 115, + 116, + 32, + 97, + 116, + 116, + 114, + 105, + 98, + 117, + 116, + 101, + 32, + 111, + 102, + 32, + 100, + 115, + 101, + 116, + 49, + 46, + 49, + 46, + 49, 0 ] - }, + }, { - "name": "attr2", + "name": "attr2", "shape": { - "class": "H5S_SIMPLE", + "class": "H5S_SIMPLE", "dims": [ 27 ] - }, + }, "type": { - "base": "H5T_STD_I8LE", + "base": "H5T_STD_I8LE", "class": "H5T_INTEGER" - }, + }, "value": [ - 50, - 110, - 100, - 32, - 97, - 116, - 116, - 114, - 105, - 98, - 117, - 116, - 101, - 32, - 111, - 102, - 32, - 100, - 115, - 101, - 116, - 49, - 46, - 49, - 46, - 49, + 50, + 110, + 100, + 32, + 97, + 116, + 116, + 114, + 105, + 98, + 117, + 116, + 101, + 32, + 111, + 102, + 32, + 100, + 115, + 101, + 116, + 49, + 46, + 49, + 46, + 49, 0 ] } - ], + ], "creationProperties": { - "allocTime": "H5D_ALLOC_TIME_LATE", - "fillTime": "H5D_FILL_TIME_IFSET", + "allocTime": "H5D_ALLOC_TIME_LATE", + "fillTime": "H5D_FILL_TIME_IFSET", "layout": { "class": "H5D_CONTIGUOUS" } - }, + }, "shape": { - "class": "H5S_SIMPLE", + "class": "H5S_SIMPLE", "dims": [ - 10, + 10, 10 ] - }, + }, "type": { - "base": "H5T_STD_I32BE", + "base": "H5T_STD_I32BE", "class": "H5T_INTEGER" - }, + }, "value": [ [ - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, - 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, 0 - ], + ], [ - 0, - 1, - 2, - 3, - 4, - 5, - 6, - 7, - 8, + 0, + 1, + 2, + 3, + 4, + 5, + 6, + 7, + 8, 9 - ], + ], [ - 0, - 2, - 4, - 6, - 8, - 10, - 12, - 14, - 16, + 0, + 2, + 4, + 6, + 8, + 10, + 12, + 14, + 16, 18 - ], + ], [ - 0, - 3, - 6, - 9, - 12, - 15, - 18, - 21, - 24, + 0, + 3, + 6, + 9, + 12, + 15, + 18, + 21, + 24, 27 - ], + ], [ - 0, - 4, - 8, - 12, - 16, - 20, - 24, - 28, - 32, + 0, + 4, + 8, + 12, + 16, + 20, + 24, + 28, + 32, 36 - ], + ], [ - 0, - 5, - 10, - 15, - 20, - 25, - 30, - 35, - 40, + 0, + 5, + 10, + 15, + 20, + 25, + 30, + 35, + 40, 45 - ], + ], [ - 0, - 6, - 12, - 18, - 24, - 30, - 36, - 42, - 48, + 0, + 6, + 12, + 18, + 24, + 30, + 36, + 42, + 48, 54 - ], + ], [ - 0, - 7, - 14, - 21, - 28, - 35, - 42, - 49, - 56, + 0, + 7, + 14, + 21, + 28, + 35, + 42, + 49, + 56, 63 - ], + ], [ - 0, - 8, - 16, - 24, - 32, - 40, - 48, - 56, - 64, + 0, + 8, + 16, + 24, + 32, + 40, + 48, + 56, + 64, 72 - ], + ], [ - 0, - 9, - 18, - 27, - 36, - 45, - 54, - 63, - 72, + 0, + 9, + 18, + 27, + 36, + 45, + 54, + 63, + 72, 81 ] ] - }, + }, "a33bd7c2-83f8-11e5-b354-3c15c2da029e": { "alias": [ "/g1/g1.1/dset1.1.2" - ], + ], "creationProperties": { - "allocTime": "H5D_ALLOC_TIME_LATE", - "fillTime": "H5D_FILL_TIME_IFSET", + "allocTime": "H5D_ALLOC_TIME_LATE", + "fillTime": "H5D_FILL_TIME_IFSET", "layout": { "class": "H5D_CONTIGUOUS" } - }, + }, "shape": { - "class": "H5S_SIMPLE", + "class": "H5S_SIMPLE", "dims": [ 20 ] - }, + }, "type": { - "base": "H5T_STD_I32BE", + "base": "H5T_STD_I32BE", "class": "H5T_INTEGER" - }, + }, "value": [ - 0, - 1, - 2, - 3, - 4, - 5, - 6, - 7, - 8, - 9, - 10, - 11, - 12, - 13, - 14, - 15, - 16, - 17, - 18, + 0, + 1, + 2, + 3, + 4, + 5, + 6, + 7, + 8, + 9, + 10, + 11, + 12, + 13, + 14, + 15, + 16, + 17, + 18, 19 ] - }, + }, "a33c216e-83f8-11e5-bc79-3c15c2da029e": { "alias": [ "/g2/dset2.1" - ], + ], "creationProperties": { - "allocTime": "H5D_ALLOC_TIME_LATE", - "fillTime": "H5D_FILL_TIME_IFSET", + "allocTime": "H5D_ALLOC_TIME_LATE", + "fillTime": "H5D_FILL_TIME_IFSET", "layout": { "class": "H5D_CONTIGUOUS" } - }, + }, "shape": { - "class": "H5S_SIMPLE", + "class": "H5S_SIMPLE", "dims": [ 10 ] - }, + }, "type": { - "base": "H5T_IEEE_F32BE", + "base": "H5T_IEEE_F32BE", "class": "H5T_FLOAT" - }, + }, "value": [ - 1.0, - 1.100000023841858, - 1.2000000476837158, - 1.2999999523162842, - 1.399999976158142, - 1.5, - 1.600000023841858, - 1.7000000476837158, - 1.7999999523162842, + 1.0, + 1.100000023841858, + 1.2000000476837158, + 1.2999999523162842, + 1.399999976158142, + 1.5, + 1.600000023841858, + 1.7000000476837158, + 1.7999999523162842, 1.899999976158142 ] - }, + }, "a33c34c2-83f8-11e5-bb70-3c15c2da029e": { "alias": [ "/g2/dset2.2" - ], + ], "creationProperties": { - "allocTime": "H5D_ALLOC_TIME_LATE", - "fillTime": "H5D_FILL_TIME_IFSET", + "allocTime": "H5D_ALLOC_TIME_LATE", + "fillTime": "H5D_FILL_TIME_IFSET", "layout": { "class": "H5D_CONTIGUOUS" } - }, + }, "shape": { - "class": "H5S_SIMPLE", + "class": "H5S_SIMPLE", "dims": [ - 3, + 3, 5 ] - }, + }, "type": { - "base": "H5T_IEEE_F32BE", + "base": "H5T_IEEE_F32BE", "class": "H5T_FLOAT" - }, + }, "value": [ [ - 0.0, - 0.10000000149011612, - 0.20000000298023224, - 0.30000001192092896, + 0.0, + 0.10000000149011612, + 0.20000000298023224, + 0.30000001192092896, 0.4000000059604645 - ], + ], [ - 0.0, - 0.20000000298023224, - 0.4000000059604645, - 0.6000000238418579, + 0.0, + 0.20000000298023224, + 0.4000000059604645, + 0.6000000238418579, 0.800000011920929 - ], + ], [ - 0.0, - 0.30000001192092896, - 0.6000000238418579, - 0.8999999761581421, + 0.0, + 0.30000001192092896, + 0.6000000238418579, + 0.8999999761581421, 1.2000000476837158 ] ] } - }, + }, "groups": { "a33a8ab5-83f8-11e5-9d21-3c15c2da029e": { "alias": [ "/" - ], + ], "attributes": [ { - "name": "attr1", + "name": "attr1", "shape": { - "class": "H5S_SIMPLE", + "class": "H5S_SIMPLE", "dims": [ 10 ] - }, + }, "type": { - "base": "H5T_STD_I8LE", + "base": "H5T_STD_I8LE", "class": "H5T_INTEGER" - }, + }, "value": [ - 97, - 98, - 99, - 100, - 101, - 102, - 103, - 104, - 105, + 97, + 98, + 99, + 100, + 101, + 102, + 103, + 104, + 105, 0 ] - }, + }, { - "name": "attr2", + "name": "attr2", "shape": { - "class": "H5S_SIMPLE", + "class": "H5S_SIMPLE", "dims": [ - 2, + 2, 2 ] - }, + }, "type": { - "base": "H5T_STD_I32BE", + "base": "H5T_STD_I32BE", "class": "H5T_INTEGER" - }, + }, "value": [ [ - 0, + 0, 1 - ], + ], [ - 2, + 2, 3 ] ] } - ], + ], "links": [ { - "class": "H5L_TYPE_HARD", - "collection": "groups", - "id": "a33b9a94-83f8-11e5-b011-3c15c2da029e", + "class": "H5L_TYPE_HARD", + "collection": "groups", + "id": "a33b9a94-83f8-11e5-b011-3c15c2da029e", "title": "g1" - }, + }, { - "class": "H5L_TYPE_HARD", - "collection": "groups", - "id": "a33c0eae-83f8-11e5-a8e7-3c15c2da029e", + "class": "H5L_TYPE_HARD", + "collection": "groups", + "id": "a33c0eae-83f8-11e5-a8e7-3c15c2da029e", "title": "g2" } ] - }, + }, "a33b9a94-83f8-11e5-b011-3c15c2da029e": { "alias": [ "/g1" - ], + ], "links": [ { - "class": "H5L_TYPE_HARD", - "collection": "groups", - "id": "a33baf17-83f8-11e5-95ed-3c15c2da029e", + "class": "H5L_TYPE_HARD", + "collection": "groups", + "id": "a33baf17-83f8-11e5-95ed-3c15c2da029e", "title": "g1.1" - }, + }, { - "class": "H5L_TYPE_HARD", - "collection": "groups", - "id": "a33be99c-83f8-11e5-a6da-3c15c2da029e", + "class": "H5L_TYPE_HARD", + "collection": "groups", + "id": "a33be99c-83f8-11e5-a6da-3c15c2da029e", "title": "g1.2" } ] - }, + }, "a33baf17-83f8-11e5-95ed-3c15c2da029e": { "alias": [ "/g1/g1.1" - ], + ], "links": [ { - "class": "H5L_TYPE_HARD", - "collection": "datasets", - "id": "a33bc46e-83f8-11e5-940c-3c15c2da029e", + "class": "H5L_TYPE_HARD", + "collection": "datasets", + "id": "a33bc46e-83f8-11e5-940c-3c15c2da029e", "title": "dset1.1.1" - }, + }, { - "class": "H5L_TYPE_HARD", - "collection": "datasets", - "id": "a33bd7c2-83f8-11e5-b354-3c15c2da029e", + "class": "H5L_TYPE_HARD", + "collection": "datasets", + "id": "a33bd7c2-83f8-11e5-b354-3c15c2da029e", "title": "dset1.1.2" } ] - }, + }, "a33be99c-83f8-11e5-a6da-3c15c2da029e": { "alias": [ "/g1/g1.2" - ], + ], "links": [ { - "class": "H5L_TYPE_HARD", - "collection": "groups", - "id": "a33bfaeb-83f8-11e5-8012-3c15c2da029e", + "class": "H5L_TYPE_HARD", + "collection": "groups", + "id": "a33bfaeb-83f8-11e5-8012-3c15c2da029e", "title": "g1.2.1" - }, + }, { - "class": "H5L_TYPE_EXTERNAL", - "file": "somefile", - "h5path": "somepath", + "class": "H5L_TYPE_EXTERNAL", + "file": "somefile", + "h5path": "somepath", "title": "extlink" } ] - }, + }, "a33bfaeb-83f8-11e5-8012-3c15c2da029e": { "alias": [ "/g1/g1.2/g1.2.1" - ], + ], "links": [ { - "class": "H5L_TYPE_SOFT", - "h5path": "somevalue", + "class": "H5L_TYPE_SOFT", + "h5path": "somevalue", "title": "slink" } ] - }, + }, "a33c0eae-83f8-11e5-a8e7-3c15c2da029e": { "alias": [ "/g2" - ], + ], "links": [ { - "class": "H5L_TYPE_HARD", - "collection": "datasets", - "id": "a33c216e-83f8-11e5-bc79-3c15c2da029e", + "class": "H5L_TYPE_HARD", + "collection": "datasets", + "id": "a33c216e-83f8-11e5-bc79-3c15c2da029e", "title": "dset2.1" - }, + }, { - "class": "H5L_TYPE_HARD", - "collection": "datasets", - "id": "a33c34c2-83f8-11e5-bb70-3c15c2da029e", + "class": "H5L_TYPE_HARD", + "collection": "datasets", + "id": "a33c34c2-83f8-11e5-bb70-3c15c2da029e", "title": "dset2.2" - }, + }, { - "class": "H5L_TYPE_USER_DEFINED", - "title": "udlink" + "class": "H5L_TYPE_USER_DEFINED", + "title": "udlink", + "target": [ + 1, + 4, + 19, + 67, + 15, + 254, + 36, + 118 + ] } ] } - }, + }, "root": "a33a8ab5-83f8-11e5-9d21-3c15c2da029e" } diff --git a/data/json/tstr.json b/data/json/tstr.json index e69de29..1e4d217 100644 --- a/data/json/tstr.json +++ b/data/json/tstr.json @@ -0,0 +1,2440 @@ +{ + "apiVersion": "1.1.1", + "datasets": { + "c074a970-a3c0-11eb-a2f5-8c8590747994": { + "alias": [ + "/comp1" + ], + "creationProperties": { + "allocTime": "H5D_ALLOC_TIME_LATE", + "fillTime": "H5D_FILL_TIME_IFSET", + "layout": { + "class": "H5D_CONTIGUOUS" + } + }, + "shape": { + "class": "H5S_SIMPLE", + "dims": [ + 3, + 6 + ] + }, + "type": { + "class": "H5T_COMPOUND", + "fields": [ + { + "name": "int_array", + "type": { + "base": { + "base": "H5T_STD_I32BE", + "class": "H5T_INTEGER" + }, + "class": "H5T_ARRAY", + "dims": [ + 8, + 10 + ] + } + }, + { + "name": "string", + "type": { + "base": { + "charSet": "H5T_CSET_ASCII", + "class": "H5T_STRING", + "length": 32, + "strPad": "H5T_STR_NULLPAD" + }, + "class": "H5T_ARRAY", + "dims": [ + 3, + 4 + ] + } + } + ] + }, + "value": [ + [ + [ + [ + [ + 0, + 1, + 4, + 9, + 16, + 25, + 36, + 49, + 64, + 81 + ], + [ + 1, + 4, + 9, + 16, + 25, + 36, + 49, + 64, + 81, + 100 + ], + [ + 4, + 9, + 16, + 25, + 36, + 49, + 64, + 81, + 100, + 121 + ], + [ + 9, + 16, + 25, + 36, + 49, + 64, + 81, + 100, + 121, + 144 + ], + [ + 16, + 25, + 36, + 49, + 64, + 81, + 100, + 121, + 144, + 169 + ], + [ + 25, + 36, + 49, + 64, + 81, + 100, + 121, + 144, + 169, + 196 + ], + [ + 36, + 49, + 64, + 81, + 100, + 121, + 144, + 169, + 196, + 225 + ], + [ + 49, + 64, + 81, + 100, + 121, + 144, + 169, + 196, + 225, + 256 + ] + ], + [ + [ + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678" + ], + [ + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678" + ], + [ + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678" + ] + ] + ], + [ + [ + [ + 1, + 4, + 9, + 16, + 25, + 36, + 49, + 64, + 81, + 100 + ], + [ + 4, + 9, + 16, + 25, + 36, + 49, + 64, + 81, + 100, + 121 + ], + [ + 9, + 16, + 25, + 36, + 49, + 64, + 81, + 100, + 121, + 144 + ], + [ + 16, + 25, + 36, + 49, + 64, + 81, + 100, + 121, + 144, + 169 + ], + [ + 25, + 36, + 49, + 64, + 81, + 100, + 121, + 144, + 169, + 196 + ], + [ + 36, + 49, + 64, + 81, + 100, + 121, + 144, + 169, + 196, + 225 + ], + [ + 49, + 64, + 81, + 100, + 121, + 144, + 169, + 196, + 225, + 256 + ], + [ + 64, + 81, + 100, + 121, + 144, + 169, + 196, + 225, + 256, + 289 + ] + ], + [ + [ + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678" + ], + [ + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678" + ], + [ + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678" + ] + ] + ], + [ + [ + [ + 4, + 9, + 16, + 25, + 36, + 49, + 64, + 81, + 100, + 121 + ], + [ + 9, + 16, + 25, + 36, + 49, + 64, + 81, + 100, + 121, + 144 + ], + [ + 16, + 25, + 36, + 49, + 64, + 81, + 100, + 121, + 144, + 169 + ], + [ + 25, + 36, + 49, + 64, + 81, + 100, + 121, + 144, + 169, + 196 + ], + [ + 36, + 49, + 64, + 81, + 100, + 121, + 144, + 169, + 196, + 225 + ], + [ + 49, + 64, + 81, + 100, + 121, + 144, + 169, + 196, + 225, + 256 + ], + [ + 64, + 81, + 100, + 121, + 144, + 169, + 196, + 225, + 256, + 289 + ], + [ + 81, + 100, + 121, + 144, + 169, + 196, + 225, + 256, + 289, + 324 + ] + ], + [ + [ + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678" + ], + [ + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678" + ], + [ + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678" + ] + ] + ], + [ + [ + [ + 9, + 16, + 25, + 36, + 49, + 64, + 81, + 100, + 121, + 144 + ], + [ + 16, + 25, + 36, + 49, + 64, + 81, + 100, + 121, + 144, + 169 + ], + [ + 25, + 36, + 49, + 64, + 81, + 100, + 121, + 144, + 169, + 196 + ], + [ + 36, + 49, + 64, + 81, + 100, + 121, + 144, + 169, + 196, + 225 + ], + [ + 49, + 64, + 81, + 100, + 121, + 144, + 169, + 196, + 225, + 256 + ], + [ + 64, + 81, + 100, + 121, + 144, + 169, + 196, + 225, + 256, + 289 + ], + [ + 81, + 100, + 121, + 144, + 169, + 196, + 225, + 256, + 289, + 324 + ], + [ + 100, + 121, + 144, + 169, + 196, + 225, + 256, + 289, + 324, + 361 + ] + ], + [ + [ + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678" + ], + [ + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678" + ], + [ + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678" + ] + ] + ], + [ + [ + [ + 16, + 25, + 36, + 49, + 64, + 81, + 100, + 121, + 144, + 169 + ], + [ + 25, + 36, + 49, + 64, + 81, + 100, + 121, + 144, + 169, + 196 + ], + [ + 36, + 49, + 64, + 81, + 100, + 121, + 144, + 169, + 196, + 225 + ], + [ + 49, + 64, + 81, + 100, + 121, + 144, + 169, + 196, + 225, + 256 + ], + [ + 64, + 81, + 100, + 121, + 144, + 169, + 196, + 225, + 256, + 289 + ], + [ + 81, + 100, + 121, + 144, + 169, + 196, + 225, + 256, + 289, + 324 + ], + [ + 100, + 121, + 144, + 169, + 196, + 225, + 256, + 289, + 324, + 361 + ], + [ + 121, + 144, + 169, + 196, + 225, + 256, + 289, + 324, + 361, + 400 + ] + ], + [ + [ + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678" + ], + [ + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678" + ], + [ + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678" + ] + ] + ], + [ + [ + [ + 25, + 36, + 49, + 64, + 81, + 100, + 121, + 144, + 169, + 196 + ], + [ + 36, + 49, + 64, + 81, + 100, + 121, + 144, + 169, + 196, + 225 + ], + [ + 49, + 64, + 81, + 100, + 121, + 144, + 169, + 196, + 225, + 256 + ], + [ + 64, + 81, + 100, + 121, + 144, + 169, + 196, + 225, + 256, + 289 + ], + [ + 81, + 100, + 121, + 144, + 169, + 196, + 225, + 256, + 289, + 324 + ], + [ + 100, + 121, + 144, + 169, + 196, + 225, + 256, + 289, + 324, + 361 + ], + [ + 121, + 144, + 169, + 196, + 225, + 256, + 289, + 324, + 361, + 400 + ], + [ + 144, + 169, + 196, + 225, + 256, + 289, + 324, + 361, + 400, + 441 + ] + ], + [ + [ + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678" + ], + [ + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678" + ], + [ + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678" + ] + ] + ] + ], + [ + [ + [ + [ + 0, + 1, + 4, + 9, + 16, + 25, + 36, + 49, + 64, + 81 + ], + [ + 1, + 4, + 9, + 16, + 25, + 36, + 49, + 64, + 81, + 100 + ], + [ + 4, + 9, + 16, + 25, + 36, + 49, + 64, + 81, + 100, + 121 + ], + [ + 9, + 16, + 25, + 36, + 49, + 64, + 81, + 100, + 121, + 144 + ], + [ + 16, + 25, + 36, + 49, + 64, + 81, + 100, + 121, + 144, + 169 + ], + [ + 25, + 36, + 49, + 64, + 81, + 100, + 121, + 144, + 169, + 196 + ], + [ + 36, + 49, + 64, + 81, + 100, + 121, + 144, + 169, + 196, + 225 + ], + [ + 49, + 64, + 81, + 100, + 121, + 144, + 169, + 196, + 225, + 256 + ] + ], + [ + [ + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678" + ], + [ + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678" + ], + [ + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678" + ] + ] + ], + [ + [ + [ + 1, + 4, + 9, + 16, + 25, + 36, + 49, + 64, + 81, + 100 + ], + [ + 4, + 9, + 16, + 25, + 36, + 49, + 64, + 81, + 100, + 121 + ], + [ + 9, + 16, + 25, + 36, + 49, + 64, + 81, + 100, + 121, + 144 + ], + [ + 16, + 25, + 36, + 49, + 64, + 81, + 100, + 121, + 144, + 169 + ], + [ + 25, + 36, + 49, + 64, + 81, + 100, + 121, + 144, + 169, + 196 + ], + [ + 36, + 49, + 64, + 81, + 100, + 121, + 144, + 169, + 196, + 225 + ], + [ + 49, + 64, + 81, + 100, + 121, + 144, + 169, + 196, + 225, + 256 + ], + [ + 64, + 81, + 100, + 121, + 144, + 169, + 196, + 225, + 256, + 289 + ] + ], + [ + [ + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678" + ], + [ + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678" + ], + [ + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678" + ] + ] + ], + [ + [ + [ + 4, + 9, + 16, + 25, + 36, + 49, + 64, + 81, + 100, + 121 + ], + [ + 9, + 16, + 25, + 36, + 49, + 64, + 81, + 100, + 121, + 144 + ], + [ + 16, + 25, + 36, + 49, + 64, + 81, + 100, + 121, + 144, + 169 + ], + [ + 25, + 36, + 49, + 64, + 81, + 100, + 121, + 144, + 169, + 196 + ], + [ + 36, + 49, + 64, + 81, + 100, + 121, + 144, + 169, + 196, + 225 + ], + [ + 49, + 64, + 81, + 100, + 121, + 144, + 169, + 196, + 225, + 256 + ], + [ + 64, + 81, + 100, + 121, + 144, + 169, + 196, + 225, + 256, + 289 + ], + [ + 81, + 100, + 121, + 144, + 169, + 196, + 225, + 256, + 289, + 324 + ] + ], + [ + [ + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678" + ], + [ + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678" + ], + [ + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678" + ] + ] + ], + [ + [ + [ + 9, + 16, + 25, + 36, + 49, + 64, + 81, + 100, + 121, + 144 + ], + [ + 16, + 25, + 36, + 49, + 64, + 81, + 100, + 121, + 144, + 169 + ], + [ + 25, + 36, + 49, + 64, + 81, + 100, + 121, + 144, + 169, + 196 + ], + [ + 36, + 49, + 64, + 81, + 100, + 121, + 144, + 169, + 196, + 225 + ], + [ + 49, + 64, + 81, + 100, + 121, + 144, + 169, + 196, + 225, + 256 + ], + [ + 64, + 81, + 100, + 121, + 144, + 169, + 196, + 225, + 256, + 289 + ], + [ + 81, + 100, + 121, + 144, + 169, + 196, + 225, + 256, + 289, + 324 + ], + [ + 100, + 121, + 144, + 169, + 196, + 225, + 256, + 289, + 324, + 361 + ] + ], + [ + [ + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678" + ], + [ + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678" + ], + [ + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678" + ] + ] + ], + [ + [ + [ + 16, + 25, + 36, + 49, + 64, + 81, + 100, + 121, + 144, + 169 + ], + [ + 25, + 36, + 49, + 64, + 81, + 100, + 121, + 144, + 169, + 196 + ], + [ + 36, + 49, + 64, + 81, + 100, + 121, + 144, + 169, + 196, + 225 + ], + [ + 49, + 64, + 81, + 100, + 121, + 144, + 169, + 196, + 225, + 256 + ], + [ + 64, + 81, + 100, + 121, + 144, + 169, + 196, + 225, + 256, + 289 + ], + [ + 81, + 100, + 121, + 144, + 169, + 196, + 225, + 256, + 289, + 324 + ], + [ + 100, + 121, + 144, + 169, + 196, + 225, + 256, + 289, + 324, + 361 + ], + [ + 121, + 144, + 169, + 196, + 225, + 256, + 289, + 324, + 361, + 400 + ] + ], + [ + [ + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678" + ], + [ + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678" + ], + [ + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678" + ] + ] + ], + [ + [ + [ + 25, + 36, + 49, + 64, + 81, + 100, + 121, + 144, + 169, + 196 + ], + [ + 36, + 49, + 64, + 81, + 100, + 121, + 144, + 169, + 196, + 225 + ], + [ + 49, + 64, + 81, + 100, + 121, + 144, + 169, + 196, + 225, + 256 + ], + [ + 64, + 81, + 100, + 121, + 144, + 169, + 196, + 225, + 256, + 289 + ], + [ + 81, + 100, + 121, + 144, + 169, + 196, + 225, + 256, + 289, + 324 + ], + [ + 100, + 121, + 144, + 169, + 196, + 225, + 256, + 289, + 324, + 361 + ], + [ + 121, + 144, + 169, + 196, + 225, + 256, + 289, + 324, + 361, + 400 + ], + [ + 144, + 169, + 196, + 225, + 256, + 289, + 324, + 361, + 400, + 441 + ] + ], + [ + [ + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678" + ], + [ + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678" + ], + [ + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678" + ] + ] + ] + ], + [ + [ + [ + [ + 0, + 1, + 4, + 9, + 16, + 25, + 36, + 49, + 64, + 81 + ], + [ + 1, + 4, + 9, + 16, + 25, + 36, + 49, + 64, + 81, + 100 + ], + [ + 4, + 9, + 16, + 25, + 36, + 49, + 64, + 81, + 100, + 121 + ], + [ + 9, + 16, + 25, + 36, + 49, + 64, + 81, + 100, + 121, + 144 + ], + [ + 16, + 25, + 36, + 49, + 64, + 81, + 100, + 121, + 144, + 169 + ], + [ + 25, + 36, + 49, + 64, + 81, + 100, + 121, + 144, + 169, + 196 + ], + [ + 36, + 49, + 64, + 81, + 100, + 121, + 144, + 169, + 196, + 225 + ], + [ + 49, + 64, + 81, + 100, + 121, + 144, + 169, + 196, + 225, + 256 + ] + ], + [ + [ + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678" + ], + [ + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678" + ], + [ + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678" + ] + ] + ], + [ + [ + [ + 1, + 4, + 9, + 16, + 25, + 36, + 49, + 64, + 81, + 100 + ], + [ + 4, + 9, + 16, + 25, + 36, + 49, + 64, + 81, + 100, + 121 + ], + [ + 9, + 16, + 25, + 36, + 49, + 64, + 81, + 100, + 121, + 144 + ], + [ + 16, + 25, + 36, + 49, + 64, + 81, + 100, + 121, + 144, + 169 + ], + [ + 25, + 36, + 49, + 64, + 81, + 100, + 121, + 144, + 169, + 196 + ], + [ + 36, + 49, + 64, + 81, + 100, + 121, + 144, + 169, + 196, + 225 + ], + [ + 49, + 64, + 81, + 100, + 121, + 144, + 169, + 196, + 225, + 256 + ], + [ + 64, + 81, + 100, + 121, + 144, + 169, + 196, + 225, + 256, + 289 + ] + ], + [ + [ + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678" + ], + [ + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678" + ], + [ + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678" + ] + ] + ], + [ + [ + [ + 4, + 9, + 16, + 25, + 36, + 49, + 64, + 81, + 100, + 121 + ], + [ + 9, + 16, + 25, + 36, + 49, + 64, + 81, + 100, + 121, + 144 + ], + [ + 16, + 25, + 36, + 49, + 64, + 81, + 100, + 121, + 144, + 169 + ], + [ + 25, + 36, + 49, + 64, + 81, + 100, + 121, + 144, + 169, + 196 + ], + [ + 36, + 49, + 64, + 81, + 100, + 121, + 144, + 169, + 196, + 225 + ], + [ + 49, + 64, + 81, + 100, + 121, + 144, + 169, + 196, + 225, + 256 + ], + [ + 64, + 81, + 100, + 121, + 144, + 169, + 196, + 225, + 256, + 289 + ], + [ + 81, + 100, + 121, + 144, + 169, + 196, + 225, + 256, + 289, + 324 + ] + ], + [ + [ + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678" + ], + [ + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678" + ], + [ + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678" + ] + ] + ], + [ + [ + [ + 9, + 16, + 25, + 36, + 49, + 64, + 81, + 100, + 121, + 144 + ], + [ + 16, + 25, + 36, + 49, + 64, + 81, + 100, + 121, + 144, + 169 + ], + [ + 25, + 36, + 49, + 64, + 81, + 100, + 121, + 144, + 169, + 196 + ], + [ + 36, + 49, + 64, + 81, + 100, + 121, + 144, + 169, + 196, + 225 + ], + [ + 49, + 64, + 81, + 100, + 121, + 144, + 169, + 196, + 225, + 256 + ], + [ + 64, + 81, + 100, + 121, + 144, + 169, + 196, + 225, + 256, + 289 + ], + [ + 81, + 100, + 121, + 144, + 169, + 196, + 225, + 256, + 289, + 324 + ], + [ + 100, + 121, + 144, + 169, + 196, + 225, + 256, + 289, + 324, + 361 + ] + ], + [ + [ + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678" + ], + [ + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678" + ], + [ + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678" + ] + ] + ], + [ + [ + [ + 16, + 25, + 36, + 49, + 64, + 81, + 100, + 121, + 144, + 169 + ], + [ + 25, + 36, + 49, + 64, + 81, + 100, + 121, + 144, + 169, + 196 + ], + [ + 36, + 49, + 64, + 81, + 100, + 121, + 144, + 169, + 196, + 225 + ], + [ + 49, + 64, + 81, + 100, + 121, + 144, + 169, + 196, + 225, + 256 + ], + [ + 64, + 81, + 100, + 121, + 144, + 169, + 196, + 225, + 256, + 289 + ], + [ + 81, + 100, + 121, + 144, + 169, + 196, + 225, + 256, + 289, + 324 + ], + [ + 100, + 121, + 144, + 169, + 196, + 225, + 256, + 289, + 324, + 361 + ], + [ + 121, + 144, + 169, + 196, + 225, + 256, + 289, + 324, + 361, + 400 + ] + ], + [ + [ + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678" + ], + [ + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678" + ], + [ + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678" + ] + ] + ], + [ + [ + [ + 25, + 36, + 49, + 64, + 81, + 100, + 121, + 144, + 169, + 196 + ], + [ + 36, + 49, + 64, + 81, + 100, + 121, + 144, + 169, + 196, + 225 + ], + [ + 49, + 64, + 81, + 100, + 121, + 144, + 169, + 196, + 225, + 256 + ], + [ + 64, + 81, + 100, + 121, + 144, + 169, + 196, + 225, + 256, + 289 + ], + [ + 81, + 100, + 121, + 144, + 169, + 196, + 225, + 256, + 289, + 324 + ], + [ + 100, + 121, + 144, + 169, + 196, + 225, + 256, + 289, + 324, + 361 + ], + [ + 121, + 144, + 169, + 196, + 225, + 256, + 289, + 324, + 361, + 400 + ], + [ + 144, + 169, + 196, + 225, + 256, + 289, + 324, + 361, + 400, + 441 + ] + ], + [ + [ + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678" + ], + [ + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678" + ], + [ + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678", + "abcdefgh12345678abcdefgh12345678" + ] + ] + ] + ] + ] + }, + "c074d06c-a3c0-11eb-a2f5-8c8590747994": { + "alias": [ + "/string1" + ], + "creationProperties": { + "allocTime": "H5D_ALLOC_TIME_LATE", + "fillTime": "H5D_FILL_TIME_IFSET", + "layout": { + "class": "H5D_CONTIGUOUS" + } + }, + "shape": { + "class": "H5S_SIMPLE", + "dims": [ + 3, + 4 + ] + }, + "type": { + "charSet": "H5T_CSET_ASCII", + "class": "H5T_STRING", + "length": 5, + "strPad": "H5T_STR_NULLPAD" + }, + "value": [ + [ + "s1", + "s2", + "s3", + "s4" + ], + [ + "s5", + "s6", + "s7", + "s8" + ], + [ + "s9", + "s0", + "s1", + "s2" + ] + ] + }, + "c074f484-a3c0-11eb-a2f5-8c8590747994": { + "alias": [ + "/string2" + ], + "creationProperties": { + "allocTime": "H5D_ALLOC_TIME_LATE", + "fillTime": "H5D_FILL_TIME_IFSET", + "layout": { + "class": "H5D_CONTIGUOUS" + } + }, + "shape": { + "class": "H5S_SIMPLE", + "dims": [ + 20 + ] + }, + "type": { + "charSet": "H5T_CSET_ASCII", + "class": "H5T_STRING", + "length": 11, + "strPad": "H5T_STR_NULLPAD" + }, + "value": [ + "ab cd ef1", + "ab cd ef2", + "ab cd ef3", + "ab cd ef4", + "ab cd ef5", + "ab cd ef6", + "ab cd ef7", + "ab cd ef8", + "ab cd ef9", + "ab cd ef0", + "ab cd ef1", + "ab cd ef2", + "ab cd ef3", + "ab cd ef4", + "ab cd ef5", + "ab cd ef6", + "ab cd ef7", + "ab cd ef8", + "ab cd ef9", + "ab cd ef0" + ] + }, + "c07518e2-a3c0-11eb-a2f5-8c8590747994": { + "alias": [ + "/string3" + ], + "creationProperties": { + "allocTime": "H5D_ALLOC_TIME_LATE", + "fillTime": "H5D_FILL_TIME_IFSET", + "layout": { + "class": "H5D_CONTIGUOUS" + } + }, + "shape": { + "class": "H5S_SIMPLE", + "dims": [ + 27 + ] + }, + "type": { + "charSet": "H5T_CSET_ASCII", + "class": "H5T_STRING", + "length": 8, + "strPad": "H5T_STR_NULLPAD" + }, + "value": [ + "abcd0", + "abcd1", + "abcd2", + "abcd3", + "abcd4", + "abcd5", + "abcd6", + "abcd7", + "abcd8", + "abcd9", + "abcd0", + "abcd1", + "abcd2", + "abcd3", + "abcd4", + "abcd5", + "abcd6", + "abcd7", + "abcd8", + "abcd9", + "abcd0", + "abcd1", + "abcd2", + "abcd3", + "abcd4", + "abcd5", + "abcd6" + ] + }, + "c0753e1c-a3c0-11eb-a2f5-8c8590747994": { + "alias": [ + "/string4" + ], + "creationProperties": { + "allocTime": "H5D_ALLOC_TIME_LATE", + "fillTime": "H5D_FILL_TIME_IFSET", + "layout": { + "class": "H5D_CONTIGUOUS" + } + }, + "shape": { + "class": "H5S_SIMPLE", + "dims": [ + 3 + ] + }, + "type": { + "charSet": "H5T_CSET_ASCII", + "class": "H5T_STRING", + "length": 168, + "strPad": "H5T_STR_NULLPAD" + }, + "value": [ + "s1234567890123456789", + "s1234567890123456789", + "s1234567890123456789" + ] + } + }, + "groups": { + "c07445ac-a3c0-11eb-a2f5-8c8590747994": { + "alias": [ + "/" + ], + "links": [ + { + "class": "H5L_TYPE_HARD", + "collection": "datasets", + "id": "c074a970-a3c0-11eb-a2f5-8c8590747994", + "title": "comp1" + }, + { + "class": "H5L_TYPE_HARD", + "collection": "datasets", + "id": "c074d06c-a3c0-11eb-a2f5-8c8590747994", + "title": "string1" + }, + { + "class": "H5L_TYPE_HARD", + "collection": "datasets", + "id": "c074f484-a3c0-11eb-a2f5-8c8590747994", + "title": "string2" + }, + { + "class": "H5L_TYPE_HARD", + "collection": "datasets", + "id": "c07518e2-a3c0-11eb-a2f5-8c8590747994", + "title": "string3" + }, + { + "class": "H5L_TYPE_HARD", + "collection": "datasets", + "id": "c0753e1c-a3c0-11eb-a2f5-8c8590747994", + "title": "string4" + } + ] + } + }, + "root": "c07445ac-a3c0-11eb-a2f5-8c8590747994" +} diff --git a/data/json/vlen_string_dset_utc.json b/data/json/vlen_string_dset_utc.json new file mode 100644 index 0000000..6907b0c --- /dev/null +++ b/data/json/vlen_string_dset_utc.json @@ -0,0 +1,2340 @@ +{ + "apiVersion": "1.1.1", + "datasets": { + "e09ffa90-98c2-11ec-aa25-8c8590747994": { + "alias": [ + "/ds1" + ], + "creationProperties": { + "allocTime": "H5D_ALLOC_TIME_LATE", + "fillTime": "H5D_FILL_TIME_ALLOC", + "layout": { + "class": "H5D_CONTIGUOUS" + } + }, + "shape": { + "class": "H5S_SIMPLE", + "dims": [ + 2293 + ] + }, + "type": { + "charSet": "H5T_CSET_ASCII", + "class": "H5T_STRING", + "length": "H5T_VARIABLE", + "strPad": "H5T_STR_NULLTERM" + }, + "value": [ + "2009-12-20T10:16:18.662409Z", + "2009-12-20T10:16:45.711273Z", + "2009-12-20T10:17:13.262140Z", + "2009-12-20T10:17:40.310054Z", + "2009-12-20T10:18:07.862647Z", + "2009-12-20T10:18:34.908689Z", + "2009-12-20T10:19:02.462767Z", + "2009-12-20T10:19:29.508808Z", + "2009-12-20T10:19:57.055611Z", + "2009-12-20T10:20:24.106798Z", + "2009-12-20T10:20:51.662587Z", + "2009-12-20T10:21:18.706612Z", + "2009-12-20T10:21:46.252853Z", + "2009-12-20T10:22:13.306918Z", + "2009-12-20T10:22:40.858835Z", + "2009-12-20T10:23:07.907426Z", + "2009-12-20T10:23:35.452704Z", + "2009-12-20T10:24:02.512198Z", + "2009-12-20T10:24:30.050822Z", + "2009-12-20T10:24:57.107695Z", + "2009-12-20T10:25:24.651501Z", + "2009-12-20T10:25:51.712623Z", + "2009-12-20T10:26:19.253621Z", + "2009-12-20T10:26:46.305640Z", + "2009-12-20T10:27:13.852314Z", + "2009-12-20T10:27:40.911980Z", + "2009-12-20T10:28:08.451829Z", + "2009-12-20T10:28:35.501271Z", + "2009-12-20T10:29:03.051689Z", + "2009-12-20T10:29:30.110248Z", + "2009-12-20T10:29:57.650020Z", + "2009-12-20T10:30:24.706320Z", + "2009-12-20T10:30:52.249177Z", + "2009-12-20T10:31:19.304422Z", + "2009-12-20T10:31:46.850336Z", + "2009-12-20T10:32:13.903564Z", + "2009-12-20T10:32:41.449933Z", + "2009-12-20T10:33:08.499821Z", + "2009-12-20T10:33:36.050068Z", + "2009-12-20T10:34:03.108023Z", + "2009-12-20T10:34:30.648993Z", + "2009-12-20T10:34:57.703131Z", + "2009-12-20T10:35:25.249330Z", + "2009-12-20T10:35:52.307687Z", + "2009-12-20T10:36:19.848239Z", + "2009-12-20T10:36:46.901152Z", + "2009-12-20T10:37:14.448762Z", + "2009-12-20T10:37:41.498680Z", + "2009-12-20T10:38:09.047805Z", + "2009-12-20T10:38:36.104192Z", + "2009-12-20T10:39:03.650608Z", + "2009-12-20T10:39:30.701819Z", + "2009-12-20T10:39:58.243266Z", + "2009-12-20T10:40:25.302296Z", + "2009-12-20T10:40:52.849031Z", + "2009-12-20T10:41:19.903918Z", + "2009-12-20T10:41:47.443893Z", + "2009-12-20T10:42:14.498293Z", + "2009-12-20T10:42:42.049750Z", + "2009-12-20T10:43:09.099520Z", + "2009-12-20T10:43:36.644626Z", + "2009-12-20T10:44:03.699012Z", + "2009-12-20T10:44:31.244994Z", + "2009-12-20T10:44:58.299462Z", + "2009-12-20T10:45:25.846167Z", + "2009-12-20T10:45:52.900419Z", + "2009-12-20T10:46:20.449140Z", + "2009-12-20T10:46:47.503392Z", + "2009-12-20T10:47:15.045055Z", + "2009-12-20T10:47:42.104348Z", + "2009-12-20T10:48:09.643405Z", + "2009-12-20T10:48:36.699674Z", + "2009-12-20T10:59:59.439028Z", + "2009-12-20T11:00:26.496675Z", + "2009-12-20T11:00:54.040385Z", + "2009-12-20T11:01:21.094838Z", + "2009-12-20T11:01:48.639884Z", + "2009-12-20T11:02:15.695951Z", + "2009-12-20T11:02:43.245076Z", + "2009-12-20T11:03:10.295899Z", + "2009-12-20T11:03:37.842775Z", + "2009-12-20T11:04:04.894539Z", + "2009-12-20T11:04:32.438349Z", + "2009-12-20T11:04:59.496633Z", + "2009-12-20T11:05:27.043323Z", + "2009-12-20T11:05:54.098411Z", + "2009-12-20T11:06:21.638850Z", + "2009-12-20T11:06:48.698634Z", + "2009-12-20T11:07:16.242025Z", + "2009-12-20T11:07:43.285475Z", + "2009-12-20T11:08:10.837955Z", + "2009-12-20T11:08:37.892482Z", + "2009-12-20T11:09:05.440913Z", + "2009-12-20T11:09:32.495238Z", + "2009-12-20T11:10:00.039781Z", + "2009-12-20T11:10:27.092119Z", + "2009-12-20T11:10:54.637397Z", + "2009-12-20T11:11:21.696980Z", + "2009-12-20T11:11:49.237532Z", + "2009-12-20T11:12:16.296293Z", + "2009-12-20T11:12:43.838287Z", + "2009-12-20T11:13:10.890827Z", + "2009-12-20T11:13:38.437429Z", + "2009-12-20T11:14:05.491494Z", + "2009-12-20T11:14:33.041798Z", + "2009-12-20T11:15:00.091023Z", + "2009-12-20T11:15:27.635727Z", + "2009-12-20T11:15:54.692182Z", + "2009-12-20T11:16:22.236839Z", + "2009-12-20T11:16:49.290703Z", + "2009-12-20T11:17:16.841628Z", + "2009-12-20T11:17:43.891614Z", + "2009-12-20T11:18:11.436648Z", + "2009-12-20T11:18:38.490429Z", + "2009-12-20T11:19:06.036669Z", + "2009-12-20T11:19:33.092535Z", + "2009-12-20T11:20:00.635438Z", + "2009-12-20T11:20:27.687457Z", + "2009-12-20T11:20:55.234348Z", + "2009-12-20T11:21:22.289406Z", + "2009-12-20T11:21:49.833056Z", + "2009-12-20T11:22:16.889525Z", + "2009-12-20T11:22:44.434819Z", + "2009-12-20T11:23:11.494656Z", + "2009-12-20T11:23:39.033527Z", + "2009-12-20T11:24:06.087919Z", + "2009-12-20T11:24:33.635073Z", + "2009-12-20T11:25:00.688084Z", + "2009-12-20T11:25:28.237024Z", + "2009-12-20T11:25:55.287010Z", + "2009-12-20T11:26:22.833296Z", + "2009-12-20T11:26:49.887547Z", + "2009-12-20T11:27:17.433028Z", + "2009-12-20T11:27:44.488691Z", + "2009-12-20T11:28:12.036777Z", + "2009-12-20T11:28:39.089647Z", + "2009-12-20T11:29:06.632940Z", + "2009-12-20T11:29:33.684292Z", + "2009-12-20T11:30:01.232393Z", + "2009-12-20T11:30:28.286427Z", + "2009-12-20T11:30:55.832109Z", + "2009-12-20T11:31:22.887742Z", + "2009-12-20T11:31:50.432243Z", + "2009-12-20T11:32:17.487489Z", + "2009-12-20T11:55:03.729976Z", + "2009-12-20T11:55:30.778483Z", + "2009-12-20T11:55:58.325644Z", + "2009-12-20T11:56:25.379409Z", + "2009-12-20T11:56:52.924770Z", + "2009-12-20T11:57:19.976850Z", + "2009-12-20T11:57:47.528565Z", + "2009-12-20T11:58:14.583779Z", + "2009-12-20T11:58:42.126063Z", + "2009-12-20T11:59:09.179783Z", + "2009-12-20T11:59:36.723546Z", + "2009-12-20T12:00:03.776893Z", + "2009-12-20T12:00:31.321060Z", + "2009-12-20T12:00:58.378104Z", + "2009-12-20T12:01:25.922777Z", + "2009-12-20T12:01:52.981337Z", + "2009-12-20T12:02:20.521903Z", + "2009-12-20T12:02:47.577365Z", + "2009-12-20T12:03:15.121387Z", + "2009-12-20T12:03:42.177732Z", + "2009-12-20T12:04:09.721553Z", + "2009-12-20T12:04:36.774842Z", + "2009-12-20T12:05:04.326900Z", + "2009-12-20T12:05:31.381212Z", + "2009-12-20T12:05:58.926274Z", + "2009-12-20T12:06:25.976104Z", + "2009-12-20T12:06:53.520934Z", + "2009-12-20T12:07:20.575451Z", + "2009-12-20T12:07:48.120030Z", + "2009-12-20T12:08:15.181369Z", + "2009-12-20T12:08:42.718767Z", + "2009-12-20T12:09:09.773796Z", + "2009-12-20T12:09:37.320283Z", + "2009-12-20T12:10:04.374333Z", + "2009-12-20T12:10:31.920403Z", + "2009-12-20T12:10:58.975895Z", + "2009-12-20T12:11:26.521979Z", + "2009-12-20T12:11:53.572259Z", + "2009-12-20T12:12:21.118903Z", + "2009-12-20T12:12:48.173543Z", + "2009-12-20T12:13:15.716789Z", + "2009-12-20T12:13:42.772960Z", + "2009-12-20T12:14:10.319964Z", + "2009-12-20T12:14:37.379056Z", + "2009-12-20T12:15:04.921092Z", + "2009-12-20T12:15:31.973357Z", + "2009-12-20T12:15:59.516216Z", + "2009-12-20T12:16:26.572686Z", + "2009-12-20T12:16:54.116955Z", + "2009-12-20T12:17:21.176450Z", + "2009-12-20T12:17:48.720378Z", + "2009-12-20T12:18:15.778027Z", + "2009-12-20T12:18:43.317427Z", + "2009-12-20T12:19:10.372919Z", + "2009-12-20T12:19:37.916165Z", + "2009-12-20T12:20:04.976497Z", + "2009-12-20T12:20:32.521418Z", + "2009-12-20T12:20:59.574817Z", + "2009-12-20T12:21:27.116496Z", + "2009-12-20T12:21:54.174393Z", + "2009-12-20T12:22:21.714443Z", + "2009-12-20T12:22:48.774341Z", + "2009-12-20T12:23:16.318021Z", + "2009-12-20T12:23:43.374537Z", + "2009-12-20T12:24:10.915162Z", + "2009-12-20T12:24:37.971305Z", + "2009-12-20T12:25:05.515357Z", + "2009-12-20T12:25:32.573409Z", + "2009-12-20T12:26:00.117136Z", + "2009-12-20T12:26:27.170208Z", + "2009-12-20T12:26:54.714664Z", + "2009-12-20T12:27:21.768947Z", + "2009-12-20T12:38:44.516254Z", + "2009-12-20T12:39:11.566472Z", + "2009-12-20T12:39:39.114775Z", + "2009-12-20T12:40:06.171694Z", + "2009-12-20T12:40:33.708316Z", + "2009-12-20T12:41:00.766818Z", + "2009-12-20T12:41:28.311118Z", + "2009-12-20T12:41:55.371792Z", + "2009-12-20T12:42:22.910865Z", + "2009-12-20T12:42:49.971585Z", + "2009-12-20T12:43:17.509432Z", + "2009-12-20T12:43:44.571579Z", + "2009-12-20T12:44:12.109476Z", + "2009-12-20T12:44:39.170892Z", + "2009-12-20T12:45:06.708663Z", + "2009-12-20T12:45:33.766807Z", + "2009-12-20T12:46:01.313264Z", + "2009-12-20T12:46:28.365934Z", + "2009-12-20T12:46:55.907194Z", + "2009-12-20T12:47:22.966099Z", + "2009-12-20T12:47:50.507158Z", + "2009-12-20T12:48:17.566652Z", + "2009-12-20T12:48:45.106299Z", + "2009-12-20T12:49:12.165794Z", + "2009-12-20T12:49:39.706263Z", + "2009-12-20T12:50:06.767789Z", + "2009-12-20T12:50:34.310461Z", + "2009-12-20T12:51:01.363720Z", + "2009-12-20T12:51:28.910021Z", + "2009-12-20T12:51:55.965282Z", + "2009-12-20T12:52:23.511568Z", + "2009-12-20T12:52:50.566037Z", + "2009-12-20T12:53:18.107297Z", + "2009-12-20T12:53:45.168622Z", + "2009-12-20T12:54:12.708455Z", + "2009-12-20T12:54:39.764118Z", + "2009-12-20T12:55:07.305767Z", + "2009-12-20T12:55:34.365090Z", + "2009-12-20T12:56:01.907716Z", + "2009-12-20T12:56:28.964076Z", + "2009-12-20T12:56:56.508300Z", + "2009-12-20T12:57:23.566786Z", + "2009-12-20T12:57:51.106620Z", + "2009-12-20T12:58:18.163509Z", + "2009-12-20T12:58:45.706165Z", + "2009-12-20T12:59:12.763240Z", + "2009-12-20T12:59:40.305554Z", + "2009-12-20T13:00:07.362785Z", + "2009-12-20T13:00:34.907675Z", + "2009-12-20T13:01:01.961524Z", + "2009-12-20T13:01:29.505234Z", + "2009-12-20T13:01:56.561643Z", + "2009-12-20T13:02:24.110627Z", + "2009-12-20T13:02:51.160922Z", + "2009-12-20T13:03:18.704682Z", + "2009-12-20T13:03:45.763975Z", + "2009-12-20T13:04:13.303980Z", + "2009-12-20T13:04:40.358634Z", + "2009-12-20T13:05:07.904206Z", + "2009-12-20T13:05:34.960675Z", + "2009-12-20T13:06:02.504557Z", + "2009-12-20T13:06:29.560050Z", + "2009-12-20T13:06:57.102147Z", + "2009-12-20T13:07:24.158153Z", + "2009-12-20T13:07:51.702795Z", + "2009-12-20T13:08:18.762999Z", + "2009-12-20T13:08:46.302355Z", + "2009-12-20T13:09:13.361029Z", + "2009-12-20T13:09:40.903498Z", + "2009-12-20T13:10:07.957218Z", + "2009-12-20T13:10:35.503215Z", + "2009-12-20T13:11:02.559095Z", + "2009-12-20T13:33:48.796504Z", + "2009-12-20T13:34:15.850987Z", + "2009-12-20T13:34:43.395304Z", + "2009-12-20T13:35:10.451412Z", + "2009-12-20T13:35:37.995609Z", + "2009-12-20T13:36:05.056432Z", + "2009-12-20T13:36:32.592704Z", + "2009-12-20T13:36:59.651049Z", + "2009-12-20T13:37:27.200159Z", + "2009-12-20T13:37:54.250420Z", + "2009-12-20T13:38:21.793655Z", + "2009-12-20T13:38:48.849938Z", + "2009-12-20T13:39:16.392982Z", + "2009-12-20T13:39:43.450274Z", + "2009-12-20T13:40:11.000004Z", + "2009-12-20T13:40:38.053910Z", + "2009-12-20T13:41:05.594074Z", + "2009-12-20T13:41:32.657415Z", + "2009-12-20T13:42:00.192641Z", + "2009-12-20T13:42:27.252725Z", + "2009-12-20T13:42:54.793582Z", + "2009-12-20T13:43:21.850456Z", + "2009-12-20T13:43:49.398945Z", + "2009-12-20T13:44:16.456656Z", + "2009-12-20T13:44:43.991417Z", + "2009-12-20T13:45:11.048522Z", + "2009-12-20T13:45:38.597586Z", + "2009-12-20T13:46:05.647881Z", + "2009-12-20T13:46:33.195332Z", + "2009-12-20T13:47:00.248620Z", + "2009-12-20T13:47:27.791292Z", + "2009-12-20T13:47:54.846366Z", + "2009-12-20T13:48:22.391598Z", + "2009-12-20T13:48:49.451139Z", + "2009-12-20T13:49:16.996216Z", + "2009-12-20T13:49:44.047690Z", + "2009-12-20T13:50:11.590517Z", + "2009-12-20T13:50:38.656868Z", + "2009-12-20T13:51:06.189892Z", + "2009-12-20T13:51:33.251201Z", + "2009-12-20T13:52:00.791034Z", + "2009-12-20T13:52:27.846496Z", + "2009-12-20T13:52:55.389602Z", + "2009-12-20T13:53:22.446878Z", + "2009-12-20T13:53:49.990155Z", + "2009-12-20T13:54:17.046020Z", + "2009-12-20T13:54:44.588289Z", + "2009-12-20T13:55:11.646668Z", + "2009-12-20T13:55:39.191696Z", + "2009-12-20T13:56:06.245399Z", + "2009-12-20T13:56:33.791971Z", + "2009-12-20T13:57:00.843891Z", + "2009-12-20T13:57:28.388088Z", + "2009-12-20T13:57:55.442168Z", + "2009-12-20T13:58:22.989291Z", + "2009-12-20T13:58:50.045358Z", + "2009-12-20T13:59:17.591242Z", + "2009-12-20T13:59:44.644185Z", + "2009-12-20T14:00:12.187359Z", + "2009-12-20T14:00:39.246770Z", + "2009-12-20T14:01:06.788608Z", + "2009-12-20T14:01:33.843910Z", + "2009-12-20T14:02:01.387115Z", + "2009-12-20T14:02:28.442851Z", + "2009-12-20T14:02:55.990521Z", + "2009-12-20T14:03:23.042659Z", + "2009-12-20T14:03:50.586281Z", + "2009-12-20T14:04:17.643150Z", + "2009-12-20T14:04:45.187237Z", + "2009-12-20T14:05:12.246402Z", + "2009-12-20T14:05:39.786612Z", + "2009-12-20T14:06:06.842476Z", + "2009-12-20T14:17:29.583700Z", + "2009-12-20T14:17:56.640992Z", + "2009-12-20T14:18:24.188317Z", + "2009-12-20T14:18:51.241866Z", + "2009-12-20T14:19:18.784651Z", + "2009-12-20T14:19:45.838933Z", + "2009-12-20T14:20:13.385250Z", + "2009-12-20T14:20:40.439186Z", + "2009-12-20T14:21:07.987634Z", + "2009-12-20T14:21:35.044289Z", + "2009-12-20T14:22:02.584976Z", + "2009-12-20T14:22:29.639909Z", + "2009-12-20T14:22:57.183341Z", + "2009-12-20T14:23:24.238659Z", + "2009-12-20T14:23:51.781288Z", + "2009-12-20T14:24:18.838466Z", + "2009-12-20T14:24:46.384277Z", + "2009-12-20T14:25:13.438529Z", + "2009-12-20T14:25:40.983046Z", + "2009-12-20T14:26:08.041129Z", + "2009-12-20T14:26:35.583366Z", + "2009-12-20T14:27:02.637664Z", + "2009-12-20T14:27:30.183159Z", + "2009-12-20T14:27:57.244469Z", + "2009-12-20T14:28:24.781913Z", + "2009-12-20T14:28:51.836367Z", + "2009-12-20T14:29:19.383505Z", + "2009-12-20T14:29:46.436201Z", + "2009-12-20T14:30:13.982677Z", + "2009-12-20T14:30:41.037721Z", + "2009-12-20T14:31:08.581773Z", + "2009-12-20T14:31:35.638909Z", + "2009-12-20T14:32:03.180356Z", + "2009-12-20T14:32:30.237461Z", + "2009-12-20T14:32:57.785765Z", + "2009-12-20T14:33:24.839007Z", + "2009-12-20T14:33:52.385092Z", + "2009-12-20T14:34:19.446280Z", + "2009-12-20T14:34:46.983054Z", + "2009-12-20T14:35:14.037337Z", + "2009-12-20T14:35:41.580149Z", + "2009-12-20T14:36:08.636216Z", + "2009-12-20T14:36:36.178640Z", + "2009-12-20T14:37:03.239360Z", + "2009-12-20T14:37:30.783242Z", + "2009-12-20T14:37:57.837912Z", + "2009-12-20T14:38:25.380398Z", + "2009-12-20T14:38:52.439675Z", + "2009-12-20T14:39:19.978732Z", + "2009-12-20T14:39:47.040662Z", + "2009-12-20T14:40:14.581908Z", + "2009-12-20T14:40:41.641604Z", + "2009-12-20T14:41:09.178226Z", + "2009-12-20T14:41:36.231889Z", + "2009-12-20T14:42:03.777151Z", + "2009-12-20T14:42:30.832643Z", + "2009-12-20T14:42:58.379333Z", + "2009-12-20T14:43:25.433150Z", + "2009-12-20T14:43:52.980073Z", + "2009-12-20T14:44:20.034758Z", + "2009-12-20T14:44:47.575988Z", + "2009-12-20T14:45:14.633249Z", + "2009-12-20T14:45:42.179783Z", + "2009-12-20T14:46:09.233680Z", + "2009-12-20T14:46:36.775512Z", + "2009-12-20T14:47:03.834588Z", + "2009-12-20T14:47:31.375414Z", + "2009-12-20T14:47:58.433714Z", + "2009-12-20T14:48:25.978993Z", + "2009-12-20T14:48:53.032266Z", + "2009-12-20T14:49:20.575110Z", + "2009-12-20T14:49:47.633657Z", + "2009-12-20T15:12:33.865141Z", + "2009-12-20T15:13:00.926699Z", + "2009-12-20T15:13:28.470968Z", + "2009-12-20T15:13:55.526012Z", + "2009-12-20T15:14:23.069490Z", + "2009-12-20T15:14:50.125557Z", + "2009-12-20T15:15:17.669624Z", + "2009-12-20T15:15:44.726296Z", + "2009-12-20T15:16:12.270037Z", + "2009-12-20T15:16:39.323996Z", + "2009-12-20T15:17:06.870297Z", + "2009-12-20T15:17:33.926379Z", + "2009-12-20T15:18:01.468043Z", + "2009-12-20T15:18:28.524700Z", + "2009-12-20T15:18:56.068147Z", + "2009-12-20T15:19:23.121034Z", + "2009-12-20T15:19:50.667289Z", + "2009-12-20T15:20:17.722735Z", + "2009-12-20T15:20:45.266012Z", + "2009-12-20T15:21:12.322079Z", + "2009-12-20T15:21:39.866147Z", + "2009-12-20T15:22:06.924230Z", + "2009-12-20T15:22:34.466498Z", + "2009-12-20T15:23:01.523558Z", + "2009-12-20T15:23:29.065625Z", + "2009-12-20T15:23:56.125322Z", + "2009-12-20T15:24:23.666163Z", + "2009-12-20T15:24:50.720818Z", + "2009-12-20T15:25:18.267523Z", + "2009-12-20T15:25:45.327825Z", + "2009-12-20T15:26:12.865268Z", + "2009-12-20T15:26:39.923397Z", + "2009-12-20T15:27:07.464814Z", + "2009-12-20T15:27:34.521314Z", + "2009-12-20T15:28:02.063879Z", + "2009-12-20T15:28:29.122069Z", + "2009-12-20T15:28:56.666928Z", + "2009-12-20T15:29:23.722824Z", + "2009-12-20T15:29:51.264876Z", + "2009-12-20T15:30:18.325870Z", + "2009-12-20T15:30:45.865211Z", + "2009-12-20T15:31:12.921511Z", + "2009-12-20T15:31:40.462120Z", + "2009-12-20T15:32:07.525678Z", + "2009-12-20T15:32:35.066939Z", + "2009-12-20T15:33:02.118181Z", + "2009-12-20T15:33:29.663646Z", + "2009-12-20T15:33:56.718921Z", + "2009-12-20T15:34:24.269271Z", + "2009-12-20T15:34:51.325710Z", + "2009-12-20T15:35:18.864550Z", + "2009-12-20T15:35:45.919856Z", + "2009-12-20T15:36:13.463506Z", + "2009-12-20T15:36:40.518378Z", + "2009-12-20T15:37:08.062244Z", + "2009-12-20T15:37:35.118714Z", + "2009-12-20T15:38:02.662999Z", + "2009-12-20T15:38:29.718895Z", + "2009-12-20T15:38:57.261985Z", + "2009-12-20T15:39:24.319246Z", + "2009-12-20T15:39:51.861918Z", + "2009-12-20T15:40:18.916060Z", + "2009-12-20T15:40:46.462285Z", + "2009-12-20T15:41:13.515544Z", + "2009-12-20T15:41:41.061644Z", + "2009-12-20T15:42:08.116329Z", + "2009-12-20T15:42:35.660956Z", + "2009-12-20T15:43:02.718496Z", + "2009-12-20T15:43:30.265821Z", + "2009-12-20T15:43:57.316413Z", + "2009-12-20T15:44:24.862729Z", + "2009-12-20T15:44:51.917106Z", + "2009-12-20T15:56:14.657169Z", + "2009-12-20T15:56:41.713468Z", + "2009-12-20T15:57:09.260545Z", + "2009-12-20T15:57:36.312869Z", + "2009-12-20T15:58:03.860406Z", + "2009-12-20T15:58:30.913612Z", + "2009-12-20T15:58:58.461915Z", + "2009-12-20T15:59:25.513141Z", + "2009-12-20T15:59:53.058636Z", + "2009-12-20T16:00:20.113029Z", + "2009-12-20T16:00:47.662401Z", + "2009-12-20T16:01:14.713613Z", + "2009-12-20T16:01:42.256099Z", + "2009-12-20T16:02:09.312816Z", + "2009-12-20T16:02:36.855861Z", + "2009-12-20T16:03:03.913153Z", + "2009-12-20T16:03:31.455390Z", + "2009-12-20T16:03:58.511331Z", + "2009-12-20T16:04:26.056625Z", + "2009-12-20T16:04:53.112071Z", + "2009-12-20T16:05:20.655565Z", + "2009-12-20T16:05:47.711849Z", + "2009-12-20T16:06:15.255126Z", + "2009-12-20T16:06:42.312945Z", + "2009-12-20T16:07:09.858098Z", + "2009-12-20T16:07:36.911543Z", + "2009-12-20T16:08:04.454200Z", + "2009-12-20T16:08:31.511104Z", + "2009-12-20T16:08:59.055187Z", + "2009-12-20T16:09:26.111626Z", + "2009-12-20T16:09:53.653491Z", + "2009-12-20T16:10:20.710193Z", + "2009-12-20T16:10:48.255503Z", + "2009-12-20T16:11:15.310576Z", + "2009-12-20T16:11:42.852799Z", + "2009-12-20T16:12:09.912309Z", + "2009-12-20T16:12:37.452731Z", + "2009-12-20T16:13:04.509217Z", + "2009-12-20T16:13:32.053905Z", + "2009-12-20T16:13:59.109972Z", + "2009-12-20T16:14:26.652411Z", + "2009-12-20T16:14:53.708539Z", + "2009-12-20T16:15:21.250778Z", + "2009-12-20T16:15:48.309047Z", + "2009-12-20T16:16:15.851532Z", + "2009-12-20T16:16:42.905971Z", + "2009-12-20T16:17:10.452489Z", + "2009-12-20T16:17:37.507144Z", + "2009-12-20T16:18:05.051973Z", + "2009-12-20T16:18:32.108055Z", + "2009-12-20T16:18:59.650370Z", + "2009-12-20T16:19:26.710842Z", + "2009-12-20T16:19:54.252086Z", + "2009-12-20T16:20:21.305950Z", + "2009-12-20T16:20:48.850406Z", + "2009-12-20T16:21:15.912135Z", + "2009-12-20T16:21:43.447425Z", + "2009-12-20T16:22:10.506018Z", + "2009-12-20T16:22:38.048458Z", + "2009-12-20T16:23:05.106233Z", + "2009-12-20T16:23:32.649554Z", + "2009-12-20T16:23:59.704785Z", + "2009-12-20T16:24:27.250280Z", + "2009-12-20T16:24:54.304516Z", + "2009-12-20T16:25:21.850212Z", + "2009-12-20T16:25:48.905659Z", + "2009-12-20T16:26:16.449153Z", + "2009-12-20T16:26:43.506817Z", + "2009-12-20T16:27:11.048434Z", + "2009-12-20T16:27:38.104810Z", + "2009-12-20T16:28:05.648802Z", + "2009-12-20T16:28:32.703240Z", + "2009-12-20T16:51:50.142807Z", + "2009-12-20T16:52:17.199074Z", + "2009-12-20T16:52:44.742119Z", + "2009-12-20T16:53:11.797380Z", + "2009-12-20T16:53:39.344472Z", + "2009-12-20T16:54:06.396722Z", + "2009-12-20T16:54:33.941612Z", + "2009-12-20T16:55:00.996503Z", + "2009-12-20T16:55:28.545502Z", + "2009-12-20T16:55:55.595565Z", + "2009-12-20T16:56:23.141821Z", + "2009-12-20T16:56:50.195468Z", + "2009-12-20T16:57:17.742763Z", + "2009-12-20T16:57:44.795820Z", + "2009-12-20T16:58:12.346078Z", + "2009-12-20T16:58:39.400748Z", + "2009-12-20T16:59:06.939387Z", + "2009-12-20T16:59:33.996260Z", + "2009-12-20T17:00:01.539321Z", + "2009-12-20T17:00:28.600847Z", + "2009-12-20T17:00:56.138230Z", + "2009-12-20T17:01:23.194327Z", + "2009-12-20T17:01:50.738814Z", + "2009-12-20T17:02:17.799891Z", + "2009-12-20T17:02:45.337708Z", + "2009-12-20T17:03:12.395993Z", + "2009-12-20T17:03:39.941054Z", + "2009-12-20T17:04:06.994514Z", + "2009-12-20T17:04:34.538381Z", + "2009-12-20T17:05:01.590026Z", + "2009-12-20T17:05:29.136515Z", + "2009-12-20T17:05:56.200617Z", + "2009-12-20T17:06:23.736462Z", + "2009-12-20T17:06:50.799169Z", + "2009-12-20T17:07:18.335977Z", + "2009-12-20T17:07:45.399551Z", + "2009-12-20T17:08:12.936576Z", + "2009-12-20T17:08:39.997855Z", + "2009-12-20T17:09:07.537084Z", + "2009-12-20T17:09:34.593352Z", + "2009-12-20T17:10:02.135388Z", + "2009-12-20T17:10:29.191888Z", + "2009-12-20T17:10:56.735196Z", + "2009-12-20T17:11:23.790814Z", + "2009-12-20T17:11:51.336354Z", + "2009-12-20T17:12:18.394034Z", + "2009-12-20T17:12:45.936645Z", + "2009-12-20T17:13:12.994603Z", + "2009-12-20T17:13:40.538005Z", + "2009-12-20T17:14:07.591511Z", + "2009-12-20T17:14:35.134788Z", + "2009-12-20T17:15:02.195306Z", + "2009-12-20T17:15:29.737575Z", + "2009-12-20T17:15:56.793455Z", + "2009-12-20T17:16:24.331473Z", + "2009-12-20T17:16:51.390363Z", + "2009-12-20T17:17:18.933407Z", + "2009-12-20T17:17:45.994748Z", + "2009-12-20T17:18:13.536784Z", + "2009-12-20T17:18:40.586646Z", + "2009-12-20T17:19:08.134328Z", + "2009-12-20T17:19:35.190208Z", + "2009-12-20T17:20:02.740253Z", + "2009-12-20T17:20:29.789567Z", + "2009-12-20T17:20:57.337078Z", + "2009-12-20T17:21:24.392353Z", + "2009-12-20T17:21:51.939431Z", + "2009-12-20T17:22:18.988874Z", + "2009-12-20T17:22:46.530538Z", + "2009-12-20T17:23:13.588636Z", + "2009-12-20T17:23:41.133697Z", + "2009-12-20T17:24:08.194618Z", + "2009-12-20T17:35:30.932036Z", + "2009-12-20T17:35:57.983568Z", + "2009-12-20T17:36:25.538638Z", + "2009-12-20T17:36:52.583485Z", + "2009-12-20T17:37:20.131918Z", + "2009-12-20T17:37:47.183666Z", + "2009-12-20T17:38:14.730790Z", + "2009-12-20T17:38:41.784033Z", + "2009-12-20T17:39:09.336901Z", + "2009-12-20T17:39:36.392854Z", + "2009-12-20T17:40:03.930195Z", + "2009-12-20T17:40:30.984550Z", + "2009-12-20T17:40:58.530620Z", + "2009-12-20T17:41:25.581892Z", + "2009-12-20T17:41:53.131189Z", + "2009-12-20T17:42:20.181034Z", + "2009-12-20T17:42:47.728949Z", + "2009-12-20T17:43:14.781401Z", + "2009-12-20T17:43:42.328292Z", + "2009-12-20T17:44:09.383210Z", + "2009-12-20T17:44:36.930573Z", + "2009-12-20T17:45:03.982881Z", + "2009-12-20T17:45:31.530041Z", + "2009-12-20T17:45:58.580843Z", + "2009-12-20T17:46:26.127951Z", + "2009-12-20T17:46:53.186266Z", + "2009-12-20T17:47:20.728257Z", + "2009-12-20T17:47:47.786774Z", + "2009-12-20T17:48:15.325632Z", + "2009-12-20T17:48:42.381883Z", + "2009-12-20T17:49:09.927439Z", + "2009-12-20T17:49:36.984699Z", + "2009-12-20T17:50:04.527889Z", + "2009-12-20T17:50:31.585424Z", + "2009-12-20T17:50:59.126611Z", + "2009-12-20T17:51:26.187561Z", + "2009-12-20T17:51:53.733862Z", + "2009-12-20T17:52:20.785523Z", + "2009-12-20T17:52:48.092245Z", + "2009-12-20T17:53:15.381221Z", + "2009-12-20T17:53:42.922781Z", + "2009-12-20T17:54:09.980564Z", + "2009-12-20T17:54:37.526592Z", + "2009-12-20T17:55:04.585538Z", + "2009-12-20T17:55:32.127518Z", + "2009-12-20T17:55:59.185890Z", + "2009-12-20T17:56:26.725217Z", + "2009-12-20T17:56:53.780783Z", + "2009-12-20T17:57:21.325352Z", + "2009-12-20T17:57:48.388968Z", + "2009-12-20T17:58:15.928713Z", + "2009-12-20T17:58:42.980245Z", + "2009-12-20T17:59:10.525188Z", + "2009-12-20T17:59:37.583591Z", + "2009-12-20T18:00:05.124619Z", + "2009-12-20T18:00:32.176668Z", + "2009-12-20T18:00:59.732948Z", + "2009-12-20T18:01:26.779207Z", + "2009-12-20T18:01:54.328228Z", + "2009-12-20T18:02:21.377885Z", + "2009-12-20T18:02:48.928219Z", + "2009-12-20T18:03:15.978050Z", + "2009-12-20T18:03:43.522620Z", + "2009-12-20T18:04:10.582591Z", + "2009-12-20T18:04:38.126152Z", + "2009-12-20T18:05:05.181298Z", + "2009-12-20T18:05:32.722140Z", + "2009-12-20T18:05:59.775649Z", + "2009-12-20T18:06:27.326308Z", + "2009-12-20T18:06:54.379148Z", + "2009-12-20T18:07:21.921526Z", + "2009-12-20T18:07:48.976988Z", + "2009-12-20T18:30:35.020912Z", + "2009-12-20T18:31:02.068769Z", + "2009-12-20T18:31:29.609911Z", + "2009-12-20T18:31:56.678118Z", + "2009-12-20T18:32:24.215706Z", + "2009-12-20T18:32:51.269411Z", + "2009-12-20T18:33:18.813407Z", + "2009-12-20T18:33:45.874184Z", + "2009-12-20T18:34:13.414318Z", + "2009-12-20T18:34:40.470286Z", + "2009-12-20T18:35:08.015677Z", + "2009-12-20T18:35:35.071413Z", + "2009-12-20T18:36:02.613361Z", + "2009-12-20T18:36:29.673751Z", + "2009-12-20T18:36:57.214117Z", + "2009-12-20T18:37:24.265370Z", + "2009-12-20T18:37:51.815027Z", + "2009-12-20T18:38:18.866670Z", + "2009-12-20T18:38:46.411201Z", + "2009-12-20T18:39:13.467455Z", + "2009-12-20T18:39:41.012413Z", + "2009-12-20T18:40:08.068895Z", + "2009-12-20T18:40:35.612316Z", + "2009-12-20T18:41:02.672766Z", + "2009-12-20T18:41:30.211210Z", + "2009-12-20T18:41:57.268806Z", + "2009-12-20T18:42:24.811964Z", + "2009-12-20T18:42:51.868276Z", + "2009-12-20T18:43:19.412285Z", + "2009-12-20T18:43:46.473094Z", + "2009-12-20T18:44:14.011801Z", + "2009-12-20T18:44:41.064325Z", + "2009-12-20T18:45:08.611128Z", + "2009-12-20T18:45:35.663250Z", + "2009-12-20T18:46:03.211077Z", + "2009-12-20T18:46:30.264176Z", + "2009-12-20T18:46:57.813616Z", + "2009-12-20T18:47:24.864124Z", + "2009-12-20T18:47:52.416186Z", + "2009-12-20T18:48:19.468711Z", + "2009-12-20T18:48:47.008471Z", + "2009-12-20T18:49:14.064657Z", + "2009-12-20T18:49:41.608203Z", + "2009-12-20T18:50:08.666964Z", + "2009-12-20T18:50:36.208121Z", + "2009-12-20T18:51:03.267703Z", + "2009-12-20T18:51:30.808412Z", + "2009-12-20T18:51:57.869280Z", + "2009-12-20T18:52:25.407614Z", + "2009-12-20T18:52:52.460558Z", + "2009-12-20T18:53:20.010339Z", + "2009-12-20T18:53:47.061469Z", + "2009-12-20T18:54:14.609698Z", + "2009-12-20T18:54:41.660796Z", + "2009-12-20T18:55:09.208422Z", + "2009-12-20T18:55:36.262016Z", + "2009-12-20T18:56:03.809021Z", + "2009-12-20T18:56:30.862973Z", + "2009-12-20T18:56:58.409978Z", + "2009-12-20T18:57:25.461277Z", + "2009-12-20T18:57:53.005071Z", + "2009-12-20T18:58:20.067291Z", + "2009-12-20T18:58:47.610464Z", + "2009-12-20T18:59:14.662011Z", + "2009-12-20T18:59:42.205184Z", + "2009-12-20T19:00:09.262130Z", + "2009-12-20T19:00:36.810748Z", + "2009-12-20T19:01:03.861676Z", + "2009-12-20T19:01:31.406462Z", + "2009-12-20T19:01:58.463020Z", + "2009-12-20T19:02:26.006038Z", + "2009-12-20T19:02:53.065187Z", + "2009-12-20T19:14:15.808748Z", + "2009-12-20T19:14:42.852990Z", + "2009-12-20T19:15:10.404634Z", + "2009-12-20T19:15:37.456569Z", + "2009-12-20T19:16:05.003029Z", + "2009-12-20T19:16:32.060767Z", + "2009-12-20T19:16:59.606143Z", + "2009-12-20T19:17:26.659304Z", + "2009-12-20T19:17:54.202942Z", + "2009-12-20T19:18:21.255607Z", + "2009-12-20T19:18:48.806862Z", + "2009-12-20T19:19:15.856161Z", + "2009-12-20T19:19:43.406593Z", + "2009-12-20T19:20:10.458327Z", + "2009-12-20T19:20:38.007535Z", + "2009-12-20T19:21:05.055034Z", + "2009-12-20T19:21:32.600038Z", + "2009-12-20T19:21:59.654766Z", + "2009-12-20T19:22:27.202794Z", + "2009-12-20T19:22:54.256528Z", + "2009-12-20T19:23:21.799298Z", + "2009-12-20T19:23:48.853654Z", + "2009-12-20T19:24:16.401511Z", + "2009-12-20T19:24:43.454393Z", + "2009-12-20T19:25:10.999180Z", + "2009-12-20T19:25:38.055163Z", + "2009-12-20T19:26:05.607457Z", + "2009-12-20T19:26:32.653469Z", + "2009-12-20T19:27:00.198860Z", + "2009-12-20T19:27:27.257869Z", + "2009-12-20T19:27:54.798389Z", + "2009-12-20T19:28:21.853567Z", + "2009-12-20T19:28:49.397950Z", + "2009-12-20T19:29:16.454104Z", + "2009-12-20T19:29:43.998085Z", + "2009-12-20T19:30:11.054471Z", + "2009-12-20T19:30:38.597196Z", + "2009-12-20T19:31:05.655786Z", + "2009-12-20T19:31:33.197361Z", + "2009-12-20T19:32:00.257736Z", + "2009-12-20T19:32:27.797682Z", + "2009-12-20T19:32:54.855263Z", + "2009-12-20T19:33:22.399042Z", + "2009-12-20T19:33:49.457168Z", + "2009-12-20T19:34:16.998542Z", + "2009-12-20T19:34:44.058729Z", + "2009-12-20T19:35:11.597250Z", + "2009-12-20T19:35:38.657670Z", + "2009-12-20T19:36:06.200828Z", + "2009-12-20T19:36:33.255943Z", + "2009-12-20T19:37:00.795503Z", + "2009-12-20T19:37:27.856079Z", + "2009-12-20T19:37:55.395420Z", + "2009-12-20T19:38:22.452785Z", + "2009-12-20T19:38:50.001589Z", + "2009-12-20T19:39:17.051493Z", + "2009-12-20T19:39:44.595039Z", + "2009-12-20T19:40:11.646385Z", + "2009-12-20T19:40:39.193173Z", + "2009-12-20T19:41:06.256153Z", + "2009-12-20T19:41:33.798117Z", + "2009-12-20T19:42:00.854845Z", + "2009-12-20T19:42:28.393396Z", + "2009-12-20T19:42:55.456609Z", + "2009-12-20T19:43:22.993531Z", + "2009-12-20T19:43:50.056093Z", + "2009-12-20T19:44:17.596861Z", + "2009-12-20T19:44:44.652411Z", + "2009-12-20T19:45:12.193569Z", + "2009-12-20T19:45:39.247893Z", + "2009-12-20T19:46:06.792881Z", + "2009-12-20T19:46:33.848480Z", + "2009-12-20T20:09:19.897442Z", + "2009-12-20T20:09:46.946554Z", + "2009-12-20T20:10:14.488301Z", + "2009-12-20T20:10:41.539741Z", + "2009-12-20T20:11:09.088839Z", + "2009-12-20T20:11:36.141349Z", + "2009-12-20T20:12:03.691347Z", + "2009-12-20T20:12:30.744493Z", + "2009-12-20T20:12:58.292319Z", + "2009-12-20T20:13:25.340998Z", + "2009-12-20T20:13:52.884978Z", + "2009-12-20T20:14:19.940915Z", + "2009-12-20T20:14:47.484477Z", + "2009-12-20T20:15:14.544588Z", + "2009-12-20T20:15:42.087248Z", + "2009-12-20T20:16:09.139340Z", + "2009-12-20T20:16:36.687151Z", + "2009-12-20T20:17:03.740281Z", + "2009-12-20T20:17:31.285471Z", + "2009-12-20T20:17:58.343410Z", + "2009-12-20T20:18:25.886614Z", + "2009-12-20T20:18:52.941574Z", + "2009-12-20T20:19:20.484796Z", + "2009-12-20T20:19:47.539460Z", + "2009-12-20T20:20:15.082851Z", + "2009-12-20T20:20:42.141395Z", + "2009-12-20T20:21:09.691162Z", + "2009-12-20T20:21:36.741887Z", + "2009-12-20T20:22:04.282842Z", + "2009-12-20T20:22:31.344224Z", + "2009-12-20T20:22:58.882775Z", + "2009-12-20T20:23:25.944763Z", + "2009-12-20T20:23:53.484911Z", + "2009-12-20T20:24:20.533403Z", + "2009-12-20T20:24:48.081602Z", + "2009-12-20T20:25:15.133553Z", + "2009-12-20T20:25:42.682962Z", + "2009-12-20T20:26:09.734061Z", + "2009-12-20T20:26:37.285533Z", + "2009-12-20T20:27:04.334614Z", + "2009-12-20T20:27:31.883247Z", + "2009-12-20T20:27:58.935324Z", + "2009-12-20T20:28:26.481769Z", + "2009-12-20T20:28:53.534899Z", + "2009-12-20T20:29:21.088341Z", + "2009-12-20T20:29:48.135048Z", + "2009-12-20T20:30:15.680657Z", + "2009-12-20T20:30:42.734795Z", + "2009-12-20T20:31:10.279768Z", + "2009-12-20T20:31:37.336201Z", + "2009-12-20T20:32:04.880988Z", + "2009-12-20T20:32:31.935096Z", + "2009-12-20T20:32:59.479650Z", + "2009-12-20T20:33:26.535402Z", + "2009-12-20T20:33:54.080823Z", + "2009-12-20T20:34:21.134604Z", + "2009-12-20T20:34:48.685440Z", + "2009-12-20T20:35:15.733731Z", + "2009-12-20T20:35:43.279107Z", + "2009-12-20T20:36:10.333432Z", + "2009-12-20T20:36:37.877084Z", + "2009-12-20T20:37:04.933102Z", + "2009-12-20T20:37:32.485225Z", + "2009-12-20T20:37:59.533188Z", + "2009-12-20T20:38:27.076966Z", + "2009-12-20T20:38:54.132702Z", + "2009-12-20T20:39:21.677333Z", + "2009-12-20T20:39:48.733317Z", + "2009-12-20T20:40:16.280538Z", + "2009-12-20T20:40:43.332443Z", + "2009-12-20T20:41:10.875848Z", + "2009-12-20T20:41:37.932406Z", + "2009-12-20T20:53:00.680759Z", + "2009-12-20T20:53:27.728863Z", + "2009-12-20T20:53:55.275092Z", + "2009-12-20T20:54:22.328222Z", + "2009-12-20T20:54:49.873815Z", + "2009-12-20T20:55:16.930483Z", + "2009-12-20T20:55:44.471591Z", + "2009-12-20T20:56:11.528522Z", + "2009-12-20T20:56:39.078753Z", + "2009-12-20T20:57:06.127447Z", + "2009-12-20T20:57:33.671815Z", + "2009-12-20T20:58:00.727799Z", + "2009-12-20T20:58:28.270165Z", + "2009-12-20T20:58:55.326940Z", + "2009-12-20T20:59:22.878210Z", + "2009-12-20T20:59:49.925306Z", + "2009-12-20T21:00:17.470279Z", + "2009-12-20T21:00:44.525612Z", + "2009-12-20T21:01:12.069912Z", + "2009-12-20T21:01:39.126801Z", + "2009-12-20T21:02:06.670780Z", + "2009-12-20T21:03:01.272141Z", + "2009-12-20T21:03:28.326480Z", + "2009-12-20T21:03:55.874524Z", + "2009-12-20T21:04:22.927049Z", + "2009-12-20T21:04:50.473665Z", + "2009-12-20T21:05:17.526376Z", + "2009-12-20T21:05:45.074870Z", + "2009-12-20T21:06:12.126325Z", + "2009-12-20T21:06:39.667513Z", + "2009-12-20T21:07:06.728073Z", + "2009-12-20T21:07:34.267161Z", + "2009-12-20T21:08:01.325803Z", + "2009-12-20T21:08:28.867596Z", + "2009-12-20T21:08:55.925379Z", + "2009-12-20T21:09:23.467384Z", + "2009-12-20T21:09:50.533767Z", + "2009-12-20T21:10:18.066500Z", + "2009-12-20T21:10:45.126486Z", + "2009-12-20T21:11:12.380142Z", + "2009-12-20T21:11:39.731445Z", + "2009-12-20T21:12:07.266469Z", + "2009-12-20T21:12:34.326119Z", + "2009-12-20T21:13:01.867380Z", + "2009-12-20T21:13:28.925106Z", + "2009-12-20T21:13:56.466046Z", + "2009-12-20T21:14:23.525784Z", + "2009-12-20T21:14:51.066926Z", + "2009-12-20T21:15:18.124367Z", + "2009-12-20T21:15:45.664588Z", + "2009-12-20T21:16:12.724083Z", + "2009-12-20T21:16:40.265731Z", + "2009-12-20T21:17:07.324419Z", + "2009-12-20T21:17:34.867981Z", + "2009-12-20T21:18:01.923344Z", + "2009-12-20T21:18:29.468534Z", + "2009-12-20T21:18:56.522456Z", + "2009-12-20T21:19:24.066047Z", + "2009-12-20T21:19:51.122575Z", + "2009-12-20T21:20:18.663043Z", + "2009-12-20T21:20:45.723330Z", + "2009-12-20T21:21:13.262946Z", + "2009-12-20T21:21:40.321433Z", + "2009-12-20T21:22:07.864404Z", + "2009-12-20T21:22:34.920962Z", + "2009-12-20T21:23:02.464022Z", + "2009-12-20T21:23:29.528326Z", + "2009-12-20T21:23:57.061319Z", + "2009-12-20T21:24:24.126646Z", + "2009-12-20T21:24:51.662028Z", + "2009-12-20T21:25:18.719879Z", + "2009-12-20T21:48:04.755901Z", + "2009-12-20T21:48:31.812991Z", + "2009-12-20T21:48:59.359593Z", + "2009-12-20T21:49:26.413701Z", + "2009-12-20T21:49:53.955260Z", + "2009-12-20T21:50:21.017450Z", + "2009-12-20T21:50:48.553855Z", + "2009-12-20T21:51:15.614141Z", + "2009-12-20T21:51:43.153804Z", + "2009-12-20T21:52:10.211997Z", + "2009-12-20T21:52:37.751069Z", + "2009-12-20T21:53:04.812566Z", + "2009-12-20T21:53:32.352213Z", + "2009-12-20T21:53:59.411103Z", + "2009-12-20T21:54:26.958180Z", + "2009-12-20T21:54:54.010803Z", + "2009-12-20T21:55:21.559106Z", + "2009-12-20T21:55:48.611356Z", + "2009-12-20T21:56:16.152167Z", + "2009-12-20T21:56:43.208234Z", + "2009-12-20T21:57:10.752086Z", + "2009-12-20T21:57:37.815831Z", + "2009-12-20T21:58:05.352034Z", + "2009-12-20T21:58:32.410909Z", + "2009-12-20T21:58:59.950338Z", + "2009-12-20T21:59:27.018317Z", + "2009-12-20T21:59:54.550846Z", + "2009-12-20T22:00:21.612574Z", + "2009-12-20T22:00:49.151817Z", + "2009-12-20T22:01:16.209669Z", + "2009-12-20T22:01:43.751146Z", + "2009-12-20T22:02:10.808738Z", + "2009-12-20T22:02:38.351295Z", + "2009-12-20T22:03:05.405517Z", + "2009-12-20T22:03:32.950646Z", + "2009-12-20T22:04:00.006382Z", + "2009-12-20T22:04:27.553491Z", + "2009-12-20T22:04:54.608146Z", + "2009-12-20T22:05:22.154432Z", + "2009-12-20T22:05:49.207085Z", + "2009-12-20T22:06:16.749510Z", + "2009-12-20T22:06:43.805173Z", + "2009-12-20T22:07:11.349272Z", + "2009-12-20T22:07:38.404718Z", + "2009-12-20T22:08:05.949764Z", + "2009-12-20T22:08:33.006249Z", + "2009-12-20T22:09:00.545881Z", + "2009-12-20T22:09:27.607780Z", + "2009-12-20T22:09:55.146232Z", + "2009-12-20T22:10:22.207402Z", + "2009-12-20T22:10:49.748183Z", + "2009-12-20T22:11:16.805878Z", + "2009-12-20T22:11:44.347526Z", + "2009-12-20T22:12:11.406617Z", + "2009-12-20T22:12:38.945427Z", + "2009-12-20T22:13:06.006535Z", + "2009-12-20T22:13:33.552619Z", + "2009-12-20T22:14:00.604668Z", + "2009-12-20T22:14:28.146922Z", + "2009-12-20T22:14:55.206634Z", + "2009-12-20T22:15:22.742418Z", + "2009-12-20T22:15:49.806582Z", + "2009-12-20T22:16:17.345251Z", + "2009-12-20T22:16:44.407382Z", + "2009-12-20T22:17:11.945930Z", + "2009-12-20T22:17:39.004649Z", + "2009-12-20T22:18:06.544684Z", + "2009-12-20T22:18:33.604411Z", + "2009-12-20T22:19:01.145236Z", + "2009-12-20T22:19:28.208376Z", + "2009-12-20T22:19:55.745448Z", + "2009-12-20T22:20:22.805703Z", + "2009-12-20T22:31:45.542140Z", + "2009-12-20T22:32:12.601045Z", + "2009-12-20T22:32:40.144062Z", + "2009-12-20T22:33:07.200373Z", + "2009-12-20T22:33:34.742398Z", + "2009-12-20T22:34:01.801143Z", + "2009-12-20T22:34:29.341612Z", + "2009-12-20T22:34:56.399463Z", + "2009-12-20T22:35:23.942233Z", + "2009-12-20T22:35:51.000435Z", + "2009-12-20T22:36:18.541695Z", + "2009-12-20T22:36:45.600011Z", + "2009-12-20T22:37:13.142442Z", + "2009-12-20T22:37:40.199867Z", + "2009-12-20T22:38:07.741721Z", + "2009-12-20T22:38:34.799039Z", + "2009-12-20T22:39:02.340801Z", + "2009-12-20T22:39:29.399175Z", + "2009-12-20T22:41:18.598652Z", + "2009-12-20T22:41:46.145761Z", + "2009-12-20T22:42:13.199190Z", + "2009-12-20T22:42:40.741242Z", + "2009-12-20T22:43:07.804382Z", + "2009-12-20T22:43:35.338351Z", + "2009-12-20T22:44:02.395861Z", + "2009-12-20T22:44:29.942477Z", + "2009-12-20T22:44:57.001410Z", + "2009-12-20T22:45:24.538420Z", + "2009-12-20T22:45:51.595929Z", + "2009-12-20T22:46:19.142112Z", + "2009-12-20T22:46:46.196265Z", + "2009-12-20T22:47:13.737639Z", + "2009-12-20T22:47:40.796803Z", + "2009-12-20T22:48:08.340985Z", + "2009-12-20T22:48:35.396333Z", + "2009-12-20T22:49:02.944749Z", + "2009-12-20T22:49:29.995444Z", + "2009-12-20T22:49:57.536197Z", + "2009-12-20T22:50:24.595407Z", + "2009-12-20T22:50:52.136953Z", + "2009-12-20T22:51:19.195497Z", + "2009-12-20T22:51:46.736513Z", + "2009-12-20T22:52:13.797043Z", + "2009-12-20T22:52:41.337005Z", + "2009-12-20T22:53:08.396588Z", + "2009-12-20T22:53:35.936220Z", + "2009-12-20T22:54:02.997747Z", + "2009-12-20T22:54:30.539926Z", + "2009-12-20T22:54:57.597074Z", + "2009-12-20T22:55:25.135564Z", + "2009-12-20T22:55:52.191300Z", + "2009-12-20T22:56:19.736148Z", + "2009-12-20T22:56:46.798957Z", + "2009-12-20T22:57:14.335879Z", + "2009-12-20T22:57:41.400070Z", + "2009-12-20T22:58:08.935782Z", + "2009-12-20T22:58:35.990944Z", + "2009-12-20T22:59:03.533570Z", + "2009-12-20T22:59:30.592273Z", + "2009-12-20T22:59:58.133921Z", + "2009-12-20T23:00:25.192004Z", + "2009-12-20T23:00:52.734041Z", + "2009-12-20T23:01:19.790667Z", + "2009-12-20T23:01:47.333152Z", + "2009-12-20T23:02:14.390429Z", + "2009-12-20T23:02:41.933303Z", + "2009-12-20T23:03:08.992983Z", + "2009-12-20T23:03:36.532496Z", + "2009-12-20T23:04:03.590930Z", + "2009-12-20T23:27:22.125046Z", + "2009-12-20T23:27:49.182943Z", + "2009-12-20T23:28:16.726737Z", + "2009-12-20T23:28:43.787252Z", + "2009-12-20T23:29:11.325704Z", + "2009-12-20T23:29:38.381599Z", + "2009-12-20T23:30:05.924846Z", + "2009-12-20T23:30:32.982944Z", + "2009-12-20T23:31:00.524188Z", + "2009-12-20T23:31:27.584677Z", + "2009-12-20T23:31:55.126945Z", + "2009-12-20T23:32:22.182205Z", + "2009-12-20T23:32:49.724584Z", + "2009-12-20T23:33:16.779502Z", + "2009-12-20T23:33:44.323771Z", + "2009-12-20T23:34:11.380039Z", + "2009-12-20T23:34:38.925504Z", + "2009-12-20T23:35:05.981787Z", + "2009-12-20T23:35:33.524227Z", + "2009-12-20T23:36:00.580914Z", + "2009-12-20T23:36:28.123168Z", + "2009-12-20T23:36:55.185485Z", + "2009-12-20T23:37:22.728530Z", + "2009-12-20T23:37:49.782564Z", + "2009-12-20T23:38:17.323877Z", + "2009-12-20T23:38:44.385165Z", + "2009-12-20T23:39:11.928912Z", + "2009-12-20T23:39:38.981639Z", + "2009-12-20T23:40:06.524425Z", + "2009-12-20T23:40:33.584597Z", + "2009-12-20T23:41:01.124575Z", + "2009-12-20T23:41:28.177100Z", + "2009-12-20T23:41:55.722564Z", + "2009-12-20T23:42:22.784308Z", + "2009-12-20T23:42:50.322497Z", + "2009-12-20T23:43:17.378998Z", + "2009-12-20T23:43:44.924245Z", + "2009-12-20T23:44:11.978527Z", + "2009-12-20T23:44:39.523021Z", + "2009-12-20T23:45:06.578072Z", + "2009-12-20T23:45:34.125554Z", + "2009-12-20T23:46:01.183420Z", + "2009-12-20T23:46:28.719794Z", + "2009-12-20T23:46:55.773860Z", + "2009-12-20T23:47:23.325720Z", + "2009-12-20T23:47:50.374196Z", + "2009-12-20T23:48:17.919692Z", + "2009-12-20T23:48:44.977169Z", + "2009-12-20T23:49:12.520260Z", + "2009-12-20T23:49:39.576729Z", + "2009-12-20T23:50:07.120896Z", + "2009-12-20T23:50:34.176260Z", + "2009-12-20T23:51:01.719521Z", + "2009-12-20T23:51:28.775789Z", + "2009-12-20T23:51:56.330188Z", + "2009-12-20T23:52:23.379615Z", + "2009-12-20T23:52:50.919934Z", + "2009-12-20T23:53:17.976306Z", + "2009-12-20T23:53:45.523527Z", + "2009-12-20T23:54:12.578876Z", + "2009-12-20T23:54:40.118322Z", + "2009-12-20T23:55:07.174807Z", + "2009-12-20T23:55:34.721899Z", + "2009-12-20T23:56:01.773964Z", + "2009-12-20T23:56:29.317100Z", + "2009-12-20T23:56:56.374502Z", + "2009-12-20T23:57:23.917922Z", + "2009-12-20T23:57:50.974279Z", + "2009-12-20T23:58:18.520089Z", + "2009-12-20T23:58:45.573219Z", + "2009-12-20T23:59:13.117058Z", + "2009-12-20T23:59:40.179201Z", + "2009-12-21T00:11:02.918024Z", + "2009-12-21T00:11:29.973253Z", + "2009-12-21T00:11:57.515434Z", + "2009-12-21T00:12:24.570595Z", + "2009-12-21T00:12:52.117429Z", + "2009-12-21T00:13:19.177028Z", + "2009-12-21T00:13:46.713778Z", + "2009-12-21T00:14:13.776960Z", + "2009-12-21T00:14:41.316391Z", + "2009-12-21T00:15:08.378336Z", + "2009-12-21T00:15:35.913861Z", + "2009-12-21T00:16:02.968202Z", + "2009-12-21T00:16:30.513981Z", + "2009-12-21T00:16:57.568956Z", + "2009-12-21T00:17:25.113773Z", + "2009-12-21T00:17:52.169696Z", + "2009-12-21T00:18:19.716902Z", + "2009-12-21T00:18:46.769846Z", + "2009-12-21T00:19:14.311510Z", + "2009-12-21T00:19:41.371423Z", + "2009-12-21T00:20:08.912408Z", + "2009-12-21T00:20:35.968331Z", + "2009-12-21T00:21:03.510866Z", + "2009-12-21T00:21:30.568497Z", + "2009-12-21T00:22:25.168772Z", + "2009-12-21T00:22:52.709412Z", + "2009-12-21T00:23:19.767495Z", + "2009-12-21T00:23:47.310885Z", + "2009-12-21T00:24:14.367691Z", + "2009-12-21T00:24:41.906687Z", + "2009-12-21T00:25:08.968183Z", + "2009-12-21T00:25:36.505943Z", + "2009-12-21T00:26:03.566519Z", + "2009-12-21T00:26:31.109392Z", + "2009-12-21T00:26:58.167041Z", + "2009-12-21T00:27:25.710721Z", + "2009-12-21T00:27:52.768200Z", + "2009-12-21T00:28:20.309429Z", + "2009-12-21T00:28:47.365108Z", + "2009-12-21T00:29:14.910572Z", + "2009-12-21T00:29:41.965040Z", + "2009-12-21T00:30:09.507697Z", + "2009-12-21T00:30:36.567611Z", + "2009-12-21T00:31:04.110671Z", + "2009-12-21T00:31:31.164503Z", + "2009-12-21T00:31:58.707750Z", + "2009-12-21T00:32:25.769680Z", + "2009-12-21T00:32:53.308086Z", + "2009-12-21T00:33:20.364431Z", + "2009-12-21T00:33:47.907631Z", + "2009-12-21T00:34:14.963077Z", + "2009-12-21T00:34:42.511381Z", + "2009-12-21T00:35:09.561816Z", + "2009-12-21T00:35:37.107916Z", + "2009-12-21T00:36:04.159763Z", + "2009-12-21T00:36:31.705445Z", + "2009-12-21T00:36:58.767793Z", + "2009-12-21T00:37:26.306526Z", + "2009-12-21T00:37:53.365462Z", + "2009-12-21T00:38:20.905048Z", + "2009-12-21T00:38:47.963752Z", + "2009-12-21T00:39:15.508037Z", + "2009-12-21T00:39:42.561481Z", + "2009-12-21T00:40:10.105147Z", + "2009-12-21T00:40:37.162407Z", + "2009-12-21T00:41:04.702226Z", + "2009-12-21T00:41:31.763072Z", + "2009-12-21T00:41:59.303167Z", + "2009-12-21T00:42:26.361019Z", + "2009-12-21T00:42:53.902868Z", + "2009-12-21T00:43:20.964783Z", + "2009-12-21T01:06:07.095789Z", + "2009-12-21T01:06:34.154633Z", + "2009-12-21T01:07:01.695660Z", + "2009-12-21T01:07:28.753401Z", + "2009-12-21T01:07:56.293779Z", + "2009-12-21T01:08:23.353676Z", + "2009-12-21T01:08:50.902383Z", + "2009-12-21T01:09:17.954975Z", + "2009-12-21T01:09:45.496811Z", + "2009-12-21T01:10:12.554738Z", + "2009-12-21T01:10:40.095925Z", + "2009-12-21T01:11:07.153586Z", + "2009-12-21T01:11:34.695623Z", + "2009-12-21T01:12:01.759428Z", + "2009-12-21T01:12:29.295012Z", + "2009-12-21T01:12:56.352102Z", + "2009-12-21T01:13:23.894355Z", + "2009-12-21T01:13:50.951213Z", + "2009-12-21T01:14:18.496491Z", + "2009-12-21T01:14:45.558188Z", + "2009-12-21T01:15:13.097215Z", + "2009-12-21T01:15:40.155702Z", + "2009-12-21T01:16:07.693721Z", + "2009-12-21T01:16:34.752580Z", + "2009-12-21T01:17:02.292645Z", + "2009-12-21T01:17:29.353117Z", + "2009-12-21T01:17:56.893959Z", + "2009-12-21T01:18:23.950415Z", + "2009-12-21T01:18:51.491690Z", + "2009-12-21T01:19:18.549757Z", + "2009-12-21T01:19:46.090816Z", + "2009-12-21T01:20:13.150729Z", + "2009-12-21T01:20:40.690502Z", + "2009-12-21T01:21:07.749437Z", + "2009-12-21T01:21:35.290264Z", + "2009-12-21T01:22:02.348751Z", + "2009-12-21T01:22:29.896448Z", + "2009-12-21T01:22:56.949475Z", + "2009-12-21T01:23:24.494582Z", + "2009-12-21T01:23:51.548213Z", + "2009-12-21T01:24:19.091071Z", + "2009-12-21T01:24:46.148766Z", + "2009-12-21T01:25:13.692618Z", + "2009-12-21T01:25:40.747474Z", + "2009-12-21T01:26:08.292969Z", + "2009-12-21T01:26:35.347240Z", + "2009-12-21T01:27:02.890063Z", + "2009-12-21T01:27:29.947775Z", + "2009-12-21T01:27:57.489780Z", + "2009-12-21T01:28:24.547041Z", + "2009-12-21T01:28:52.090318Z", + "2009-12-21T01:29:19.143718Z", + "2009-12-21T01:29:46.690095Z", + "2009-12-21T01:30:13.746923Z", + "2009-12-21T01:30:41.289207Z", + "2009-12-21T01:31:08.345832Z", + "2009-12-21T01:31:35.889558Z", + "2009-12-21T01:32:02.951675Z", + "2009-12-21T01:32:30.488887Z", + "2009-12-21T01:32:57.545992Z", + "2009-12-21T01:33:25.091674Z", + "2009-12-21T01:33:52.145320Z", + "2009-12-21T01:34:19.690180Z", + "2009-12-21T01:34:46.744245Z", + "2009-12-21T01:35:14.293773Z", + "2009-12-21T01:35:41.352693Z", + "2009-12-21T01:36:08.886276Z", + "2009-12-21T01:36:35.945165Z", + "2009-12-21T01:37:03.483400Z", + "2009-12-21T01:37:30.543903Z", + "2009-12-21T01:37:58.087413Z", + "2009-12-21T01:38:25.144659Z", + "2009-12-21T01:49:47.884086Z", + "2009-12-21T01:50:14.941564Z", + "2009-12-21T01:50:42.491138Z", + "2009-12-21T01:51:09.547003Z", + "2009-12-21T01:51:37.090870Z", + "2009-12-21T01:52:04.141739Z", + "2009-12-21T01:52:31.683154Z", + "2009-12-21T01:52:58.738631Z", + "2009-12-21T01:53:26.283739Z", + "2009-12-21T01:53:53.340844Z", + "2009-12-21T01:54:20.883485Z", + "2009-12-21T01:54:47.940389Z", + "2009-12-21T01:55:15.482440Z", + "2009-12-21T01:55:42.540942Z", + "2009-12-21T01:56:10.081814Z", + "2009-12-21T01:56:37.144334Z", + "2009-12-21T01:57:04.682105Z", + "2009-12-21T01:57:31.739241Z", + "2009-12-21T01:57:59.282271Z", + "2009-12-21T01:58:26.339205Z", + "2009-12-21T01:58:53.881257Z", + "2009-12-21T01:59:20.938936Z", + "2009-12-21T01:59:48.481421Z", + "2009-12-21T02:00:15.537055Z", + "2009-12-21T02:00:43.080780Z", + "2009-12-21T02:01:10.145316Z", + "2009-12-21T02:01:37.680094Z", + "2009-12-21T02:02:04.745280Z", + "2009-12-21T02:02:32.280677Z", + "2009-12-21T02:02:59.343817Z", + "2009-12-21T02:03:26.879407Z", + "2009-12-21T02:03:53.940075Z", + "2009-12-21T02:04:21.480574Z", + "2009-12-21T02:04:48.545715Z", + "2009-12-21T02:05:16.080043Z", + "2009-12-21T02:05:43.144594Z", + "2009-12-21T02:06:10.683419Z", + "2009-12-21T02:06:37.734954Z", + "2009-12-21T02:07:05.279148Z", + "2009-12-21T02:07:32.339837Z", + "2009-12-21T02:07:59.884092Z", + "2009-12-21T02:08:26.938173Z", + "2009-12-21T02:08:54.478006Z", + "2009-12-21T02:09:21.536507Z", + "2009-12-21T02:09:49.083093Z", + "2009-12-21T02:10:16.135836Z", + "2009-12-21T02:10:43.683720Z", + "2009-12-21T02:11:10.740608Z", + "2009-12-21T02:11:38.279232Z", + "2009-12-21T02:12:05.335501Z", + "2009-12-21T02:12:32.878374Z", + "2009-12-21T02:12:59.935247Z", + "2009-12-21T02:13:27.479113Z", + "2009-12-21T02:13:54.536390Z", + "2009-12-21T02:14:22.075618Z", + "2009-12-21T02:14:49.135300Z", + "2009-12-21T02:15:16.674125Z", + "2009-12-21T02:15:43.735651Z", + "2009-12-21T02:16:11.276881Z", + "2009-12-21T02:16:38.332000Z", + "2009-12-21T02:17:05.876815Z", + "2009-12-21T02:17:32.934509Z", + "2009-12-21T02:18:00.487248Z", + "2009-12-21T02:18:27.534054Z", + "2009-12-21T02:18:55.074648Z", + "2009-12-21T02:19:22.134360Z", + "2009-12-21T02:19:49.675015Z", + "2009-12-21T02:20:16.735115Z", + "2009-12-21T02:20:44.274933Z", + "2009-12-21T02:21:11.336041Z", + "2009-12-21T02:21:38.879520Z", + "2009-12-21T02:22:05.932173Z", + "2009-12-21T02:44:52.069998Z", + "2009-12-21T02:45:19.126654Z", + "2009-12-21T02:45:46.672464Z", + "2009-12-21T02:46:13.731039Z", + "2009-12-21T02:46:41.268255Z", + "2009-12-21T02:47:08.327358Z", + "2009-12-21T02:47:35.869089Z", + "2009-12-21T02:48:02.926484Z", + "2009-12-21T02:48:30.467626Z", + "2009-12-21T02:48:57.529426Z", + "2009-12-21T02:49:25.069576Z", + "2009-12-21T02:49:52.124535Z", + "2009-12-21T02:50:19.668313Z", + "2009-12-21T02:50:46.727881Z", + "2009-12-21T02:51:14.268133Z", + "2009-12-21T02:51:41.329846Z", + "2009-12-21T02:52:08.868827Z", + "2009-12-21T02:52:35.925327Z", + "2009-12-21T02:53:03.467364Z", + "2009-12-21T02:53:30.524671Z", + "2009-12-21T02:53:58.070771Z", + "2009-12-21T02:54:25.123797Z", + "2009-12-21T02:54:52.672550Z", + "2009-12-21T02:55:19.729206Z", + "2009-12-21T02:55:47.266201Z", + "2009-12-21T02:56:14.331141Z", + "2009-12-21T02:56:41.866278Z", + "2009-12-21T02:57:08.929894Z", + "2009-12-21T02:57:36.465823Z", + "2009-12-21T02:58:03.529625Z", + "2009-12-21T02:58:31.066031Z", + "2009-12-21T02:58:58.126549Z", + "2009-12-21T02:59:25.665937Z", + "2009-12-21T02:59:52.720821Z", + "2009-12-21T03:00:20.266243Z", + "2009-12-21T03:00:47.326214Z", + "2009-12-21T03:01:14.871434Z", + "2009-12-21T03:01:41.922517Z", + "2009-12-21T03:02:09.464553Z", + "2009-12-21T03:02:36.521054Z", + "2009-12-21T03:03:04.063524Z", + "2009-12-21T03:03:31.123018Z", + "2009-12-21T03:03:58.667790Z", + "2009-12-21T03:04:25.722750Z", + "2009-12-21T03:04:53.263146Z", + "2009-12-21T03:05:20.325134Z", + "2009-12-21T03:05:47.863136Z", + "2009-12-21T03:06:14.920041Z", + "2009-12-21T03:06:42.466746Z", + "2009-12-21T03:07:09.522812Z", + "2009-12-21T03:07:37.063654Z", + "2009-12-21T03:08:04.119347Z", + "2009-12-21T03:08:31.661295Z", + "2009-12-21T03:08:58.720693Z", + "2009-12-21T03:09:26.261229Z", + "2009-12-21T03:09:53.322612Z", + "2009-12-21T03:10:20.864305Z", + "2009-12-21T03:10:47.917766Z", + "2009-12-21T03:11:15.467609Z", + "2009-12-21T03:11:42.518692Z", + "2009-12-21T03:12:10.065119Z", + "2009-12-21T03:12:37.118857Z", + "2009-12-21T03:13:04.665288Z", + "2009-12-21T03:13:31.719148Z", + "2009-12-21T03:13:59.260320Z", + "2009-12-21T03:14:26.319685Z", + "2009-12-21T03:14:53.859648Z", + "2009-12-21T03:15:20.918439Z", + "2009-12-21T03:15:48.459716Z", + "2009-12-21T03:16:15.523429Z", + "2009-12-21T03:16:43.061060Z", + "2009-12-21T03:17:10.117948Z", + "2009-12-21T03:28:32.858421Z", + "2009-12-21T03:28:59.913275Z", + "2009-12-21T03:29:27.463209Z", + "2009-12-21T03:29:54.518930Z", + "2009-12-21T03:30:22.057077Z", + "2009-12-21T03:30:49.113449Z", + "2009-12-21T03:31:16.656463Z", + "2009-12-21T03:31:43.716996Z", + "2009-12-21T03:32:11.253934Z", + "2009-12-21T03:32:38.322854Z", + "2009-12-21T03:33:05.857977Z", + "2009-12-21T03:33:32.916163Z", + "2009-12-21T03:34:00.454683Z", + "2009-12-21T03:34:27.514080Z", + "2009-12-21T03:34:55.055468Z", + "2009-12-21T03:35:22.115626Z", + "2009-12-21T03:35:49.657030Z", + "2009-12-21T03:36:16.713154Z", + "2009-12-21T03:36:44.254818Z", + "2009-12-21T03:37:11.310497Z", + "2009-12-21T03:37:38.854952Z", + "2009-12-21T03:38:05.911236Z", + "2009-12-21T03:38:33.458577Z", + "2009-12-21T03:39:00.514628Z", + "2009-12-21T03:39:28.052258Z", + "2009-12-21T03:39:55.108138Z", + "2009-12-21T03:40:22.656943Z", + "2009-12-21T03:40:49.708041Z", + "2009-12-21T03:41:17.255077Z", + "2009-12-21T03:41:44.309790Z", + "2009-12-21T03:42:11.853442Z", + "2009-12-21T03:42:38.910544Z", + "2009-12-21T03:43:06.453965Z", + "2009-12-21T03:43:33.509096Z", + "2009-12-21T03:44:01.053712Z", + "2009-12-21T03:44:28.110038Z", + "2009-12-21T03:44:55.655084Z", + "2009-12-21T03:45:22.712189Z", + "2009-12-21T03:45:50.252135Z", + "2009-12-21T03:46:17.309299Z", + "2009-12-21T03:46:44.852704Z", + "2009-12-21T03:47:11.911637Z", + "2009-12-21T03:47:39.452823Z", + "2009-12-21T03:48:06.509880Z", + "2009-12-21T03:48:34.051052Z", + "2009-12-21T03:49:01.108231Z", + "2009-12-21T03:49:28.650814Z", + "2009-12-21T03:49:55.707961Z", + "2009-12-21T03:50:23.257691Z", + "2009-12-21T03:50:50.312160Z", + "2009-12-21T03:51:17.850463Z", + "2009-12-21T03:51:44.911629Z", + "2009-12-21T03:52:12.450769Z", + "2009-12-21T03:52:39.511980Z", + "2009-12-21T03:53:07.051912Z", + "2009-12-21T03:53:34.109479Z", + "2009-12-21T03:54:01.649642Z", + "2009-12-21T03:54:28.706805Z", + "2009-12-21T03:54:56.250568Z", + "2009-12-21T03:55:23.306708Z", + "2009-12-21T03:55:50.851278Z", + "2009-12-21T03:56:17.906036Z", + "2009-12-21T03:56:45.451800Z", + "2009-12-21T03:57:12.505379Z", + "2009-12-21T03:57:40.048709Z", + "2009-12-21T03:58:07.106416Z", + "2009-12-21T03:58:34.650411Z", + "2009-12-21T03:59:01.709016Z", + "2009-12-21T03:59:29.247814Z", + "2009-12-21T03:59:56.306963Z", + "2009-12-21T04:00:23.848508Z", + "2009-12-21T04:00:50.904198Z", + "2009-12-21T04:23:37.042430Z", + "2009-12-21T04:24:04.105337Z", + "2009-12-21T04:24:31.639840Z", + "2009-12-21T04:24:58.697143Z", + "2009-12-21T04:25:26.239556Z", + "2009-12-21T04:25:53.300147Z", + "2009-12-21T04:26:20.838697Z", + "2009-12-21T04:26:47.898668Z", + "2009-12-21T04:27:15.440431Z", + "2009-12-21T04:27:42.500805Z", + "2009-12-21T04:28:10.038378Z", + "2009-12-21T04:28:37.100536Z", + "2009-12-21T04:29:04.638638Z", + "2009-12-21T04:29:31.701850Z", + "2009-12-21T04:29:59.240199Z", + "2009-12-21T04:30:26.294353Z", + "2009-12-21T04:30:53.838907Z", + "2009-12-21T04:31:20.893619Z", + "2009-12-21T04:31:48.438418Z", + "2009-12-21T04:32:15.495368Z", + "2009-12-21T04:32:43.045227Z", + "2009-12-21T04:33:10.095922Z", + "2009-12-21T04:33:37.636721Z", + "2009-12-21T04:34:04.696520Z", + "2009-12-21T04:34:32.242326Z", + "2009-12-21T04:34:59.292591Z", + "2009-12-21T04:35:26.837549Z", + "2009-12-21T04:35:53.902793Z", + "2009-12-21T04:36:21.437079Z", + "2009-12-21T04:36:48.493264Z", + "2009-12-21T04:37:16.038097Z", + "2009-12-21T04:37:43.091987Z", + "2009-12-21T04:38:10.637642Z", + "2009-12-21T04:38:37.691749Z", + "2009-12-21T04:39:05.240367Z", + "2009-12-21T04:39:32.292473Z", + "2009-12-21T04:39:59.835922Z", + "2009-12-21T04:40:26.892825Z", + "2009-12-21T04:40:54.440668Z", + "2009-12-21T04:41:21.492184Z", + "2009-12-21T04:41:49.037777Z", + "2009-12-21T04:42:16.090892Z", + "2009-12-21T04:42:43.632048Z", + "2009-12-21T04:43:10.691229Z", + "2009-12-21T04:43:38.231997Z", + "2009-12-21T04:44:05.298467Z", + "2009-12-21T04:44:32.834784Z", + "2009-12-21T04:44:59.891528Z", + "2009-12-21T04:45:27.433104Z", + "2009-12-21T04:45:54.490670Z", + "2009-12-21T04:46:22.030850Z", + "2009-12-21T04:46:49.090603Z", + "2009-12-21T04:47:16.639065Z", + "2009-12-21T04:47:43.690163Z", + "2009-12-21T04:48:11.234361Z", + "2009-12-21T04:48:38.290686Z", + "2009-12-21T04:49:05.835534Z", + "2009-12-21T04:49:32.889874Z", + "2009-12-21T04:50:00.436148Z", + "2009-12-21T04:50:27.491854Z", + "2009-12-21T04:50:55.033011Z", + "2009-12-21T04:51:22.088778Z", + "2009-12-21T04:51:49.634743Z", + "2009-12-21T04:52:16.688711Z", + "2009-12-21T04:52:44.235343Z", + "2009-12-21T04:53:11.289093Z", + "2009-12-21T04:53:38.832758Z", + "2009-12-21T04:54:05.889056Z", + "2009-12-21T04:54:33.437085Z", + "2009-12-21T04:55:00.488106Z", + "2009-12-21T04:55:28.034210Z", + "2009-12-21T04:55:55.088782Z", + "2009-12-21T05:07:17.829225Z", + "2009-12-21T05:07:44.884991Z", + "2009-12-21T05:08:12.429607Z", + "2009-12-21T05:08:39.484209Z", + "2009-12-21T05:09:07.029974Z", + "2009-12-21T05:09:34.084127Z", + "2009-12-21T05:10:01.629316Z", + "2009-12-21T05:10:28.684216Z", + "2009-12-21T05:10:56.228923Z", + "2009-12-21T05:11:23.288505Z", + "2009-12-21T05:11:50.828080Z", + "2009-12-21T05:12:17.882545Z", + "2009-12-21T05:12:45.428356Z", + "2009-12-21T05:13:12.484399Z", + "2009-12-21T05:13:40.028118Z", + "2009-12-21T05:14:07.082533Z", + "2009-12-21T05:14:34.627351Z", + "2009-12-21T05:15:01.682885Z", + "2009-12-21T05:15:29.228308Z", + "2009-12-21T05:15:56.282026Z", + "2009-12-21T05:16:23.828039Z", + "2009-12-21T05:16:50.882595Z", + "2009-12-21T05:17:18.432640Z", + "2009-12-21T05:17:45.482559Z", + "2009-12-21T05:18:13.025634Z", + "2009-12-21T05:18:40.081655Z", + "2009-12-21T05:19:07.628318Z", + "2009-12-21T05:19:34.681634Z", + "2009-12-21T05:20:02.223955Z", + "2009-12-21T05:20:29.282389Z", + "2009-12-21T05:20:56.828819Z", + "2009-12-21T05:21:23.881531Z", + "2009-12-21T05:21:51.430163Z", + "2009-12-21T05:22:18.480269Z", + "2009-12-21T05:22:46.028587Z", + "2009-12-21T05:23:13.080590Z", + "2009-12-21T05:23:40.622196Z", + "2009-12-21T05:24:07.681764Z", + "2009-12-21T05:24:35.227760Z", + "2009-12-21T05:25:02.280704Z", + "2009-12-21T05:25:29.822755Z", + "2009-12-21T05:25:56.879644Z", + "2009-12-21T05:26:24.422704Z", + "2009-12-21T05:26:51.479174Z", + "2009-12-21T05:27:19.022838Z", + "2009-12-21T05:27:46.084784Z", + "2009-12-21T05:28:13.623304Z", + "2009-12-21T05:28:40.677644Z", + "2009-12-21T05:29:08.238868Z", + "2009-12-21T05:29:35.279966Z", + "2009-12-21T05:30:02.822048Z", + "2009-12-21T05:30:29.878518Z", + "2009-12-21T05:30:57.420958Z", + "2009-12-21T05:31:24.477476Z", + "2009-12-21T05:31:52.021936Z", + "2009-12-21T05:32:19.077780Z", + "2009-12-21T05:32:46.622049Z", + "2009-12-21T05:33:13.677248Z", + "2009-12-21T05:33:41.221849Z", + "2009-12-21T05:34:08.277786Z", + "2009-12-21T05:34:35.826880Z", + "2009-12-21T05:35:02.881737Z", + "2009-12-21T05:35:30.419368Z", + "2009-12-21T05:35:57.476412Z", + "2009-12-21T05:36:25.021244Z", + "2009-12-21T05:36:52.075538Z", + "2009-12-21T05:37:19.618987Z", + "2009-12-21T05:37:46.676480Z", + "2009-12-21T05:38:14.219466Z", + "2009-12-21T05:38:41.275591Z", + "2009-12-21T05:39:08.819198Z", + "2009-12-21T05:39:35.875771Z", + "2009-12-21T06:02:54.213325Z", + "2009-12-21T06:03:21.268643Z", + "2009-12-21T06:03:48.812308Z", + "2009-12-21T06:04:15.868872Z", + "2009-12-21T06:04:43.412571Z", + "2009-12-21T06:05:10.473070Z", + "2009-12-21T06:05:38.015914Z", + "2009-12-21T06:06:05.065713Z", + "2009-12-21T06:06:32.610501Z", + "2009-12-21T06:06:59.664437Z", + "2009-12-21T06:07:27.216886Z", + "2009-12-21T06:07:54.263966Z", + "2009-12-21T06:08:21.814891Z", + "2009-12-21T06:08:48.868119Z", + "2009-12-21T06:09:16.415698Z", + "2009-12-21T06:09:43.466454Z", + "2009-12-21T06:10:11.017058Z", + "2009-12-21T06:10:38.067164Z", + "2009-12-21T06:11:05.617783Z", + "2009-12-21T06:11:32.667485Z", + "2009-12-21T06:12:00.207431Z", + "2009-12-21T06:12:27.266394Z", + "2009-12-21T06:12:54.809149Z", + "2009-12-21T06:13:21.866313Z", + "2009-12-21T06:13:49.408074Z", + "2009-12-21T06:14:16.466029Z", + "2009-12-21T06:14:44.007805Z", + "2009-12-21T06:15:11.065977Z", + "2009-12-21T06:15:38.612175Z", + "2009-12-21T06:16:05.665910Z", + "2009-12-21T06:16:33.206896Z", + "2009-12-21T06:17:00.265021Z", + "2009-12-21T06:17:27.806969Z", + "2009-12-21T06:17:54.868831Z", + "2009-12-21T06:18:22.406298Z", + "2009-12-21T06:18:49.465290Z", + "2009-12-21T06:19:17.006866Z", + "2009-12-21T06:19:44.064866Z", + "2009-12-21T06:20:11.606080Z", + "2009-12-21T06:20:38.664396Z", + "2009-12-21T06:21:06.206778Z", + "2009-12-21T06:21:33.264686Z", + "2009-12-21T06:22:00.806521Z", + "2009-12-21T06:22:27.870251Z", + "2009-12-21T06:22:55.407622Z", + "2009-12-21T06:23:22.463993Z", + "2009-12-21T06:23:50.007353Z", + "2009-12-21T06:24:17.063105Z", + "2009-12-21T06:24:44.606165Z", + "2009-12-21T06:25:11.665055Z", + "2009-12-21T06:25:39.206987Z", + "2009-12-21T06:26:06.262381Z", + "2009-12-21T06:26:33.807473Z", + "2009-12-21T06:27:00.862733Z", + "2009-12-21T06:27:28.403187Z", + "2009-12-21T06:27:55.463286Z", + "2009-12-21T06:28:23.003957Z", + "2009-12-21T06:28:50.061591Z", + "2009-12-21T06:29:17.605182Z", + "2009-12-21T06:29:44.661368Z", + "2009-12-21T06:30:12.205116Z", + "2009-12-21T06:30:39.261705Z", + "2009-12-21T06:31:06.804562Z", + "2009-12-21T06:31:33.861467Z", + "2009-12-21T06:32:01.402810Z", + "2009-12-21T06:32:28.462609Z", + "2009-12-21T06:32:56.003379Z", + "2009-12-21T06:33:23.061705Z", + "2009-12-21T06:33:50.604133Z", + "2009-12-21T06:34:17.661281Z", + "2009-12-21T06:34:45.203073Z", + "2009-12-21T06:35:12.261230Z", + "2009-12-21T06:46:35.000846Z", + "2009-12-21T06:47:02.058541Z", + "2009-12-21T06:47:29.600205Z", + "2009-12-21T06:47:56.656070Z", + "2009-12-21T06:48:24.200670Z", + "2009-12-21T06:48:51.257011Z", + "2009-12-21T06:49:18.800604Z", + "2009-12-21T06:49:45.856743Z", + "2009-12-21T06:50:13.403592Z", + "2009-12-21T06:50:40.456288Z", + "2009-12-21T06:51:07.999693Z", + "2009-12-21T06:51:35.056484Z", + "2009-12-21T06:52:02.603158Z", + "2009-12-21T06:52:29.662264Z", + "2009-12-21T06:52:57.199450Z", + "2009-12-21T06:53:24.261266Z", + "2009-12-21T06:53:51.800763Z", + "2009-12-21T06:54:18.851040Z", + "2009-12-21T06:54:46.397356Z", + "2009-12-21T06:55:13.455037Z", + "2009-12-21T06:55:40.996312Z", + "2009-12-21T06:56:08.054643Z", + "2009-12-21T06:56:35.595424Z", + "2009-12-21T06:57:02.655150Z", + "2009-12-21T06:57:30.196380Z", + "2009-12-21T06:57:57.253827Z", + "2009-12-21T06:58:24.795769Z", + "2009-12-21T06:58:51.853666Z", + "2009-12-21T06:59:19.391669Z", + "2009-12-21T06:59:46.454189Z", + "2009-12-21T07:00:13.996339Z", + "2009-12-21T07:00:41.054959Z", + "2009-12-21T07:01:08.595511Z", + "2009-12-21T07:01:35.659298Z", + "2009-12-21T07:02:03.195734Z", + "2009-12-21T07:02:30.253599Z", + "2009-12-21T07:02:57.794643Z", + "2009-12-21T07:03:24.855348Z", + "2009-12-21T07:03:52.395729Z", + "2009-12-21T07:04:19.453683Z", + "2009-12-21T07:04:46.995114Z", + "2009-12-21T07:05:14.055819Z", + "2009-12-21T07:05:41.594225Z", + "2009-12-21T07:06:08.651517Z", + "2009-12-21T07:06:36.194189Z", + "2009-12-21T07:07:03.251435Z", + "2009-12-21T07:07:30.793457Z", + "2009-12-21T07:07:57.852377Z", + "2009-12-21T07:08:25.393450Z", + "2009-12-21T07:08:52.450309Z", + "2009-12-21T07:09:19.991942Z", + "2009-12-21T07:09:47.050211Z", + "2009-12-21T07:10:14.590619Z", + "2009-12-21T07:10:41.655387Z", + "2009-12-21T07:11:09.191621Z", + "2009-12-21T07:11:36.248045Z", + "2009-12-21T07:12:03.792532Z", + "2009-12-21T07:12:30.850181Z", + "2009-12-21T07:12:58.398115Z", + "2009-12-21T07:13:25.449712Z", + "2009-12-21T07:13:52.999628Z", + "2009-12-21T07:14:20.052996Z", + "2009-12-21T07:14:47.591433Z", + "2009-12-21T07:15:14.647888Z", + "2009-12-21T07:15:42.192877Z", + "2009-12-21T07:16:09.248194Z", + "2009-12-21T07:16:36.791368Z", + "2009-12-21T07:17:03.852347Z", + "2009-12-21T07:17:31.395319Z", + "2009-12-21T07:17:58.449038Z", + "2009-12-21T07:18:25.991001Z", + "2009-12-21T07:18:53.050352Z", + "2009-12-21T07:41:39.188077Z", + "2009-12-21T07:42:06.246232Z", + "2009-12-21T07:42:33.789592Z", + "2009-12-21T07:43:00.840070Z", + "2009-12-21T07:43:28.388331Z", + "2009-12-21T07:43:55.439847Z", + "2009-12-21T07:44:22.980803Z", + "2009-12-21T07:44:50.039982Z", + "2009-12-21T07:45:17.580842Z", + "2009-12-21T07:45:44.639699Z", + "2009-12-21T07:46:12.182857Z", + "2009-12-21T07:46:39.240765Z", + "2009-12-21T07:47:06.781907Z", + "2009-12-21T07:47:33.839660Z", + "2009-12-21T07:48:01.381296Z", + "2009-12-21T07:48:28.439980Z", + "2009-12-21T07:48:55.980434Z", + "2009-12-21T07:49:23.038083Z", + "2009-12-21T07:49:50.577312Z", + "2009-12-21T07:50:17.638032Z", + "2009-12-21T07:50:45.179882Z", + "2009-12-21T07:51:12.242805Z", + "2009-12-21T07:51:39.789391Z", + "2009-12-21T07:52:06.844693Z", + "2009-12-21T07:52:34.380250Z", + "2009-12-21T07:53:01.436994Z", + "2009-12-21T07:53:28.979046Z", + "2009-12-21T07:53:56.036136Z", + "2009-12-21T07:54:23.583126Z", + "2009-12-21T07:54:50.638877Z", + "2009-12-21T07:55:18.183230Z", + "2009-12-21T07:55:45.235165Z", + "2009-12-21T07:56:12.782661Z", + "2009-12-21T07:56:39.839131Z", + "2009-12-21T07:57:07.380877Z", + "2009-12-21T07:57:34.438258Z", + "2009-12-21T07:58:01.978421Z", + "2009-12-21T07:58:29.038780Z", + "2009-12-21T07:58:56.582155Z", + "2009-12-21T07:59:23.634742Z", + "2009-12-21T07:59:51.176949Z", + "2009-12-21T08:00:18.240507Z", + "2009-12-21T08:00:45.776479Z", + "2009-12-21T08:01:12.833584Z", + "2009-12-21T08:01:40.377237Z", + "2009-12-21T08:02:07.436713Z", + "2009-12-21T08:02:34.977296Z", + "2009-12-21T08:03:02.033435Z", + "2009-12-21T08:03:29.577678Z", + "2009-12-21T08:03:56.634220Z", + "2009-12-21T08:04:24.177410Z", + "2009-12-21T08:04:51.233967Z", + "2009-12-21T08:05:18.780196Z", + "2009-12-21T08:05:45.833713Z", + "2009-12-21T08:06:13.378314Z", + "2009-12-21T08:06:40.435447Z", + "2009-12-21T08:07:07.979845Z", + "2009-12-21T08:07:35.038389Z", + "2009-12-21T08:08:02.575791Z", + "2009-12-21T08:08:29.629510Z", + "2009-12-21T08:08:57.177209Z", + "2009-12-21T08:09:24.238720Z", + "2009-12-21T08:09:51.774334Z", + "2009-12-21T08:10:18.832231Z", + "2009-12-21T08:10:46.374365Z", + "2009-12-21T08:11:13.432970Z", + "2009-12-21T08:11:40.974405Z", + "2009-12-21T08:12:08.028497Z", + "2009-12-21T08:12:35.575735Z", + "2009-12-21T08:13:02.630059Z", + "2009-12-21T08:13:30.174690Z", + "2009-12-21T08:13:57.234428Z", + "2009-12-21T08:25:19.973936Z", + "2009-12-21T08:25:47.026806Z", + "2009-12-21T08:26:14.569964Z", + "2009-12-21T08:26:41.627980Z", + "2009-12-21T08:27:09.169613Z", + "2009-12-21T08:27:36.224066Z", + "2009-12-21T08:28:03.769157Z", + "2009-12-21T08:28:30.826264Z", + "2009-12-21T08:28:58.374582Z", + "2009-12-21T08:29:25.427593Z", + "2009-12-21T08:29:52.975367Z", + "2009-12-21T08:30:20.022732Z", + "2009-12-21T08:30:47.569018Z", + "2009-12-21T08:31:14.625504Z", + "2009-12-21T08:31:42.172209Z", + "2009-12-21T08:32:09.225653Z", + "2009-12-21T08:32:36.768237Z", + "2009-12-21T08:33:03.827261Z", + "2009-12-21T08:33:31.368893Z", + "2009-12-21T08:33:58.423967Z", + "2009-12-21T08:34:25.967012Z", + "2009-12-21T08:34:53.027530Z", + "2009-12-21T08:35:20.563331Z", + "2009-12-21T08:35:47.625291Z", + "2009-12-21T08:36:15.167358Z", + "2009-12-21T08:36:42.232514Z", + "2009-12-21T08:37:09.766872Z", + "2009-12-21T08:37:36.823731Z", + "2009-12-21T08:38:04.368867Z", + "2009-12-21T08:38:31.424344Z", + "2009-12-21T08:38:58.965125Z", + "2009-12-21T08:39:26.030529Z", + "2009-12-21T08:39:53.565523Z", + "2009-12-21T08:40:20.628445Z", + "2009-12-21T08:40:48.167689Z", + "2009-12-21T08:41:15.223368Z", + "2009-12-21T08:41:42.763791Z", + "2009-12-21T08:42:09.824929Z", + "2009-12-21T08:42:37.364142Z", + "2009-12-21T08:43:04.422024Z", + "2009-12-21T08:43:31.964464Z", + "2009-12-21T08:43:59.024781Z", + "2009-12-21T08:44:26.562582Z", + "2009-12-21T08:44:53.622697Z", + "2009-12-21T08:45:21.166982Z", + "2009-12-21T08:45:48.221622Z", + "2009-12-21T08:46:15.762433Z", + "2009-12-21T08:46:42.821151Z", + "2009-12-21T08:47:10.364584Z", + "2009-12-21T08:47:37.421689Z", + "2009-12-21T08:48:04.962531Z", + "2009-12-21T08:48:32.019188Z", + "2009-12-21T08:48:59.564078Z", + "2009-12-21T08:49:26.620144Z", + "2009-12-21T08:49:54.161591Z", + "2009-12-21T08:50:21.219643Z", + "2009-12-21T08:50:48.761741Z", + "2009-12-21T08:51:15.818785Z", + "2009-12-21T08:51:43.361861Z", + "2009-12-21T08:52:10.419726Z", + "2009-12-21T08:52:37.960150Z", + "2009-12-21T08:53:05.020668Z", + "2009-12-21T08:53:32.561076Z", + "2009-12-21T08:53:59.618802Z", + "2009-12-21T08:54:27.164049Z", + "2009-12-21T08:54:54.218099Z", + "2009-12-21T08:55:21.765706Z", + "2009-12-21T08:55:48.818530Z", + "2009-12-21T08:56:16.359851Z", + "2009-12-21T08:56:43.421751Z", + "2009-12-21T08:57:10.959366Z", + "2009-12-21T08:57:38.017822Z", + "2009-12-21T09:20:24.154140Z", + "2009-12-21T09:20:51.209819Z", + "2009-12-21T09:21:18.752087Z", + "2009-12-21T09:21:45.810760Z", + "2009-12-21T09:22:13.353231Z", + "2009-12-21T09:22:40.407406Z", + "2009-12-21T09:23:07.952559Z", + "2009-12-21T09:23:35.010627Z", + "2009-12-21T09:24:02.555688Z", + "2009-12-21T09:24:29.608946Z", + "2009-12-21T09:24:57.151588Z", + "2009-12-21T09:25:24.213315Z", + "2009-12-21T09:25:51.753537Z", + "2009-12-21T09:26:18.808828Z", + "2009-12-21T09:26:46.351050Z", + "2009-12-21T09:27:13.407939Z", + "2009-12-21T09:27:40.951186Z", + "2009-12-21T09:28:08.008880Z", + "2009-12-21T09:28:35.555369Z", + "2009-12-21T09:29:02.612926Z", + "2009-12-21T09:29:30.151082Z", + "2009-12-21T09:29:57.208529Z", + "2009-12-21T09:30:24.749558Z", + "2009-12-21T09:30:51.810433Z", + "2009-12-21T09:31:19.350469Z", + "2009-12-21T09:31:46.408956Z", + "2009-12-21T09:32:13.949813Z", + "2009-12-21T09:32:41.006560Z", + "2009-12-21T09:33:08.549486Z", + "2009-12-21T09:33:35.606199Z", + "2009-12-21T09:34:03.149879Z", + "2009-12-21T09:34:30.207776Z", + "2009-12-21T09:34:57.748820Z", + "2009-12-21T09:35:24.804670Z", + "2009-12-21T09:35:52.349792Z", + "2009-12-21T09:36:19.405052Z", + "2009-12-21T09:36:46.946499Z", + "2009-12-21T09:37:14.005791Z", + "2009-12-21T09:37:41.548262Z", + "2009-12-21T09:38:08.611790Z", + "2009-12-21T09:38:36.149607Z", + "2009-12-21T09:39:03.204277Z", + "2009-12-21T09:39:30.747740Z", + "2009-12-21T09:39:57.804613Z", + "2009-12-21T09:40:25.344633Z", + "2009-12-21T09:40:52.404950Z", + "2009-12-21T09:41:19.947078Z", + "2009-12-21T09:41:46.999902Z", + "2009-12-21T09:42:14.546561Z", + "2009-12-21T09:42:41.603264Z", + "2009-12-21T09:43:09.149043Z", + "2009-12-21T09:43:36.203166Z", + "2009-12-21T09:44:03.744802Z", + "2009-12-21T09:46:20.002684Z", + "2009-12-21T09:47:42.142078Z", + "2009-12-21T09:48:09.206630Z", + "2009-12-21T09:48:36.745816Z", + "2009-12-21T09:49:03.804531Z", + "2009-12-21T09:49:31.346167Z", + "2009-12-21T09:49:58.409379Z", + "2009-12-21T09:50:25.945180Z", + "2009-12-21T09:50:53.005698Z", + "2009-12-21T09:51:20.549321Z", + "2009-12-21T09:51:47.603444Z", + "2009-12-21T09:52:15.147005Z", + "2009-12-21T09:52:42.201592Z", + "2009-12-21T10:04:04.946263Z", + "2009-12-21T10:04:31.998312Z", + "2009-12-21T10:04:59.545792Z", + "2009-12-21T10:05:26.598895Z", + "2009-12-21T10:05:54.141878Z", + "2009-12-21T10:06:21.198239Z", + "2009-12-21T10:06:48.739685Z", + "2009-12-21T10:07:15.801383Z", + "2009-12-21T10:07:43.343156Z", + "2009-12-21T10:08:10.398524Z", + "2009-12-21T10:08:37.940172Z", + "2009-12-21T10:09:05.004335Z", + "2009-12-21T10:09:32.540337Z", + "2009-12-21T10:09:59.602282Z", + "2009-12-21T10:10:27.141929Z", + "2009-12-21T10:10:54.197593Z", + "2009-12-21T10:11:21.740781Z", + "2009-12-21T10:11:48.798145Z", + "2009-12-21T10:12:16.340832Z", + "2009-12-21T10:12:43.397706Z", + "2009-12-21T10:13:10.939989Z", + "2009-12-21T10:13:37.996863Z", + "2009-12-21T10:14:05.536495Z", + "2009-12-21T10:14:32.599060Z", + "2009-12-21T10:15:00.138806Z", + "2009-12-21T10:15:27.204841Z", + "2009-12-21T10:15:54.744892Z", + "2009-12-21T10:16:21.797127Z", + "2009-12-21T10:16:49.345042Z", + "2009-12-21T10:17:16.398099Z", + "2009-12-21T10:17:43.938210Z", + "2009-12-21T10:18:10.995813Z", + "2009-12-21T10:18:38.537261Z", + "2009-12-21T10:19:05.604293Z", + "2009-12-21T10:19:33.134416Z", + "2009-12-21T10:20:00.196113Z", + "2009-12-21T10:20:27.736970Z", + "2009-12-21T10:20:54.796062Z", + "2009-12-21T10:21:22.339510Z", + "2009-12-21T10:21:49.395668Z", + "2009-12-21T10:22:16.938884Z", + "2009-12-21T10:22:43.995802Z", + "2009-12-21T10:23:11.535590Z", + "2009-12-21T10:23:38.592510Z", + "2009-12-21T10:24:06.137601Z", + "2009-12-21T10:24:33.195219Z", + "2009-12-21T10:25:00.739349Z", + "2009-12-21T10:25:27.795648Z", + "2009-12-21T10:25:55.335234Z", + "2009-12-21T10:26:22.391517Z", + "2009-12-21T10:26:49.934189Z", + "2009-12-21T10:27:16.994491Z", + "2009-12-21T10:27:44.540963Z", + "2009-12-21T10:28:11.598472Z", + "2009-12-21T10:28:39.133667Z", + "2009-12-21T10:29:06.193147Z", + "2009-12-21T10:29:33.734191Z", + "2009-12-21T10:30:00.792863Z", + "2009-12-21T10:30:28.329516Z", + "2009-12-21T10:30:55.393492Z", + "2009-12-21T10:31:22.938105Z", + "2009-12-21T10:31:49.992326Z", + "2009-12-21T10:32:17.533168Z", + "2009-12-21T10:32:44.592057Z", + "2009-12-21T10:33:12.138995Z", + "2009-12-21T10:33:39.192409Z", + "2009-12-21T10:34:06.732398Z", + "2009-12-21T10:34:33.798594Z", + "2009-12-21T10:35:01.332967Z", + "2009-12-21T10:35:28.390244Z", + "2009-12-21T10:35:55.936933Z", + "2009-12-21T10:36:22.991991Z", + "2009-12-21T10:59:09.126171Z", + "2009-12-21T10:59:36.183029Z", + "2009-12-21T11:00:03.725857Z", + "2009-12-21T11:00:30.783351Z", + "2009-12-21T11:00:58.324424Z", + "2009-12-21T11:01:25.384105Z", + "2009-12-21T11:01:52.923939Z", + "2009-12-21T11:02:19.982425Z", + "2009-12-21T11:02:47.528883Z", + "2009-12-21T11:03:14.583351Z", + "2009-12-21T11:03:42.123588Z", + "2009-12-21T11:04:09.184510Z", + "2009-12-21T11:04:36.724731Z", + "2009-12-21T11:05:03.782194Z", + "2009-12-21T11:05:31.324463Z", + "2009-12-21T11:05:58.386982Z", + "2009-12-21T11:06:25.923299Z", + "2009-12-21T11:06:52.983269Z", + "2009-12-21T11:07:20.523103Z", + "2009-12-21T11:07:47.582210Z", + "2009-12-21T11:08:15.123222Z", + "2009-12-21T11:08:42.182158Z", + "2009-12-21T11:09:09.719153Z", + "2009-12-21T11:09:36.782883Z", + "2009-12-21T11:10:04.322328Z", + "2009-12-21T11:10:31.385235Z", + "2009-12-21T11:10:58.921207Z", + "2009-12-21T11:11:25.986595Z", + "2009-12-21T11:11:53.522768Z", + "2009-12-21T11:12:20.581069Z", + "2009-12-21T11:12:48.121321Z", + "2009-12-21T11:13:15.188680Z", + "2009-12-21T11:13:42.721786Z", + "2009-12-21T11:14:09.781323Z", + "2009-12-21T11:14:37.320961Z", + "2009-12-21T11:15:04.380480Z", + "2009-12-21T11:15:31.920660Z", + "2009-12-21T11:15:58.986634Z", + "2009-12-21T11:16:26.521554Z", + "2009-12-21T11:16:53.578889Z", + "2009-12-21T11:17:21.121546Z", + "2009-12-21T11:17:48.178651Z", + "2009-12-21T11:18:15.719896Z", + "2009-12-21T11:18:42.779592Z", + "2009-12-21T11:19:10.318835Z", + "2009-12-21T11:19:37.377741Z", + "2009-12-21T11:20:04.919807Z", + "2009-12-21T11:20:31.977286Z", + "2009-12-21T11:20:59.518516Z", + "2009-12-21T11:21:26.583672Z", + "2009-12-21T11:21:54.123505Z", + "2009-12-21T11:22:21.181805Z", + "2009-12-21T11:22:48.721783Z", + "2009-12-21T11:23:15.777937Z", + "2009-12-21T11:23:43.324239Z", + "2009-12-21T11:24:10.377205Z", + "2009-12-21T11:24:37.917548Z", + "2009-12-21T11:25:04.977245Z", + "2009-12-21T11:25:32.518820Z", + "2009-12-21T11:25:59.578232Z", + "2009-12-21T11:26:27.122920Z", + "2009-12-21T11:26:54.186045Z", + "2009-12-21T11:27:21.718231Z", + "2009-12-21T11:27:48.776516Z", + "2009-12-21T11:28:16.317015Z", + "2009-12-21T11:28:43.379923Z", + "2009-12-21T11:29:10.916171Z", + "2009-12-21T11:29:37.975914Z", + "2009-12-21T11:30:05.522821Z", + "2009-12-21T11:30:32.579477Z", + "2009-12-21T11:31:00.112717Z", + "2009-12-21T11:31:27.175392Z", + "2009-12-21T11:42:49.917856Z", + "2009-12-21T11:43:16.970480Z", + "2009-12-21T11:43:44.514407Z", + "2009-12-21T11:44:11.577314Z", + "2009-12-21T11:44:39.119009Z", + "2009-12-21T11:45:06.172066Z", + "2009-12-21T11:45:33.713527Z", + "2009-12-21T11:46:00.771641Z", + "2009-12-21T11:46:28.316763Z", + "2009-12-21T11:46:55.371605Z", + "2009-12-21T11:47:22.909513Z", + "2009-12-21T11:47:49.977013Z", + "2009-12-21T11:48:17.512038Z", + "2009-12-21T11:48:44.570338Z", + "2009-12-21T11:49:12.112434Z", + "2009-12-21T11:49:39.170750Z", + "2009-12-21T11:50:06.707947Z", + "2009-12-21T11:50:33.771475Z", + "2009-12-21T11:51:01.316612Z", + "2009-12-21T11:51:28.369654Z", + "2009-12-21T11:51:55.914373Z", + "2009-12-21T11:52:22.970238Z", + "2009-12-21T11:52:50.512201Z", + "2009-12-21T11:53:17.570095Z", + "2009-12-21T11:53:45.111202Z", + "2009-12-21T11:54:12.174012Z", + "2009-12-21T11:54:39.711706Z", + "2009-12-21T11:55:06.775528Z", + "2009-12-21T11:55:34.313158Z", + "2009-12-21T11:56:01.369704Z", + "2009-12-21T11:56:28.914145Z", + "2009-12-21T11:56:55.972430Z", + "2009-12-21T11:57:23.510764Z", + "2009-12-21T11:57:50.568935Z", + "2009-12-21T11:58:18.110742Z", + "2009-12-21T11:58:45.172760Z", + "2009-12-21T11:59:12.710235Z", + "2009-12-21T11:59:39.764844Z", + "2009-12-21T12:00:07.309301Z", + "2009-12-21T12:00:34.371836Z", + "2009-12-21T12:01:01.913917Z", + "2009-12-21T12:01:28.968371Z", + "2009-12-21T12:01:56.511446Z", + "2009-12-21T12:02:23.564750Z", + "2009-12-21T12:02:51.108959Z", + "2009-12-21T12:03:18.167072Z", + "2009-12-21T12:03:45.713979Z", + "2009-12-21T12:04:12.770636Z", + "2009-12-21T12:04:40.308468Z", + "2009-12-21T12:05:07.363340Z", + "2009-12-21T12:05:34.908230Z", + "2009-12-21T12:06:01.966157Z", + "2009-12-21T12:06:29.512458Z", + "2009-12-21T12:06:56.566074Z", + "2009-12-21T12:07:24.109956Z", + "2009-12-21T12:07:51.162223Z", + "2009-12-21T12:08:18.707036Z", + "2009-12-21T12:08:45.765397Z", + "2009-12-21T12:09:13.310287Z", + "2009-12-21T12:09:40.366293Z", + "2009-12-21T12:10:07.905148Z", + "2009-12-21T12:10:34.970693Z", + "2009-12-21T12:11:02.513783Z", + "2009-12-21T12:11:29.564219Z", + "2009-12-21T12:11:57.107248Z", + "2009-12-21T12:12:24.164989Z", + "2009-12-21T12:12:51.704419Z", + "2009-12-21T12:13:18.771189Z", + "2009-12-21T12:13:46.305360Z", + "2009-12-21T12:14:13.371928Z", + "2009-12-21T12:14:40.908334Z", + "2009-12-21T12:15:07.965005Z" + ] + } + }, + "groups": { + "e09f9e06-98c2-11ec-aa25-8c8590747994": { + "alias": [ + "/" + ], + "links": [ + { + "class": "H5L_TYPE_HARD", + "collection": "datasets", + "id": "e09ffa90-98c2-11ec-aa25-8c8590747994", + "title": "ds1" + } + ] + } + }, + "root": "e09f9e06-98c2-11ec-aa25-8c8590747994" +} diff --git a/docs/Installation/index.rst b/docs/Installation/index.rst deleted file mode 100755 index 06c8e37..0000000 --- a/docs/Installation/index.rst +++ /dev/null @@ -1,129 +0,0 @@ -#################### -Installing hdf5-json -#################### - -Instructions for installing hdf5-json library and utilties. - - -Prerequisites -------------- - -A computer running a 64-bit version of Windows, Mac OS X, or Linux. - -You will also need the following Python packages: - -* Python 2.7, 3.3, 3.4, or 3.5 -* NumPy 1.10.4 or later -* h5py 2.5 or later - -If you are not familiar with installing Python packages, the easiest route is to -use a package manager such as Anaconda (as described below). - -If you have a git client installed on your system, you can directly download the hdf5-json -source from GitHub: ``git clone --recursive https://github.com/HDFGroup/hdf5-json.git``. -Otherwise, you can download a zip file of the source from GitHub (as described below). - - -Installing on Windows ---------------------- - -Anaconda from Continuum Analytics can be used to easily manage the package dependencies -needed for hdf5-json. - -In a browser go to: http://continuum.io/downloads and click the "Windows 64-bit -Python 2.7 Graphical Installer" button. - -Install Anaconda using the default options. - -Once Anaconda is installed select "Anaconda Command Prompt" from the start menu. - -In the command window that appears, create a new anaconda environment using the following command: -``conda create -n hdf5json python=2.7 h5py`` - -Answer 'y' to the prompt, and the packages will be fetched. - -In the same command window, run: ``activate hdf5json`` - -In a browser go to: https://github.com/HDFGroup/hdf5-json and click the "Download ZIP" -button (right side of page). Save the file as "hdf5json.zip" to your Downloads directory. - -Alternatively, if you have git installed, you can run: -``git clone --recursive https://github.com/HDFGroup/hdf5-json.git`` to download the hdf5-json source tree. - -If you downloaded the ZIP file, in Windows Explorer, right-click on the file and select -"Extract All...". You can choose any folder as the destination. - -Next, in the command window, cd to the folder you extracted the source files to. - -Run: -``python setup.py install`` -to install the package. - -Installing on Linux/Mac OS X ------------------------------ - -Anaconda from Continuum Analytics can be used to easily manage the package dependencies -needed for hdf5-json. - -In a browser go to: http://continuum.io/downloads and click the "Mac OS X 64-bit -Python 2.7 Graphical Installer" button for Mac OS X or: "Linux 64-bit Python 2.7". - -Install Anaconda using the default options. - -Once Anaconda is installed, open a new shell and run the following on the command line: - -``conda create -n hdf5json python=2.7 h5py`` - -Answer 'y' to the prompt, and the packages will be fetched. - -In the same shell, run: ``source activate hdf5json`` - -Run: ``git clone --recursive https://github.com/HDFGroup/hdf5-json.git`` to download the source -tree. Alternatively, in a browser go to: https://github.com/HDFGroup/hdf5-json and click -the "Download ZIP" button (right side of page). Download the zip file and extract to -the destination directory of your choice. - -Next, cd to the folder you extracted the source files to. - -Run: - -``python setup.py install`` - -to install the package. - -Alternatively, you can install directly from PyPI. - -Run: - -``pip install h5json`` - - -Verification -------------- - -To verify hdf5-json was installed correctly, you can convert a test HDF5 file to json and back. - -Open a new shell (on Windows, run “Annaconda Command Prompt” from the start menu). - -In this shell, run the following commands: - -* source activate hdf5json (just: activate hdf5json on Windows) -* cd -* cd util -* python h5tojson.py ../data/hdf5/tall.h5 > tall.json -* python jsontoh5.py tall.json tall.h5 - -At this point the file tall.json should contain a JSON description of the original file and -the file tall.h5 should be an HDF5 equivalent to the original file. - -Running: - -``python testall.py`` - -will run the complete set of unit and integration tests. - -You should also be able to import the h5json package in any Python script: - -``import h5json`` - - diff --git a/docs/Utilities.rst b/docs/Utilities.rst deleted file mode 100755 index 92bad23..0000000 --- a/docs/Utilities.rst +++ /dev/null @@ -1,90 +0,0 @@ -################### -Utilities -################### - -The hdf5-json distribution includes the following utility scripts. These are all -located in the ``util`` and ``util\codegen`` directories. - - - -jsontoh5.py ------------ - -Converts a JSON representation of an HDF5 file to an HDF5 file. - -Usage: - -``jsontoh5.py [-h] `` - - is the input .json file. - is the output file (will be created by the script) - -Options: - * ``-h``: prints help message - -h5tojson.py ------------ - -This script converts the given HDF5 file to a JSON representation of the file. - -Usage: - -``python h5tojson.py [-h] -[D|-d] `` - -Output is a file the hdf5 file base name and the extension ``.json``. - -Options: - * ``-h``: prints help message - * ``-D``: suppress all data output - * ``-d``: suppress data output for datasets (but not attributes) - - - jsontoFortran.py - ---------------- - - This script generates code to create Fortran source code that when compiled and run, - will create an HDF5 file that reflects the JSON input. - - Note: Dataset values are not initialized by the Fortran code. - - Usage: - -``python jsontoFortran.py [-h] `` - -positional arguments: - in_filename JSON file to be converted to h5py - out_filename name of HDF5 file to be created by generated code - -optional arguments: - -h, --help show this help message and exit - -jsontoh5py.py - ---------------- - -This script generates code to create Python code (using the h5py package) that when run, -will create an HDF5 file that reflects the JSON input. - -Note: Dataset values are not initialized by the Python code. - -Usage: - -``python jsontoh5py.py [-h] `` - -positional arguments: - in_filename JSON file to be converted to h5py - out_filename name of HDF5 file to be created by generated code - -optional arguments: - -h, --help show this help message and exit - - - - - - - - - - - - diff --git a/docs/bnf/attribute_collection.rst b/docs/bnf/attribute_collection.rst index cd86479..ffe6e2c 100644 --- a/docs/bnf/attribute_collection.rst +++ b/docs/bnf/attribute_collection.rst @@ -6,7 +6,7 @@ Attribute attribute_list: `attribute` ("," `attribute`)* attribute: "{" : "name" ":" `ascii_string` | `unicode_string` "," - : "type" ":" `datatype` | `datatype_object_ref` "," + : "type" ":" `datatype` | `datatype_ref` "," : "shape" ":" `dataspace` "," : "value" ":" `json_value` "," : "creationProperties" ":" `acpl` diff --git a/docs/bnf/dataset.rst b/docs/bnf/dataset.rst index 0a7881d..523366a 100644 --- a/docs/bnf/dataset.rst +++ b/docs/bnf/dataset.rst @@ -5,7 +5,7 @@ Dataset dataset: `identifier` ":" "{" : "alias" ":" `hdf5_path_name_array` : "attributes" ":" `attribute_collection` "," - : "type" ":" `datatype` | `datatype_object_ref` "," + : "type" ":" `datatype` | `datatype_ref` "," : "shape" ":" `dataspace` "," : "value" ":" `json_value` "," : "creationProperties" ":" `dcpl` "," diff --git a/docs/bnf/datatype.rst b/docs/bnf/datatype.rst index 05e2eaf..400607f 100644 --- a/docs/bnf/datatype.rst +++ b/docs/bnf/datatype.rst @@ -15,13 +15,13 @@ Datatype :| `opaque_datatype` :| `reference_datatype` :| `string_datatype` - :| `vlen_datatype` + :| `vlen_datatype` .. productionlist:: array_datatype: "{" : "class" ":" "H5T_ARRAY" "," - : "base" ":" `datatype` | `datatype_object_ref` "," + : "base" ":" `datatype` | `datatype_ref` "," : "dims" ":" `dims_array` : "}" @@ -56,7 +56,7 @@ Datatype field_list: `field_def` ("," `field_def`)* field_def: "{" : "name" ":" `ascii_string` "," - : "type" ":" `datatype` | `datatype_object_ref` "," + : "type" ":" `datatype` | `datatype_ref` "," : "}" @@ -167,5 +167,5 @@ Datatype .. productionlist:: vlen_datatype: "{" : "class" ":" "H5T_VLEN" "," - : "base" ":" `datatype` | `datatype_object_ref` "," + : "base" ":" `datatype` | `datatype_ref` "," : "}" diff --git a/docs/bnf/index.rst b/docs/bnf/index.rst index 1e5296d..0b5475e 100644 --- a/docs/bnf/index.rst +++ b/docs/bnf/index.rst @@ -1,5 +1,7 @@ -Production Rules -================ +Backus-Naur Grammar +=================== + +HDF5/JSON specification in the Backus-Naur form. .. toctree:: diff --git a/docs/bp/index.rst b/docs/bp/index.rst deleted file mode 100644 index 62bdf3d..0000000 --- a/docs/bp/index.rst +++ /dev/null @@ -1,2 +0,0 @@ -Recommendations and Best Practices -================================== diff --git a/docs/conf.py b/docs/conf.py index e641566..f56c572 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -1,61 +1,40 @@ -# -*- coding: utf-8 -*- -# -# PyHexad documentation build configuration file, created by -# sphinx-quickstart on Wed Oct 15 13:55:49 2014. -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -import sys -import os - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. -sys.path.insert(0, os.path.abspath('.')) -sys.path.insert(0, os.path.abspath('..')) -sys.path.insert(0, os.path.abspath('../add-in')) +from datetime import date +import h5json # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. -#needs_sphinx = '1.0' +# needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. -extensions = ['sphinx.ext.autodoc', 'sphinx.ext.todo'] +extensions = ["sphinx.ext.autodoc", "sphinx.ext.todo"] # Add any paths that contain templates here, relative to this directory. -templates_path = ['_templates'] +templates_path = ["_templates"] # The suffix of source filenames. -source_suffix = '.rst' +source_suffix = ".rst" # The encoding of source files. -#source_encoding = 'utf-8-sig' +# source_encoding = 'utf-8-sig' # The master toctree document. -master_doc = 'index' +master_doc = "index" # General information about the project. -project = u'HDF5/JSON' -copyright = u'2014, The HDF Group' +project = "HDF5/JSON" +copyright = f"2014 - {date.today().year}, The HDF Group" # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. -version = '0.1' +version = h5json.__version__ # The full version, including alpha/beta/rc tags. -release = '0.1' +release = h5json.__version__ # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. @@ -66,198 +45,191 @@ # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: -#today = '' +# today = '' # Else, today_fmt is used as the format for a strftime call. -#today_fmt = '%B %d, %Y' +# today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. -exclude_patterns = ['_build'] +exclude_patterns = ["_build"] # The reST default role (used for this markup: `text`) to use for all # documents. -#default_role = None +# default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. -#add_function_parentheses = True +# add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). -#add_module_names = True +# add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. -#show_authors = False +# show_authors = False # The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' +pygments_style = "default" # A list of ignored prefixes for module index sorting. -#modindex_common_prefix = [] +# modindex_common_prefix = [] # If true, keep warnings as "system message" paragraphs in the built documents. -#keep_warnings = False +# keep_warnings = False todo_include_todos = True -rst_epilog = u""" -.. |product| replace:: HDF5/JSON +rst_epilog = """ +.. |product| replace:: HDF5/JSON """ # -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. -html_theme = 'default' +html_theme = "sphinx_book_theme" # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. -#html_theme_options = {} +# html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. -#html_theme_path = [] +# html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". -#html_title = None +html_title = f"{project} {release}" # A shorter title for the navigation bar. Default is the same as html_title. -#html_short_title = None +# html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. -#html_logo = None +# html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. -#html_favicon = None +# html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['_static'] +# html_static_path = ["_static"] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied # directly to the root of the documentation. -#html_extra_path = [] +# html_extra_path = [] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. -#html_last_updated_fmt = '%b %d, %Y' +# html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. -#html_use_smartypants = True +# html_use_smartypants = True # Custom sidebar templates, maps document names to template names. -#html_sidebars = {} +# html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. -#html_additional_pages = {} +# html_additional_pages = {} # If false, no module index is generated. -#html_domain_indices = True +# html_domain_indices = True # If false, no index is generated. -#html_use_index = True +# html_use_index = True # If true, the index is split into individual pages for each letter. -#html_split_index = False +# html_split_index = False # If true, links to the reST sources are added to the pages. -#html_show_sourcelink = True +# html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. -#html_show_sphinx = True +# html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. -#html_show_copyright = True +# html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. -#html_use_opensearch = '' +# html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). -#html_file_suffix = None +# html_file_suffix = None # Language to be used for generating the HTML full-text search index. # Sphinx supports the following languages: # 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' # 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' -#html_search_language = 'en' +# html_search_language = 'en' # A dictionary with options for the search language support, empty by default. # Now only 'ja' uses this config value -#html_search_options = {'type': 'default'} +# html_search_options = {'type': 'default'} # The name of a javascript file (relative to the configuration directory) that # implements a search results scorer. If empty, the default will be used. -#html_search_scorer = 'scorer.js' +# html_search_scorer = 'scorer.js' # Output file base name for HTML help builder. -htmlhelp_basename = 'HDF5JSONdoc' +htmlhelp_basename = "HDF5JSONdoc" # -- Options for LaTeX output --------------------------------------------- latex_elements = { -# The paper size ('letterpaper' or 'a4paper'). - 'papersize': 'letterpaper', - -# The font size ('10pt', '11pt' or '12pt'). -#'pointsize': '10pt', - -# Additional stuff for the LaTeX preamble. -#'preamble': '', - -# Latex figure (float) alignment -#'figure_align': 'htbp', + # The paper size ('letterpaper' or 'a4paper'). + "papersize": "letterpaper", + # The font size ('10pt', '11pt' or '12pt'). + # 'pointsize': '10pt', + # Additional stuff for the LaTeX preamble. + # 'preamble': '', + # Latex figure (float) alignment + # 'figure_align': 'htbp', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ - ('index', 'HDF5JSON.tex', u'HDF5/JSON Specification', - u'The HDF Group', 'manual'), + ("index", "HDF5JSON.tex", "HDF5/JSON Specification", "The HDF Group", "manual"), ] # The name of an image file (relative to this directory) to place at the top of # the title page. -#latex_logo = None +# latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. -#latex_use_parts = False +# latex_use_parts = False # If true, show page references after internal links. -#latex_show_pagerefs = False +# latex_show_pagerefs = False # If true, show URL addresses after external links. -#latex_show_urls = False +# latex_show_urls = False # Documents to append as an appendix to all manuals. -#latex_appendices = [] +# latex_appendices = [] # If false, no module index is generated. -#latex_domain_indices = True +# latex_domain_indices = True # -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). -man_pages = [ - ('index', 'hdf5json', u'HDF5/JSON Specification', - [u'The HDF Group'], 1) -] +man_pages = [("index", "hdf5json", "HDF5/JSON Specification", ["The HDF Group"], 1)] # If true, show URL addresses after external links. -#man_show_urls = False +# man_show_urls = False # -- Options for Texinfo output ------------------------------------------- @@ -266,20 +238,25 @@ # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ - ('index', 'HDF5/JSON', u'HDF5/JSON Specification', - u'The HDF Group', 'HDF5/JSON', - 'A JSON representation of HDF5 expressions.', - 'Miscellaneous'), + ( + "index", + "HDF5/JSON", + "HDF5/JSON Specification", + "The HDF Group", + "HDF5/JSON", + "A JSON representation of HDF5 expressions.", + "Miscellaneous", + ), ] # Documents to append as an appendix to all manuals. -#texinfo_appendices = [] +# texinfo_appendices = [] # If false, no module index is generated. -#texinfo_domain_indices = True +# texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. -#texinfo_show_urls = 'footnote' +# texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu. -#texinfo_no_detailmenu = False +# texinfo_no_detailmenu = False diff --git a/docs/examples/array_dset.json b/docs/examples/array_dset.json index c290ed6..e6db095 100644 --- a/docs/examples/array_dset.json +++ b/docs/examples/array_dset.json @@ -1,5 +1,5 @@ { - "apiVersion": "0.0.0", + "apiVersion": "1.0.0", "datasets": { "146c9fb5-7b51-11e4-a549-3c15c2da029e": { "shape": { diff --git a/docs/examples/array_dset.rst b/docs/examples/array_dset.rst index f8e2171..683ff8d 100644 --- a/docs/examples/array_dset.rst +++ b/docs/examples/array_dset.rst @@ -2,4 +2,4 @@ An HDF5 Array Datatype ====================== .. literalinclude:: array_dset.json - :language: javascript + :language: json diff --git a/docs/examples/classic.rst b/docs/examples/classic.rst index 5ab37b0..d5ef5bd 100644 --- a/docs/examples/classic.rst +++ b/docs/examples/classic.rst @@ -1,8 +1,8 @@ The "Classic" ============= -This example is equivalent to the one in section 4 of -`DDL in BNF for HDF5 `_. +This example is equivalent to the one in Section 4 of +`DDL in BNF for HDF5 `_. .. literalinclude:: sample.json - :language: javascript + :language: json diff --git a/docs/examples/compound.json b/docs/examples/compound.json index fa53b07..c6751ec 100644 --- a/docs/examples/compound.json +++ b/docs/examples/compound.json @@ -1,4 +1,5 @@ { + "apiVersion": "1.0.0", "datasets": { "2fd9e630-7b51-11e4-b2b8-3c15c2da029e": { "shape": { diff --git a/docs/examples/compound.rst b/docs/examples/compound.rst index 9058553..ee4b9c3 100644 --- a/docs/examples/compound.rst +++ b/docs/examples/compound.rst @@ -2,4 +2,4 @@ An HDF5 Compound Datatype ========================= .. literalinclude:: compound.json - :language: javascript + :language: json diff --git a/docs/examples/datatype_object.json b/docs/examples/datatype_object.json index 647b15c..489a3b4 100644 --- a/docs/examples/datatype_object.json +++ b/docs/examples/datatype_object.json @@ -1,5 +1,5 @@ { - "apiVersion": "0.0.0", + "apiVersion": "1.0.0", "datasets": { "25d6f985-7b51-11e4-a037-3c15c2da029e": { "shape": { diff --git a/docs/examples/datatype_object.rst b/docs/examples/datatype_object.rst index cc76333..4676d1d 100644 --- a/docs/examples/datatype_object.rst +++ b/docs/examples/datatype_object.rst @@ -2,4 +2,4 @@ An HDF5 Datatype Object ======================= .. literalinclude:: datatype_object.json - :language: javascript + :language: json diff --git a/docs/examples/empty.json b/docs/examples/empty.json index b37b973..edca1a1 100644 --- a/docs/examples/empty.json +++ b/docs/examples/empty.json @@ -1,5 +1,5 @@ { - "apiVersion": "0.0.0", + "apiVersion": "1.0.0", "groups": { "913d8791-7b51-11e4-89fd-3c15c2da029e": {} }, diff --git a/docs/examples/empty.rst b/docs/examples/empty.rst index dea9a3f..cde2353 100644 --- a/docs/examples/empty.rst +++ b/docs/examples/empty.rst @@ -2,4 +2,4 @@ An "empty" HDF5 File ==================== .. literalinclude:: empty.json - :language: javascript + :language: json diff --git a/docs/examples/enum_attr.json b/docs/examples/enum_attr.json index 48d3072..17cdd1a 100644 --- a/docs/examples/enum_attr.json +++ b/docs/examples/enum_attr.json @@ -1,7 +1,8 @@ { - "apiVersion": "0.0.0", + "apiVersion": "1.0.0", "datasets": { "a615cf0a-7b51-11e4-bc7e-3c15c2da029e": { + "alias": ["/DS1"], "attributes": [ { "name": "A1", @@ -54,6 +55,7 @@ }, "groups": { "a614dc9c-7b51-11e4-a3f7-3c15c2da029e": { + "alias": ["/"], "links": [ { "class": "H5L_TYPE_HARD", diff --git a/docs/examples/enum_attr.rst b/docs/examples/enum_attr.rst index 9904c48..5a87a8a 100644 --- a/docs/examples/enum_attr.rst +++ b/docs/examples/enum_attr.rst @@ -2,4 +2,4 @@ An HDF5 Enumerated Datatype (Attribute) ======================================= .. literalinclude:: enum_attr.json - :language: javascript + :language: json diff --git a/docs/examples/fixed_string_dset.json b/docs/examples/fixed_string_dset.json index 2ba416b..d3c4c86 100644 --- a/docs/examples/fixed_string_dset.json +++ b/docs/examples/fixed_string_dset.json @@ -1,5 +1,5 @@ { - "apiVersion": "0.0.0", + "apiVersion": "1.0.0", "datasets": { "99d645d1-7bf7-11e4-92bc-3c15c2da029e": { "shape": { diff --git a/docs/examples/fixed_string_dset.rst b/docs/examples/fixed_string_dset.rst index 09a1cb5..40db043 100644 --- a/docs/examples/fixed_string_dset.rst +++ b/docs/examples/fixed_string_dset.rst @@ -2,4 +2,4 @@ An HDF5 Fixed-Length String Datatype (Dataset) ============================================== .. literalinclude:: fixed_string_dset.json - :language: javascript + :language: json diff --git a/docs/examples/index.rst b/docs/examples/index.rst index 6718c16..3f8eb56 100644 --- a/docs/examples/index.rst +++ b/docs/examples/index.rst @@ -1,6 +1,8 @@ Examples ======== +File content examples in HDF5/JSON. + .. toctree:: classic diff --git a/docs/examples/null_objref_dset.json b/docs/examples/null_objref_dset.json index 0ab07ea..ea74724 100644 --- a/docs/examples/null_objref_dset.json +++ b/docs/examples/null_objref_dset.json @@ -1,5 +1,5 @@ { - "apiVersion": "0.0.0", + "apiVersion": "1.0.0", "datasets": { "9faa470f-7bf7-11e4-8eb6-3c15c2da029e": { "shape": { diff --git a/docs/examples/null_objref_dset.rst b/docs/examples/null_objref_dset.rst index 4e4fe07..36d70ea 100644 --- a/docs/examples/null_objref_dset.rst +++ b/docs/examples/null_objref_dset.rst @@ -2,4 +2,4 @@ An HDF5 Uninitialized HDF5 Object Reference =========================================== .. literalinclude:: null_objref_dset.json - :language: javascript + :language: json diff --git a/docs/examples/nullspace_dset.json b/docs/examples/nullspace_dset.json index 9521706..10120d2 100644 --- a/docs/examples/nullspace_dset.json +++ b/docs/examples/nullspace_dset.json @@ -1,5 +1,5 @@ { - "apiVersion": "0.0.0", + "apiVersion": "1.0.0", "datasets": { "23d3e919-7b53-11e4-961d-3c15c2da029e": { "shape": { diff --git a/docs/examples/nullspace_dset.rst b/docs/examples/nullspace_dset.rst index 5e75217..7e8c118 100644 --- a/docs/examples/nullspace_dset.rst +++ b/docs/examples/nullspace_dset.rst @@ -2,4 +2,4 @@ An HDF5 Dataset with an HDF5 NULL Dataspace =========================================== .. literalinclude:: nullspace_dset.json - :language: javascript + :language: json diff --git a/docs/examples/objref_attr.json b/docs/examples/objref_attr.json index 463e61b..b233721 100644 --- a/docs/examples/objref_attr.json +++ b/docs/examples/objref_attr.json @@ -1,5 +1,5 @@ { - "apiVersion": "0.0.0", + "apiVersion": "1.0.0", "datasets": { "a09a7f14-7bf7-11e4-8eef-3c15c2da029e": { "attributes": [ diff --git a/docs/examples/objref_attr.rst b/docs/examples/objref_attr.rst index aa7898e..72732e5 100644 --- a/docs/examples/objref_attr.rst +++ b/docs/examples/objref_attr.rst @@ -2,4 +2,4 @@ An HDF5 Object Reference Datatype (Attribute) ============================================= .. literalinclude:: objref_attr.json - :language: javascript + :language: json diff --git a/docs/examples/objref_dset.json b/docs/examples/objref_dset.json index 1b72d54..a783aa5 100644 --- a/docs/examples/objref_dset.json +++ b/docs/examples/objref_dset.json @@ -1,5 +1,5 @@ { - "apiVersion": "0.0.0", + "apiVersion": "1.0.0", "datasets": { "23674021-7bed-11e4-aba3-3c15c2da029e": { "shape": { @@ -31,22 +31,22 @@ "2365cacf-7bed-11e4-b15d-3c15c2da029e": { "links": [ { - "class": "H5L_TYPE_HARD", + "class": "H5L_TYPE_HARD", "title": "DS1", - "collection": "datasets", + "collection": "datasets", "id": "23674021-7bed-11e4-aba3-3c15c2da029e" }, { - "class": "H5L_TYPE_HARD", + "class": "H5L_TYPE_HARD", "title": "DS2", - "collection": "datasets", + "collection": "datasets", "id": "23675094-7bed-11e4-b08a-3c15c2da029e" }, { - "class": "H5L_TYPE_HARD", + "class": "H5L_TYPE_HARD", "title": "G1", - "collection": "groups", - "href": "23676461-7bed-11e4-b95c-3c15c2da029e" + "collection": "groups", + "id": "23676461-7bed-11e4-b95c-3c15c2da029e" } ] }, diff --git a/docs/examples/regionref_attr.json b/docs/examples/regionref_attr.json index 2091a59..94f0185 100644 --- a/docs/examples/regionref_attr.json +++ b/docs/examples/regionref_attr.json @@ -1,5 +1,5 @@ { - "apiVersion": "0.0.0", + "apiVersion": "1.0.0", "datasets": { "a27c4d1c-7bf7-11e4-8fa1-3c15c2da029e": { "attributes": [ diff --git a/docs/examples/regionref_attr.rst b/docs/examples/regionref_attr.rst index 62d7a08..149b8f0 100644 --- a/docs/examples/regionref_attr.rst +++ b/docs/examples/regionref_attr.rst @@ -2,4 +2,4 @@ An HDF5 Region Reference Datatype (Attribute) ============================================= .. literalinclude:: regionref_attr.json - :language: javascript + :language: json diff --git a/docs/examples/resizable.json b/docs/examples/resizable.json index 1bba8aa..3530592 100644 --- a/docs/examples/resizable.json +++ b/docs/examples/resizable.json @@ -1,5 +1,5 @@ { - "apiVersion": "0.0.0", + "apiVersion": "1.0.0", "datasets": { "a36df1ba-7bf7-11e4-b0ea-3c15c2da029e": { "dcpl": { @@ -54,7 +54,7 @@ "shape": { "class": "H5S_SIMPLE", "dims": [10], - "maxdims": [H5S_UNLIMITED] + "maxdims": ["H5S_UNLIMITED"] }, "type": { "base": "H5T_STD_I64LE", @@ -66,7 +66,7 @@ "shape": { "class": "H5S_SIMPLE", "dims": [10, 10], - "maxdims": [10, H5S_UNLIMITED] + "maxdims": [10, "H5S_UNLIMITED"] }, "type": { "base": "H5T_STD_I64LE", diff --git a/docs/examples/resizable.rst b/docs/examples/resizable.rst index 5608269..b11fc2e 100644 --- a/docs/examples/resizable.rst +++ b/docs/examples/resizable.rst @@ -2,4 +2,4 @@ A resizable HDF5 Dataset ======================== .. literalinclude:: resizable.json - :language: javascript + :language: json diff --git a/docs/examples/sample.json b/docs/examples/sample.json index a72d441..66a3f43 100644 --- a/docs/examples/sample.json +++ b/docs/examples/sample.json @@ -1,51 +1,53 @@ { - "apiVersion": "0.0.0", + "apiVersion": "1.0.0", "id": "e203fee7-89b4-4216-894d-7aef0e3a199d", "root": "903d1d75-e617-4767-a3bf-0cb3ee509027", "groups": { "903d1d75-e617-4767-a3bf-0cb3ee509027": { - "attributes": [{ - "name": "attr1", - "type": { - "class": "H5T_STRING", - "length": 17, - "strPad": "H5T_STR_NULLTERM", - "charSet": "H5T_CSET_ASCII" - }, - "shape": { - "class": "H5S_SCALAR" - }, - "value": "string attribute" - }], + "attributes": [ + { + "name": "attr1", + "type": { + "class": "H5T_STRING", + "length": 17, + "strPad": "H5T_STR_NULLTERM", + "charSet": "H5T_CSET_ASCII" + }, + "shape": { + "class": "H5S_SCALAR" + }, + "value": "string attribute" + } + ], "links": [ { "class": "H5L_TYPE_HARD", "title": "dset1", - "collection": "datasets", + "collection": "datasets", "id": "30292613-8d2a-4dc4-a277-b9d59d5b0d20" }, { "class": "H5L_TYPE_HARD", "title": "group1", - "collection": "groups", + "collection": "groups", "id": "be8dcb22-b411-4439-85e9-ea384a685ae0" }, { "class": "H5L_TYPE_HARD", "title": "group2", - "collection": "groups", + "collection": "groups", "id": "be8dcb22-b411-4439-85e9-ea384a685ae0" }, { "class": "H5L_TYPE_HARD", "title": "dset2", - "collection": "datasets", + "collection": "datasets", "id": "0a68caca-629a-44aa-9f37-311e7ffb8417" }, { "class": "H5L_TYPE_HARD", "title": "dset3", - "collection": "datasets", + "collection": "datasets", "id": "4b43748e-817f-44c6-a9f1-16e242fd374b" }, { @@ -56,18 +58,20 @@ { "class": "H5L_TYPE_HARD", "title": "type1", - "collection": "datatypes", + "collection": "datatypes", "id": "a93ff089-d466-44e7-b3f0-09db34ec2ef5" } ] }, "be8dcb22-b411-4439-85e9-ea384a685ae0": { - "links": [{ - "class": "H5L_TYPE_HARD", - "title": "dset3", - "collection": "datasets", - "id": "42f5e3a2-5e70-4faf-9893-fd216257a0d9" - }] + "links": [ + { + "class": "H5L_TYPE_HARD", + "title": "dset3", + "collection": "datasets", + "id": "42f5e3a2-5e70-4faf-9893-fd216257a0d9" + } + ] } }, "datasets": { @@ -155,7 +159,7 @@ ] }, "42f5e3a2-5e70-4faf-9893-fd216257a0d9": { - "type": "a93ff089-d466-44e7-b3f0-09db34ec2ef5", + "type": "datatypes/a93ff089-d466-44e7-b3f0-09db34ec2ef5", "shape": { "class": "H5S_SIMPLE", "dims": [5], diff --git a/docs/examples/scalar.json b/docs/examples/scalar.json index 62db2ea..4d03560 100644 --- a/docs/examples/scalar.json +++ b/docs/examples/scalar.json @@ -1,5 +1,5 @@ { - "apiVersion": "0.0.0", + "apiVersion": "1.0.0", "datasets": { "41e49e63-7b86-11e4-852f-3c15c2da029e": { "shape": { diff --git a/docs/examples/scalar.rst b/docs/examples/scalar.rst index e8a67f3..2385010 100644 --- a/docs/examples/scalar.rst +++ b/docs/examples/scalar.rst @@ -2,4 +2,4 @@ HDF5 scalar Datasets and Attributes =================================== .. literalinclude:: scalar.json - :language: javascript + :language: json diff --git a/docs/examples/tall.json b/docs/examples/tall.json index b2adb19..e7e9036 100644 --- a/docs/examples/tall.json +++ b/docs/examples/tall.json @@ -1,5 +1,5 @@ { - "apiVersion": "0.0.0", + "apiVersion": "1.0.0", "datasets": { "7f335a2e-7ab1-11e4-87a5-3c15c2da029e": { "attributes": [ diff --git a/docs/examples/tall.rst b/docs/examples/tall.rst index 78b2b45..c9819a7 100644 --- a/docs/examples/tall.rst +++ b/docs/examples/tall.rst @@ -2,4 +2,4 @@ A sample File ============= .. literalinclude:: tall.json - :language: javascript + :language: json diff --git a/docs/examples/tgroup.json b/docs/examples/tgroup.json index be762fe..13bf6a0 100644 --- a/docs/examples/tgroup.json +++ b/docs/examples/tgroup.json @@ -1,107 +1,113 @@ { - "apiVersion": "0.0.0", + "apiVersion": "1.0.0", "groups": { "a6c2fdf8-7bf7-11e4-9882-3c15c2da029e": { "links": [ { - "href": "groups/a6c3f58c-7bf7-11e4-a370-3c15c2da029e", - "title": "g1" - }, + "collection": "groups", + "title": "g1", + "class": "H5L_TYPE_HARD", + "id": "a6c3f58c-7bf7-11e4-a370-3c15c2da029e" + }, { - "href": "groups/a6c42494-7bf7-11e4-ab57-3c15c2da029e", - "title": "g2" - }, + "collection": "groups", + "title": "g2", + "class": "H5L_TYPE_HARD", + "id": "a6c42494-7bf7-11e4-ab57-3c15c2da029e" + }, { - "href": "a6c46ab3-7bf7-11e4-9229-3c15c2da029e", - "title": "g3" + "id": "a6c46ab3-7bf7-11e4-9229-3c15c2da029e", + "title": "g3", + "class": "H5L_TYPE_HARD", + "collection": "groups" } ] - }, + }, "a6c3f58c-7bf7-11e4-a370-3c15c2da029e": { "links": [ { - "class": "H5L_TYPE_HARD", + "class": "H5L_TYPE_HARD", "title": "g1.1", - "collection": "groups", + "collection": "groups", "id": "a6c40ab5-7bf7-11e4-9279-3c15c2da029e" - }, + }, { - "class": "H5L_TYPE_HARD", + "class": "H5L_TYPE_HARD", "title": "g1.2", - "collection": "groups", + "collection": "groups", "id": "a6c417f5-7bf7-11e4-b5b9-3c15c2da029e" } ] - }, + }, "a6c40ab5-7bf7-11e4-9279-3c15c2da029e": {}, "a6c417f5-7bf7-11e4-b5b9-3c15c2da029e": {}, "a6c42494-7bf7-11e4-ab57-3c15c2da029e": { "links": [ { - "class": "H5L_TYPE_HARD", + "class": "H5L_TYPE_HARD", "title": "g2.1", - "collection": "groups", + "collection": "groups", "id": "a6c43326-7bf7-11e4-ad08-3c15c2da029e" } ] - }, + }, "a6c43326-7bf7-11e4-ad08-3c15c2da029e": { "links": [ { - "class": "H5L_TYPE_HARD", + "class": "H5L_TYPE_HARD", "title": "g2.1.1", - "collection": "groups", + "collection": "groups", "id": "a6c4420a-7bf7-11e4-b535-3c15c2da029e" - }, + }, { - "class": "H5L_TYPE_HARD", + "class": "H5L_TYPE_HARD", "title": "g2.1.2", - "collection": "groups", + "collection": "groups", "id": "a6c45070-7bf7-11e4-b666-3c15c2da029e" - }, + }, { - "class": "H5L_TYPE_HARD", + "class": "H5L_TYPE_HARD", "title": "g2.1.3", - "collection": "groups", + "collection": "groups", "id": "a6c45e26-7bf7-11e4-bb94-3c15c2da029e" } ] - }, + }, "a6c4420a-7bf7-11e4-b535-3c15c2da029e": {}, "a6c45070-7bf7-11e4-b666-3c15c2da029e": {}, "a6c45e26-7bf7-11e4-bb94-3c15c2da029e": {}, "a6c46ab3-7bf7-11e4-9229-3c15c2da029e": { "links": [ { - "class": "H5L_TYPE_HARD", + "class": "H5L_TYPE_HARD", "title": "g3.1", - "collection": "groups", + "collection": "groups", "id": "a6c4782b-7bf7-11e4-b1e0-3c15c2da029e" - }, + }, { - "class": "H5L_TYPE_HARD", + "class": "H5L_TYPE_HARD", "title": "g3.2", - "collection": "groups", + "collection": "groups", "id": "a6c484ca-7bf7-11e4-973a-3c15c2da029e" - }, + }, { - "class": "H5L_TYPE_HARD", + "class": "H5L_TYPE_HARD", "title": "g3.3", - "collection": "groups", - "id" "a6c4918f-7bf7-11e4-a8fc-3c15c2da029e" - }, + "collection": "groups", + "id": "a6c4918f-7bf7-11e4-a8fc-3c15c2da029e" + }, { - "class": "H5L_TYPE_HARD", + "class": "H5L_TYPE_HARD", "title": "g3.4", - "collection": "groups", - "href": "a6c49d2b-7bf7-11e4-a6ca-3c15c2da029e" + "collection": "groups", + "id": "a6c49d2b-7bf7-11e4-a6ca-3c15c2da029e" } ] - }, + }, "a6c4782b-7bf7-11e4-b1e0-3c15c2da029e": {}, "a6c484ca-7bf7-11e4-973a-3c15c2da029e": {}, "a6c4918f-7bf7-11e4-a8fc-3c15c2da029e": {}, "a6c49d2b-7bf7-11e4-a6ca-3c15c2da029e": {} - }, + }, "root": "a6c2fdf8-7bf7-11e4-9882-3c15c2da029e" } diff --git a/docs/examples/tgroup.rst b/docs/examples/tgroup.rst index 6d97bee..83d54d0 100644 --- a/docs/examples/tgroup.rst +++ b/docs/examples/tgroup.rst @@ -2,4 +2,4 @@ A few HDF5 Groups ================= .. literalinclude:: tgroup.json - :language: javascript + :language: json diff --git a/docs/examples/vlen_dset.json b/docs/examples/vlen_dset.json index f170ee8..7342a5d 100644 --- a/docs/examples/vlen_dset.json +++ b/docs/examples/vlen_dset.json @@ -1,5 +1,5 @@ { - "apiVersion": "0.0.0", + "apiVersion": "1.0.0", "datasets": { "89dfb414-7b83-11e4-8384-3c15c2da029e": { "shape": { diff --git a/docs/examples/vlen_dset.rst b/docs/examples/vlen_dset.rst index 6a6f8b4..b8bcfe8 100644 --- a/docs/examples/vlen_dset.rst +++ b/docs/examples/vlen_dset.rst @@ -2,4 +2,4 @@ An HDF5 variable-length Datatype (Dataset) ========================================== .. literalinclude:: vlen_dset.json - :language: javascript + :language: json diff --git a/docs/examples/vlen_string_attr.json b/docs/examples/vlen_string_attr.json index cf23194..71f7768 100644 --- a/docs/examples/vlen_string_attr.json +++ b/docs/examples/vlen_string_attr.json @@ -1,5 +1,5 @@ { - "apiVersion": "0.0.0", + "apiVersion": "1.0.0", "datasets": { "a9217e6b-7bf7-11e4-9645-3c15c2da029e": { "attributes": [ @@ -32,8 +32,10 @@ "a9208b38-7bf7-11e4-b1be-3c15c2da029e": { "links": [ { - "href": "datasets/a9217e6b-7bf7-11e4-9645-3c15c2da029e", - "title": "DS1" + "collection": "datasets", + "title": "DS1", + "id": "a9217e6b-7bf7-11e4-9645-3c15c2da029e", + "class": "H5L_TYPE_HARD" } ] } diff --git a/docs/examples/vlen_string_attr.rst b/docs/examples/vlen_string_attr.rst index 9d65552..9ceff1d 100644 --- a/docs/examples/vlen_string_attr.rst +++ b/docs/examples/vlen_string_attr.rst @@ -2,4 +2,4 @@ An HDF5 variable-length String Datatype (Attribute) =================================================== .. literalinclude:: vlen_string_attr.json - :language: javascript + :language: json diff --git a/docs/index.rst b/docs/index.rst index 46ca4ea..f81c329 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -1,16 +1,14 @@ - HDF5/JSON ========= -Specification and tools for representing HDF5 in JSON +Specification for representation and tools for manipulation of HDF5 content in +JSON by the `HDF Group `_. -Contents: +What is Here: .. toctree:: :maxdepth: 2 - Installation/index - Utiltities - bnf/index + specs examples/index - + tools/h5json diff --git a/docs/requirements.txt b/docs/requirements.txt new file mode 100644 index 0000000..b880117 --- /dev/null +++ b/docs/requirements.txt @@ -0,0 +1,2 @@ +file:.#egg=h5json +sphinx-book-theme diff --git a/docs/schema/attribute.rst b/docs/schema/attribute.rst new file mode 100644 index 0000000..90d0342 --- /dev/null +++ b/docs/schema/attribute.rst @@ -0,0 +1,5 @@ +Attribute +========= + +.. literalinclude:: ../../h5json/schema/attribute.schema.json + :language: json diff --git a/docs/schema/dataset.rst b/docs/schema/dataset.rst new file mode 100644 index 0000000..a87a2fa --- /dev/null +++ b/docs/schema/dataset.rst @@ -0,0 +1,5 @@ +Dataset +======= + +.. literalinclude:: ../../h5json/schema/dataset.schema.json + :language: json diff --git a/docs/schema/dataspaces.rst b/docs/schema/dataspaces.rst new file mode 100644 index 0000000..5a00c9b --- /dev/null +++ b/docs/schema/dataspaces.rst @@ -0,0 +1,5 @@ +Dataspace +========= + +.. literalinclude:: ../../h5json/schema/dataspaces.schema.json + :language: json diff --git a/docs/schema/datatypes.rst b/docs/schema/datatypes.rst new file mode 100644 index 0000000..4b6f6b4 --- /dev/null +++ b/docs/schema/datatypes.rst @@ -0,0 +1,5 @@ +Datatype +======== + +.. literalinclude:: ../../h5json/schema/datatypes.schema.json + :language: json diff --git a/docs/schema/file.rst b/docs/schema/file.rst new file mode 100644 index 0000000..ebca784 --- /dev/null +++ b/docs/schema/file.rst @@ -0,0 +1,7 @@ +File +==== + +Main schema, describing the overall file content as HDF5/JSON. + +.. literalinclude:: ../../h5json/schema/hdf5.schema.json + :language: json diff --git a/docs/schema/filters.rst b/docs/schema/filters.rst new file mode 100644 index 0000000..a360654 --- /dev/null +++ b/docs/schema/filters.rst @@ -0,0 +1,5 @@ +Filter +====== + +.. literalinclude:: ../../h5json/schema/filters.schema.json + :language: json diff --git a/docs/schema/group.rst b/docs/schema/group.rst new file mode 100644 index 0000000..4904e53 --- /dev/null +++ b/docs/schema/group.rst @@ -0,0 +1,5 @@ +Group +===== + +.. literalinclude:: ../../h5json/schema/group.schema.json + :language: json diff --git a/docs/schema/index.rst b/docs/schema/index.rst new file mode 100644 index 0000000..2616507 --- /dev/null +++ b/docs/schema/index.rst @@ -0,0 +1,15 @@ +JSON Schema +=========== + +HDF5/JSON specification as JSON Schema. + +.. toctree:: + :maxdepth: 1 + + file + group + dataset + datatypes + dataspaces + attribute + filters diff --git a/docs/specs.rst b/docs/specs.rst new file mode 100644 index 0000000..a3ac0b5 --- /dev/null +++ b/docs/specs.rst @@ -0,0 +1,8 @@ +Specification +============= + +.. toctree:: + :maxdepth: 1 + + bnf/index + schema/index diff --git a/docs/tools/h5json.rst b/docs/tools/h5json.rst new file mode 100755 index 0000000..8fbed3a --- /dev/null +++ b/docs/tools/h5json.rst @@ -0,0 +1,129 @@ +############## +h5json Package +############## + +This package provides CLI tools for conversion between HDF5 files and HDF5/JSON, +and HDF5/JSON validation. + +Installation +============ + +The standard method using the *pip* tool is recommended:: + + pip install h5json + +If interested in an unreleased version, install directly from the repository:: + + pip install git+https://github.com/HDFGroup/hdf5-json.git@{LABEL} + +``{LABEL}`` is a branch, tag, or commit identifier. *pip* `documentation +`_ explains +available install features. + +For development, create a fork of the `repository +`_ and execute:: + + $ mkdir my-h5json-dev + $ cd my-h5json-dev + $ git clone https://{MYUSERNAME}@github.com/{MYUSERNAME}/hdf5-json.git + $ cd h5json + $ pip install -e . + +`GitHub documentation +`_ +explains this workflow in great detail. + +Verification +------------ + +To verify h5json was installed correctly convert an HDF5 file to HDF5/JSON and +back. Run the following commands: + +.. code-block:: shell + + $ h5tojson sample.h5 > sample.json + $ h5jvalidate sample.json + $ jsontoh5 sample.json new-sample.h5 + +The file ``sample.json`` should contain HDF5/JSON description of the original +file and the file ``new-sample.h5`` should be an HDF5 equivalent to the original +file ``sample.h5``. + + +CLI Tools +========= + +The h5json distribution provides three command-line tools described below. + +jsontoh5 +-------- + +Generate an HDF5 file with the content, storage features, and data described in +an HDF5/JSON file. + +Usage:: + + jsontoh5.py [-h] + +where: + +```` + Input HDF5/JSON file. + +```` + Output HDF5 file that will be created. + +Options: + +-h + Print help message. + +h5tojson +-------- + +Convert the input HDF5 file to its HDF5/JSON representation. Output is printed +to `stdout`. + +Usage:: + + h5tojson [-h] [-D] [-d] + +where: + +```` + HDF5 file. + +Options: + +-h + Print help message. +-D + Suppress all data output. Output HDF5/JSON will not contain any dataset or + attribute values. +-d + Suppress data output for datasets only. + + +h5jvalidate +----------- + +Validate generated HDF5/JSON files against the schema. Validation errors are +printed to `stderr`. Command's exit status indicates the overall success (``0``) +or failure (``1``). + +Usage:: + + h5jvalidate [-h|--help] [-s|--stop] JSON_LOC [JSON_LOC ...] + +where: + +``JSON_LOC`` + HDF5/JSON location (files or folders). If a folder, all files with ``.json`` + extension will be selected for validation. + +Options: + +-s, --stop + Stop after first HDF5/JSON file failed validation (default: False) +-h, --help + Print help message. diff --git a/h5json/__init__.py b/h5json/__init__.py index 79791a4..5f7d7bf 100644 --- a/h5json/__init__.py +++ b/h5json/__init__.py @@ -12,7 +12,7 @@ """ - This is the h5json package, a mapping between HDF5 objects and JSON + This is the h5json package, a mapping between HDF5 objects and JSON """ from __future__ import absolute_import @@ -20,4 +20,8 @@ from .hdf5dtype import getTypeResponse from .hdf5dtype import getItemSize from .hdf5dtype import createDataType -from .hdf5db import Hdf5db \ No newline at end of file +from .hdf5db import Hdf5db + +from . import _version + +__version__ = _version.get_versions()["version"] diff --git a/h5json/_version.py b/h5json/_version.py new file mode 100644 index 0000000..d71a213 --- /dev/null +++ b/h5json/_version.py @@ -0,0 +1,657 @@ +# This file helps to compute a version number in source trees obtained from +# git-archive tarball (such as those provided by githubs download-from-tag +# feature). Distribution tarballs (built by setup.py sdist) and build +# directories (produced by setup.py build) will contain a much shorter file +# that just contains the computed version number. + +# This file is released into the public domain. Generated by +# versioneer-0.20 (https://github.com/python-versioneer/python-versioneer) + +"""Git implementation of _version.py.""" + +import errno +import os +import re +import subprocess +import sys + + +def get_keywords(): + """Get the keywords needed to look up the version information.""" + # these strings will be replaced by git during git-archive. + # setup.py/versioneer.py will grep for the variable names, so they must + # each be defined on a line of their own. _version.py will just call + # get_keywords(). + git_refnames = "$Format:%d$" + git_full = "$Format:%H$" + git_date = "$Format:%ci$" + keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} + return keywords + + +class VersioneerConfig: # pylint: disable=too-few-public-methods + """Container for Versioneer configuration parameters.""" + + +def get_config(): + """Create, populate and return the VersioneerConfig() object.""" + # these strings are filled in when 'setup.py versioneer' creates + # _version.py + cfg = VersioneerConfig() + cfg.VCS = "git" + cfg.style = "pep440" + cfg.tag_prefix = "" + cfg.parentdir_prefix = "None" + cfg.versionfile_source = "h5json/_version.py" + cfg.verbose = False + return cfg + + +class NotThisMethod(Exception): + """Exception raised if a method is not valid for the current scenario.""" + + +LONG_VERSION_PY = {} +HANDLERS = {} + + +def register_vcs_handler(vcs, method): # decorator + """Create decorator to mark a method as the handler of a VCS.""" + + def decorate(f): + """Store f in HANDLERS[vcs][method].""" + if vcs not in HANDLERS: + HANDLERS[vcs] = {} + HANDLERS[vcs][method] = f + return f + + return decorate + + +# pylint:disable=too-many-arguments,consider-using-with # noqa +def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=None): + """Call the given command(s).""" + assert isinstance(commands, list) + process = None + for command in commands: + try: + dispcmd = str([command] + args) + # remember shell=False, so use git.cmd on windows, not just git + process = subprocess.Popen( + [command] + args, + cwd=cwd, + env=env, + stdout=subprocess.PIPE, + stderr=(subprocess.PIPE if hide_stderr else None), + ) + break + except EnvironmentError: + e = sys.exc_info()[1] + if e.errno == errno.ENOENT: + continue + if verbose: + print("unable to run %s" % dispcmd) + print(e) + return None, None + else: + if verbose: + print("unable to find command, tried %s" % (commands,)) + return None, None + stdout = process.communicate()[0].strip().decode() + if process.returncode != 0: + if verbose: + print("unable to run %s (error)" % dispcmd) + print("stdout was %s" % stdout) + return None, process.returncode + return stdout, process.returncode + + +def versions_from_parentdir(parentdir_prefix, root, verbose): + """Try to determine the version from the parent directory name. + + Source tarballs conventionally unpack into a directory that includes both + the project name and a version string. We will also support searching up + two directory levels for an appropriately named parent directory + """ + rootdirs = [] + + for _ in range(3): + dirname = os.path.basename(root) + if dirname.startswith(parentdir_prefix): + return { + "version": dirname[len(parentdir_prefix) :], + "full-revisionid": None, + "dirty": False, + "error": None, + "date": None, + } + rootdirs.append(root) + root = os.path.dirname(root) # up a level + + if verbose: + print( + "Tried directories %s but none started with prefix %s" + % (str(rootdirs), parentdir_prefix) + ) + raise NotThisMethod("rootdir doesn't start with parentdir_prefix") + + +@register_vcs_handler("git", "get_keywords") +def git_get_keywords(versionfile_abs): + """Extract version information from the given file.""" + # the code embedded in _version.py can just fetch the value of these + # keywords. When used from setup.py, we don't want to import _version.py, + # so we do it with a regexp instead. This function is not used from + # _version.py. + keywords = {} + try: + with open(versionfile_abs, "r") as fobj: + for line in fobj: + if line.strip().startswith("git_refnames ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["refnames"] = mo.group(1) + if line.strip().startswith("git_full ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["full"] = mo.group(1) + if line.strip().startswith("git_date ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["date"] = mo.group(1) + except EnvironmentError: + pass + return keywords + + +@register_vcs_handler("git", "keywords") +def git_versions_from_keywords(keywords, tag_prefix, verbose): + """Get version information from git keywords.""" + if "refnames" not in keywords: + raise NotThisMethod("Short version file found") + date = keywords.get("date") + if date is not None: + # Use only the last line. Previous lines may contain GPG signature + # information. + date = date.splitlines()[-1] + + # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant + # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 + # -like" string, which we must then edit to make compliant), because + # it's been around since git-1.5.3, and it's too difficult to + # discover which version we're using, or to work around using an + # older one. + date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) + refnames = keywords["refnames"].strip() + if refnames.startswith("$Format"): + if verbose: + print("keywords are unexpanded, not using") + raise NotThisMethod("unexpanded keywords, not a git-archive tarball") + refs = {r.strip() for r in refnames.strip("()").split(",")} + # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of + # just "foo-1.0". If we see a "tag: " prefix, prefer those. + TAG = "tag: " + tags = {r[len(TAG) :] for r in refs if r.startswith(TAG)} + if not tags: + # Either we're using git < 1.8.3, or there really are no tags. We use + # a heuristic: assume all version tags have a digit. The old git %d + # expansion behaves like git log --decorate=short and strips out the + # refs/heads/ and refs/tags/ prefixes that would let us distinguish + # between branches and tags. By ignoring refnames without digits, we + # filter out many common branch names like "release" and + # "stabilization", as well as "HEAD" and "master". + tags = {r for r in refs if re.search(r"\d", r)} + if verbose: + print("discarding '%s', no digits" % ",".join(refs - tags)) + if verbose: + print("likely tags: %s" % ",".join(sorted(tags))) + for ref in sorted(tags): + # sorting will prefer e.g. "2.0" over "2.0rc1" + if ref.startswith(tag_prefix): + r = ref[len(tag_prefix) :] + # Filter out refs that exactly match prefix or that don't start + # with a number once the prefix is stripped (mostly a concern + # when prefix is '') + if not re.match(r"\d", r): + continue + if verbose: + print("picking %s" % r) + return { + "version": r, + "full-revisionid": keywords["full"].strip(), + "dirty": False, + "error": None, + "date": date, + } + # no suitable tags, so version is "0+unknown", but full hex is still there + if verbose: + print("no suitable tags, using unknown + full revision id") + return { + "version": "0+unknown", + "full-revisionid": keywords["full"].strip(), + "dirty": False, + "error": "no suitable tags", + "date": None, + } + + +@register_vcs_handler("git", "pieces_from_vcs") +def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command): + """Get version from 'git describe' in the root of the source tree. + + This only gets called if the git-archive 'subst' keywords were *not* + expanded, and _version.py hasn't already been rewritten with a short + version string, meaning we're inside a checked out source tree. + """ + GITS = ["git"] + if sys.platform == "win32": + GITS = ["git.cmd", "git.exe"] + + _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root, hide_stderr=True) + if rc != 0: + if verbose: + print("Directory %s not under git control" % root) + raise NotThisMethod("'git rev-parse --git-dir' returned error") + + # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] + # if there isn't one, this yields HEX[-dirty] (no NUM) + describe_out, rc = runner( + GITS, + [ + "describe", + "--tags", + "--dirty", + "--always", + "--long", + "--match", + "%s*" % tag_prefix, + ], + cwd=root, + ) + # --long was added in git-1.5.5 + if describe_out is None: + raise NotThisMethod("'git describe' failed") + describe_out = describe_out.strip() + full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root) + if full_out is None: + raise NotThisMethod("'git rev-parse' failed") + full_out = full_out.strip() + + pieces = {} + pieces["long"] = full_out + pieces["short"] = full_out[:7] # maybe improved later + pieces["error"] = None + + branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"], cwd=root) + # --abbrev-ref was added in git-1.6.3 + if rc != 0 or branch_name is None: + raise NotThisMethod("'git rev-parse --abbrev-ref' returned error") + branch_name = branch_name.strip() + + if branch_name == "HEAD": + # If we aren't exactly on a branch, pick a branch which represents + # the current commit. If all else fails, we are on a branchless + # commit. + branches, rc = runner(GITS, ["branch", "--contains"], cwd=root) + # --contains was added in git-1.5.4 + if rc != 0 or branches is None: + raise NotThisMethod("'git branch --contains' returned error") + branches = branches.split("\n") + + # Remove the first line if we're running detached + if "(" in branches[0]: + branches.pop(0) + + # Strip off the leading "* " from the list of branches. + branches = [branch[2:] for branch in branches] + if "master" in branches: + branch_name = "master" + elif not branches: + branch_name = None + else: + # Pick the first branch that is returned. Good or bad. + branch_name = branches[0] + + pieces["branch"] = branch_name + + # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] + # TAG might have hyphens. + git_describe = describe_out + + # look for -dirty suffix + dirty = git_describe.endswith("-dirty") + pieces["dirty"] = dirty + if dirty: + git_describe = git_describe[: git_describe.rindex("-dirty")] + + # now we have TAG-NUM-gHEX or HEX + + if "-" in git_describe: + # TAG-NUM-gHEX + mo = re.search(r"^(.+)-(\d+)-g([0-9a-f]+)$", git_describe) + if not mo: + # unparseable. Maybe git-describe is misbehaving? + pieces["error"] = "unable to parse git-describe output: '%s'" % describe_out + return pieces + + # tag + full_tag = mo.group(1) + if not full_tag.startswith(tag_prefix): + if verbose: + fmt = "tag '%s' doesn't start with prefix '%s'" + print(fmt % (full_tag, tag_prefix)) + pieces["error"] = "tag '%s' doesn't start with prefix '%s'" % ( + full_tag, + tag_prefix, + ) + return pieces + pieces["closest-tag"] = full_tag[len(tag_prefix) :] + + # distance: number of commits since tag + pieces["distance"] = int(mo.group(2)) + + # commit: short hex revision ID + pieces["short"] = mo.group(3) + + else: + # HEX: no tags + pieces["closest-tag"] = None + count_out, rc = runner(GITS, ["rev-list", "HEAD", "--count"], cwd=root) + pieces["distance"] = int(count_out) # total number of commits + + # commit date: see ISO-8601 comment in git_versions_from_keywords() + date = runner(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[0].strip() + # Use only the last line. Previous lines may contain GPG signature + # information. + date = date.splitlines()[-1] + pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) + + return pieces + + +def plus_or_dot(pieces): + """Return a + if we don't already have one, else return a .""" + if "+" in pieces.get("closest-tag", ""): + return "." + return "+" + + +def render_pep440(pieces): + """Build up version string, with post-release "local version identifier". + + Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you + get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty + + Exceptions: + 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += plus_or_dot(pieces) + rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + else: + # exception #1 + rendered = "0+untagged.%d.g%s" % (pieces["distance"], pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + return rendered + + +def render_pep440_branch(pieces): + """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] . + + The ".dev0" means not master branch. Note that .dev0 sorts backwards + (a feature branch will appear "older" than the master branch). + + Exceptions: + 1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + if pieces["branch"] != "master": + rendered += ".dev0" + rendered += plus_or_dot(pieces) + rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + else: + # exception #1 + rendered = "0" + if pieces["branch"] != "master": + rendered += ".dev0" + rendered += "+untagged.%d.g%s" % (pieces["distance"], pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + return rendered + + +def render_pep440_pre(pieces): + """TAG[.post0.devDISTANCE] -- No -dirty. + + Exceptions: + 1: no tags. 0.post0.devDISTANCE + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"]: + rendered += ".post0.dev%d" % pieces["distance"] + else: + # exception #1 + rendered = "0.post0.dev%d" % pieces["distance"] + return rendered + + +def render_pep440_post(pieces): + """TAG[.postDISTANCE[.dev0]+gHEX] . + + The ".dev0" means dirty. Note that .dev0 sorts backwards + (a dirty tree will appear "older" than the corresponding clean one), + but you shouldn't be releasing software with -dirty anyways. + + Exceptions: + 1: no tags. 0.postDISTANCE[.dev0] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + rendered += plus_or_dot(pieces) + rendered += "g%s" % pieces["short"] + else: + # exception #1 + rendered = "0.post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + rendered += "+g%s" % pieces["short"] + return rendered + + +def render_pep440_post_branch(pieces): + """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] . + + The ".dev0" means not master branch. + + Exceptions: + 1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%d" % pieces["distance"] + if pieces["branch"] != "master": + rendered += ".dev0" + rendered += plus_or_dot(pieces) + rendered += "g%s" % pieces["short"] + if pieces["dirty"]: + rendered += ".dirty" + else: + # exception #1 + rendered = "0.post%d" % pieces["distance"] + if pieces["branch"] != "master": + rendered += ".dev0" + rendered += "+g%s" % pieces["short"] + if pieces["dirty"]: + rendered += ".dirty" + return rendered + + +def render_pep440_old(pieces): + """TAG[.postDISTANCE[.dev0]] . + + The ".dev0" means dirty. + + Exceptions: + 1: no tags. 0.postDISTANCE[.dev0] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + else: + # exception #1 + rendered = "0.post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + return rendered + + +def render_git_describe(pieces): + """TAG[-DISTANCE-gHEX][-dirty]. + + Like 'git describe --tags --dirty --always'. + + Exceptions: + 1: no tags. HEX[-dirty] (note: no 'g' prefix) + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"]: + rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) + else: + # exception #1 + rendered = pieces["short"] + if pieces["dirty"]: + rendered += "-dirty" + return rendered + + +def render_git_describe_long(pieces): + """TAG-DISTANCE-gHEX[-dirty]. + + Like 'git describe --tags --dirty --always -long'. + The distance/hash is unconditional. + + Exceptions: + 1: no tags. HEX[-dirty] (note: no 'g' prefix) + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) + else: + # exception #1 + rendered = pieces["short"] + if pieces["dirty"]: + rendered += "-dirty" + return rendered + + +def render(pieces, style): + """Render the given version pieces into the requested style.""" + if pieces["error"]: + return { + "version": "unknown", + "full-revisionid": pieces.get("long"), + "dirty": None, + "error": pieces["error"], + "date": None, + } + + if not style or style == "default": + style = "pep440" # the default + + if style == "pep440": + rendered = render_pep440(pieces) + elif style == "pep440-branch": + rendered = render_pep440_branch(pieces) + elif style == "pep440-pre": + rendered = render_pep440_pre(pieces) + elif style == "pep440-post": + rendered = render_pep440_post(pieces) + elif style == "pep440-post-branch": + rendered = render_pep440_post_branch(pieces) + elif style == "pep440-old": + rendered = render_pep440_old(pieces) + elif style == "git-describe": + rendered = render_git_describe(pieces) + elif style == "git-describe-long": + rendered = render_git_describe_long(pieces) + else: + raise ValueError("unknown style '%s'" % style) + + return { + "version": rendered, + "full-revisionid": pieces["long"], + "dirty": pieces["dirty"], + "error": None, + "date": pieces.get("date"), + } + + +def get_versions(): + """Get version information or return default if unable to do so.""" + # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have + # __file__, we can work backwards from there to the root. Some + # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which + # case we can only use expanded keywords. + + cfg = get_config() + verbose = cfg.verbose + + try: + return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, verbose) + except NotThisMethod: + pass + + try: + root = os.path.realpath(__file__) + # versionfile_source is the relative path from the top of the source + # tree (where the .git directory might live) to this file. Invert + # this to find the root from __file__. + for _ in cfg.versionfile_source.split("/"): + root = os.path.dirname(root) + except NameError: + return { + "version": "0+unknown", + "full-revisionid": None, + "dirty": None, + "error": "unable to find root of source tree", + "date": None, + } + + try: + pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) + return render(pieces, cfg.style) + except NotThisMethod: + pass + + try: + if cfg.parentdir_prefix: + return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) + except NotThisMethod: + pass + + return { + "version": "0+unknown", + "full-revisionid": None, + "dirty": None, + "error": "unable to compute version", + "date": None, + } diff --git a/h5json/apiversion.py b/h5json/apiversion.py new file mode 100644 index 0000000..3df8bea --- /dev/null +++ b/h5json/apiversion.py @@ -0,0 +1,15 @@ +############################################################################## +# Copyright by The HDF Group. # +# All rights reserved. # +# # +# This file is part of H5Serv (HDF5 REST Server) Service, Libraries and # +# Utilities. The full HDF5 REST Server copyright notice, including # +# terms governing use, modification, and redistribution, is contained in # +# the file COPYING, which can be found at the root of the source code # +# distribution tree. If you do not have access to this file, you may # +# request a copy from help@hdfgroup.org. # +############################################################################## + +# IMPORTANT: HDF5/JSON apiVersion key value is set here. Update only when the +# HDF5/JSON spec changes. +_apiver = "1.1.1" diff --git a/h5json/h5tojson/__init__.py b/h5json/h5tojson/__init__.py new file mode 100644 index 0000000..36dc58d --- /dev/null +++ b/h5json/h5tojson/__init__.py @@ -0,0 +1,11 @@ +############################################################################## +# Copyright by The HDF Group. # +# All rights reserved. # +# # +# This file is part of H5Serv (HDF5 REST Server) Service, Libraries and # +# Utilities. The full HDF5 REST Server copyright notice, including # +# terms governing use, modification, and redistribution, is contained in # +# the file COPYING, which can be found at the root of the source code # +# distribution tree. If you do not have access to this file, you may # +# request a copy from help@hdfgroup.org. # +############################################################################## diff --git a/h5tojson/h5tojson.py b/h5json/h5tojson/h5tojson.py similarity index 62% rename from h5tojson/h5tojson.py rename to h5json/h5tojson/h5tojson.py index 83a2348..89a65bd 100755 --- a/h5tojson/h5tojson.py +++ b/h5json/h5tojson/h5tojson.py @@ -14,19 +14,17 @@ import argparse import os.path as op import tempfile - import logging import logging.handlers - from h5json import Hdf5db from h5json import hdf5dtype -""" -DumpJson - return json representation of all objects within the given file -""" - class DumpJson: + """ + DumpJson - return json representation of all objects within the given file + """ + def __init__(self, db, app_logger=None, options=None): self.options = options self.db = db @@ -39,63 +37,61 @@ def __init__(self, db, app_logger=None, options=None): def dumpAttribute(self, col_name, uuid, attr_name): self.log.info("dumpAttribute: [" + attr_name + "]") item = self.db.getAttributeItem(col_name, uuid, attr_name) - response = { 'name': attr_name } - typeItem = item['type'] - response['type'] = hdf5dtype.getTypeResponse(typeItem) - response['shape'] = item['shape'] + response = {"name": attr_name} + typeItem = item["type"] + response["type"] = hdf5dtype.getTypeResponse(typeItem) + response["shape"] = item["shape"] if not self.options.D: - if 'value' not in item: + if "value" not in item: self.log.warning("no value key in attribute: " + attr_name) else: - response['value'] = item['value'] # dump values unless header -D was passed + response["value"] = item[ + "value" + ] # dump values unless header -D was passed return response - def dumpAttributes(self, col_name, uuid): attr_list = self.db.getAttributeItems(col_name, uuid) self.log.info("dumpAttributes: " + uuid) items = [] for attr in attr_list: - item = self.dumpAttribute(col_name, uuid, attr['name']) + item = self.dumpAttribute(col_name, uuid, attr["name"]) items.append(item) return items def dumpLink(self, uuid, name): item = self.db.getLinkItemByUuid(uuid, name) - for key in ('ctime', 'mtime', 'href'): + for key in ("ctime", "mtime", "href"): if key in item: del item[key] return item - def dumpLinks(self, uuid): link_list = self.db.getLinkItems(uuid) items = [] for link in link_list: - item = self.dumpLink(uuid, link['title']) + item = self.dumpLink(uuid, link["title"]) items.append(item) return items - def dumpGroup(self, uuid): item = self.db.getGroupItemByUuid(uuid) - if 'alias' in item: - alias = item['alias'] + if "alias" in item: + alias = item["alias"] if alias: self.log.info("dumpGroup alias: [" + alias[0] + "]") - for key in ('ctime', 'mtime', 'linkCount', 'attributeCount', 'id'): + for key in ("ctime", "mtime", "linkCount", "attributeCount", "id"): if key in item: del item[key] - attributes = self.dumpAttributes('groups', uuid) + attributes = self.dumpAttributes("groups", uuid) if attributes: - item['attributes'] = attributes + item["attributes"] = attributes links = self.dumpLinks(uuid) if links: - item['links'] = links + item["links"] = links return item - def dumpGroups(self): groups = {} item = self.dumpGroup(self.root_uuid) @@ -105,53 +101,51 @@ def dumpGroups(self): item = self.dumpGroup(uuid) groups[uuid] = item - self.json['groups'] = groups - + self.json["groups"] = groups def dumpDataset(self, uuid): - response = { } + response = {} self.log.info("dumpDataset: " + uuid) item = self.db.getDatasetItemByUuid(uuid) - if 'alias' in item: - alias = item['alias'] + if "alias" in item: + alias = item["alias"] if alias: self.log.info("dumpDataset alias: [" + alias[0] + "]") - response['alias'] = item['alias'] + response["alias"] = item["alias"] - typeItem = item['type'] - response['type'] = hdf5dtype.getTypeResponse(typeItem) - shapeItem = item['shape'] + typeItem = item["type"] + response["type"] = hdf5dtype.getTypeResponse(typeItem) + shapeItem = item["shape"] shape_rsp = {} num_elements = 1 - shape_rsp['class'] = shapeItem['class'] - if 'dims' in shapeItem: - shape_rsp['dims'] = shapeItem['dims'] - for dim in shapeItem['dims']: + shape_rsp["class"] = shapeItem["class"] + if "dims" in shapeItem: + shape_rsp["dims"] = shapeItem["dims"] + for dim in shapeItem["dims"]: num_elements *= dim - if 'maxdims' in shapeItem: + if "maxdims" in shapeItem: maxdims = [] - for dim in shapeItem['maxdims']: + for dim in shapeItem["maxdims"]: if dim == 0: - maxdims.append('H5S_UNLIMITED') + maxdims.append("H5S_UNLIMITED") else: maxdims.append(dim) - shape_rsp['maxdims'] = maxdims - response['shape'] = shape_rsp + shape_rsp["maxdims"] = maxdims + response["shape"] = shape_rsp - if 'creationProperties' in item: - response['creationProperties'] = item['creationProperties'] + if "creationProperties" in item: + response["creationProperties"] = item["creationProperties"] - attributes = self.dumpAttributes('datasets', uuid) + attributes = self.dumpAttributes("datasets", uuid) if attributes: - response['attributes'] = attributes - + response["attributes"] = attributes if not (self.options.D or self.options.d): if num_elements > 0: value = self.db.getDatasetValuesByUuid(uuid) - response['value'] = value # dump values unless header flag was passed + response["value"] = value # dump values unless header flag was passed else: - response['value'] = [] # empty list + response["value"] = [] # empty list return response def dumpDatasets(self): @@ -162,20 +156,19 @@ def dumpDatasets(self): item = self.dumpDataset(uuid) datasets[uuid] = item - self.json['datasets'] = datasets + self.json["datasets"] = datasets def dumpDatatype(self, uuid): - response = { } + response = {} item = self.db.getCommittedTypeItemByUuid(uuid) - response['alias'] = item['alias'] - typeItem = item['type'] - response['type'] = hdf5dtype.getTypeResponse(typeItem) - attributes = self.dumpAttributes('datatypes', uuid) + response["alias"] = item["alias"] + typeItem = item["type"] + response["type"] = hdf5dtype.getTypeResponse(typeItem) + attributes = self.dumpAttributes("datatypes", uuid) if attributes: - response['attributes'] = attributes + response["attributes"] = attributes return response - def dumpDatatypes(self): uuids = self.db.getCollection("datatypes") if uuids: @@ -184,41 +177,45 @@ def dumpDatatypes(self): item = self.dumpDatatype(uuid) datatypes[uuid] = item - self.json['datatypes'] = datatypes + self.json["datatypes"] = datatypes def dumpFile(self): - self.root_uuid = self.db.getUUIDByPath('/') + self.root_uuid = self.db.getUUIDByPath("/") db_version_info = self.db.getVersionInfo() - self.json['apiVersion'] = db_version_info['hdf5-json-version'] - self.json['root'] = self.root_uuid + self.json["apiVersion"] = db_version_info["hdf5-json-version"] + self.json["root"] = self.root_uuid self.dumpGroups() self.dumpDatasets() self.dumpDatatypes() - + print(json.dumps(self.json, sort_keys=True, indent=4)) -""" - Generate a temporary filename to avoid problems with trying to create a dbfile - in a read-only directory. (See: https://github.com/HDFGroup/h5serv/issues/37) -""" + def getTempFileName(): + """ + Generate a temporary filename to avoid problems with trying to create a dbfile + in a read-only directory. (See: https://github.com/HDFGroup/h5serv/issues/37) + """ f = tempfile.NamedTemporaryFile(delete=False) f.close() return f.name def main(): - parser = argparse.ArgumentParser(usage='%(prog)s [-h] [-D|-d] ') - parser.add_argument('-D', action='store_true', help='surpress all data output') - parser.add_argument('-d', action='store_true', help='surpress data output for' + - ' datasets (but not attribute values)') - parser.add_argument('filename', nargs='+', help='HDF5 to be converted to json') + parser = argparse.ArgumentParser(usage="%(prog)s [-h] [-D|-d] ") + parser.add_argument("-D", action="store_true", help="surpress all data output") + parser.add_argument( + "-d", + action="store_true", + help="surpress data output for" + " datasets (but not attribute values)", + ) + parser.add_argument("filename", nargs="+", help="HDF5 to be converted to json") args = parser.parse_args() # create logger @@ -226,7 +223,7 @@ def main(): # log.setLevel(logging.WARN) log.setLevel(logging.INFO) # add log handler - handler = logging.FileHandler('./h5tojson.log') + handler = logging.FileHandler("./h5tojson.log") # add handler to logger log.addHandler(handler) @@ -244,4 +241,5 @@ def main(): dumper.dumpFile() -main() +if __name__ == "__main__": + main() diff --git a/h5json/hdf5db.py b/h5json/hdf5db.py index 9075d3b..314dfcc 100644 --- a/h5json/hdf5db.py +++ b/h5json/hdf5db.py @@ -9,62 +9,6 @@ # distribution tree. If you do not have access to this file, you may # # request a copy from help@hdfgroup.org. # ############################################################################## - -from __future__ import absolute_import - -import six - -if six.PY3: - unicode = str - - - -""" -This class is used to manage UUID lookup tables for primary HDF objects (Groups, Datasets, - and Datatypes). For HDF5 files that are read/write, this information is managed within - the file itself in the "__db__" group. For read-only files, the data is managed in - an external file (domain filename with ".db" extension). - - "___db__" ("root" for read-only case) - description: Group object (member of root group). Only objects below this group are used - for UUID data - members: "{groups}", "{datasets}", "{datatypes}", "{objects}", "{paths}" - attrs: 'rootUUID': UUID of the root group - -"{groups}" - description: contains map of UUID->group objects - members: hard link to each anonymous group (i.e. groups which are not - linked to by anywhere else). Link name is the UUID - attrs: group reference (or path for read-only files) to the group (for non- - anonymous groups). - -"{datasets}" - description: contains map of UUID->dataset objects - members: hard link to each anonymous dataset (i.e. datasets which are not - linked to by anywhere else). Link name is the UUID - attrs: dataset reference (or path for read-only files) to the dataset (for non- - anonymous datasets). - -"{dataset_props}: - description contains dataset creation properties" - members: sub-group with link name as UUID. Sub-group attributes are the creation props - -"{datatypes}" - description: contains map of UUID->datatyped objects - members: hard link to each anonymous datatype (i.e. datatypes which are not - linked to by anywhere else). Link name is the UUID - attrs: datatype reference (or path for read-only files) to the datatype (for non- - anonymous datatypes). - -"{addr}" - description: contains map of file offset to UUID. - members: none - attrs: map of file offset to UUID - - - - -""" import errno import time import h5py @@ -74,8 +18,9 @@ import os import json import logging +from .hdf5dtype import getTypeItem, createDataType, getItemSize +from .apiversion import _apiver -from .hdf5dtype import getTypeItem, createDataType, getItemSize # global dictionary to direct back to the Hdf5db instance by filename # (needed for visititems callback) @@ -86,28 +31,44 @@ # standard compress filters _HDF_FILTERS = { - 1: {'class': 'H5Z_FILTER_DEFLATE', 'alias': 'gzip', 'options': ['level']}, - 2: {'class': 'H5Z_FILTER_SHUFFLE', 'alias': 'shuffle'}, - 3: {'class': 'H5Z_FILTER_FLETCHER32', 'alias': 'fletcher32'}, - 4: {'class': 'H5Z_FILTER_SZIP', 'alias': 'szip', 'options': ['bitsPerPixel', 'coding', 'pixelsPerBlock', 'pixelsPerScanLine']}, - 5: {'class': 'H5Z_FILTER_NBIT'}, - 6: {'class': 'H5Z_FILTER_SCALEOFFSET', 'alias': 'scaleoffset', 'options': ['scaleType']}, - 32000: {'class': 'H5Z_FILTER_LZF', 'alias': 'lzf'} + 1: {"class": "H5Z_FILTER_DEFLATE", "alias": "gzip", "options": ["level"]}, + 2: {"class": "H5Z_FILTER_SHUFFLE", "alias": "shuffle"}, + 3: {"class": "H5Z_FILTER_FLETCHER32", "alias": "fletcher32"}, + 4: { + "class": "H5Z_FILTER_SZIP", + "alias": "szip", + "options": ["bitsPerPixel", "coding", "pixelsPerBlock", "pixelsPerScanLine"], + }, + 5: {"class": "H5Z_FILTER_NBIT"}, + 6: { + "class": "H5Z_FILTER_SCALEOFFSET", + "alias": "scaleoffset", + "options": ["scaleType", "scaleOffset"], + }, + 32000: {"class": "H5Z_FILTER_LZF", "alias": "lzf"}, } -_HDF_FILTER_OPTION_ENUMS = {'coding': {h5py.h5z.SZIP_EC_OPTION_MASK: 'H5_SZIP_EC_OPTION_MASK', - h5py.h5z.SZIP_NN_OPTION_MASK: 'H5_SZIP_NN_OPTION_MASK'}, - 'scaleType': {h5py.h5z.SO_FLOAT_DSCALE: 'H5Z_SO_FLOAT_DSCALE', - h5py.h5z.SO_FLOAT_ESCALE: 'H5Z_SO_FLOAT_ESCALE', - h5py.h5z.SO_INT: 'H5Z_SO_INT'}} +_HDF_FILTER_OPTION_ENUMS = { + "coding": { + h5py.h5z.SZIP_EC_OPTION_MASK: "H5_SZIP_EC_OPTION_MASK", + h5py.h5z.SZIP_NN_OPTION_MASK: "H5_SZIP_NN_OPTION_MASK", + }, + "scaleType": { + h5py.h5z.SO_FLOAT_DSCALE: "H5Z_SO_FLOAT_DSCALE", + h5py.h5z.SO_FLOAT_ESCALE: "H5Z_SO_FLOAT_ESCALE", + h5py.h5z.SO_INT: "H5Z_SO_INT", + }, +} # h5py supported filters -_H5PY_FILTERS = {'gzip': 1, - 'shuffle': 2, - 'fletcher32': 3, - 'szip': 4, - 'scaleoffset': 6, - 'lzf': 32000} +_H5PY_FILTERS = { + "gzip": 1, + "shuffle": 2, + "fletcher32": 3, + "szip": 4, + "scaleoffset": 6, + "lzf": 32000, +} _H5PY_COMPRESSION_FILTERS = ("gzip", "lzf", "szip") @@ -118,27 +79,76 @@ def visitObj(path, obj): class Hdf5db: + """ + This class is used to manage UUID lookup tables for primary HDF objects (Groups, Datasets, + and Datatypes). For HDF5 files that are read/write, this information is managed within + the file itself in the "__db__" group. For read-only files, the data is managed in + an external file (domain filename with ".db" extension). + + "___db__" ("root" for read-only case) + description: Group object (member of root group). Only objects below this group are used + for UUID data + members: "{groups}", "{datasets}", "{datatypes}", "{objects}", "{paths}" + attrs: 'rootUUID': UUID of the root group + + "{groups}" + description: contains map of UUID->group objects + members: hard link to each anonymous group (i.e. groups which are not + linked to by anywhere else). Link name is the UUID + attrs: group reference (or path for read-only files) to the group (for non- + anonymous groups). + + "{datasets}" + description: contains map of UUID->dataset objects + members: hard link to each anonymous dataset (i.e. datasets which are not + linked to by anywhere else). Link name is the UUID + attrs: dataset reference (or path for read-only files) to the dataset (for non- + anonymous datasets). + + "{dataset_props}: + description contains dataset creation properties" + members: sub-group with link name as UUID. Sub-group attributes are the creation props + + "{datatypes}" + description: contains map of UUID->datatyped objects + members: hard link to each anonymous datatype (i.e. datatypes which are not + linked to by anywhere else). Link name is the UUID + attrs: datatype reference (or path for read-only files) to the datatype (for non- + anonymous datatypes). + + "{addr}" + description: contains map of file offset to UUID. + members: none + attrs: map of file offset to UUID + """ @staticmethod def createHDF5File(filePath): # create an "empty" hdf5 file - if op.isfile(filePath): - raise IOError(errno.EEXIST, "Resource already exists") + # if op.isfile(filePath): + # raise IOError(errno.EEXIST, "Resource already exists") - f = h5py.File(filePath, 'w') + f = h5py.File(filePath, "w") f.close() @staticmethod def getVersionInfo(): versionInfo = {} - versionInfo['hdf5-json-version'] = "1.1.0" # todo - have this auto-synch with package version - versionInfo['h5py_version'] = h5py.version.version - versionInfo['hdf5_version'] = h5py.version.hdf5_version + versionInfo["hdf5-json-version"] = _apiver + versionInfo["h5py_version"] = h5py.version.version + versionInfo["hdf5_version"] = h5py.version.hdf5_version return versionInfo - def __init__(self, filePath, dbFilePath=None, readonly=False, - app_logger=None, root_uuid=None, update_timestamps=True, - userid=None): + def __init__( + self, + filePath, + dbFilePath=None, + readonly=False, + app_logger=None, + root_uuid=None, + update_timestamps=True, + userid=None, + ): if app_logger: self.log = app_logger else: @@ -148,7 +158,7 @@ def __init__(self, filePath, dbFilePath=None, readonly=False, if not h5py.is_hdf5(filePath): raise IOError(errno.EINVAL, "not an HDF5 file") - mode = 'r' + mode = "r" if readonly: self.readonly = True else: @@ -156,15 +166,14 @@ def __init__(self, filePath, dbFilePath=None, readonly=False, # file is read-only self.readonly = True else: - mode = 'r+' + mode = "r+" self.readonly = False - self.log.info("init -- filePath: " + filePath + " mode: " + mode) self.update_timestamps = update_timestamps - self.f = h5py.File(filePath, mode, libver='latest') + self.f = h5py.File(filePath, mode, libver="latest") self.root_uuid = root_uuid @@ -176,12 +185,12 @@ def __init__(self, filePath, dbFilePath=None, readonly=False, dirname = op.dirname(self.f.filename) basename = op.basename(self.f.filename) if len(dirname) > 0: - dbFilePath = dirname + '/.' + basename + dbFilePath = dirname + "/." + basename else: - dbFilePath = '.' + basename - dbMode = 'r+' + dbFilePath = "." + basename + dbMode = "r+" if not op.isfile(dbFilePath): - dbMode = 'w' + dbMode = "w" self.log.info("dbFilePath: " + dbFilePath + " mode: " + dbMode) self.dbf = h5py.File(dbFilePath, dbMode) else: @@ -191,11 +200,11 @@ def __init__(self, filePath, dbFilePath=None, readonly=False, _db[filePath] = self def __enter__(self): - self.log.info('Hdf5db __enter') + self.log.info("Hdf5db __enter") return self def __exit__(self, type, value, traceback): - self.log.info('Hdf5db __exit') + self.log.info("Hdf5db __exit") filename = self.f.filename self.f.flush() self.f.close() @@ -236,6 +245,7 @@ def getTimeStampName(self, uuid, objType="object", name=None): Note - should only be called once per object """ + def setCreateTime(self, uuid, objType="object", name=None, timestamp=None): if not self.update_timestamps: return @@ -245,7 +255,7 @@ def setCreateTime(self, uuid, objType="object", name=None, timestamp=None): timestamp = time.time() if ts_name in ctime_grp.attrs: self.log.warning("modifying create time for object: " + ts_name) - ctime_grp.attrs.create(ts_name, timestamp, dtype='int64') + ctime_grp.attrs.create(ts_name, timestamp, dtype="int64") """ getCreateTime - gets the create time timestamp for the @@ -257,6 +267,7 @@ def setCreateTime(self, uuid, objType="object", name=None, timestamp=None): returns - create time for object, or create time for root if not set """ + def getCreateTime(self, uuid, objType="object", name=None, useRoot=True): ctime_grp = self.dbGrp["{ctime}"] ts_name = self.getTimeStampName(uuid, objType, name) @@ -281,6 +292,7 @@ def getCreateTime(self, uuid, objType="object", name=None, useRoot=True): returns - nothing """ + def setModifiedTime(self, uuid, objType="object", name=None, timestamp=None): if not self.update_timestamps: return @@ -288,7 +300,7 @@ def setModifiedTime(self, uuid, objType="object", name=None, timestamp=None): ts_name = self.getTimeStampName(uuid, objType, name) if timestamp is None: timestamp = time.time() - mtime_grp.attrs.create(ts_name, timestamp, dtype='int64') + mtime_grp.attrs.create(ts_name, timestamp, dtype="int64") """ getModifiedTime - gets the modified time timestamp for the @@ -300,6 +312,7 @@ def setModifiedTime(self, uuid, objType="object", name=None, timestamp=None): returns - create time for object, or create time for root if not set """ + def getModifiedTime(self, uuid, objType="object", name=None, useRoot=True): mtime_grp = self.dbGrp["{mtime}"] ts_name = self.getTimeStampName(uuid, objType, name) @@ -321,6 +334,7 @@ def getModifiedTime(self, uuid, objType="object", name=None, useRoot=True): getAclGroup - return the db group "{acl}" if present, otherwise return None """ + def getAclGroup(self, create=False): if not self.dbGrp: return None # file not initialized @@ -334,21 +348,23 @@ def getAclGroup(self, create=False): """ getAclDtype - return detype for ACL """ + def getAclDtype(self): fields = [] - fields.append(('userid', np.int32)) - fields.append(('create', np.int8)) - fields.append(('read', np.int8)) - fields.append(('update', np.int8)) - fields.append(('delete', np.int8)) - fields.append(('readACL', np.int8)) - fields.append(('updateACL', np.int8)) + fields.append(("userid", np.int32)) + fields.append(("create", np.int8)) + fields.append(("read", np.int8)) + fields.append(("update", np.int8)) + fields.append(("delete", np.int8)) + fields.append(("readACL", np.int8)) + fields.append(("updateACL", np.int8)) dt = np.dtype(fields) return dt """ getAclDataset - return ACL datset for given uuid """ + def getAclDataset(self, obj_uuid, create=False): acl_group = self.getAclGroup(create=create) @@ -369,6 +385,7 @@ def getAclDataset(self, obj_uuid, create=False): """ getNumAcls - return number of acls associatted with given uuid """ + def getNumAcls(self, obj_uuid): acl_group = self.getAclGroup() if acl_group is None: @@ -381,6 +398,7 @@ def getNumAcls(self, obj_uuid): """ convertAclNdArrayToDict - helper function - return acl item to dict """ + def convertAclNdArrayToDict(self, acl_ndarray): fields = acl_ndarray.dtype.fields.keys() acl = {} @@ -389,34 +407,33 @@ def convertAclNdArrayToDict(self, acl_ndarray): acl[field] = value return acl - """ - Get default acl - returns dict obj - """ def getDefaultAcl(self): + """Get default acl - returns dict obj""" + dt = self.getAclDtype() acl = {} for field in dt.fields.keys(): - if field == 'userid': + if field == "userid": acl[field] = 0 else: acl[field] = 1 # default is allowed return acl - """ - getAcl - return ACL for given uuid and userid - returns ACL associated with the given uuid, or if none exists, - the ACL associatted with the root group. - - If an ACL is not present for a userid/obj and ACL will be returned - via the following precedence: - - 1) obj_uuid, user_id - 2) root_uuid, user_id - 3) obj_uuid, 0 - 4) root_uuid, 0 - 5) 'all perm' ACL - """ def getAcl(self, obj_uuid, userid): + """ + getAcl - return ACL for given uuid and userid + returns ACL associated with the given uuid, or if none exists, + the ACL associatted with the root group. + + If an ACL is not present for a userid/obj and ACL will be returned + via the following precedence: + + 1) obj_uuid, user_id + 2) root_uuid, user_id + 3) obj_uuid, 0 + 4) root_uuid, 0 + 5) 'all perm' ACL + """ acl_grp = self.getAclGroup() if acl_grp is not None: @@ -447,12 +464,11 @@ def getAcl(self, obj_uuid, userid): return acl - """ - get ACL for specific uuid and user - return None if not found - """ def getAclByObjAndUser(self, obj_uuid, userid): - + """ + get ACL for specific uuid and user + return None if not found + """ acl = None acl_dset = self.getAclDataset(obj_uuid) @@ -463,7 +479,7 @@ def getAclByObjAndUser(self, obj_uuid, userid): acl = None for i in range(num_acls): item = acls[i] - if item['userid'] == userid: + if item["userid"] == userid: acl = item break @@ -471,12 +487,10 @@ def getAclByObjAndUser(self, obj_uuid, userid): acl = self.convertAclNdArrayToDict(acl) return acl - """ - getAcls - get all acls for given uuid - """ - def getAcls(self, obj_uuid): - + """ + getAcls - get all acls for given uuid + """ acls = [] acl_dset = self.getAclDataset(obj_uuid) @@ -491,10 +505,10 @@ def getAcls(self, obj_uuid): return acls - """ - setAcl - set the acl for given uuid. - """ def setAcl(self, obj_uuid, acl): + """ + setAcl - set the acl for given uuid. + """ acl_dset = self.getAclDataset(obj_uuid, create=True) if acl_dset is None: @@ -502,7 +516,7 @@ def setAcl(self, obj_uuid, acl): self.log.error(msg) raise IOError(errno.EIO, msg) - userid = acl['userid'] + userid = acl["userid"] # iterate through elements, looking for user_id acls = acl_dset[...] @@ -512,14 +526,14 @@ def setAcl(self, obj_uuid, acl): for i in range(num_acls): item = acls[i] - if item['userid'] == userid: + if item["userid"] == userid: # update this element user_index = i break if user_index is None: # userid not found - add row - acl_dset.resize(((num_acls+1),)) + acl_dset.resize(((num_acls + 1),)) user_index = num_acls # update the acl dataset @@ -552,9 +566,9 @@ def initFile(self): self.dbGrp.create_group("{groups}") self.dbGrp.create_group("{datasets}") self.dbGrp.create_group("{datatypes}") - self.dbGrp.create_group("{addr}") # store object address - self.dbGrp.create_group("{ctime}") # stores create timestamps - self.dbGrp.create_group("{mtime}") # store modified timestamps + self.dbGrp.create_group("{addr}") # store object address + self.dbGrp.create_group("{ctime}") # stores create timestamps + self.dbGrp.create_group("{mtime}") # store modified timestamps mtime = op.getmtime(self.f.filename) ctime = mtime @@ -565,15 +579,15 @@ def initFile(self): def visit(self, path, obj): name = obj.__class__.__name__ - if len(path) >= 6 and path[:6] == '__db__': + if len(path) >= 6 and path[:6] == "__db__": return # don't include the db objects - self.log.info('visit: ' + path + ' name: ' + name) + self.log.info("visit: " + path + " name: " + name) col = None - if name == 'Group': + if name == "Group": col = self.dbGrp["{groups}"].attrs - elif name == 'Dataset': + elif name == "Dataset": col = self.dbGrp["{datasets}"].attrs - elif name == 'Datatype': + elif name == "Datatype": col = self.dbGrp["{datatypes}"].attrs else: msg = "Unknown object type: " + __name__ + " found during scan of HDF5 file" @@ -586,7 +600,7 @@ def visit(self, path, obj): # storing db in the file itself, so we can link to the object directly col[id] = obj.ref # save attribute ref to object else: - #store path to object + # store path to object col[id] = obj.name addr = h5py.h5o.get_info(obj.id).addr # store reverse map as an attribute @@ -609,21 +623,26 @@ def getDatasetCreationProps(self, dset_uuid): try: prop_list = json.loads(prop_str) except ValueError as ve: - msg = "Unable to load creation properties for dataset:[" + dset_uuid + "]: " + ve.message + msg = ( + "Unable to load creation properties for dataset:[" + + dset_uuid + + "]: " + + ve.message + ) self.log.error(msg) raise IOError(errno.EIO, msg) # fill in Filter class values - if 'filters' in prop_list: - prop_filters = prop_list['filters'] + if "filters" in prop_list: + prop_filters = prop_list["filters"] for prop_filter in prop_filters: - if 'class' not in prop_filter: - filter_id = prop_filter['id'] + if "class" not in prop_filter: + filter_id = prop_filter["id"] if filter_id in _HDF_FILTERS: hdf_filter = _HDF_FILTERS[filter_id] - prop_filter['class'] = hdf_filter['class'] + prop_filter["class"] = hdf_filter["class"] else: - prop_filter['class'] = 'H5Z_FILTER_USER' + prop_filter["class"] = "H5Z_FILTER_USER" return prop_list @@ -631,7 +650,7 @@ def getDatasetCreationProps(self, dset_uuid): # Set dataset creation property # def setDatasetCreationProps(self, dset_uuid, prop_dict): - self.log.info('setDataProp([' + dset_uuid + ']') + self.log.info("setDataProp([" + dset_uuid + "]") if not prop_dict: # just ignore if empty dictionary return @@ -640,7 +659,11 @@ def setDatasetCreationProps(self, dset_uuid, prop_dict): dbPropsGrp = self.dbGrp["{dataset_props}"] if dset_uuid in dbPropsGrp.attrs: # this should be write once - msg = "Unexpected error setting dataset creation properties for dataset:[" + dset_uuid + "]" + msg = ( + "Unexpected error setting dataset creation properties for dataset:[" + + dset_uuid + + "]" + ) self.log.error(msg) raise IOError(errno.EIO, msg) prop_str = json.dumps(prop_dict) @@ -656,13 +679,13 @@ def getUUIDByAddress(self, addr): obj_uuid = addrGrp.attrs[str(addr)] if obj_uuid and type(obj_uuid) is not str: # convert bytes to unicode - obj_uuid = obj_uuid.decode('utf-8') + obj_uuid = obj_uuid.decode("utf-8") return obj_uuid - """ - Get the number of links in a group to an object - """ def getNumLinksToObjectInGroup(self, grp, obj): + """ + Get the number of links in a group to an object + """ objAddr = h5py.h5o.get_info(obj.id).addr numLinks = 0 for name in grp: @@ -679,10 +702,10 @@ def getNumLinksToObjectInGroup(self, grp, obj): return numLinks - """ - Get the number of links to the given object - """ def getNumLinksToObject(self, obj): + """ + Get the number of links to the given object + """ self.initFile() groups = self.dbGrp["{groups}"] numLinks = 0 @@ -711,16 +734,16 @@ def getNumLinksToObject(self, obj): def getUUIDByPath(self, path): self.initFile() self.log.info("getUUIDByPath: [" + path + "]") - if len(path) >= 6 and path[:6] == '__db__': + if len(path) >= 6 and path[:6] == "__db__": msg = "getUUIDByPath called with invalid path: [" + path + "]" self.log.error(msg) raise IOError(errno.EIO, msg) - if path == '/': + if path == "/": # just return the root UUID root_uuid = self.dbGrp.attrs["rootUUID"] if root_uuid and type(root_uuid) is not str: # convert bytes to unicode - root_uuid = root_uuid.decode('utf-8') + root_uuid = root_uuid.decode("utf-8") return root_uuid obj = self.f[path] # will throw KeyError if object doesn't exist @@ -729,22 +752,22 @@ def getUUIDByPath(self, path): return obj_uuid def getObjByPath(self, path): - if len(path) >= 6 and path[:6] == '__db__': - return None # don't include the db objects + if len(path) >= 6 and path[:6] == "__db__": + return None # don't include the db objects obj = self.f[path] # will throw KeyError if object doesn't exist return obj def getObjectByUuid(self, col_type, obj_uuid): - #col_type should be either "datasets", "groups", or "datatypes" + # col_type should be either "datasets", "groups", or "datatypes" if col_type not in ("datasets", "groups", "datatypes"): msg = "Unexpectd error, invalid col_type: [" + col_type + "]" self.log.error(msg) raise IOError(errno.EIO, msg) if col_type == "groups" and obj_uuid == self.dbGrp.attrs["rootUUID"]: - return self.f['/'] # returns root group + return self.f["/"] # returns root group obj = None # Group, Dataset, or Datatype - col_name = '{' + col_type + '}' + col_name = "{" + col_type + "}" # get the collection group for this collection type col = self.dbGrp[col_name] if obj_uuid in col.attrs: @@ -774,64 +797,65 @@ def getGroupObjByUuid(self, obj_uuid): def getDatasetTypeItemByUuid(self, obj_uuid): dset = self.getDatasetObjByUuid(obj_uuid) # throws exception if not found - item = { 'id': obj_uuid } - item['type'] = getTypeItem(dset.dtype) + item = {"id": obj_uuid} + item["type"] = getTypeItem(dset.dtype) if self.update_timestamps: - item['ctime'] = self.getCreateTime(obj_uuid) - item['mtime'] = self.getModifiedTime(obj_uuid) + item["ctime"] = self.getCreateTime(obj_uuid) + item["mtime"] = self.getModifiedTime(obj_uuid) return item - """ - getNullReference - return a null object reference - """ def getNullReference(self): + """ + getNullReference - return a null object reference + """ tmpGrp = None if "{tmp}" not in self.dbGrp: tmpGrp = self.dbGrp.create_group("{tmp}") else: tmpGrp = self.dbGrp["{tmp}"] - if 'nullref' not in tmpGrp: + if "nullref" not in tmpGrp: dt = h5py.special_dtype(ref=h5py.Reference) - tmpGrp.create_dataset('nullref', (1,), dtype=dt) - nullref_dset = tmpGrp['nullref'] + tmpGrp.create_dataset("nullref", (1,), dtype=dt) + nullref_dset = tmpGrp["nullref"] return nullref_dset[0] - """ - getNullRegionReference - return a null region reference - """ def getNullRegionReference(self): + """ + getNullRegionReference - return a null region reference + """ tmpGrp = None if "{tmp}" not in self.dbGrp: tmpGrp = self.dbGrp.create_group("{tmp}") else: tmpGrp = self.dbGrp["{tmp}"] - if 'nullregref' not in tmpGrp: + if "nullregref" not in tmpGrp: dt = h5py.special_dtype(ref=h5py.RegionReference) - tmpGrp.create_dataset('nullregref', (1,), dtype=dt) - nullregref_dset = tmpGrp['nullregref'] + tmpGrp.create_dataset("nullregref", (1,), dtype=dt) + nullregref_dset = tmpGrp["nullregref"] return nullregref_dset[0] def getShapeItemByDsetObj(self, obj): item = {} if obj.shape is None: # new with h5py 2.6, null space datasets will return None for shape - item['class'] = 'H5S_NULL' + item["class"] = "H5S_NULL" elif len(obj.shape) == 0: # check to see if this is a null space vs a scalar dataset we'll do # this by seeing if an exception is raised when reading the dataset # h5py issue https://github.com/h5py/h5py/issues/279 will provide a # better way to determine null spaces + # Update 3/10/17: Above issue is closed, but waiting on 2.7 final release try: val = obj[...] - if not val: + if val is None: self.log.warning("no value returned for scalar dataset") - item['class'] = 'H5S_SCALAR' + item["class"] = "H5S_SCALAR" except IOError: - item['class'] = 'H5S_NULL' + item["class"] = "H5S_NULL" else: - item['class'] = 'H5S_SIMPLE' - item['dims'] = obj.shape + item["class"] = "H5S_SIMPLE" + item["dims"] = obj.shape maxshape = [] include_maxdims = False for i in range(len(obj.shape)): @@ -844,7 +868,7 @@ def getShapeItemByDsetObj(self, obj): include_maxdims = True maxshape.append(extent) if include_maxdims: - item['maxdims'] = maxshape + item["maxdims"] = maxshape return item def getShapeItemByAttrObj(self, obj): @@ -852,13 +876,13 @@ def getShapeItemByAttrObj(self, obj): if obj.shape is None or obj.get_storage_size() == 0: # If storage size is 0, assume this is a null space obj # See: h5py issue https://github.com/h5py/h5py/issues/279 - item['class'] = 'H5S_NULL' + item["class"] = "H5S_NULL" else: if obj.shape: - item['class'] = 'H5S_SIMPLE' - item['dims'] = obj.shape + item["class"] = "H5S_SIMPLE" + item["dims"] = obj.shape else: - item['class'] = 'H5S_SCALAR' + item["class"] = "H5S_SCALAR" return item # @@ -875,39 +899,39 @@ def getHDF5DatasetCreationProperties(self, obj_uuid, type_class): # alloc time nAllocTime = plist.get_alloc_time() if nAllocTime == h5py.h5d.ALLOC_TIME_DEFAULT: - creationProps['allocTime'] = 'H5D_ALLOC_TIME_DEFAULT' + creationProps["allocTime"] = "H5D_ALLOC_TIME_DEFAULT" elif nAllocTime == h5py.h5d.ALLOC_TIME_LATE: - creationProps['allocTime'] = 'H5D_ALLOC_TIME_LATE' + creationProps["allocTime"] = "H5D_ALLOC_TIME_LATE" elif nAllocTime == h5py.h5d.ALLOC_TIME_EARLY: - creationProps['allocTime'] = 'H5D_ALLOC_TIME_EARLY' + creationProps["allocTime"] = "H5D_ALLOC_TIME_EARLY" elif nAllocTime == h5py.h5d.ALLOC_TIME_INCR: - creationProps['allocTime'] = 'H5D_ALLOC_TIME_INCR' + creationProps["allocTime"] = "H5D_ALLOC_TIME_INCR" else: self.log.warning("Unknown alloc time value: " + str(nAllocTime)) # fill time nFillTime = plist.get_fill_time() if nFillTime == h5py.h5d.FILL_TIME_ALLOC: - creationProps['fillTime'] = 'H5D_FILL_TIME_ALLOC' + creationProps["fillTime"] = "H5D_FILL_TIME_ALLOC" elif nFillTime == h5py.h5d.FILL_TIME_NEVER: - creationProps['fillTime'] = 'H5D_FILL_TIME_NEVER' + creationProps["fillTime"] = "H5D_FILL_TIME_NEVER" elif nFillTime == h5py.h5d.FILL_TIME_IFSET: - creationProps['fillTime'] = 'H5D_FILL_TIME_IFSET' + creationProps["fillTime"] = "H5D_FILL_TIME_IFSET" else: self.log.warning("unknown fill time value: " + str(nFillTime)) - if type_class not in ('H5T_VLEN', 'H5T_OPAQUE'): + if type_class not in ("H5T_VLEN", "H5T_OPAQUE"): if plist.fill_value_defined() == h5py.h5d.FILL_VALUE_USER_DEFINED: - creationProps['fillValue'] = self.bytesArrayToList(dset.fillvalue) + creationProps["fillValue"] = self.bytesArrayToList(dset.fillvalue) # layout nLayout = plist.get_layout() if nLayout == h5py.h5d.COMPACT: - creationProps['layout'] = {'class': 'H5D_COMPACT'} + creationProps["layout"] = {"class": "H5D_COMPACT"} elif nLayout == h5py.h5d.CONTIGUOUS: - creationProps['layout'] = {'class': 'H5D_CONTIGUOUS'} + creationProps["layout"] = {"class": "H5D_CONTIGUOUS"} elif nLayout == h5py.h5d.CHUNKED: - creationProps['layout'] = {'class': 'H5D_CHUNKED', 'dims': dset.chunks } + creationProps["layout"] = {"class": "H5D_CHUNKED", "dims": dset.chunks} else: self.log.warning("Unknown layout value:" + str(nLayout)) @@ -919,14 +943,14 @@ def getHDF5DatasetCreationProperties(self, obj_uuid, type_class): opt_values = filter_info[2] filter_prop = {} filter_id = filter_info[0] - filter_prop['id'] = filter_id + filter_prop["id"] = filter_id if filter_info[3]: - filter_prop['name'] = self.bytesArrayToList(filter_info[3]) + filter_prop["name"] = self.bytesArrayToList(filter_info[3]) if filter_id in _HDF_FILTERS: hdf_filter = _HDF_FILTERS[filter_id] - filter_prop['class'] = hdf_filter['class'] - if 'options' in hdf_filter: - filter_opts = hdf_filter['options'] + filter_prop["class"] = hdf_filter["class"] + if "options" in hdf_filter: + filter_opts = hdf_filter["options"] for i in range(len(filter_opts)): if len(opt_values) <= i: break # end of option values @@ -943,11 +967,11 @@ def getHDF5DatasetCreationProperties(self, obj_uuid, type_class): filter_prop[option_name] = opt_value else: # custom filter - filter_prop['class'] = 'H5Z_FILTER_USER' + filter_prop["class"] = "H5Z_FILTER_USER" if opt_values: - filter_prop['parameters'] = opt_values + filter_prop["parameters"] = opt_values filter_props.append(filter_prop) - creationProps['filters'] = filter_props + creationProps["filters"] = filter_props return creationProps @@ -967,14 +991,14 @@ def getDatasetItemByUuid(self, obj_uuid): raise IOError(errno.ENXIO, msg) # fill in the item info for the dataset - item = { 'id': obj_uuid } + item = {"id": obj_uuid} alias = [] if dset.name and not dset.name.startswith("/__db__"): - alias.append(dset.name) # just use the default h5py path for now - item['alias'] = alias + alias.append(dset.name) # just use the default h5py path for now + item["alias"] = alias - item['attributeCount'] = len(dset.attrs) + item["attributeCount"] = len(dset.attrs) # check if the dataset is using a committed type typeid = h5py.h5d.DatasetID.get_type(dset.id) @@ -984,56 +1008,60 @@ def getDatasetItemByUuid(self, obj_uuid): addr = h5py.h5o.get_info(typeid).addr type_uuid = self.getUUIDByAddress(addr) committedType = self.getCommittedTypeItemByUuid(type_uuid) - typeItem = committedType['type'] - typeItem['uuid'] = type_uuid + typeItem = committedType["type"] + typeItem["uuid"] = type_uuid else: typeItem = getTypeItem(dset.dtype) - item['type'] = typeItem + item["type"] = typeItem # get shape - item['shape'] = self.getShapeItemByDsetObj(dset) + item["shape"] = self.getShapeItemByDsetObj(dset) if self.update_timestamps: - item['ctime'] = self.getCreateTime(obj_uuid) - item['mtime'] = self.getModifiedTime(obj_uuid) + item["ctime"] = self.getCreateTime(obj_uuid) + item["mtime"] = self.getModifiedTime(obj_uuid) creationProps = self.getDatasetCreationProps(obj_uuid) if creationProps: # if chunks is not in the db props, add it from the dataset prop # (so auto-chunk values can be returned) - if dset.chunks and 'layout' not in creationProps: - creationProps['layout'] = {'class': 'H5D_CHUNKED', - 'dims': dset.chunks} + if dset.chunks and "layout" not in creationProps: + creationProps["layout"] = {"class": "H5D_CHUNKED", "dims": dset.chunks} else: # no db-tracked creation properties, pull properties from library - creationProps = self.getHDF5DatasetCreationProperties(obj_uuid, typeItem['class']) + creationProps = self.getHDF5DatasetCreationProperties( + obj_uuid, typeItem["class"] + ) if creationProps: - item['creationProperties'] = creationProps + item["creationProperties"] = creationProps return item - """ - createTypeFromItem - create type given dictionary definition - """ def createTypeFromItem(self, attr_type): + """ + createTypeFromItem - create type given dictionary definition + """ dt = None - if type(attr_type) in (six.text_type, six.binary_type) and len(attr_type) == UUID_LEN: + if isinstance(attr_type, (str, bytes)) and len(attr_type) == UUID_LEN: # assume attr_type is a uuid of a named datatype tgt = self.getCommittedTypeObjByUuid(attr_type) if tgt is None: - msg = "Unable to create attribute, committed type with uuid of: " + attr_type + " not found" + msg = ( + "Unable to create attribute, committed type with uuid of: " + + attr_type + + " not found" + ) self.log.info(msg) raise IOError(errno.ENXIO, msg) dt = tgt # can use the object as the dt parameter else: - try: dt = createDataType(attr_type) except KeyError as ke: - msg = "Unable to create type: " + ke.message + msg = "Unable to create type: " + str(ke) self.log.info(msg) raise IOError(errno.EINVAL, msg) except TypeError as te: @@ -1046,11 +1074,11 @@ def createTypeFromItem(self, attr_type): raise IOError(errno, errno.EIO, msg) return dt - """ - createCommittedType - creates new named datatype - Returns item - """ def createCommittedType(self, datatype, obj_uuid=None): + """ + createCommittedType - creates new named datatype + Returns item + """ self.log.info("createCommittedType") self.initFile() if self.readonly: @@ -1068,7 +1096,7 @@ def createCommittedType(self, datatype, obj_uuid=None): msg = "Unexpected failure to create committed datatype" self.log.error(msg) raise IOError(errno.EIO, msg) - newType = datatypes[obj_uuid] # this will be a h5py Datatype class + newType = datatypes[obj_uuid] # this will be a h5py Datatype class # store reverse map as an attribute addr = h5py.h5o.get_info(newType.id).addr addrGrp = self.dbGrp["{addr}"] @@ -1077,19 +1105,19 @@ def createCommittedType(self, datatype, obj_uuid=None): now = time.time() self.setCreateTime(obj_uuid, timestamp=now) self.setModifiedTime(obj_uuid, timestamp=now) - item = { 'id': obj_uuid } - item['attributeCount'] = len(newType.attrs) - #item['type'] = hdf5dtype.getTypeItem(datatype.dtype) + item = {"id": obj_uuid} + item["attributeCount"] = len(newType.attrs) + # item['type'] = hdf5dtype.getTypeItem(datatype.dtype) if self.update_timestamps: - item['ctime'] = self.getCreateTime(obj_uuid) - item['mtime'] = self.getModifiedTime(obj_uuid) + item["ctime"] = self.getCreateTime(obj_uuid) + item["mtime"] = self.getModifiedTime(obj_uuid) return item - """ - getCommittedTypeObjByUuid - get obj from {datatypes} collection - Returns type obj - """ def getCommittedTypeObjByUuid(self, obj_uuid): + """ + getCommittedTypeObjByUuid - get obj from {datatypes} collection + Returns type obj + """ self.log.info("getCommittedTypeObjByUuid(" + obj_uuid + ")") self.initFile() datatype = None @@ -1106,11 +1134,11 @@ def getCommittedTypeObjByUuid(self, obj_uuid): return datatype - """ - getCommittedTypeItemByUuid - get json from {datatypes} collection - Returns type obj - """ def getCommittedTypeItemByUuid(self, obj_uuid): + """ + getCommittedTypeItemByUuid - get json from {datatypes} collection + Returns type obj + """ self.log.info("getCommittedTypeItemByUuid(" + obj_uuid + ")") self.initFile() datatype = self.getCommittedTypeObjByUuid(obj_uuid) @@ -1125,24 +1153,24 @@ def getCommittedTypeItemByUuid(self, obj_uuid): self.log.info(msg) raise IOError(errno.ENXIO, msg) - item = { 'id': obj_uuid } + item = {"id": obj_uuid} alias = [] if datatype.name and not datatype.name.startswith("/__db__"): - alias.append(datatype.name) # just use the default h5py path for now - item['alias'] = alias - item['attributeCount'] = len(datatype.attrs) - item['type'] = getTypeItem(datatype.dtype) + alias.append(datatype.name) # just use the default h5py path for now + item["alias"] = alias + item["attributeCount"] = len(datatype.attrs) + item["type"] = getTypeItem(datatype.dtype) if self.update_timestamps: - item['ctime'] = self.getCreateTime(obj_uuid) - item['mtime'] = self.getModifiedTime(obj_uuid) + item["ctime"] = self.getCreateTime(obj_uuid) + item["mtime"] = self.getModifiedTime(obj_uuid) return item - """ - Get attribute given an object and name - returns: JSON object - """ def getAttributeItemByObj(self, obj, name, includeData=True): + """ + Get attribute given an object and name + returns: JSON object + """ if name not in obj.attrs: msg = "Attribute: [" + name + "] not found in object: " + obj.name self.log.info(msg) @@ -1152,7 +1180,7 @@ def getAttributeItemByObj(self, obj, name, includeData=True): attrObj = h5py.h5a.open(obj.id, np.string_(name)) attr = None - item = { 'name': name } + item = {"name": name} # check if the dataset is using a committed type typeid = attrObj.get_type() @@ -1162,20 +1190,20 @@ def getAttributeItemByObj(self, obj, name, includeData=True): addr = h5py.h5o.get_info(typeid).addr type_uuid = self.getUUIDByAddress(addr) committedType = self.getCommittedTypeItemByUuid(type_uuid) - typeItem = committedType['type'] - typeItem['uuid'] = type_uuid + typeItem = committedType["type"] + typeItem["uuid"] = type_uuid else: typeItem = getTypeItem(attrObj.dtype) - item['type'] = typeItem + item["type"] = typeItem # todo - don't include data for OPAQUE until JSON serialization # issues are addressed - if type(typeItem) == dict and typeItem['class'] in ('H5T_OPAQUE'): + if type(typeItem) == dict and typeItem["class"] in ("H5T_OPAQUE"): includeData = False shape_json = self.getShapeItemByAttrObj(attrObj) - item['shape'] = shape_json - if shape_json['class'] == 'H5S_NULL': + item["shape"] = shape_json + if shape_json["class"] == "H5S_NULL": includeData = False if includeData: try: @@ -1184,7 +1212,7 @@ def getAttributeItemByObj(self, obj, name, includeData=True): self.log.warning("type error reading attribute") if includeData and attr is not None: - if shape_json['class'] == 'H5S_SCALAR': + if shape_json["class"] == "H5S_SCALAR": data = self.getDataValue(typeItem, attr) else: dims = shape_json["dims"] @@ -1192,8 +1220,8 @@ def getAttributeItemByObj(self, obj, name, includeData=True): # convert numpy object to python list # values = self.toList(typeItem, attr) data = self.toList(rank, typeItem, attr) - #data = self.bytesToString(data) - item['value'] = data + # data = self.bytesToString(data) + item["value"] = data # timestamps will be added by getAttributeItem() return item @@ -1226,8 +1254,12 @@ def getAttributeItems(self, col_type, obj_uuid, marker=None, limit=0): item = self.getAttributeItemByObj(obj, name, False) # mix-in timestamps if self.update_timestamps: - item['ctime'] = self.getCreateTime(obj_uuid, objType="attribute", name=name) - item['mtime'] = self.getModifiedTime(obj_uuid, objType="attribute", name=name) + item["ctime"] = self.getCreateTime( + obj_uuid, objType="attribute", name=name + ) + item["mtime"] = self.getModifiedTime( + obj_uuid, objType="attribute", name=name + ) items.append(item) count += 1 @@ -1236,8 +1268,9 @@ def getAttributeItems(self, col_type, obj_uuid, marker=None, limit=0): return items def getAttributeItem(self, col_type, obj_uuid, name): - self.log.info("getAttributeItemByUuid(" + col_type + ", " + obj_uuid - + ", " + name + ")") + self.log.info( + "getAttributeItemByUuid(" + col_type + ", " + obj_uuid + ", " + name + ")" + ) self.initFile() obj = self.getObjectByUuid(col_type, obj_uuid) if obj is None: @@ -1247,9 +1280,17 @@ def getAttributeItem(self, col_type, obj_uuid, name): return None item = self.getAttributeItemByObj(obj, name) if item is None: - if self.getModifiedTime(obj_uuid, objType="attribute", name=name, useRoot=False): + if self.getModifiedTime( + obj_uuid, objType="attribute", name=name, useRoot=False + ): # attribute has been removed - msg = "Attribute: [" + name + "] of object: " + obj_uuid + " has been previously deleted" + msg = ( + "Attribute: [" + + name + + "] of object: " + + obj_uuid + + " has been previously deleted" + ) self.log.info(msg) raise IOError(errno.ENOENT, msg) msg = "Attribute: [" + name + "] of object: " + obj_uuid + " not found" @@ -1257,46 +1298,48 @@ def getAttributeItem(self, col_type, obj_uuid, name): raise IOError(errno.ENXIO, msg) # mix-in timestamps if self.update_timestamps: - item['ctime'] = self.getCreateTime(obj_uuid, objType="attribute", name=name) - item['mtime'] = self.getModifiedTime(obj_uuid, objType="attribute", name=name) + item["ctime"] = self.getCreateTime(obj_uuid, objType="attribute", name=name) + item["mtime"] = self.getModifiedTime( + obj_uuid, objType="attribute", name=name + ) return item - """ - isDimensionList - return True if this attribute json looks like a dimension list - """ def isDimensionList(self, attr_name, attr_type): + """ + isDimensionList - return True if this attribute json looks like a dimension list + """ if attr_name != "DIMENSION_LIST": return False if type(attr_type) is not dict: return False - if attr_type['class'] != "H5T_VLEN": + if attr_type["class"] != "H5T_VLEN": return False - base_type = attr_type['base'] - if base_type['class'] != 'H5T_REFERENCE': + base_type = attr_type["base"] + if base_type["class"] != "H5T_REFERENCE": return False return True - """ - isReferenceList - return True if this attribute json looks like a reference list - """ def isReferenceList(self, attr_name, attr_type): + """ + isReferenceList - return True if this attribute json looks like a reference list + """ if attr_name != "REFERENCE_LIST": return False if type(attr_type) is not dict: return False - if attr_type['class'] != "H5T_COMPOUND": + if attr_type["class"] != "H5T_COMPOUND": return False return True - """ - makeDimensionList - work-around for h5py problems saving dimension list - - types which are vlen's of references are not working directly, so use dim_scale api - Note: this is a work-around for h5py issue: - https://github.com/h5py/h5py/issues/553 - """ def makeDimensionList(self, obj, shape, value): + """ + makeDimensionList - work-around for h5py problems saving dimension list - + types which are vlen's of references are not working directly, so use dim_scale api + Note: this is a work-around for h5py issue: + https://github.com/h5py/h5py/issues/553 + """ dset_refs = self.listToRef(value) for i in range(len(dset_refs)): refs = dset_refs[i] @@ -1307,7 +1350,9 @@ def makeDimensionList(self, obj, shape, value): for j in range(len(refs)): scale_obj = self.f[refs[j]] if scale_obj is None: - self.log.warning("dimension list, missing obj reference: " + value[i]) + self.log.warning( + "dimension list, missing obj reference: " + value[i] + ) continue if "CLASS" not in scale_obj.attrs: self.log.warning("dimension list, no scale obj") @@ -1321,28 +1366,29 @@ def makeDimensionList(self, obj, shape, value): except RuntimeError: self.log.error("got runtime error attaching scale") - """ - writeNdArrayToAttribute - create an attribute given numpy array - """ def writeNdArrayToAttribute(self, attrs, attr_name, npdata, shape, dt): + """ + writeNdArrayToAttribute - create an attribute given numpy array + """ attrs.create(attr_name, npdata, shape=shape, dtype=dt) - """ - create a scalar string attribute using nullterm padding - """ def makeNullTermStringAttribute(self, obj, attr_name, strLength, value): + """ + create a scalar string attribute using nullterm padding + """ self.log.info( - "make nullterm, length: " + str(strLength) + " value:" + str(value)) - if type(value) == unicode: - value = str(value) + "make nullterm, length: " + str(strLength) + " value:" + str(value) + ) + value = str(value) if strLength < len(value): - self.log.warning("makeNullTermStringAttribute: value string longer than length") - #value = value[:strLength] # truncate to length - - - if six.PY3 and type(attr_name) is str: + self.log.warning( + "makeNullTermStringAttribute: value string longer than length" + ) + # value = value[:strLength] # truncate to length + + if isinstance(attr_name, str): try: - attr_name = attr_name.encode('ascii') + attr_name = attr_name.encode("ascii") except UnicodeDecodeError: raise TypeError("non-ascii attribute name not allowed") @@ -1353,7 +1399,7 @@ def makeNullTermStringAttribute(self, obj, attr_name, strLength, value): sid = h5py.h5s.create(h5py.h5s.SCALAR) aid = h5py.h5a.create(obj.id, attr_name, tid, sid) # write the value - dtype_code = 'S' + str(strLength) + dtype_code = "S" + str(strLength) ndarr = np.array(value, dtype=np.dtype(dtype_code)) aid.write(ndarr) @@ -1363,7 +1409,7 @@ def makeAttribute(self, obj, attr_name, shape, attr_type, value): attribute) """ is_committed_type = False - if type(attr_type) in (str, unicode) and len(attr_type) == UUID_LEN: + if isinstance(attr_type, str) and len(attr_type) == UUID_LEN: # assume attr_type is a uuid of a named datatype is_committed_type = True @@ -1382,11 +1428,8 @@ def makeAttribute(self, obj, attr_name, shape, attr_type, value): else: tmpGrp = self.dbGrp["{tmp}"] tmpGrp.attrs.create(attr_name, 0, shape=(), dtype=dt) - if six.PY3: - b_attr_name = attr_name.encode('utf-8') - tmpAttr = h5py.h5a.open(tmpGrp.id, name=b_attr_name) - else: - tmpAttr = h5py.h5a.open(tmpGrp.id, name=attr_name) + b_attr_name = attr_name.encode("utf-8") + tmpAttr = h5py.h5a.open(tmpGrp.id, name=b_attr_name) if not tmpAttr: msg = "Unexpected error creating datatype for nullspace attribute" self.log.error(msg) @@ -1397,10 +1440,7 @@ def makeAttribute(self, obj, attr_name, shape, attr_type, value): if attr_name in obj.attrs: self.log.info("deleting attribute: " + attr_name) del obj.attrs[attr_name] - if six.PY3: - attr_id = h5py.h5a.create(obj.id, b_attr_name, tid, sid) - else: - attr_id = h5py.h5a.create(obj.id, attr_name, tid, sid) + attr_id = h5py.h5a.create(obj.id, b_attr_name, tid, sid) # delete the temp attribute del tmpGrp.attrs[attr_name] if not attr_id: @@ -1420,11 +1460,19 @@ def makeAttribute(self, obj, attr_name, shape, attr_type, value): # convert python list to numpy object strPad = None strLength = 0 - if type(attr_type) == dict and attr_type['class'] == 'H5T_STRING' and "strPad" in attr_type: + if ( + type(attr_type) == dict + and attr_type["class"] == "H5T_STRING" + and "strPad" in attr_type + ): strPad = attr_type["strPad"] - strLength = attr_type['length'] + strLength = attr_type["length"] - if rank == 0 and type(strLength) == int and strPad == "H5T_STR_NULLTERM": + if ( + rank == 0 + and type(strLength) == int + and strPad == "H5T_STR_NULLTERM" + ): self.makeNullTermStringAttribute(obj, attr_name, strLength, value) else: typeItem = getTypeItem(dt) @@ -1432,17 +1480,20 @@ def makeAttribute(self, obj, attr_name, shape, attr_type, value): # create numpy array npdata = np.zeros(shape, dtype=dt) - + if rank == 0: npdata[()] = self.toNumPyValue(attr_type, value, npdata[()]) else: self.toNumPyArray(rank, attr_type, value, npdata) - self.writeNdArrayToAttribute(obj.attrs, attr_name, npdata, shape, dt) + self.writeNdArrayToAttribute( + obj.attrs, attr_name, npdata, shape, dt + ) """ createAttribute - create an attribute """ + def createAttribute(self, col_name, obj_uuid, attr_name, shape, attr_type, value): self.log.info("createAttribute: [" + attr_name + "]") @@ -1466,7 +1517,9 @@ def createAttribute(self, col_name, obj_uuid, attr_name, shape, attr_type, value now = time.time() self.setCreateTime(obj_uuid, objType="attribute", name=attr_name, timestamp=now) - self.setModifiedTime(obj_uuid, objType="attribute", name=attr_name, timestamp=now) + self.setModifiedTime( + obj_uuid, objType="attribute", name=attr_name, timestamp=now + ) self.setModifiedTime(obj_uuid, timestamp=now) # owner entity is modified def deleteAttribute(self, col_name, obj_uuid, attr_name): @@ -1478,19 +1531,28 @@ def deleteAttribute(self, col_name, obj_uuid, attr_name): obj = self.getObjectByUuid(col_name, obj_uuid) if attr_name not in obj.attrs: - msg = "Attribute with name: [" + attr_name + "] of object: " + obj_uuid + " not found" + msg = ( + "Attribute with name: [" + + attr_name + + "] of object: " + + obj_uuid + + " not found" + ) self.log.info(msg) raise IOError(errno.ENXIO, msg) del obj.attrs[attr_name] now = time.time() - self.setModifiedTime(obj_uuid, objType="attribute", name=attr_name, timestamp=now) + self.setModifiedTime( + obj_uuid, objType="attribute", name=attr_name, timestamp=now + ) return True """ Return a json-serializable representation of the numpy value """ + def getDataValue(self, typeItem, value, dimension=0, dims=None): if dimension > 0: if type(dims) not in (list, tuple): @@ -1505,24 +1567,24 @@ def getDataValue(self, typeItem, value, dimension=0, dims=None): raise IOError(errno.EIO, msg) nElements = dims[rank - dimension] for i in range(nElements): - item_value = self.getDataValue(typeItem, value[i], - dimension=(dimension-1), - dims=dims) + item_value = self.getDataValue( + typeItem, value[i], dimension=(dimension - 1), dims=dims + ) out.append(item_value) return out # done for array case out = None - typeClass = typeItem['class'] + typeClass = typeItem["class"] if isinstance(value, (np.ndarray, np.generic)): value = value.tolist() # convert numpy object to list - if typeClass == 'H5T_COMPOUND': + if typeClass == "H5T_COMPOUND": if type(value) not in (list, tuple): msg = "Unexpected type for compound value" self.log.error(msg) raise IOError(errno.EIO, msg) - fields = typeItem['fields'] + fields = typeItem["fields"] if len(fields) != len(value): msg = "Number of elements in compound type does not match type" self.log.error(msg) @@ -1531,49 +1593,44 @@ def getDataValue(self, typeItem, value, dimension=0, dims=None): out = [] for i in range(nFields): field = fields[i] - item_value = self.getDataValue(field['type'], value[i]) + item_value = self.getDataValue(field["type"], value[i]) out.append(item_value) - elif typeClass == 'H5T_VLEN': + elif typeClass == "H5T_VLEN": if type(value) not in (list, tuple): msg = "Unexpected type for vlen value" self.log.error(msg) raise IOError(errno.EIO, msg) - baseType = typeItem['base'] + baseType = typeItem["base"] out = [] nElements = len(value) for i in range(nElements): item_value = self.getDataValue(baseType, value[i]) out.append(item_value) - elif typeClass == 'H5T_REFERENCE': + elif typeClass == "H5T_REFERENCE": out = self.refToList(value) - elif typeClass == 'H5T_OPAQUE': + elif typeClass == "H5T_OPAQUE": out = "???" # todo - elif typeClass == 'H5T_ARRAY': + elif typeClass == "H5T_ARRAY": type_dims = typeItem["dims"] if type(type_dims) not in (list, tuple): msg = "unexpected type for type array dimensions" self.log.error(msg) raise IOError(errno.EIO, msg) rank = len(type_dims) - baseType = typeItem['base'] - out = self.getDataValue(baseType, value, dimension=rank, - dims=type_dims) + baseType = typeItem["base"] + out = self.getDataValue(baseType, value, dimension=rank, dims=type_dims) - elif typeClass in ('H5T_INTEGER', 'H5T_FLOAT', 'H5T_ENUM'): + elif typeClass in ("H5T_INTEGER", "H5T_FLOAT", "H5T_ENUM"): out = value # just copy value - elif typeClass == 'H5T_STRING': - if six.PY3: - if "charSet" in typeItem: - charSet = typeItem["charSet"] - else: - charSet = "H5T_CSET_ASCII" - if charSet == "H5T_CSET_ASCII": - out = value.decode("utf-8") - else: - out = value + elif typeClass == "H5T_STRING": + if "charSet" in typeItem: + charSet = typeItem["charSet"] + else: + charSet = "H5T_CSET_ASCII" + if charSet == "H5T_CSET_ASCII" and isinstance(value, bytes): + out = value.decode("utf-8") else: - # things are simpler in PY2 out = value else: msg = "Unexpected type class: " + typeClass @@ -1581,20 +1638,20 @@ def getDataValue(self, typeItem, value, dimension=0, dims=None): raise IOError(errno.ENINVAL, msg) return out - """ - Return a numpy value based on json representation - """ - def getRefValue(self, typeItem, value): + def getRefValue(self, typeItem: dict, value: list): + """ + Return a numpy value based on json representation + """ out = None - typeClass = typeItem['class'] - if typeClass == 'H5T_COMPOUND': + typeClass = typeItem["class"] + if typeClass == "H5T_COMPOUND": - if type(value) not in (list, tuple): - msg = "Unexpected type for compound value" + if not isinstance(value, (list, tuple)): + msg = f"Unexpected type for compound value: {type(value)}" self.log.error(msg) raise IOError(errno.EIO, msg) - fields = typeItem['fields'] + fields = typeItem["fields"] if len(fields) != len(value): msg = "Number of elements in compound type does not match type" self.log.error(msg) @@ -1603,30 +1660,30 @@ def getRefValue(self, typeItem, value): out = [] for i in range(nFields): field = fields[i] - item_value = self.getRefValue(field['type'], value[i]) + item_value = self.getRefValue(field["type"], value[i]) out.append(item_value) - elif typeClass == 'H5T_VLEN': + elif typeClass == "H5T_VLEN": if type(value) not in (list, tuple): msg = "Unexpected type for vlen value" self.log.error(msg) raise IOError(errno.EIO, msg) - baseType = typeItem['base'] + baseType = typeItem["base"] out = [] nElements = len(value) for i in range(nElements): item_value = self.getRefValue(baseType, value[i]) out.append(item_value) - elif typeClass == 'H5T_REFERENCE': + elif typeClass == "H5T_REFERENCE": out = self.listToRef(value) - elif typeClass == 'H5T_OPAQUE': + elif typeClass == "H5T_OPAQUE": out = "???" # todo - elif typeClass == 'H5T_ARRAY': - out = value - elif typeClass in ('H5T_INTEGER', 'H5T_FLOAT', 'H5T_ENUM'): + elif typeClass == "H5T_ARRAY": + out = self.toRef(len(typeItem["dims"]), typeItem["base"], value) + elif typeClass in ("H5T_INTEGER", "H5T_FLOAT", "H5T_ENUM"): out = value # just copy value - elif typeClass == 'H5T_STRING': - if typeItem['charSet'] == 'H5T_CSET_UTF8': + elif typeClass == "H5T_STRING": + if typeItem["charSet"] == "H5T_CSET_UTF8": # out = value.encode('utf-8') out = value else: @@ -1643,13 +1700,14 @@ def getRefValue(self, typeItem, value): """ Return a numpy value based on json representation """ + def toNumPyValue(self, typeItem, src, des): - typeClass = 'H5T_INTEGER' # default to int type + typeClass = "H5T_INTEGER" # default to int type if type(typeItem) is dict: - typeClass = typeItem['class'] - if typeClass == 'H5T_COMPOUND': - fields = typeItem['fields'] + typeClass = typeItem["class"] + if typeClass == "H5T_COMPOUND": + fields = typeItem["fields"] if len(fields) != len(src): msg = "Number of elements in compound type does not match type" self.log.error(msg) @@ -1658,40 +1716,42 @@ def toNumPyValue(self, typeItem, src, des): for i in range(nFields): field = fields[i] - field_name = field['name'] + field_name = field["name"] des[field_name] = src[i] - elif typeClass == 'H5T_VLEN': + elif typeClass == "H5T_VLEN": if type(src) not in (list, tuple): msg = "Unexpected type for vlen value" self.log.error(msg) raise IOError(errno.EIO, msg) - baseType = typeItem['base'] + baseType = typeItem["base"] dt = self.createTypeFromItem(baseType) des = np.array(src, dtype=dt) - elif typeClass == 'H5T_REFERENCE': + elif typeClass == "H5T_REFERENCE": des = src # self.listToRef(src) - elif typeClass == 'H5T_OPAQUE': + elif typeClass == "H5T_OPAQUE": des = "???" # todo - elif typeClass == 'H5T_ARRAY': + elif typeClass == "H5T_ARRAY": des = src - elif typeClass in ('H5T_INTEGER', 'H5T_FLOAT', 'H5T_ENUM'): + elif typeClass in ("H5T_INTEGER", "H5T_FLOAT", "H5T_ENUM"): des = src # just copy value - elif typeClass == 'H5T_STRING': - if typeItem['charSet'] == 'H5T_CSET_UTF8': + elif typeClass == "H5T_STRING": + if typeItem["charSet"] == "H5T_CSET_UTF8": des = src # src.encode('utf-8') else: if type(src) is str: try: - src.encode('ascii') + src.encode("ascii") except UnicodeDecodeError: - raise TypeError("non-ascii value not allowed with H5T_CSET_ASCII") + raise TypeError( + "non-ascii value not allowed with H5T_CSET_ASCII" + ) des = src - + else: msg = "Unexpected type class: " + typeClass self.log.info(msg) @@ -1701,6 +1761,7 @@ def toNumPyValue(self, typeItem, src, des): """ copy src data to numpy array """ + def toNumPyArray(self, rank, typeItem, src, des): if rank == 0: @@ -1719,23 +1780,23 @@ def toNumPyArray(self, rank, typeItem, src, des): rv = self.toNumPyValue(typeItem, src_sec, des_sec) # if the numpy object is writeable, des_sec will be # already updated. Otherwise, update the des by assignment - if not hasattr(des_sec, 'flags') or not des_sec.flags['WRITEABLE']: + if not hasattr(des_sec, "flags") or not des_sec.flags["WRITEABLE"]: des[i] = rv - """ - Convert json list to h5py compatible values - """ def toRef(self, rank, typeItem, data): + """ + Convert json list to h5py compatible values + """ out = None - if type(typeItem) in (str, unicode): + if isinstance(typeItem, str): # commited type - get json representation committed_type_item = self.getCommittedTypeItemByUuid(typeItem) - typeItem = committed_type_item['type'] + typeItem = committed_type_item["type"] - typeClass = typeItem['class'] - if typeClass in ('H5T_INTEGER', 'H5T_FLOAT'): - out = data # just use as is + typeClass = typeItem["class"] + if typeClass in ("H5T_INTEGER", "H5T_FLOAT"): + out = data # just use as is elif rank == 0: # scalar value @@ -1755,10 +1816,11 @@ def toRef(self, rank, typeItem, data): """ Convert list to json serializable values. """ + def toList(self, rank, typeItem, data): out = None - typeClass = typeItem['class'] - if typeClass in ('H5T_INTEGER', 'H5T_FLOAT'): + typeClass = typeItem["class"] + if typeClass in ("H5T_INTEGER", "H5T_FLOAT"): out = data.tolist() # just use as is elif rank == 0: @@ -1779,6 +1841,7 @@ def toList(self, rank, typeItem, data): """ Create ascii representation of vlen data object """ + def vlenToList(self, data): # todo - verify that data is a numpy.ndarray out = None @@ -1786,7 +1849,7 @@ def vlenToList(self, data): out = [] else: try: - if data.dtype.kind != 'O': + if data.dtype.kind != "O": out = data.tolist() else: out = [] @@ -1800,6 +1863,7 @@ def vlenToList(self, data): """ Create ascii representation of ref data object """ + def refToList(self, data): # todo - verify that data is a numpy.ndarray out = None @@ -1815,8 +1879,7 @@ def refToList(self, data): elif self.getCommittedTypeObjByUuid(uuid): out = "datatypes/" + uuid else: - self.log.warning( - "uuid in region ref not found: [" + uuid + "]") + self.log.warning("uuid in region ref not found: [" + uuid + "]") return None else: out = "null" @@ -1831,28 +1894,33 @@ def refToList(self, data): """ Convert ascii representation of data references to data ref """ + def listToRef(self, data): out = None if not data: # null reference out = self.getNullReference() - elif type(data) in (bytes, str, unicode): + elif isinstance(data, (bytes, str)): obj_ref = None # object reference should be in the form: / for prefix in ("datasets", "groups", "datatypes"): if data.startswith(prefix): - uuid_ref = data[len(prefix):] - if len(uuid_ref) == (UUID_LEN + 1) and uuid_ref.startswith('/'): + uuid_ref = data[len(prefix) :] + if len(uuid_ref) == (UUID_LEN + 1) and uuid_ref.startswith("/"): obj = self.getObjectByUuid(prefix, uuid_ref[1:]) if obj: obj_ref = obj.ref else: - msg = "Invalid object refence value: [" + uuid_ref + "] not found" + msg = ( + "Invalid object reference value: [" + + uuid_ref + + "] not found" + ) self.log.info(msg) raise IOError(errno.ENXIO, msg) break if not obj_ref: - msg = "Invalid object refence value: [" + data + "]" + msg = "Invalid object reference value: [" + data + "]" self.log.info(msg) raise IOError(errno.EINVAL, msg) else: @@ -1866,62 +1934,61 @@ def listToRef(self, data): # assume region ref out = self.createRegionReference(data) else: - msg = "Invalid object refence value type: [" + str(type(data)) + "]" + msg = "Invalid object reference value type: [" + str(type(data)) + "]" self.log.info(msg) raise IOError(errno.EINVAL, msg) return out - - """ - Convert list that may contain bytes type elements to list of string elements - """ + def bytesArrayToList(self, data): - if type(data) in (bytes, str, unicode): + """ + Convert list that may contain bytes type elements to list of string elements + """ + if isinstance(data, (bytes, str)): is_list = False elif isinstance(data, (np.ndarray, np.generic)): if len(data.shape) == 0: is_list = False data = data.tolist() # tolist will return a scalar in this case - if type(data) in (list, tuple): + if isinstance(data, (list, tuple)): is_list = True else: is_list = False else: - is_list = True - elif type(data) in (list, tuple): + is_list = True + elif isinstance(data, (list, tuple)): is_list = True else: is_list = False - + if is_list: out = [] for item in data: - out.append(self.bytesArrayToList(item)) # recursive call - elif type(data) is bytes: - if six.PY3: - out = data.decode("utf-8") - else: - out = data + out.append(self.bytesArrayToList(item)) # recursive call + elif isinstance(data, bytes): + out = data.decode("utf-8") else: out = data - + return out - - """ - Get item description of region reference value - """ + def getRegionReference(self, regionRef): - selectionEnums = {h5py.h5s.SEL_NONE: 'H5S_SEL_NONE', - h5py.h5s.SEL_ALL: 'H5S_SEL_ALL', - h5py.h5s.SEL_POINTS: 'H5S_SEL_POINTS', - h5py.h5s.SEL_HYPERSLABS: 'H5S_SEL_HYPERSLABS'} + """ + Get item description of region reference value + """ + selectionEnums = { + h5py.h5s.SEL_NONE: "H5S_SEL_NONE", + h5py.h5s.SEL_ALL: "H5S_SEL_ALL", + h5py.h5s.SEL_POINTS: "H5S_SEL_POINTS", + h5py.h5s.SEL_HYPERSLABS: "H5S_SEL_HYPERSLABS", + } item = {} objid = h5py.h5r.dereference(regionRef, self.f.file.file.id) if objid: - item['id'] = self.getUUIDByAddress(h5py.h5o.get_info(objid).addr) + item["id"] = self.getUUIDByAddress(h5py.h5o.get_info(objid).addr) else: - self.log.info("region reference unable to find item with objid: " + objid) - return item + self.log.info("region reference unable to find item with objid: " + objid) + return item sel = h5py.h5r.get_region(regionRef, objid) select_type = sel.get_select_type() @@ -1929,7 +1996,7 @@ def getRegionReference(self, regionRef): msg = "Unexpected selection type: " + regionRef.typecode self.log.error(msg) raise IOError(errno.EIO, msg) - item['select_type'] = selectionEnums[select_type] + item["select_type"] = selectionEnums[select_type] pointlist = None if select_type == h5py.h5s.SEL_POINTS: # retrieve a numpy array of selection points @@ -1945,43 +2012,45 @@ def getRegionReference(self, regionRef): for i in range(len(coord2)): coord2[i] = coord2[i] + 1 - item['selection'] = pointlist + item["selection"] = pointlist return item - """ - Create region reference from item description of region reference value - """ def createRegionReference(self, item): - selectionEnums = {'H5S_SEL_NONE': h5py.h5s.SEL_NONE, - 'H5S_SEL_ALL': h5py.h5s.SEL_ALL, - 'H5S_SEL_POINTS': h5py.h5s.SEL_POINTS, - 'H5S_SEL_HYPERSLABS': h5py.h5s.SEL_HYPERSLABS} + """ + Create region reference from item description of region reference value + """ + selectionEnums = { + "H5S_SEL_NONE": h5py.h5s.SEL_NONE, + "H5S_SEL_ALL": h5py.h5s.SEL_ALL, + "H5S_SEL_POINTS": h5py.h5s.SEL_POINTS, + "H5S_SEL_HYPERSLABS": h5py.h5s.SEL_HYPERSLABS, + } region_ref = None - if 'select_type' not in item: + if "select_type" not in item: msg = "select_type not provided for region selection" self.log.info(msg) raise IOError(errno.EINVAL, msg) - select_type = item['select_type'] + select_type = item["select_type"] if select_type not in selectionEnums.keys(): msg = "selection type: [" + select_type + "] is not valid" self.log.info(msg) raise IOError(errno.EINVAL, msg) dset = None - if select_type == 'H5S_SEL_NONE': - if 'id' not in item: - # select none on null dataset, return null ref - out = self.getNullReference() - return out + if select_type == "H5S_SEL_NONE": + if "id" not in item: + # select none on null dataset, return null ref + out = self.getNullReference() + return out else: # select_type != 'H5S_SEL_NONE' - if 'id' not in item: + if "id" not in item: msg = "id not provided for region selection" self.log.info(msg) raise IOError(errno.EINVAL, msg) # Otherwise need to provide uuid of dataset - uuid_ref = item['id'] + uuid_ref = item["id"] if len(uuid_ref) != UUID_LEN: msg = "uuid value: [" + uuid_ref + "] for region reference is not valid" self.log.info(msg) @@ -1995,8 +2064,8 @@ def createRegionReference(self, item): self.log.info(msg) raise IOError(errno.EINVAL, msg) - if select_type in ('H5S_SEL_POINTS', 'H5S_SEL_HYPERSLABS'): - if 'selection' not in item: + if select_type in ("H5S_SEL_POINTS", "H5S_SEL_HYPERSLABS"): + if "selection" not in item: msg = "selection key not provided for region selection" self.log.info(msg) raise IOError(errno.EINVAL, msg) @@ -2005,98 +2074,98 @@ def createRegionReference(self, item): space_id = h5py.h5d.DatasetID.get_space(dset.id) h5py.h5s.SpaceID.select_none(space_id) - if select_type == 'H4S_SEL_NONE': - pass # did select_none above - elif select_type == 'H5S_SEL_ALL': + if select_type == "H4S_SEL_NONE": + pass # did select_none above + elif select_type == "H5S_SEL_ALL": h5py.h5s.SpaceID.select_all(space_id) - elif select_type == 'H5S_SEL_POINTS': - selection = item['selection'] + elif select_type == "H5S_SEL_POINTS": + selection = item["selection"] for point in selection: if len(point) != rank: - msg = "point selection number of elements must mach rank of referenced dataset" - self.log.info(msg) - raise IOError(errno.EINVAL, msg) + msg = "point selection number of elements must mach rank of referenced dataset" + self.log.info(msg) + raise IOError(errno.EINVAL, msg) h5py.h5s.SpaceID.select_elements(space_id, selection) - elif select_type == 'H5S_SEL_HYPERSLABS': - selection = item['selection'] + elif select_type == "H5S_SEL_HYPERSLABS": + selection = item["selection"] for slab in selection: - # each item should be a two element array defining the hyperslab boundary - if len(slab) != 2: - msg = "selection value not valid (not a 2 element array)" - self.log.info(msg) - raise IOError(errno.EINVAL, msg) - start = slab[0] - if type(start) == list: - start = tuple(start) - if type(start) is not tuple or len(start) != rank: - msg = "selection value not valid, start element should have number " - msg += "elements equal to rank of referenced dataset" + # each item should be a two element array defining the hyperslab boundary + if len(slab) != 2: + msg = "selection value not valid (not a 2 element array)" + self.log.info(msg) + raise IOError(errno.EINVAL, msg) + start = slab[0] + if type(start) == list: + start = tuple(start) + if type(start) is not tuple or len(start) != rank: + msg = "selection value not valid, start element should have number " + msg += "elements equal to rank of referenced dataset" + self.log.info(msg) + raise IOError(errno.EINVAL, msg) + stop = slab[1] + if type(stop) == list: + stop = tuple(stop) + if type(stop) is not tuple or len(stop) != rank: + msg = "selection value not valid, count element should have number " + msg += "elements equal to rank of referenced dataset" + self.log.info(msg) + raise IOError(errno.EINVAL, msg) + count = [] + for i in range(rank): + if start[i] < 0: + msg = "start value for hyperslab selection must be non-negative" self.log.info(msg) raise IOError(errno.EINVAL, msg) - stop = slab[1] - if type(stop) == list: - stop = tuple(stop) - if type(stop) is not tuple or len(stop) != rank: - msg = "selection value not valid, count element should have number " - msg += "elements equal to rank of referenced dataset" + if stop[i] <= start[i]: + msg = "stop value must be greater than start value for hyperslab selection" self.log.info(msg) raise IOError(errno.EINVAL, msg) - count = [] - for i in range(rank): - if start[i] < 0: - msg = "start value for hyperslab selection must be non-negative" - self.log.info(msg) - raise IOError(errno.EINVAL, msg) - if stop[i] <= start[i]: - msg = "stop value must be greater than start value for hyperslab selection" - self.log.info(msg) - raise IOError(errno.EINVAL, msg) - count.append(stop[i] - start[i]) - count = tuple(count) + count.append(stop[i] - start[i]) + count = tuple(count) - h5py.h5s.SpaceID.select_hyperslab(space_id, start, count, op=h5py.h5s.SELECT_OR) + h5py.h5s.SpaceID.select_hyperslab( + space_id, start, count, op=h5py.h5s.SELECT_OR + ) # now that we've selected the desired region in the space, return a region reference - - if six.PY3: - dset_name = dset.name.encode('utf-8') - else: - dset_name = dset.name - region_ref = h5py.h5r.create(self.f.id, dset_name, h5py.h5r.DATASET_REGION, space_id) + dset_name = dset.name.encode("utf-8") + region_ref = h5py.h5r.create( + self.f.id, dset_name, h5py.h5r.DATASET_REGION, space_id + ) return region_ref - """ - Convert a list to a tuple, recursively. - Example. [[1,2],[3,4]] -> ((1,2),(3,4)) - """ def toTuple(self, rank, data): - if type(data) in (list, tuple): + """ + Convert a list to a tuple, recursively. + Example. [[1,2],[3,4]] -> ((1,2),(3,4)) + """ + if isinstance(data, (list, tuple)): if rank > 0: - return list(self.toTuple(rank-1, x) for x in data) + return list(self.toTuple(rank - 1, x) for x in data) else: - return tuple(self.toTuple(rank-1, x) for x in data) + return tuple(self.toTuple(rank - 1, x) for x in data) else: return data - """ - Get values from dataset identified by obj_uuid. - If a slices list or tuple is provided, it should have the same - number of elements as the rank of the dataset. - """ def getDatasetValuesByUuid(self, obj_uuid, slices=Ellipsis, format="json"): + """ + Get values from dataset identified by obj_uuid. + If a slices list or tuple is provided, it should have the same + number of elements as the rank of the dataset. + """ dset = self.getDatasetObjByUuid(obj_uuid) if format not in ("json", "binary"): msg = "only json and binary formats are supported" self.log.info(msg) raise IOError(errno.EINVAL, msg) - + if dset is None: msg = "Dataset: " + obj_uuid + " not found" self.log.info(msg) raise IOError(errno.ENXIO, msg) - + values = None dt = dset.dtype typeItem = getTypeItem(dt) @@ -2105,13 +2174,13 @@ def getDatasetValuesByUuid(self, obj_uuid, slices=Ellipsis, format="json"): msg = "Only JSON is supported for for this data type" self.log.info(msg) raise IOError(errno.EINVAL, msg) - + if dset.shape is None: # null space dataset (with h5py 2.6.0) - return None - + return None + rank = len(dset.shape) - + if rank == 0: # check for null dataspace try: @@ -2119,7 +2188,7 @@ def getDatasetValuesByUuid(self, obj_uuid, slices=Ellipsis, format="json"): except IOError: # assume null dataspace, return none return None - if not val: + if val is None: self.log.warning("no value returned from scalar dataset") if type(slices) != list and type(slices) != tuple and slices is not Ellipsis: @@ -2131,17 +2200,15 @@ def getDatasetValuesByUuid(self, obj_uuid, slices=Ellipsis, format="json"): msg = "Unexpected error: getDatasetValuesByUuid: number of dims in selection not same as rank" self.log.error(msg) raise IOError(errno.EIO, msg) - - if dt.kind == 'O': + + if dt.kind == "O": if format != "json": msg = "Only JSON is supported for for this data type" self.log.info(msg) raise IOError(errno.EINVAL, msg) # numpy object type - could be a vlen string or generic vlen h5t_check = h5py.h5t.check_dtype(vlen=dt) - if h5t_check == str or h5t_check == unicode: - values = dset[slices].tolist() # just dump to list - elif six.PY3 and h5t_check == bytes: + if h5t_check == str or h5t_check == bytes: values = self.bytesArrayToList(dset[slices]) elif h5t_check is not None: # other vlen data @@ -2156,91 +2223,95 @@ def getDatasetValuesByUuid(self, obj_uuid, slices=Ellipsis, format="json"): msg = "Unexpected error, object type unknown" self.log.error(msg) raise IOError(errno.EIO, msg) - elif dt.kind == 'V' and len(dt) <= 1 and len(dt.shape) == 0: + elif dt.kind == "V" and len(dt) <= 1 and len(dt.shape) == 0 and not dt.names: # opaque type - skip for now self.log.warning("unable to get opaque type values") - values = "????" - elif dt.kind == 'S' and six.PY3: - # For Python3 fixed string values will be returned as bytes, - # so finese them into strings - if format != "json": - msg = "Only JSON is supported for for this data type" - self.log.info(msg) - raise IOError(errno.EINVAL, msg) + values = "????" + elif dt.kind == "S" and format == "json": values = self.bytesArrayToList(dset[slices]) - elif len(dt) > 1: + elif len(dt) > 1 or dt.names: # compound type if format == "json": values = self.bytesArrayToList(dset[slices]) else: values = dset[slices].tobytes() - else: values = dset[slices] - + # just use tolist to dump - if format == "json": + if format == "json": values = values.tolist() else: - #values = base64.b64encode(dset[slices].tobytes()) + # values = base64.b64encode(dset[slices].tobytes()) values = values.tobytes() - + return values - + """ doDatasetQueryByUuid: return rows based on query string Return rows from a dataset that matches query string. - + Note: Only supported for compound_type/one-dimensional datasets """ - def doDatasetQueryByUuid(self, obj_uuid, query, start=0, stop=-1, step=1, limit=None): + + def doDatasetQueryByUuid( + self, obj_uuid, query, start=0, stop=-1, step=1, limit=None + ): self.log.info("doQueryByUuid - uuid: " + obj_uuid + " query:" + query) - self.log.info("start: " + str(start) + " stop: " + str(stop) + " step: " + str(step) + " limit: " + str(limit)) - dset = self.getDatasetObjByUuid(obj_uuid) + self.log.info( + "start: " + + str(start) + + " stop: " + + str(stop) + + " step: " + + str(step) + + " limit: " + + str(limit) + ) + dset = self.getDatasetObjByUuid(obj_uuid) if dset is None: msg = "Dataset: " + obj_uuid + " not found" self.log.info(msg) raise IOError(errno.ENXIO, msg) - + values = [] dt = dset.dtype typeItem = getTypeItem(dt) - itemSize = getItemSize(typeItem) - if typeItem['class'] != "H5T_COMPOUND": + # itemSize = getItemSize(typeItem) + if typeItem["class"] != "H5T_COMPOUND": msg = "Only compound type datasets can be used as query target" self.log.info(msg) raise IOError(errno.EINVAL, msg) - + if dset.shape is None: # null space dataset (with h5py 2.6.0) - return None - + return None + rank = len(dset.shape) if rank != 1: msg = "One one-dimensional datasets can be used as query target" self.log.info(msg) raise IOError(errno.EINVAL, msg) - values = [] indexes = [] count = 0 - + num_elements = dset.shape[0] if stop == -1: stop = num_elements elif stop > num_elements: stop = num_elements block_size = self._getBlockSize(dset) - self.log.info("block_size: " + str(block_size)) - + self.log.info("block_size: " + str(block_size)) + field_names = list(dset.dtype.fields.keys()) - eval_str = self._getEvalStr(query, field_names) - + eval_str = self._getEvalStr(query, field_names) + while start < stop: if limit and (count == limit): break # no more rows for this batch - end = start + block_size + end = start + block_size if end > stop: end = stop rows = dset[start:end] # read from dataset @@ -2255,21 +2326,21 @@ def doDatasetQueryByUuid(self, obj_uuid, query, start=0, stop=-1, step=1, limit= count += 1 if limit and (count == limit): break # no more rows for this batch - + start = end # go to next block - - + # values = self.getDataValue(item_type, values, dimension=1, dims=(len(values),)) - - self.log.info("got " + str(count) + " query matches") + + self.log.info("got " + str(count) + " query matches") return (indexes, values) - + """ _getBlockSize: Get number of rows to read from disk - + heurestic to get reasonable sized chunk of data to fetch. make multiple of chunk_size if possible - """ + """ + def _getBlockSize(self, dset): target_block_size = 256 * 1000 if dset.chunks: @@ -2281,12 +2352,13 @@ def _getBlockSize(self, dset): else: block_size = target_block_size return block_size - + """ _getEvalStr: Get eval string for given query - + Gets Eval string to use with numpy where method. - """ + """ + def _getEvalStr(self, query, field_names): i = 0 eval_str = "" @@ -2294,7 +2366,7 @@ def _getEvalStr(self, query, field_names): end_quote_char = None var_count = 0 paren_count = 0 - black_list = ( "import", ) # field names that are not allowed + black_list = ("import",) # field names that are not allowed self.log.info("getEvalStr(" + query + ")") for item in black_list: if item in field_names: @@ -2303,8 +2375,8 @@ def _getEvalStr(self, query, field_names): raise IOError(errno.EINVAL, msg) while i < len(query): ch = query[i] - if (i+1) < len(query): - ch_next = query[i+1] + if (i + 1) < len(query): + ch_next = query[i + 1] else: ch_next = None if var_name and not ch.isalnum(): @@ -2317,7 +2389,7 @@ def _getEvalStr(self, query, field_names): eval_str += "rows['" + var_name + "']" var_name = None var_count += 1 - + if end_quote_char: if ch == end_quote_char: # end of literal @@ -2327,16 +2399,16 @@ def _getEvalStr(self, query, field_names): end_quote_char = ch eval_str += ch elif ch.isalpha(): - if ch == 'b' and ch_next in ("'", '"'): - eval_str += 'b' # start of a byte string literal + if ch == "b" and ch_next in ("'", '"'): + eval_str += "b" # start of a byte string literal elif var_name is None: var_name = ch # start of a variable else: var_name += ch - elif ch == '(' and end_quote_char is None: + elif ch == "(" and end_quote_char is None: paren_count += 1 eval_str += ch - elif ch == ')' and end_quote_char is None: + elif ch == ")" and end_quote_char is None: paren_count -= 1 if paren_count < 0: msg = "Mismatched paren" @@ -2346,7 +2418,7 @@ def _getEvalStr(self, query, field_names): else: # just add to eval_str eval_str += ch - i = i+1 + i = i + 1 if end_quote_char: msg = "no matching quote character" self.log.info("EINVAL: " + msg) @@ -2359,20 +2431,21 @@ def _getEvalStr(self, query, field_names): msg = "Mismatched paren" self.log.info("EINVAL: " + msg) raise IOError(errno.EINVAL, msg) - + return eval_str """ Get values from dataset identified by obj_uuid using the given point selection. """ + def getDatasetPointSelectionByUuid(self, obj_uuid, points): dset = self.getDatasetObjByUuid(obj_uuid) if dset is None: msg = "Dataset: " + obj_uuid + " not found" self.log.info(msg) raise IOError(errno.ENXIO, msg) - + rank = len(dset.shape) values = np.zeros(len(points), dtype=dset.dtype) try: @@ -2394,24 +2467,25 @@ def getDatasetPointSelectionByUuid(self, obj_uuid, points): setDatasetValuesByUuid - update the given dataset values with supplied data and optionally a hyperslab selection (slices) """ + def setDatasetValuesByUuid(self, obj_uuid, data, slices=None, format="json"): dset = self.getDatasetObjByUuid(obj_uuid) - + if format not in ("json", "binary"): msg = "only json and binary formats are supported" self.log.info(msg) raise IOError(errno.EINVAL, msg) - + if format == "binary" and type(data) is not bytes: - msg ="data must be of type bytes for binary writing" + msg = "data must be of type bytes for binary writing" self.log.info(msg) raise IOError(errno.EINVAL, msg) - + if dset is None: msg = "Dataset: " + obj_uuid + " not found" self.log.info(msg) - raise IOError(errno.ENXIO, msg) - + raise IOError(errno.ENXIO, msg) + dt = dset.dtype typeItem = getTypeItem(dt) itemSize = getItemSize(typeItem) @@ -2419,11 +2493,11 @@ def setDatasetValuesByUuid(self, obj_uuid, data, slices=None, format="json"): arraySize = 1 for extent in dset.shape: arraySize *= arraySize - + if itemSize == "H5T_VARIABLE" and format == "binary": msg = "Only JSON is supported for for this data type" self.log.info(msg) - raise IOError(errno.EINVAL, msg) + raise IOError(errno.EINVAL, msg) if slices is None: slices = [] @@ -2432,24 +2506,22 @@ def setDatasetValuesByUuid(self, obj_uuid, data, slices=None, format="json"): s = slice(0, dset.shape[dim], 1) slices.append(s) slices = tuple(slices) - - + if type(slices) != tuple: msg = "setDatasetValuesByUuid: bad type for dim parameter" self.log.error(msg) - raise IOError(erno.EIO, msg) - + raise IOError(errno.EIO, msg) if len(slices) != rank: msg = "number of dims in selection not same as rank" self.log.info(msg) raise IOError(errno.EINVAL, msg) - + npoints = 1 np_shape = [] for i in range(rank): s = slices[i] - + if s.start < 0 or s.step <= 0 or s.stop < s.start: msg = "invalid slice specification" self.log.info(msg) @@ -2459,28 +2531,27 @@ def setDatasetValuesByUuid(self, obj_uuid, data, slices=None, format="json"): self.log.info(msg) raise IOError(errno.EINVAL, msg) np_shape.append(s.stop - s.start) - + count = (s.stop - s.start) // s.step if count <= 0: msg = "invalid slice specification" self.log.info(msg) - raise IOError(errno.EINVAL, msg) - - npoints *= count - + raise IOError(errno.EINVAL, msg) + + npoints *= count + np_shape = tuple(np_shape) # for comparison with ndarray shape - - self.log.info("selection shape:" + str(np_shape)) + self.log.info("selection shape:" + str(np_shape)) # need some special conversion for compound types -- # each element must be a tuple, but the JSON decoder # gives us a list instead. - if format != "binary" and len(dset.dtype) > 1 and type(data) in (list, tuple): + if format != "binary" and dset.dtype.names and isinstance(data, (list, tuple)): data = self.toTuple(rank, data) - #for i in range(len(data)): + # for i in range(len(data)): # converted_data.append(self.toTuple(data[i])) - #data = converted_data + # data = converted_data else: h5t_check = h5py.check_dtype(ref=dset.dtype) if h5t_check in (h5py.Reference, h5py.RegionReference): @@ -2488,22 +2559,37 @@ def setDatasetValuesByUuid(self, obj_uuid, data, slices=None, format="json"): if format == "binary": msg = "Only JSON is supported for for this data type" self.log.info(msg) - raise IOError(errno.EINVAL, msg) + raise IOError(errno.EINVAL, msg) data = self.listToRef(data) - + if format == "binary": - if npoints*itemSize != len(data): - msg = "Expected: " + str(npoints*itemSize) + " bytes, but got: " + str(len(data)) + if npoints * itemSize != len(data): + msg = ( + "Expected: " + + str(npoints * itemSize) + + " bytes, but got: " + + str(len(data)) + ) self.log.info(msg) raise IOError(errno.EINVAL, msg) - arr = np.fromstring(data, dtype=dset.dtype) - arr = arr.reshape(np_shape) # conform to selection shape - + if dset.dtype.shape == (): + arr = np.fromstring(data, dtype=dset.dtype) + arr = arr.reshape(np_shape) # conform to selection shape + else: + # tricy array type! + arr = np.empty(np_shape, dtype=dset.dtype) + base_arr = np.fromstring(data, dtype=dset.dtype.base) + base_shape = list(np_shape) + base_shape.extend(dset.dtype.shape) # add on the type dimensions + base_arr = base_arr.reshape(base_shape) + arr[...] = base_arr else: # data is json if npoints == 1 and len(dset.dtype) > 1: # convert to tuple for compound singleton writes - data = [tuple(data),] + data = [ + tuple(data), + ] arr = np.array(data, dtype=dset.dtype) # raise an exception of the array shape doesn't match the selection shape @@ -2523,16 +2609,16 @@ def setDatasetValuesByUuid(self, obj_uuid, data, slices=None, format="json"): if selection_extent == 1: np_index += 1 continue # skip singleton selection - + # selection/data mismatch! msg = "data shape doesn't match selection shape" msg += "--data shape: " + str(arr.shape) msg += "--selection shape: " + str(np_shape) - + self.log.info(msg) raise IOError(errno.EINVAL, msg) - - # write temp numpy array to dataset + + # write temp numpy array to dataset if rank == 1: s = slices[0] try: @@ -2555,31 +2641,32 @@ def setDatasetValuesByUuid(self, obj_uuid, data, slices=None, format="json"): setDatasetValuesByPointSelection - Update the dataset values using the given data and point selection """ + def setDatasetValuesByPointSelection(self, obj_uuid, data, points, format="json"): dset = self.getDatasetObjByUuid(obj_uuid) - + if format not in ("json", "binary"): msg = "only json and binary formats are supported" self.log.info(msg) raise IOError(errno.EINVAL, msg) - + if format == "binary" and type(data) is not bytes: - msg ="data must be of type bytes for binary writing" + msg = "data must be of type bytes for binary writing" self.log.info(msg) raise IOError(errno.EINVAL, msg) - + if dset is None: msg = "Dataset: " + obj_uuid + " not found" self.log.info(msg) - raise IOError(errno.ENXIO, msg) - + raise IOError(errno.ENXIO, msg) + dt = dset.dtype typeItem = getTypeItem(dt) itemSize = getItemSize(typeItem) if itemSize == "H5T_VARIABLE" and format == "binary": msg = "Only JSON is supported for for this data type" self.log.info(msg) - raise IOError(errno.EINVAL, msg) + raise IOError(errno.EINVAL, msg) rank = len(dset.shape) @@ -2587,11 +2674,12 @@ def setDatasetValuesByPointSelection(self, obj_uuid, data, points, format="json" # each element must be a tuple, but the JSON decoder # gives us a list instead. if format == "json" and len(dset.dtype) > 1 and type(data) in (list, tuple): - converted_data = self.toTuple(rank, data) - #for i in range(len(data)): + raise NotImplementedError("need some special conversion for compound types") + # converted_data = self.toTuple(rank, data) + # for i in range(len(data)): # converted_data.append(self.toTuple(data[i])) - #data = converted_data - + # data = converted_data + if format == "json": try: @@ -2607,11 +2695,11 @@ def setDatasetValuesByPointSelection(self, obj_uuid, data, points, format="json" msg = "setDatasetValuesByPointSelection, out of range error" self.log.info(msg) raise IOError(errno.EINVAL, msg) - + else: - #binary + # binary arr = np.fromstring(data, dtype=dset.dtype) - dset[points] = arr # coordinate write + dset[points] = arr # coordinate write # update modified time self.setModifiedTime(obj_uuid) @@ -2621,8 +2709,10 @@ def setDatasetValuesByPointSelection(self, obj_uuid, data, points, format="json" createDataset - creates new dataset given shape and datatype Returns item """ - def createDataset(self, datatype, datashape, max_shape=None, - creation_props=None, obj_uuid=None): + + def createDataset( + self, datatype, datashape, max_shape=None, creation_props=None, obj_uuid=None + ): self.initFile() if self.readonly: msg = "Unable to create dataset (Updates are not allowed)" @@ -2646,11 +2736,11 @@ def createDataset(self, datatype, datashape, max_shape=None, if "fillValue" in creation_props: fillvalue = creation_props["fillValue"] if "trackTimes" in creation_props: - kwargs['track_times'] = creation_props["trackTimes"] + kwargs["track_times"] = creation_props["trackTimes"] if "layout" in creation_props: layout = creation_props["layout"] if "dims" in layout: - kwargs['chunks'] = tuple(layout["dims"]) + kwargs["chunks"] = tuple(layout["dims"]) if "filters" in creation_props: filter_props = creation_props["filters"] for filter_prop in filter_props: @@ -2660,42 +2750,56 @@ def createDataset(self, datatype, datashape, max_shape=None, raise IOError(errno.EINVAL, msg) filter_id = filter_prop["id"] if filter_id not in _HDF_FILTERS: - self.log.info("unknown filter id: " + str(filter_id) + " ignoring") + self.log.info( + "unknown filter id: " + str(filter_id) + " ignoring" + ) continue hdf_filter = _HDF_FILTERS[filter_id] self.log.info("got filter: " + str(filter_id)) if "alias" not in hdf_filter: - self.log.info("unsupported filter id: " + str(filter_id) + " ignoring") + self.log.info( + "unsupported filter id: " + str(filter_id) + " ignoring" + ) continue filter_alias = hdf_filter["alias"] if not h5py.h5z.filter_avail(filter_id): - self.log.info("compression filter not available, filter: " + filter_alias + " will be ignored") + self.log.info( + "compression filter not available, filter: " + + filter_alias + + " will be ignored" + ) continue if filter_alias in _H5PY_COMPRESSION_FILTERS: - if kwargs.get('compression'): - self.log.info("compression filter already set, filter: " + filter_alias + " will be ignored") + if kwargs.get("compression"): + self.log.info( + "compression filter already set, filter: " + + filter_alias + + " will be ignored" + ) continue - kwargs['compression'] = filter_alias - self.log.info("setting compression filter to: " + kwargs['compression']) + kwargs["compression"] = filter_alias + self.log.info( + "setting compression filter to: " + kwargs["compression"] + ) if filter_alias == "gzip": # check for an optional compression value if "level" in filter_prop: - kwargs['compression_opts'] = filter_prop["level"] + kwargs["compression_opts"] = filter_prop["level"] elif filter_alias == "szip": bitsPerPixel = None - coding = 'nn' + coding = "nn" if "bitsPerPixel" in filter_prop: bitsPerPixel = filter_prop["bitsPerPixel"] if "coding" in filter_prop: if filter_prop["coding"] == "H5_SZIP_EC_OPTION_MASK": - coding = 'ec' + coding = "ec" elif filter_prop["coding"] == "H5_SZIP_NN_OPTION_MASK": - coding = 'nn' + coding = "nn" else: msg = "invalid szip option: 'coding'" self.log.info(msg) @@ -2705,31 +2809,37 @@ def createDataset(self, datatype, datashape, max_shape=None, if "pixelsPerBlock" in filter_props: self.log.info("ignoring szip option: 'pixelsPerBlock'") if "pixelsPerScanline" in filter_props: - self.log.info("ignoring szip option: 'pixelsPerScanline'") + self.log.info( + "ignoring szip option: 'pixelsPerScanline'" + ) if bitsPerPixel: - kwargs['compression_opts'] = (coding, bitsPerPixel) + kwargs["compression_opts"] = (coding, bitsPerPixel) else: if filter_alias == "shuffle": - kwargs['shuffle'] = True + kwargs["shuffle"] = True elif filter_alias == "fletcher32": - kwargs['fletcher32'] = True + kwargs["fletcher32"] = True elif filter_alias == "scaleoffset": if "scaleOffset" not in filter_prop: msg = "No scale_offset provided for scale offset filter" self.log(msg) raise IOError(errno.EINVAL, msg) - kwargs['scaleoffset'] = filter_prop["scaleOffset"] + kwargs["scaleoffset"] = filter_prop["scaleOffset"] else: - self.log.info("Unexpected filter name: " + filter_alias + " , ignoring") + self.log.info( + "Unexpected filter name: " + + filter_alias + + " , ignoring" + ) dt_ref = self.createTypeFromItem(datatype) if dt_ref is None: - msg = 'Unexpected error, no type returned' + msg = "Unexpected error, no type returned" self.log.error(msg) raise IOError(errno.EIO, msg) dt = dt_ref - if hasattr(dt_ref, 'dtype'): + if hasattr(dt_ref, "dtype"): # dt_ref is actualy a handle to a committed type # get the dtype prop, but use dt_ref for the actual dataset creation dt = dt_ref.dtype @@ -2738,7 +2848,7 @@ def createDataset(self, datatype, datashape, max_shape=None, # for compound types, need to convert from list to dataset compatible element if len(dt) != len(fillvalue): - msg = 'fillvalue has incorrect number of elements' + msg = "fillvalue has incorrect number of elements" self.log.info(msg) raise IOError(errno.EINVAL, msg) ndscalar = np.zeros((), dtype=dt) @@ -2748,7 +2858,7 @@ def createDataset(self, datatype, datashape, max_shape=None, fillvalue = ndscalar if fillvalue: - kwargs['fillvalue'] = fillvalue + kwargs["fillvalue"] = fillvalue dataset_id = None if datashape is None: @@ -2762,27 +2872,25 @@ def createDataset(self, datatype, datashape, max_shape=None, tmpGrp = self.dbGrp.create_group("{tmp}") else: tmpGrp = self.dbGrp["{tmp}"] - tmpDataset = tmpGrp.create_dataset(obj_uuid, shape=(1,), - dtype=dt_ref) + tmpDataset = tmpGrp.create_dataset(obj_uuid, shape=(1,), dtype=dt_ref) tid = tmpDataset.id.get_type() sid = sid = h5py.h5s.create(h5py.h5s.NULL) # now create the permanent dataset gid = datasets.id - if six.PY3: - b_obj_uuid = obj_uuid.encode('utf-8') - dataset_id = h5py.h5d.create(gid, b_obj_uuid, tid, sid) - else: - dataset_id = h5py.h5d.create(gid, obj_uuid, tid, sid) + b_obj_uuid = obj_uuid.encode("utf-8") + dataset_id = h5py.h5d.create(gid, b_obj_uuid, tid, sid) # delete the temp dataset del tmpGrp[obj_uuid] else: - # create the dataset - try: newDataset = datasets.create_dataset( - obj_uuid, shape=datashape, maxshape=max_shape, - dtype=dt_ref, **kwargs) + obj_uuid, + shape=datashape, + maxshape=max_shape, + dtype=dt_ref, + **kwargs, + ) except ValueError as ve: msg = "Unable to create dataset" try: @@ -2796,7 +2904,7 @@ def createDataset(self, datatype, datashape, max_shape=None, dataset_id = newDataset.id if dataset_id is None: - msg = 'Unexpected failure to create dataset' + msg = "Unexpected failure to create dataset" self.log.error(msg) raise IOError(errno.EIO, msg) # store reverse map as an attribute @@ -2813,18 +2921,19 @@ def createDataset(self, datatype, datashape, max_shape=None, self.setCreateTime(obj_uuid, timestamp=now) self.setModifiedTime(obj_uuid, timestamp=now) - item['id'] = obj_uuid + item["id"] = obj_uuid if self.update_timestamps: - item['ctime'] = self.getCreateTime(obj_uuid) - item['mtime'] = self.getModifiedTime(obj_uuid) - item['attributeCount'] = 0 + item["ctime"] = self.getCreateTime(obj_uuid) + item["mtime"] = self.getModifiedTime(obj_uuid) + item["attributeCount"] = 0 return item """ Resize existing Dataset """ + def resizeDataset(self, obj_uuid, shape): - self.log.info("resizeDataset(") # + obj_uuid + "): ") # + str(shape)) + self.log.info("resizeDataset(") # + obj_uuid + "): ") # + str(shape)) self.initFile() if self.readonly: msg = "Unable to resize dataset (Updates are not allowed)" @@ -2840,7 +2949,7 @@ def resizeDataset(self, obj_uuid, shape): msg = "Unable to resize dataset, cannot make extent smaller" self.log.info(msg) raise IOError(errno.EINVAL, msg) - if dset.maxshape[i] != None and shape[i] > dset.maxshape[i]: + if dset.maxshape[i] is not None and shape[i] > dset.maxshape[i]: msg = "Unable to resize dataset, max extent exceeded" self.log.info(msg) raise IOError(errno.EINVAL, msg) @@ -2853,6 +2962,7 @@ def resizeDataset(self, obj_uuid, shape): """ Check if link points to given target (as a HardLink) """ + def isObjectHardLinked(self, parentGroup, targetGroup, linkName): try: linkObj = parentGroup.get(linkName, None, False, True) @@ -2860,11 +2970,11 @@ def isObjectHardLinked(self, parentGroup, targetGroup, linkName): except TypeError: # UDLink? Ignore for now return False - if linkClass == 'SoftLink': + if linkClass == "SoftLink": return False - elif linkClass == 'ExternalLink': + elif linkClass == "ExternalLink": return False - elif linkClass == 'HardLink': + elif linkClass == "HardLink": if parentGroup[linkName] == targetGroup: return True else: @@ -2874,8 +2984,9 @@ def isObjectHardLinked(self, parentGroup, targetGroup, linkName): """ Delete Dataset, Group or Datatype by UUID """ + def deleteObjectByUuid(self, objtype, obj_uuid): - if objtype not in ('group', 'dataset', 'datatype'): + if objtype not in ("group", "dataset", "datatype"): msg = "unexpected objtype: " + objtype self.log.error(msg) raise IOError(errno.EIO, msg) @@ -2886,7 +2997,7 @@ def deleteObjectByUuid(self, objtype, obj_uuid): self.log.info(msg) raise IOError(errno.EPERM, msg) - if obj_uuid == self.dbGrp.attrs["rootUUID"] and objtype == 'group': + if obj_uuid == self.dbGrp.attrs["rootUUID"] and objtype == "group": # can't delete root group msg = "Unable to delete group (root group may not be deleted)" self.log.info(msg) @@ -2894,10 +3005,10 @@ def deleteObjectByUuid(self, objtype, obj_uuid): dbCol = None tgt = None - if objtype == 'dataset': + if objtype == "dataset": tgt = self.getDatasetObjByUuid(obj_uuid) dbCol = self.dbGrp["{datasets}"] - elif objtype == 'group': + elif objtype == "group": tgt = self.getGroupObjByUuid(obj_uuid) dbCol = self.dbGrp["{groups}"] else: # datatype @@ -2910,7 +3021,7 @@ def deleteObjectByUuid(self, objtype, obj_uuid): raise IOError(errno.ENXIO, msg) # unlink from root (if present) - self.unlinkObject(self.f['/'], tgt) + self.unlinkObject(self.f["/"], tgt) groups = self.dbGrp["{groups}"] # iterate through each group in the file and unlink tgt if it is linked @@ -2925,9 +3036,9 @@ def deleteObjectByUuid(self, objtype, obj_uuid): grp = self.f[grpRef] for linkName in grp: if self.isObjectHardLinked(grp, tgt, linkName): - linkList.append({'group': grp, 'link': linkName}) + linkList.append({"group": grp, "link": linkName}) for item in linkList: - self.unlinkObjectItem(item['group'], tgt, item['link']) + self.unlinkObjectItem(item["group"], tgt, item["link"]) addr = h5py.h5o.get_info(tgt.id).addr addrGrp = self.dbGrp["{addr}"] @@ -2944,7 +3055,9 @@ def deleteObjectByUuid(self, objtype, obj_uuid): self.log.warning("did not find: " + obj_uuid + " in anonymous collection") if obj_uuid in dbCol.attrs: - self.log.info("removing: " + obj_uuid + " from non-anonymous collection") + self.log.info( + "removing: " + obj_uuid + " from non-anonymous collection" + ) del dbCol.attrs[obj_uuid] dbRemoved = True @@ -2975,16 +3088,16 @@ def getGroupItemByUuid(self, obj_uuid): if "__db__" in grp: linkCount -= 1 # don't include the db group - item = { 'id': obj_uuid } + item = {"id": obj_uuid} alias = [] if grp.name and not grp.name.startswith("/__db__"): - alias.append(grp.name) # just use the default h5py path for now - item['alias'] = alias - item['attributeCount'] = len(grp.attrs) - item['linkCount'] = linkCount + alias.append(grp.name) # just use the default h5py path for now + item["alias"] = alias + item["attributeCount"] = len(grp.attrs) + item["linkCount"] = linkCount if self.update_timestamps: - item['ctime'] = self.getCreateTime(obj_uuid) - item['mtime'] = self.getModifiedTime(obj_uuid) + item["ctime"] = self.getCreateTime(obj_uuid) + item["mtime"] = self.getModifiedTime(obj_uuid) return item @@ -2994,6 +3107,7 @@ def getGroupItemByUuid(self, obj_uuid): linkName: name of link return: item dictionary with link attributes, or None if not found """ + def getLinkItemByObj(self, parent, link_name): if link_name not in parent: return None @@ -3001,49 +3115,48 @@ def getLinkItemByObj(self, parent, link_name): if link_name == "__db__": return None # don't provide link to db group # "http://somefile/#h5path(somepath)") - item = { 'title': link_name } + item = {"title": link_name} # get the link object, one of HardLink, SoftLink, or ExternalLink try: linkObj = parent.get(link_name, None, False, True) linkClass = linkObj.__class__.__name__ except TypeError: # UDLink? set class as 'user' - linkClass = 'UDLink' # user defined links - item['class'] = 'H5L_TYPE_USER_DEFINED' - if linkClass == 'SoftLink': - item['class'] = 'H5L_TYPE_SOFT' - item['h5path'] = linkObj.path - item['href'] = '#h5path(' + linkObj.path + ')' - elif linkClass == 'ExternalLink': - item['class'] = 'H5L_TYPE_EXTERNAL' - item['h5path'] = linkObj.path - item['file'] = linkObj.filename - item['href'] = '#h5path(' + linkObj.path + ')' - elif linkClass == 'HardLink': + linkClass = "UDLink" # user defined links + item["class"] = "H5L_TYPE_USER_DEFINED" + if linkClass == "SoftLink": + item["class"] = "H5L_TYPE_SOFT" + item["h5path"] = linkObj.path + item["href"] = "#h5path(" + linkObj.path + ")" + elif linkClass == "ExternalLink": + item["class"] = "H5L_TYPE_EXTERNAL" + item["h5path"] = linkObj.path + item["file"] = linkObj.filename + item["href"] = "#h5path(" + linkObj.path + ")" + elif linkClass == "HardLink": # Hardlink doesn't have any properties itself, just get the linked # object obj = parent[link_name] addr = h5py.h5o.get_info(obj.id).addr - item['class'] = 'H5L_TYPE_HARD' - item['id'] = self.getUUIDByAddress(addr) + item["class"] = "H5L_TYPE_HARD" + item["id"] = self.getUUIDByAddress(addr) class_name = obj.__class__.__name__ - if class_name == 'Dataset': - item['href'] = 'datasets/' + item['id'] - item['collection'] = 'datasets' - elif class_name == 'Group': - item['href'] = 'groups/' + item['id'] - item['collection'] = 'groups' - elif class_name == 'Datatype': - item['href'] = 'datatypes/' + item['id'] - item['collection'] = 'datatypes' + if class_name == "Dataset": + item["href"] = "datasets/" + item["id"] + item["collection"] = "datasets" + elif class_name == "Group": + item["href"] = "groups/" + item["id"] + item["collection"] = "groups" + elif class_name == "Datatype": + item["href"] = "datatypes/" + item["id"] + item["collection"] = "datatypes" else: - self.log.warning("unexpected object type: " + item['type']) + self.log.warning("unexpected object type: " + item["type"]) return item def getLinkItemByUuid(self, grpUuid, link_name): - self.log.info( - "db.getLinkItemByUuid(" + grpUuid + ", [" + link_name + "])") + self.log.info("db.getLinkItemByUuid(" + grpUuid + ", [" + link_name + "])") if not link_name: msg = "link_name not specified" self.log.info(msg) @@ -3060,13 +3173,25 @@ def getLinkItemByUuid(self, grpUuid, link_name): # add timestamps if item: if self.update_timestamps: - item['ctime'] = self.getCreateTime(grpUuid, objType="link", name=link_name) - item['mtime'] = self.getModifiedTime(grpUuid, objType="link", name=link_name) + item["ctime"] = self.getCreateTime( + grpUuid, objType="link", name=link_name + ) + item["mtime"] = self.getModifiedTime( + grpUuid, objType="link", name=link_name + ) else: self.log.info("link not found") - mtime = self.getModifiedTime(grpUuid, objType="link", name=link_name, useRoot=False) + mtime = self.getModifiedTime( + grpUuid, objType="link", name=link_name, useRoot=False + ) if mtime: - msg = "Link [" + link_name + "] of: " + grpUuid + " has been previously deleted" + msg = ( + "Link [" + + link_name + + "] of: " + + grpUuid + + " has been previously deleted" + ) self.log.info(msg) raise IOError(errno.ENOENT, msg) else: @@ -3123,7 +3248,13 @@ def unlinkItem(self, grpUuid, link_name): raise IOError(errno.ENXIO, msg) if link_name not in grp: - msg = "Link: [" + link_name + "] of group: " + grpUuid + " not found, cannot remove link" + msg = ( + "Link: [" + + link_name + + "] of group: " + + grpUuid + + " not found, cannot remove link" + ) self.log.info(msg) raise IOError(errno.ENXIO, msg) @@ -3136,7 +3267,7 @@ def unlinkItem(self, grpUuid, link_name): try: linkObj = grp.get(link_name, None, False, True) linkClass = linkObj.__class__.__name__ - if linkClass == 'HardLink': + if linkClass == "HardLink": # we can safely reference the object obj = grp[link_name] except TypeError: @@ -3161,7 +3292,7 @@ def unlinkItem(self, grpUuid, link_name): def getCollection(self, col_type, marker=None, limit=None): self.log.info("db.getCollection(" + col_type + ")") - #col_type should be either "datasets", "groups", or "datatypes" + # col_type should be either "datasets", "groups", or "datatypes" if col_type not in ("datasets", "groups", "datatypes"): msg = "Unexpected col_type: [" + col_type + "]" self.log.error(msg) @@ -3205,12 +3336,14 @@ def getCollection(self, col_type, marker=None, limit=None): """ Get the DB Collection names """ + def getDBCollections(self): return ("{groups}", "{datasets}", "{datatypes}") """ Return the db collection the uuid belongs to """ + def getDBCollection(self, obj_uuid): dbCollections = self.getDBCollections() for dbCollectionName in dbCollections: @@ -3238,7 +3371,7 @@ def unlinkObjectItem(self, parentGrp, tgtObj, link_name): linkClass = linkObj.__class__.__name__ # only deal with HardLinks linkDeleted = False - if linkClass == 'HardLink': + if linkClass == "HardLink": obj = parentGrp[link_name] if tgtObj is None or obj == tgtObj: @@ -3249,13 +3382,13 @@ def unlinkObjectItem(self, parentGrp, tgtObj, link_name): # also remove the attribute UUID key addr = h5py.h5o.get_info(obj.id).addr obj_uuid = self.getUUIDByAddress(addr) - self.log.info("converting: " + obj_uuid - + " to anonymous obj") + self.log.info("converting: " + obj_uuid + " to anonymous obj") dbCol = self.getDBCollection(obj_uuid) del dbCol.attrs[obj_uuid] # remove the object ref - dbCol[obj_uuid] = obj # add a hardlink - self.log.info("deleting link: [" + link_name + "] from: " - + parentGrp.name) + dbCol[obj_uuid] = obj # add a hardlink + self.log.info( + "deleting link: [" + link_name + "] from: " + parentGrp.name + ) del parentGrp[link_name] linkDeleted = True else: @@ -3302,7 +3435,7 @@ def linkObject(self, parentUUID, childUUID, link_name): if childUUID in dbCol: # convert to a ref del dbCol[childUUID] # remove hardlink - dbCol.attrs[childUUID] = childObj.ref # create a ref + dbCol.attrs[childUUID] = childObj.ref # create a ref # set link timestamps now = time.time() @@ -3371,7 +3504,7 @@ def createGroup(self, obj_uuid=None): addrGrp = self.dbGrp["{addr}"] addrGrp.attrs[str(addr)] = obj_uuid - #set timestamps + # set timestamps now = time.time() self.setCreateTime(obj_uuid, timestamp=now) self.setModifiedTime(obj_uuid, timestamp=now) @@ -3382,9 +3515,9 @@ def getNumberOfGroups(self): self.initFile() count = 0 groups = self.dbGrp["{groups}"] - count += len(groups) # anonymous groups + count += len(groups) # anonymous groups count += len(groups.attrs) # linked groups - count += 1 # add of for root group + count += 1 # add of for root group return count @@ -3392,7 +3525,7 @@ def getNumberOfDatasets(self): self.initFile() count = 0 datasets = self.dbGrp["{datasets}"] - count += len(datasets) # anonymous datasets + count += len(datasets) # anonymous datasets count += len(datasets.attrs) # linked datasets return count @@ -3400,6 +3533,6 @@ def getNumberOfDatatypes(self): self.initFile() count = 0 datatypes = self.dbGrp["{datatypes}"] - count += len(datatypes) # anonymous datatypes + count += len(datatypes) # anonymous datatypes count += len(datatypes.attrs) # linked datatypes return count diff --git a/h5json/hdf5dtype.py b/h5json/hdf5dtype.py index e1f3be8..9493da9 100755 --- a/h5json/hdf5dtype.py +++ b/h5json/hdf5dtype.py @@ -19,128 +19,124 @@ from h5py.h5t import check_dtype from h5py.h5r import Reference from h5py.h5r import RegionReference -import six -if six.PY3: - unicode = str - -""" -Convert the given type item to a predefined type string for -predefined integer and floating point types ("H5T_STD_I64LE", et. al). -For compound types, recursively iterate through the typeItem and do same -conversion for fields of the compound type. -""" def getTypeResponse(typeItem): - + """ + Convert the given type item to a predefined type string for + predefined integer and floating point types ("H5T_STD_I64LE", et. al). + For compound types, recursively iterate through the typeItem and do same + conversion for fields of the compound type. + """ response = None - if 'uuid' in typeItem: + if "uuid" in typeItem: # committed type, just return uuid - response = 'datatypes/' + typeItem['uuid'] - elif typeItem['class'] == 'H5T_INTEGER' or typeItem['class'] == 'H5T_FLOAT': + response = "datatypes/" + typeItem["uuid"] + elif typeItem["class"] == "H5T_INTEGER" or typeItem["class"] == "H5T_FLOAT": # just return the class and base for pre-defined types response = {} - response['class'] = typeItem['class'] - response['base'] = typeItem['base'] - elif typeItem['class'] == 'H5T_OPAQUE': + response["class"] = typeItem["class"] + response["base"] = typeItem["base"] + elif typeItem["class"] == "H5T_OPAQUE": response = {} - response['class'] = 'H5T_OPAQUE' - response['size'] = typeItem['size'] - elif typeItem['class'] == 'H5T_REFERENCE': + response["class"] = "H5T_OPAQUE" + response["size"] = typeItem["size"] + elif typeItem["class"] == "H5T_REFERENCE": response = {} - response['class'] = 'H5T_REFERENCE' - response['base'] = typeItem['base'] - elif typeItem['class'] == 'H5T_COMPOUND': + response["class"] = "H5T_REFERENCE" + response["base"] = typeItem["base"] + elif typeItem["class"] == "H5T_COMPOUND": response = {} - response['class'] = 'H5T_COMPOUND' + response["class"] = "H5T_COMPOUND" fieldList = [] - for field in typeItem['fields']: - fieldItem = { } - fieldItem['name'] = field['name'] - fieldItem['type'] = getTypeResponse(field['type']) # recursive call + for field in typeItem["fields"]: + fieldItem = {} + fieldItem["name"] = field["name"] + fieldItem["type"] = getTypeResponse(field["type"]) # recursive call fieldList.append(fieldItem) - response['fields'] = fieldList + response["fields"] = fieldList else: - response = {} # otherwise, return full type + response = {} # otherwise, return full type for k in typeItem.keys(): - if k == 'base': + if k == "base": if type(typeItem[k]) == dict: response[k] = getTypeResponse(typeItem[k]) # recursive call else: response[k] = typeItem[k] # predefined type - elif k not in ('size', 'base_size'): + elif k not in ("size", "base_size"): response[k] = typeItem[k] return response - - -""" + + +def getItemSize(typeItem): + """ Get size of an item in bytes. For variable length types (e.g. variable length strings), return the string "H5T_VARIABLE" -""" -def getItemSize(typeItem): + """ # handle the case where we are passed a primitive type first - if type(typeItem) in [six.string_types, six.text_type, six.binary_type]: - for type_prefix in ("H5T_STD_I", "H5T_STD_U", "H5T_IEEE_F"): + if isinstance(typeItem, bytes): + typeItem = typeItem.decode("ascii") + if isinstance(typeItem, str): + for type_prefix in ("H5T_STD_I", "H5T_STD_U", "H5T_IEEE_F"): if typeItem.startswith(type_prefix): - num_bits = typeItem[len(type_prefix):] - if num_bits[-2:] in ('LE', 'BE'): + num_bits = typeItem[len(type_prefix) :] + if num_bits[-2:] in ("LE", "BE"): num_bits = num_bits[:-2] try: return int(num_bits) // 8 except ValueError: raise TypeError("Invalid Type") # none of the expect primative types mathched - raise TypeError("Invalid Type") + raise TypeError("Invalid Type") if type(typeItem) != dict: raise TypeError("invalid type") item_size = 0 - if 'class' not in typeItem: + if "class" not in typeItem: raise KeyError("'class' not provided") - typeClass = typeItem['class'] + typeClass = typeItem["class"] - - if typeClass == 'H5T_INTEGER': - if 'base' not in typeItem: + if typeClass == "H5T_INTEGER": + if "base" not in typeItem: raise KeyError("'base' not provided") - item_size = getItemSize(typeItem['base']) - - elif typeClass == 'H5T_FLOAT': - if 'base' not in typeItem: + item_size = getItemSize(typeItem["base"]) + + elif typeClass == "H5T_FLOAT": + if "base" not in typeItem: raise KeyError("'base' not provided") - item_size = getItemSize(typeItem['base']) - - elif typeClass == 'H5T_STRING': - if 'length' not in typeItem: + item_size = getItemSize(typeItem["base"]) + + elif typeClass == "H5T_STRING": + if "length" not in typeItem: raise KeyError("'length' not provided") - item_size = typeItem["length"] - - elif typeClass == 'H5T_VLEN': + item_size = typeItem["length"] + + elif typeClass == "H5T_VLEN": item_size = "H5T_VARIABLE" - elif typeClass == 'H5T_OPAQUE': - if 'size' not in typeItem: + elif typeClass == "H5T_OPAQUE": + if "size" not in typeItem: raise KeyError("'size' not provided") - item_size = int(typeItem['size']) - - elif typeClass == 'H5T_ARRAY': - if 'dims' not in typeItem: + item_size = int(typeItem["size"]) + + elif typeClass == "H5T_ARRAY": + if "dims" not in typeItem: raise KeyError("'dims' must be provided for array types") - if 'base' not in typeItem: + if "base" not in typeItem: raise KeyError("'base' not provided") - item_size = getItemSize(typeItem['base']) - - elif typeClass == 'H5T_ENUM': - if 'base' not in typeItem: + item_size = getItemSize(typeItem["base"]) + + elif typeClass == "H5T_ENUM": + if "base" not in typeItem: raise KeyError("'base' must be provided for enum types") - item_size = getItemSize(typeItem['base']) - - elif typeClass == 'H5T_REFERENCE': + item_size = getItemSize(typeItem["base"]) + + elif typeClass == "H5T_REFERENCE": item_size = "H5T_VARIABLE" - elif typeClass == 'H5T_COMPOUND': - if 'fields' not in typeItem: + elif typeClass == "H5T_COMPOUND": + if "fields" not in typeItem: raise KeyError("'fields' not provided for compound type") - fields = typeItem['fields'] + fields = typeItem["fields"] if type(fields) is not list: raise TypeError("Type Error: expected list type for 'fields'") if not fields: @@ -149,25 +145,24 @@ def getItemSize(typeItem): for field in fields: if type(field) != dict: raise TypeError("Expected dictionary type for field") - if 'type' not in field: + if "type" not in field: raise KeyError("'type' missing from field") - subtype_size = getItemSize(field['type']) # recursive call + subtype_size = getItemSize(field["type"]) # recursive call if subtype_size == "H5T_VARIABLE": item_size = "H5T_VARIABLE" break # don't need to look at the rest - - item_size += subtype_size + + item_size += subtype_size else: raise TypeError("Invalid type class") - - # calculate array type - if 'dims' in typeItem and type(item_size) is int: - dims = typeItem['dims'] + + # calculate array type + if "dims" in typeItem and type(item_size) is int: + dims = typeItem["dims"] for dim in dims: - item_size *= dim - - return item_size + item_size *= dim + return item_size """ @@ -175,43 +170,42 @@ def getItemSize(typeItem): For primitive types, return string with typename For compound types return array of dictionary items """ + + def getTypeItem(dt): - + predefined_int_types = { - 'int8': 'H5T_STD_I8', - 'uint8': 'H5T_STD_U8', - 'int16': 'H5T_STD_I16', - 'uint16': 'H5T_STD_U16', - 'int32': 'H5T_STD_I32', - 'uint32': 'H5T_STD_U32', - 'int64': 'H5T_STD_I64', - 'uint64': 'H5T_STD_U64' - } - predefined_float_types = { - 'float32': 'H5T_IEEE_F32', - 'float64': 'H5T_IEEE_F64' + "int8": "H5T_STD_I8", + "uint8": "H5T_STD_U8", + "int16": "H5T_STD_I16", + "uint16": "H5T_STD_U16", + "int32": "H5T_STD_I32", + "uint32": "H5T_STD_U32", + "int64": "H5T_STD_I64", + "uint64": "H5T_STD_U64", } - + predefined_float_types = {"float32": "H5T_IEEE_F32", "float64": "H5T_IEEE_F64"} + type_info = {} - if len(dt) > 1: + if len(dt) > 1 or dt.names: # compound type names = dt.names - type_info['class'] = 'H5T_COMPOUND' + type_info["class"] = "H5T_COMPOUND" fields = [] for name in names: - field = { 'name': name } - field['type'] = getTypeItem(dt[name]) + field = {"name": name} + field["type"] = getTypeItem(dt[name]) fields.append(field) - type_info['fields'] = fields + type_info["fields"] = fields elif dt.shape: # array type if dt.base == dt: raise TypeError("Expected base type to be different than parent") # array type - type_info['dims'] = dt.shape - type_info['class'] = 'H5T_ARRAY' - type_info['base'] = getTypeItem(dt.base) - elif dt.kind == 'O': + type_info["dims"] = dt.shape + type_info["class"] = "H5T_ARRAY" + type_info["base"] = getTypeItem(dt.base) + elif dt.kind == "O": # vlen string or data # # check for h5py variable length extension @@ -219,140 +213,144 @@ def getTypeItem(dt): if vlen_check is not None and type(vlen_check) != np.dtype: vlen_check = np.dtype(vlen_check) ref_check = check_dtype(ref=dt.base) - if vlen_check == six.binary_type: - type_info['class'] = 'H5T_STRING' - type_info['length'] = 'H5T_VARIABLE' - type_info['charSet'] = 'H5T_CSET_ASCII' - type_info['strPad'] = 'H5T_STR_NULLTERM' - elif vlen_check == six.text_type: - type_info['class'] = 'H5T_STRING' - type_info['length'] = 'H5T_VARIABLE' - type_info['charSet'] = 'H5T_CSET_UTF8' - type_info['strPad'] = 'H5T_STR_NULLTERM' - elif type(vlen_check) == np.dtype: + if vlen_check == bytes: + type_info["class"] = "H5T_STRING" + type_info["length"] = "H5T_VARIABLE" + type_info["charSet"] = "H5T_CSET_ASCII" + type_info["strPad"] = "H5T_STR_NULLTERM" + elif vlen_check == str: + type_info["class"] = "H5T_STRING" + type_info["length"] = "H5T_VARIABLE" + type_info["charSet"] = "H5T_CSET_UTF8" + type_info["strPad"] = "H5T_STR_NULLTERM" + elif isinstance(vlen_check, np.dtype): # vlen data - type_info['class'] = 'H5T_VLEN' - type_info['size'] = 'H5T_VARIABLE' - type_info['base'] = getTypeItem(vlen_check) + type_info["class"] = "H5T_VLEN" + type_info["size"] = "H5T_VARIABLE" + type_info["base"] = getTypeItem(vlen_check) elif vlen_check is not None: - #unknown vlen type + # unknown vlen type raise TypeError("Unknown h5py vlen type: " + str(vlen_check)) elif ref_check is not None: # a reference type - type_info['class'] = 'H5T_REFERENCE' + type_info["class"] = "H5T_REFERENCE" if ref_check is Reference: - type_info['base'] = 'H5T_STD_REF_OBJ' # objref + type_info["base"] = "H5T_STD_REF_OBJ" # objref elif ref_check is RegionReference: - type_info['base'] = 'H5T_STD_REF_DSETREG' # region ref + type_info["base"] = "H5T_STD_REF_DSETREG" # region ref else: raise TypeError("unexpected reference type") else: raise TypeError("unknown object type") - elif dt.kind == 'V': + elif dt.kind == "V": # void type - type_info['class'] = 'H5T_OPAQUE' - type_info['size'] = dt.itemsize - type_info['tag'] = '' # todo - determine tag - elif dt.base.kind == 'S': + type_info["class"] = "H5T_OPAQUE" + type_info["size"] = dt.itemsize + type_info["tag"] = "" # todo - determine tag + elif dt.base.kind == "S": # Fixed length string type - type_info['class'] = 'H5T_STRING' - type_info['charSet'] = 'H5T_CSET_ASCII' - type_info['length'] = dt.itemsize - type_info['strPad'] = 'H5T_STR_NULLPAD' - elif dt.base.kind == 'U': + type_info["class"] = "H5T_STRING" + type_info["charSet"] = "H5T_CSET_ASCII" + type_info["length"] = dt.itemsize + type_info["strPad"] = "H5T_STR_NULLPAD" + elif dt.base.kind == "U": # Fixed length unicode type raise TypeError("Fixed length unicode type is not supported") - - elif dt.kind == 'b': + + elif dt.kind == "b": # boolean type - h5py stores as enum - if dt.base == dt: - raise TypeError("Expected base type to be different than parent") - baseType = getBaseType(dt) - type_info['class'] = 'H5T_ENUM' - type_info['mapping'] = {"false": 0, "true": 1} - type_info['base'] = getTypeItem(dt.base) - elif dt.kind == 'f': + # assume LE unless the numpy byteorder is '>' + byteorder = "LE" + if dt.base.byteorder == ">": + byteorder = "BE" + # this mapping is an h5py convention for boolean support + members = [{"name": "FALSE", "value": 0}, {"name": "TRUE", "value": 1}] + type_info["class"] = "H5T_ENUM" + type_info["members"] = members + base_info = {"class": "H5T_INTEGER"} + base_info["base"] = "H5T_STD_I8" + byteorder + type_info["base"] = base_info + + elif dt.kind == "f": # floating point type - type_info['class'] = 'H5T_FLOAT' - byteorder = 'LE' - if dt.byteorder == '>': - byteorder = 'BE' + type_info["class"] = "H5T_FLOAT" + byteorder = "LE" + if dt.byteorder == ">": + byteorder = "BE" if dt.name in predefined_float_types: - #maps to one of the HDF5 predefined types - type_info['base'] = predefined_float_types[dt.base.name] + byteorder + # maps to one of the HDF5 predefined types + type_info["base"] = predefined_float_types[dt.base.name] + byteorder else: raise TypeError("Unexpected floating point type: " + dt.name) - elif dt.kind == 'i' or dt.kind == 'u': + elif dt.kind == "i" or dt.kind == "u": # integer type - + # assume LE unless the numpy byteorder is '>' - byteorder = 'LE' - if dt.base.byteorder == '>': - byteorder = 'BE' - - # numpy integer type - but check to see if this is the hypy + byteorder = "LE" + if dt.base.byteorder == ">": + byteorder = "BE" + + # numpy integer type - but check to see if this is the h5py # enum extension mapping = check_dtype(enum=dt) if mapping: # yes, this is an enum! - type_info['class'] = 'H5T_ENUM' - type_info['mapping'] = mapping + type_info["class"] = "H5T_ENUM" + type_info["members"] = [{"name": n, "value": v} for n, v in mapping.items()] if dt.name not in predefined_int_types: raise TypeError("Unexpected integer type: " + dt.name) - #maps to one of the HDF5 predefined types - base_info = { "class": "H5T_INTEGER" } - base_info['base'] = predefined_int_types[dt.name] + byteorder + # maps to one of the HDF5 predefined types + base_info = {"class": "H5T_INTEGER"} + base_info["base"] = predefined_int_types[dt.name] + byteorder type_info["base"] = base_info else: - type_info['class'] = 'H5T_INTEGER' + type_info["class"] = "H5T_INTEGER" base_name = dt.name - + if dt.name not in predefined_int_types: raise TypeError("Unexpected integer type: " + dt.name) - - type_info['base'] = predefined_int_types[base_name] + byteorder - + + type_info["base"] = predefined_int_types[base_name] + byteorder + else: # unexpected kind raise TypeError("unexpected dtype kind: " + dt.kind) - - + return type_info def getNumpyTypename(hdf5TypeName, typeClass=None): predefined_int_types = { - 'H5T_STD_I8': 'i1', - 'H5T_STD_U8': 'u1', - 'H5T_STD_I16': 'i2', - 'H5T_STD_U16': 'u2', - 'H5T_STD_I32': 'i4', - 'H5T_STD_U32': 'u4', - 'H5T_STD_I64': 'i8', - 'H5T_STD_U64': 'u8' - } - predefined_float_types = { - 'H5T_IEEE_F32': 'f4', - 'H5T_IEEE_F64': 'f8' + "H5T_STD_I8": "i1", + "H5T_STD_U8": "u1", + "H5T_STD_I16": "i2", + "H5T_STD_U16": "u2", + "H5T_STD_I32": "i4", + "H5T_STD_U32": "u4", + "H5T_STD_I64": "i8", + "H5T_STD_U64": "u8", } + predefined_float_types = {"H5T_IEEE_F32": "f4", "H5T_IEEE_F64": "f8"} if len(hdf5TypeName) < 3: raise Exception("Type Error: invalid typename: ") - endian = '<' # default endian + endian = "<" # default endian key = hdf5TypeName - if hdf5TypeName.endswith('LE'): + if hdf5TypeName.endswith("LE"): key = hdf5TypeName[:-2] - elif hdf5TypeName.endswith('BE'): + elif hdf5TypeName.endswith("BE"): key = hdf5TypeName[:-2] - endian = '>' + endian = ">" - if key in predefined_int_types and (typeClass == None or - typeClass == 'H5T_INTEGER'): + if key in predefined_int_types and ( + typeClass == None or typeClass == "H5T_INTEGER" + ): return endian + predefined_int_types[key] - if key in predefined_float_types and (typeClass == None or - typeClass == 'H5T_FLOAT'): + if key in predefined_float_types and ( + typeClass == None or typeClass == "H5T_FLOAT" + ): return endian + predefined_float_types[key] raise TypeError("Type Error: invalid type") @@ -360,7 +358,7 @@ def getNumpyTypename(hdf5TypeName, typeClass=None): def createBaseDataType(typeItem): dtRet = None - if type(typeItem) in (str, unicode): + if isinstance(typeItem, str): # should be one of the predefined types dtName = getNumpyTypename(typeItem) dtRet = np.dtype(dtName) @@ -369,116 +367,152 @@ def createBaseDataType(typeItem): if type(typeItem) != dict: raise TypeError("Type Error: invalid type") - if 'class' not in typeItem: + if "class" not in typeItem: raise KeyError("'class' not provided") - typeClass = typeItem['class'] - - if typeClass == 'H5T_INTEGER': - if 'base' not in typeItem: + typeClass = typeItem["class"] + + if typeClass == "H5T_INTEGER": + if "base" not in typeItem: raise KeyError("'base' not provided") - if 'dims' in typeItem: + if "dims" in typeItem: raise TypeError("'dims' not supported for integer types") - baseType = getNumpyTypename(typeItem['base'], typeClass='H5T_INTEGER') + baseType = getNumpyTypename(typeItem["base"], typeClass="H5T_INTEGER") dtRet = np.dtype(baseType) - elif typeClass == 'H5T_FLOAT': - if 'base' not in typeItem: + elif typeClass == "H5T_FLOAT": + if "base" not in typeItem: raise KeyError("'base' not provided") - if 'dims' in typeItem: + if "dims" in typeItem: raise TypeError("'dims' not supported for floating point types") - baseType = getNumpyTypename(typeItem['base'], typeClass='H5T_FLOAT') + baseType = getNumpyTypename(typeItem["base"], typeClass="H5T_FLOAT") dtRet = np.dtype(baseType) - elif typeClass == 'H5T_STRING': - if 'length' not in typeItem: + elif typeClass == "H5T_STRING": + if "length" not in typeItem: raise KeyError("'length' not provided") - if 'charSet' not in typeItem: + if "charSet" not in typeItem: raise KeyError("'charSet' not provided") - if typeItem['length'] == 'H5T_VARIABLE': - if 'dims' in typeItem: + if typeItem["length"] == "H5T_VARIABLE": + if "dims" in typeItem: raise TypeError("'dims' not supported for variable types") - if typeItem['charSet'] == 'H5T_CSET_ASCII': + if typeItem["charSet"] == "H5T_CSET_ASCII": dtRet = special_dtype(vlen=bytes) - elif typeItem['charSet'] == 'H5T_CSET_UTF8': - dtRet = special_dtype(vlen=unicode) + elif typeItem["charSet"] == "H5T_CSET_UTF8": + dtRet = special_dtype(vlen=str) else: raise TypeError("unexpected 'charSet' value") else: - nStrSize = typeItem['length'] + nStrSize = typeItem["length"] if type(nStrSize) != int: raise TypeError("expecting integer value for 'length'") type_code = None - if typeItem['charSet'] == 'H5T_CSET_ASCII': - type_code = 'S' - elif typeItem['charSet'] == 'H5T_CSET_UTF8': + if typeItem["charSet"] == "H5T_CSET_ASCII": + type_code = "S" + elif typeItem["charSet"] == "H5T_CSET_UTF8": raise TypeError("fixed-width unicode strings are not supported") else: raise TypeError("unexpected 'charSet' value") dtRet = np.dtype(type_code + str(nStrSize)) # fixed size string - elif typeClass == 'H5T_VLEN': - if 'dims' in typeItem: + elif typeClass == "H5T_VLEN": + if "dims" in typeItem: raise TypeError("'dims' not supported for vlen types") - if 'base' not in typeItem: + if "base" not in typeItem: raise KeyError("'base' not provided") - baseType = createBaseDataType(typeItem['base']) + baseType = createBaseDataType(typeItem["base"]) dtRet = special_dtype(vlen=np.dtype(baseType)) - elif typeClass == 'H5T_OPAQUE': - if 'dims' in typeItem: + elif typeClass == "H5T_OPAQUE": + if "dims" in typeItem: raise TypeError("'dims' not supported for opaque types") - if 'size' not in typeItem: + if "size" not in typeItem: raise KeyError("'size' not provided") - nSize = int(typeItem['size']) + nSize = int(typeItem["size"]) if nSize <= 0: raise TypeError("'size' must be non-negative") - dtRet = np.dtype('V' + str(nSize)) - elif typeClass == 'H5T_ARRAY': - if not 'dims' in typeItem: + dtRet = np.dtype("V" + str(nSize)) + elif typeClass == "H5T_ARRAY": + if not "dims" in typeItem: raise KeyError("'dims' must be provided for array types") - if 'base' not in typeItem: + if "base" not in typeItem: raise KeyError("'base' not provided") - arrayBaseType = typeItem['base'] + arrayBaseType = typeItem["base"] if type(arrayBaseType) is dict: if "class" not in arrayBaseType: raise KeyError("'class' not provided for array base type") - if arrayBaseType["class"] not in ('H5T_INTEGER', 'H5T_FLOAT', 'H5T_STRING'): - raise TypeError("Array Type base type must be integer, float, or string") + if arrayBaseType["class"] not in ( + "H5T_INTEGER", + "H5T_FLOAT", + "H5T_STRING", + "H5T_COMPOUND", + ): + raise TypeError( + f"{arrayBaseType['class']}: H5T_ARRAY base type not supported." + ) dt_base = createDataType(arrayBaseType) - - if type(typeItem['dims']) == int: - dims = (typeItem['dims']) # make into a tuple - elif type(typeItem['dims']) not in (list, tuple): + + if type(typeItem["dims"]) == int: + dims = typeItem["dims"] # make into a tuple + elif type(typeItem["dims"]) not in (list, tuple): raise TypeError("expected list or integer for dims") else: - dims = typeItem['dims'] + dims = typeItem["dims"] # create an array type of the base type - + dtRet = np.dtype((dt_base, dims)) - - - elif typeClass == 'H5T_REFERENCE': - if 'dims' in typeItem: + + elif typeClass == "H5T_REFERENCE": + if "dims" in typeItem: raise TypeError("'dims' not supported for reference types") - if 'base' not in typeItem: + if "base" not in typeItem: raise KeyError("'base' not provided") - if typeItem['base'] == 'H5T_STD_REF_OBJ': - dtRet = special_dtype(ref=Reference) - elif typeItem['base'] == 'H5T_STD_REF_DSETREG': - dtRet = special_dtype(ref=RegionReference) + if typeItem["base"] == "H5T_STD_REF_OBJ": + dtRet = special_dtype(ref=Reference) + elif typeItem["base"] == "H5T_STD_REF_DSETREG": + dtRet = special_dtype(ref=RegionReference) else: raise TypeError("Invalid base type for reference type") + elif typeClass == "H5T_ENUM": + if "base" not in typeItem: + raise KeyError("Expected 'base' to be provided for enum type") + base_json = typeItem["base"] + if "class" not in base_json: + raise KeyError("Expected class field in base type") + if base_json["class"] != "H5T_INTEGER": + raise TypeError("Only integer base types can be used with enum type") + if "members" not in typeItem: + raise KeyError("'members' not provided for enum type") + members = typeItem["members"] + if len(members) == 0: + raise KeyError("empty enum members") + + dt = createBaseDataType(base_json) + values_dict = dict((m["name"], m["value"]) for m in members) + if ( + dt.kind == "i" + and dt.name == "int8" + and len(members) == 2 + and "TRUE" in values_dict + and "FALSE" in values_dict + ): + # convert to numpy boolean type + dtRet = np.dtype("bool") + else: + # not a boolean enum, use h5py special dtype + dtRet = special_dtype(enum=(dt, values_dict)) else: raise TypeError("Invalid type class") - return dtRet + """ -Create a numpy datatype given a json type +Create a numpy datatype given a json type """ + + def createDataType(typeItem): dtRet = None - if type(typeItem) in [six.string_types, six.text_type, six.binary_type]: + if isinstance(typeItem, (str, bytes)): # should be one of the predefined types dtName = getNumpyTypename(typeItem) dtRet = np.dtype(dtName) @@ -487,15 +521,14 @@ def createDataType(typeItem): if type(typeItem) != dict: raise TypeError("invalid type") - - if 'class' not in typeItem: + if "class" not in typeItem: raise KeyError("'class' not provided") - typeClass = typeItem['class'] + typeClass = typeItem["class"] - if typeClass == 'H5T_COMPOUND': - if 'fields' not in typeItem: + if typeClass == "H5T_COMPOUND": + if "fields" not in typeItem: raise KeyError("'fields' not provided for compound type") - fields = typeItem['fields'] + fields = typeItem["fields"] if type(fields) is not list: raise TypeError("Type Error: expected list type for 'fields'") if not fields: @@ -505,26 +538,25 @@ def createDataType(typeItem): if type(field) != dict: raise TypeError("Expected dictionary type for field") - if 'name' not in field: + if "name" not in field: raise KeyError("'name' missing from field") - if 'type' not in field: + if "type" not in field: raise KeyError("'type' missing from field") - field_name = field['name'] - if type(field_name) == unicode: + field_name = field["name"] + if isinstance(field_name, str): # verify the field name is ascii try: - ascii_name = field_name.encode('ascii') + field_name.encode("ascii") except UnicodeDecodeError: raise TypeError("non-ascii field name not allowed") - if not six.PY3: - field['name'] = ascii_name - - dt = createDataType(field['type']) # recursive call + + dt = createDataType(field["type"]) # recursive call if dt is None: raise Exception("unexpected error") - subtypes.append((field['name'], dt)) # append tuple - + subtypes.append((field_name, dt)) # append tuple + dtRet = np.dtype(subtypes) + else: dtRet = createBaseDataType(typeItem) # create non-compound dt return dtRet diff --git a/h5json/jsontoh5/__init__.py b/h5json/jsontoh5/__init__.py new file mode 100644 index 0000000..af6ddb0 --- /dev/null +++ b/h5json/jsontoh5/__init__.py @@ -0,0 +1,13 @@ +############################################################################## +# Copyright by The HDF Group. # +# All rights reserved. # +# # +# This file is part of H5Serv (HDF5 REST Server) Service, Libraries and # +# Utilities. The full HDF5 REST Server copyright notice, including # +# terms governing use, modification, and redistribution, is contained in # +# the file COPYING, which can be found at the root of the source code # +# distribution tree. If you do not have access to this file, you may # +# request a copy from help@hdfgroup.org. # +############################################################################## + +from __future__ import absolute_import diff --git a/jsontoh5/jsontoh5.py b/h5json/jsontoh5/jsontoh5.py similarity index 79% rename from jsontoh5/jsontoh5.py rename to h5json/jsontoh5/jsontoh5.py index 85589be..fcdb5c1 100755 --- a/jsontoh5/jsontoh5.py +++ b/h5json/jsontoh5/jsontoh5.py @@ -9,12 +9,6 @@ # distribution tree. If you do not have access to this file, you may # # request a copy from help@hdfgroup.org. # ############################################################################## -import six - -if six.PY3: - unicode = str - -import sys import json import argparse import h5py @@ -30,6 +24,7 @@ h5writer.writeFile() """ + class Writeh5: def __init__(self, db, json, options=None): self.options = options @@ -43,13 +38,13 @@ def __init__(self, db, json, options=None): def createLink(self, link_obj, parent_uuid): title = link_obj["title"] link_class = link_obj["class"] - if link_class == 'H5L_TYPE_HARD': + if link_class == "H5L_TYPE_HARD": child_uuid = link_obj["id"] self.db.linkObject(parent_uuid, child_uuid, title) - elif link_class == 'H5L_TYPE_SOFT': + elif link_class == "H5L_TYPE_SOFT": h5path = link_obj["h5path"] self.db.createSoftLink(parent_uuid, h5path, title) - elif link_class == 'H5L_TYPE_EXTERNAL': + elif link_class == "H5L_TYPE_EXTERNAL": h5path = link_obj["h5path"] link_file = link_obj["file"] self.db.createExternalLink(parent_uuid, link_file, h5path, title) @@ -60,19 +55,19 @@ def createLink(self, link_obj, parent_uuid): # Create HDF5 dataset object and write data values # def createDataset(self, uuid, body): - datatype = body['type'] - if type(datatype) in (str, unicode) and datatype.startswith("datatypes/"): - #committed datatype, just pass in the UUID part - datatype = datatype[len("datatypes/"):] + datatype = body["type"] + if isinstance(datatype, str) and datatype.startswith("datatypes/"): + # committed datatype, just pass in the UUID part + datatype = datatype[len("datatypes/") :] dims = () # if no space in body, default to scalar - max_shape=None - fill_value=None - creation_props=None - if 'creationProperties' in body: - creation_props = body['creationProperties'] + max_shape = None + fill_value = None + creation_props = None + if "creationProperties" in body: + creation_props = body["creationProperties"] if "shape" in body: shape = body["shape"] - if shape["class"] == 'H5S_SIMPLE': + if shape["class"] == "H5S_SIMPLE": dims = shape["dims"] if type(dims) == int: # convert int to array @@ -81,18 +76,23 @@ def createDataset(self, uuid, body): if "maxdims" in shape: max_shape = shape["maxdims"] if type(max_shape) == int: - #convert to array + # convert to array dim1 = max_shape max_shape = [dim1] # convert H5S_UNLIMITED's to None's for i in range(len(max_shape)): - if max_shape[i] == 'H5S_UNLIMITED': + if max_shape[i] == "H5S_UNLIMITED": max_shape[i] = None - elif shape["class"] == 'H5S_NULL': + elif shape["class"] == "H5S_NULL": dims = None - self.db.createDataset(datatype, dims, max_shape=max_shape, creation_props=creation_props, - obj_uuid=uuid) + self.db.createDataset( + datatype, + dims, + max_shape=max_shape, + creation_props=creation_props, + obj_uuid=uuid, + ) if "value" in body: data = body["value"] @@ -103,9 +103,9 @@ def createDataset(self, uuid, body): def createAttribute(self, attr_json, col_name, uuid): attr_name = attr_json["name"] datatype = attr_json["type"] - if type(datatype) in (str, unicode) and datatype.startswith("datatypes/"): - #committed datatype, just pass in the UUID part - datatype = datatype[len("datatypes/"):] + if isinstance(datatype, str) and datatype.startswith("datatypes/"): + # committed datatype, just pass in the UUID part + datatype = datatype[len("datatypes/") :] attr_value = None if "value" in attr_json: @@ -113,22 +113,21 @@ def createAttribute(self, attr_json, col_name, uuid): dims = None if "shape" in attr_json: shape = attr_json["shape"] - if shape["class"] == 'H5S_SIMPLE': + if shape["class"] == "H5S_SIMPLE": dims = shape["dims"] if type(dims) == int: # convert int to array dim1 = shape dims = [dim1] - elif shape["class"] == 'H5S_SCALAR': + elif shape["class"] == "H5S_SCALAR": dims = () # empty tuple for scalar self.db.createAttribute(col_name, uuid, attr_name, dims, datatype, attr_value) - # # create committed datatype HDF5 object # def createDatatype(self, uuid, body): - datatype = body['type'] + datatype = body["type"] self.db.createCommittedType(datatype, obj_uuid=uuid) # @@ -161,7 +160,6 @@ def createObjects(self): json_obj = datasets[uuid] self.createDataset(uuid, json_obj) - # # Create all the attributes for HDF5 objects defined in the JSON file # Note: this needs to be done after createObjects since an attribute @@ -198,7 +196,9 @@ def createAttributes(self): if attribute["name"] == "DIMENSION_LIST": # defer dimension list creation until after we've created all other # attributes (otherwsie attach_scale may fail) - dimension_list_attrs.append({"attribute": attribute, "uuid": uuid}) + dimension_list_attrs.append( + {"attribute": attribute, "uuid": uuid} + ) else: self.createAttribute(attribute, "datasets", uuid) @@ -223,20 +223,21 @@ def createLinks(self): for link in links: self.createLink(link, uuid) - def writeFile(self): self.root_uuid = self.json["root"] - self.createObjects() # create datasets, groups, committed datatypes - self.createAttributes() # create attributes for objects - self.createLinks() # link it all together + self.createObjects() # create datasets, groups, committed datatypes + self.createAttributes() # create attributes for objects + self.createLinks() # link it all together + def main(): - - parser = argparse.ArgumentParser(usage='%(prog)s [-h] ') - parser.add_argument('in_filename', nargs='+', help='JSon file to be converted to h5') - parser.add_argument('out_filename', nargs='+', help='name of HDF5 output file') + parser = argparse.ArgumentParser(usage="%(prog)s [-h] ") + parser.add_argument( + "in_filename", nargs="+", help="JSon file to be converted to h5" + ) + parser.add_argument("out_filename", nargs="+", help="name of HDF5 output file") args = parser.parse_args() # create logger @@ -244,13 +245,13 @@ def main(): # log.setLevel(logging.WARN) log.setLevel(logging.INFO) # add log handler - handler = logging.FileHandler('./jsontoh5.log') + handler = logging.FileHandler("./jsontoh5.log") # add handler to logger log.addHandler(handler) text = open(args.in_filename[0]).read() - + # parse the json file h5json = json.loads(text) @@ -259,17 +260,19 @@ def main(): root_uuid = h5json["root"] filename = args.out_filename[0] - + # create the file, will raise IOError if there's a problem - Hdf5db.createHDF5File(filename) + Hdf5db.createHDF5File(filename) - with Hdf5db(filename, root_uuid=root_uuid, update_timestamps=False, app_logger=log) as db: + with Hdf5db( + filename, root_uuid=root_uuid, update_timestamps=False, app_logger=log + ) as db: h5writer = Writeh5(db, h5json) h5writer.writeFile() # open with h5py and remove the _db_ group # Note: this will delete any anonymous (un-linked) objects - f = h5py.File(filename, 'a') + f = h5py.File(filename, "a") if "__db__" in f: del f["__db__"] f.close() @@ -277,4 +280,5 @@ def main(): print("done!") -main() +if __name__ == "__main__": + main() diff --git a/data/json/sample.json b/h5json/schema/__init__.py similarity index 100% rename from data/json/sample.json rename to h5json/schema/__init__.py diff --git a/h5json/schema/attribute.schema.json b/h5json/schema/attribute.schema.json new file mode 100644 index 0000000..4dbb724 --- /dev/null +++ b/h5json/schema/attribute.schema.json @@ -0,0 +1,66 @@ +{ + "$schema": "https://json-schema.org/draft/2020-12/schema#", + "$id": "https://hdfgroup.org/schemas/hdf5/json/attribute.schema.json", + "title": "HDF5/JSON Attribute Schema", + "description": "JSON Schema describing HDF5 attribute.", + "type": "object", + "$defs": { + "attributes": { + "description": "HDF5 attributes attached to an HDF5 group or dataset.", + "type": "array", + "items": { + "$ref": "#/$defs/attribute" + } + }, + "attribute": { + "description": "HDF5 attribute.", + "type": "object", + "properties": { + "name": { + "type": "string", + "minLength": 1 + }, + "type": { + "$ref": "datatypes.schema.json#/$defs/datatype" + }, + "shape": { + "description": "HDF5 attribute's shape.", + "type": "object", + "$ref": "dataspaces.schema.json#/$defs/dataspace" + }, + "creationProperties": { + "type": "object", + "properties": { + "nameCharEncoding": { + "type": "string", + "enum": [ + "H5T_CSET_ASCII", + "H5T_CSET_UTF8" + ] + } + }, + "additionalProperties": false, + "required": [ + "nameCharEncoding" + ] + }, + "value": { + "oneOf": [ + { + "type": "string" + }, + { + "type": "number" + }, + { + "type": "array" + }, + { + "type": "null" + } + ] + } + } + } + } +} diff --git a/h5json/schema/dataset.schema.json b/h5json/schema/dataset.schema.json new file mode 100644 index 0000000..4e412e4 --- /dev/null +++ b/h5json/schema/dataset.schema.json @@ -0,0 +1,205 @@ +{ + "$schema": "https://json-schema.org/draft/2020-12/schema#", + "$id": "https://hdfgroup.org/schemas/hdf5/json/dataset.schema.json", + "title": "HDF5/JSON Dataset Schema", + "description": "JSON Schema describing HDF5 dataset.", + "type": "object", + "$defs": { + "dataset": { + "type": "object", + "patternProperties": { + "^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$": { + "type": "object", + "properties": { + "alias": { + "description": "HDF5 dataset path names", + "type": "array", + "minItems": 1, + "items": { + "type": "string", + "pattern": "^(/[^/]+)+$" + } + }, + "type": { + "description": "HDF5 dataset's datatype.", + "$ref": "datatypes.schema.json#/$defs/datatype" + }, + "shape": { + "description": "HDF5 dataset's shape.", + "type": "object", + "minProperties": 1, + "$ref": "dataspaces.schema.json#/$defs/dataspace" + }, + "creationProperties": { + "type": "object", + "properties": { + "allocTime": { + "type": "string", + "enum": [ + "H5D_ALLOC_TIME_DEFAULT", + "H5D_ALLOC_TIME_EARLY", + "H5D_ALLOC_TIME_INCR", + "H5D_ALLOC_TIME_LATE" + ] + }, + "attributeCreationOrder": { + "type": "string", + "enum": [ + "H5P_CRT_ORDER_TRACKED", + "H5P_CRT_ORDER_INDEXED" + ] + }, + "attributePhaseChange": { + "type": "object", + "properties": { + "maxCompact": { + "type": "integer", + "minimum": 0 + }, + "minDense": { + "type": "integer", + "minimum": 0 + } + }, + "required": [ + "maxCompact", + "minDense" + ] + }, + "fillTime": { + "type": "string", + "enum": [ + "H5D_FILL_TIME_IFSET", + "H5D_FILL_TIME_ALLOC", + "H5D_FILL_TIME_NEVER" + ] + }, + "fillValue": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "number" + }, + { + "type": "null" + }, + { + "type": "array" + } + ] + }, + "trackTimes": { + "type": "boolean" + }, + "filters": { + "type": "array", + "minItems": 1, + "items": { + "$ref": "filters.schema.json#/$defs/filter" + } + }, + "layout": { + "oneOf": [ + { + "type": "object", + "properties": { + "class": { + "const": "H5D_COMPACT" + } + }, + "required": [ + "class" + ] + }, + { + "type": "object", + "properties": { + "class": { + "const": "H5D_CHUNKED" + }, + "dims": { + "type": "array", + "minItems": 1, + "items": { + "type": "integer", + "exclusiveMinimum": 0 + } + } + }, + "required": [ + "class", + "dims" + ] + }, + { + "type": "object", + "properties": { + "class": { + "const": "H5D_CONTIGUOUS" + }, + "externalStorage": { + "type": "array", + "minItems": 1, + "items": { + "type": "object", + "properties": { + "name": { + "type": "string" + }, + "offset": { + "type": "integer", + "minimum": 0 + }, + "size": { + "type": "integer", + "exclusiveMinimum": 0 + } + } + } + } + }, + "required": [ + "class" + ] + } + ] + } + }, + "required": [ + "layout" + ] + }, + "value": { + "oneOf": [ + { + "type": "string" + }, + { + "type": "number" + }, + { + "type": "array" + }, + { + "type": "null" + } + ] + }, + "attributes": { + "description": "All attributes of one HDF5 dataset.", + "type": "array", + "$ref": "attribute.schema.json#/$defs/attributes" + } + }, + "required": [ + "type", + "shape" + ] + } + }, + "additionalProperties": false + } + } +} diff --git a/h5json/schema/dataspaces.schema.json b/h5json/schema/dataspaces.schema.json new file mode 100644 index 0000000..0032b8b --- /dev/null +++ b/h5json/schema/dataspaces.schema.json @@ -0,0 +1,86 @@ +{ + "$schema": "https://json-schema.org/draft/2020-12/schema#", + "$id": "https://hdfgroup.org/schemas/hdf5/json/dataspaces.schema.json", + "title": "HDF5/JSON Dataspace Schema", + "description": "JSON Schema for HDF5 dataspaces.", + "type": "object", + "$defs": { + "dataspace": { + "description": "HDF5 dataspaces.", + "oneOf": [ + { + "$ref": "#/$defs/null_dataspace" + }, + { + "$ref": "#/$defs/scalar_dataspace" + }, + { + "$ref": "#/$defs/simple_dataspace" + } + ] + }, + "null_dataspace": { + "description": "HDF5 null dataspace.", + "type": "object", + "properties": { + "class": { + "const": "H5S_NULL" + } + }, + "additionalProperties": false, + "required": [ + "class" + ] + }, + "scalar_dataspace": { + "description": "HDF5 scalar dataspace.", + "type": "object", + "properties": { + "class": { + "const": "H5S_SCALAR" + } + }, + "additionalProperties": false, + "required": [ + "class" + ] + }, + "simple_dataspace": { + "description": "HDF5 simple dataspace.", + "type": "object", + "properties": { + "class": { + "const": "H5S_SIMPLE" + }, + "dims": { + "type": "array", + "minItems": 1, + "items": { + "type": "integer", + "minimum": 0 + } + }, + "maxdims": { + "type": "array", + "minItems": 1, + "items": { + "oneOf": [ + { + "type": "integer", + "exclusiveMinimum": 0 + }, + { + "const": "H5S_UNLIMITED" + } + ] + } + } + }, + "additionalProperties": false, + "required": [ + "class", + "dims" + ] + } + } +} diff --git a/h5json/schema/datatypes.schema.json b/h5json/schema/datatypes.schema.json new file mode 100644 index 0000000..94f995b --- /dev/null +++ b/h5json/schema/datatypes.schema.json @@ -0,0 +1,611 @@ +{ + "$schema": "https://json-schema.org/draft/2020-12/schema#", + "$id": "https://hdfgroup.org/schemas/hdf5/json/datatypes.schema.json", + "title": "HDF5 Datatype JSON Schema", + "description": "JSON Schema describing HDF5/JSON datatypes.", + "type": "object", + "$defs": { + "committed": { + "description": "HDF5 committed datatype.", + "type": "object", + "patternProperties": { + "^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$": { + "type": "object", + "properties": { + "alias": { + "description": "HDF5 committed datatype path names", + "type": "array", + "minItems": 1, + "items": { + "type": "string", + "pattern": "^(/[^/]+)+$" + } + }, + "type": { + "$ref": "#/$defs/datatype" + } + }, + "required": [ + "type" + ] + } + }, + "additionalProperties": false + }, + "datatype": { + "oneOf": [ + { + "$ref": "#/$defs/array_datatype" + }, + { + "$ref": "#/$defs/bitfield_datatype" + }, + { + "$ref": "#/$defs/compound_datatype" + }, + { + "$ref": "#/$defs/enumeration_datatype" + }, + { + "$ref": "#/$defs/floating_point_datatype" + }, + { + "$ref": "#/$defs/integer_datatype" + }, + { + "$ref": "#/$defs/opaque_datatype" + }, + { + "$ref": "#/$defs/reference_datatype" + }, + { + "$ref": "#/$defs/string_datatype" + }, + { + "$ref": "#/$defs/vlen_datatype" + }, + { + "type": "string", + "pattern": "^datatypes/[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$" + } + ] + }, + "string_datatype": { + "description": "HDF5 string datatype.", + "type": "object", + "properties": { + "class": { + "const": "H5T_STRING" + }, + "charSet": { + "type": "string", + "enum": [ + "H5T_CSET_ASCII", + "H5T_CSET_UTF8" + ] + }, + "length": { + "oneOf": [ + { + "type": "integer", + "exclusiveMinimum": 0 + }, + { + "const": "H5T_VARIABLE" + } + ] + }, + "strPad": { + "type": "string", + "enum": [ + "H5T_STR_NULLTERM", + "H5T_STR_NULLPAD", + "H5T_STR_SPACEPAD" + ] + } + }, + "required": [ + "class", + "length", + "strPad" + ] + }, + "integer_datatype": { + "description": "HDF5 integer datatypes", + "type": "object", + "oneOf": [ + { + "description": "HDF5 predefined integer datatypes.", + "type": "object", + "properties": { + "class": { + "const": "H5T_INTEGER" + }, + "base": { + "type": "string", + "enum": [ + "H5T_STD_I8BE", + "H5T_STD_I8LE", + "H5T_STD_I16BE", + "H5T_STD_I16LE", + "H5T_STD_I32BE", + "H5T_STD_I32LE", + "H5T_STD_I64BE", + "H5T_STD_I64LE", + "H5T_STD_U8BE", + "H5T_STD_U8LE", + "H5T_STD_U16BE", + "H5T_STD_U16LE", + "H5T_STD_U32BE", + "H5T_STD_U32LE", + "H5T_STD_U64BE", + "H5T_STD_U64LE" + ] + } + }, + "required": [ + "class", + "base" + ] + }, + { + "description": "HDF5 user-defined integer datatypes.", + "type": "object", + "properties": { + "class": { + "const": "H5T_INTEGER" + }, + "bitOffset": { + "type": "integer", + "minimum": 0 + }, + "byteOrder": { + "type": "string", + "enum": [ + "H5T_ORDER_LE", + "H5T_ORDER_BE" + ] + }, + "lsbPad": { + "type": "string", + "enum": [ + "H5T_PAD_ZERO", + "H5T_PAD_ONE", + "H5T_PAD_BACKGROUND" + ] + }, + "msbPad": { + "type": "string", + "enum": [ + "H5T_PAD_ZERO", + "H5T_PAD_ONE", + "H5T_PAD_BACKGROUND" + ] + }, + "precision": { + "type": "integer", + "exclusiveMinimum": 0 + }, + "size": { + "type": "integer", + "exclusiveMinimum": 0 + }, + "signType": { + "type": "string", + "enum": [ + "H5T_SGN_NONE", + "H5T_SGN_2" + ] + } + }, + "required": [ + "class", + "size", + "precision", + "bitOffset" + ] + } + ] + }, + "floating_point_datatype": { + "description": "HDF5 floating-point datatypes", + "oneOf": [ + { + "description": "HDF5 predefined floating-point datatypes.", + "type": "object", + "properties": { + "class": { + "const": "H5T_FLOAT" + }, + "base": { + "type": "string", + "enum": [ + "H5T_IEEE_F32BE", + "H5T_IEEE_F32LE", + "H5T_IEEE_F64BE", + "H5T_IEEE_F64LE" + ] + } + }, + "required": [ + "class", + "base" + ] + }, + { + "description": "HDF5 user-defined floating-point datatypes.", + "type": "object", + "properties": { + "class": { + "const": "H5T_FLOAT" + }, + "bitOffset": { + "type": "integer", + "minimum": 0 + }, + "byteOrder": { + "type": "string", + "enum": [ + "H5T_ORDER_LE", + "H5T_ORDER_BE" + ] + }, + "expBias": { + "type": "integer", + "exclusiveMinimum": 0 + }, + "expBits": { + "type": "integer", + "exclusiveMinimum": 0 + }, + "expBitPos": { + "type": "integer", + "exclusiveMinimum": 0 + }, + "intlbPad": { + "type": "string", + "enum": [ + "H5T_PAD_ZERO", + "H5T_PAD_ONE", + "H5T_PAD_BACKGROUND" + ] + }, + "lsbPad": { + "type": "string", + "enum": [ + "H5T_PAD_ZERO", + "H5T_PAD_ONE", + "H5T_PAD_BACKGROUND" + ] + }, + "mantBits": { + "type": "integer", + "exclusiveMinimum": 0 + }, + "mantBitPos": { + "type": "integer", + "minimum": 0 + }, + "mantNorm": { + "type": "string", + "enum": [ + "H5T_NORM_IMPLIED", + "H5T_NORM_MSBSET", + "H5T_NORM_NONE" + ] + }, + "msbitPad": { + "type": "string", + "enum": [ + "H5T_PAD_ZERO", + "H5T_PAD_ONE", + "H5T_PAD_BACKGROUND" + ] + }, + "msbPad": { + "type": "string", + "enum": [ + "H5T_PAD_ZERO", + "H5T_PAD_ONE", + "H5T_PAD_BACKGROUND" + ] + }, + "precision": { + "type": "integer", + "exclusiveMinimum": 0 + }, + "signBitPos": { + "type": "integer", + "exclusiveMinimum": 0 + }, + "size": { + "type": "integer", + "exclusiveMinimum": 0 + } + }, + "additionalProperties": false, + "required": [ + "class", + "size", + "precision", + "bitOffset" + ] + } + ] + }, + "array_datatype": { + "type": "object", + "properties": { + "class": { + "const": "H5T_ARRAY" + }, + "base": { + "allOf": [ + { + "$comment": "Placeholder for what the grammar calls datatype_object_ref." + }, + { + "$ref": "#/$defs/datatype" + } + ] + }, + "dims": { + "type": "array", + "minItems": 1, + "items": { + "type": "integer", + "exclusiveMinimum": 0 + } + } + }, + "additionalProperties": false, + "required": [ + "class", + "base", + "dims" + ] + }, + "bitfield_datatype": { + "description": "HDF5 bitfield datatypes", + "oneOf": [ + { + "description": "HDF5 predefined bitfield datatypes.", + "type": "object", + "properties": { + "class": { + "const": "H5T_BITFIELD" + }, + "base": { + "type": "string", + "enum": [ + "H5T_STD_I8BE", + "H5T_STD_I8LE", + "H5T_STD_I16BE", + "H5T_STD_I16LE", + "H5T_STD_I32BE", + "H5T_STD_I32LE", + "H5T_STD_I64BE", + "H5T_STD_I64LE", + "H5T_STD_U8BE", + "H5T_STD_U8LE", + "H5T_STD_U16BE", + "H5T_STD_U16LE", + "H5T_STD_U32BE", + "H5T_STD_U32LE", + "H5T_STD_U64BE", + "H5T_STD_U64LE" + ] + } + }, + "additionalProperties": false, + "required": [ + "class", + "base" + ] + }, + { + "description": "HDF5 user-defined bitfield datatypes.", + "type": "object", + "properties": { + "class": { + "const": "H5T_BITFIELD" + }, + "bitOffset": { + "type": "integer", + "minimum": 0 + }, + "byteOrder": { + "type": "string", + "enum": [ + "H5T_ORDER_LE", + "H5T_ORDER_BE" + ] + }, + "lsbPad": { + "type": "string", + "enum": [ + "H5T_PAD_ZERO", + "H5T_PAD_ONE", + "H5T_PAD_BACKGROUND" + ] + }, + "msbPad": { + "type": "string", + "enum": [ + "H5T_PAD_ZERO", + "H5T_PAD_ONE", + "H5T_PAD_BACKGROUND" + ] + }, + "precision": { + "type": "integer", + "exclusiveMinimum": 0 + }, + "size": { + "type": "integer", + "exclusiveMinimum": 0 + } + }, + "additionalProperties": false, + "required": [ + "class", + "size", + "precision", + "bitOffset", + "byteOrder" + ] + } + ] + }, + "compound_datatype": { + "description": "HDF5 compound datatype.", + "type": "object", + "properties": { + "class": { + "const": "H5T_COMPOUND" + }, + "fields": { + "type": "array", + "minItems": 1, + "items": { + "type": "object", + "properties": { + "name": { + "type": "string", + "minLength": 1 + }, + "type": { + "allOf": [ + { + "$comment": "Placeholder for what the grammar calls datatype_object_ref." + }, + { + "$ref": "#/$defs/datatype" + } + ] + } + }, + "additionalProperties": false, + "required": [] + } + } + }, + "additionalProperties": false, + "required": [ + "class", + "fields" + ] + }, + "enumeration_datatype": { + "description": "HDF5 enumerated datatype.", + "type": "object", + "properties": { + "class": { + "const": "H5T_ENUM" + }, + "base": { + "allOf": [ + { + "$ref": "#/$defs/integer_datatype" + }, + { + "$comment": "The grammar calls this datatype_reference." + } + ] + }, + "members": { + "type": "array", + "minItems": 1, + "items": { + "type": "object", + "properties": { + "name": { + "type": "string", + "minLength": 1 + }, + "value": { + "type": "integer" + } + }, + "additionalProperties": false, + "required": [ + "name", + "value" + ] + } + } + }, + "additionalProperties": false, + "required": [ + "class", + "base", + "members" + ] + }, + "opaque_datatype": { + "description": "HDF5 opaque datatype.", + "type": "object", + "properties": { + "class": { + "const": "H5T_OPAQUE" + }, + "size": { + "type": "integer", + "exclusiveMinimum": 0 + }, + "tag": { + "type": "string", + "minLength": 1 + } + }, + "additionalProperties": false, + "required": [ + "class", + "size" + ] + }, + "reference_datatype": { + "description": "HDF5 object and region reference datatypes.", + "type": "object", + "properties": { + "class": { + "const": "H5T_REFERENCE" + }, + "base": { + "type": "string", + "enum": [ + "H5T_STD_REF_OBJ", + "H5T_STD_REF_DSETREG" + ] + } + }, + "required": [ + "class", + "base" + ] + }, + "vlen_datatype": { + "type": "object", + "properties": { + "class": { + "const": "H5T_VLEN" + }, + "base": { + "allOf": [ + { + "$comment": "Placeholder for what the grammar calls datatype_object_ref." + }, + { + "$ref": "#/$defs/datatype" + } + ] + } + }, + "additionalProperties": false, + "required": [ + "class", + "base" + ] + } + } +} diff --git a/h5json/schema/filters.schema.json b/h5json/schema/filters.schema.json new file mode 100644 index 0000000..962662a --- /dev/null +++ b/h5json/schema/filters.schema.json @@ -0,0 +1,197 @@ +{ + "$schema": "https://json-schema.org/draft/2020-12/schema#", + "$id": "https://hdfgroup.org/schemas/hdf5/json/filters.schema.json", + "title": "HDF5/JSON Filters Schema", + "description": "JSON Schema describing supported HDF5 filters.", + "type": "object", + "$defs": { + "filter": { + "oneOf": [ + { + "$ref": "#/$defs/deflate_filter" + }, + { + "$ref": "#/$defs/fletcher32_filter" + }, + { + "$ref": "#/$defs/lzf_filter" + }, + { + "$ref": "#/$defs/nbit_filter" + }, + { + "$ref": "#/$defs/scaleoffset_filter" + }, + { + "$ref": "#/$defs/shuffle_filter" + }, + { + "$ref": "#/$defs/szip_filter" + }, + { + "$ref": "#/$defs/other_filter" + } + ] + }, + "other_filter": { + "type": "object", + "properties": { + "class": { + "const": "H5Z_FILTER_USER" + }, + "id": { + "type": "integer", + "exclusiveMinimum": 0 + }, + "parameters": { + "type": "array", + "minItems": 1, + "items": { + "type": "integer", + "exclusiveMinimum": 0 + } + } + }, + "required": [ + "class", + "id" + ] + }, + "deflate_filter": { + "type": "object", + "properties": { + "class": { + "const": "H5Z_FILTER_DEFLATE" + }, + "id": { + "const": 1 + }, + "level": { + "type": "integer", + "minimum": 0, + "maximum": 9 + } + }, + "required": [ + "class" + ] + }, + "fletcher32_filter": { + "type": "object", + "properties": { + "class": { + "const": "H5Z_FILTER_FLETCHER32" + }, + "id": { + "const": 3 + } + }, + "required": [ + "class" + ] + }, + "lzf_filter": { + "type": "object", + "properties": { + "class": { + "const": "H5Z_FILTER_LZF" + }, + "id": { + "const": 32000 + } + }, + "required": [ + "class" + ] + }, + "nbit_filter": { + "type": "object", + "properties": { + "class": { + "const": "H5Z_FILTER_NBIT" + }, + "id": { + "const": 5 + } + }, + "required": [ + "class" + ] + }, + "scaleoffset_filter": { + "type": "object", + "properties": { + "class": { + "const": "H5Z_FILTER_SCALEOFFSET" + }, + "id": { + "const": 6 + }, + "scaleType": { + "type": "string", + "enum": [ + "H5Z_SO_FLOAT_DSCALE", + "H5Z_SO_FLOAT_ESCALE", + "H5Z_SO_INT" + ] + }, + "scaleOffset": { + "type": "integer", + "minimum": 0 + } + }, + "required": [ + "class", + "scaleType", + "scaleOffset" + ] + }, + "shuffle_filter": { + "type": "object", + "properties": { + "class": { + "const": "H5Z_FILTER_SHUFFLE" + }, + "id": { + "const": 2 + } + }, + "required": [ + "class" + ] + }, + "szip_filter": { + "type": "object", + "properties": { + "class": { + "const": "H5Z_FILTER_SZIP" + }, + "id": { + "const": 4 + }, + "bitsPerPixel": { + "type": "integer", + "exclusiveMinimum": 0 + }, + "pixelsPerBlock": { + "type": "integer", + "exclusiveMinimum": 0 + }, + "pixelsPerScanline": { + "type": "integer", + "exclusiveMinimum": 0 + }, + "coding": { + "type": "string", + "enum": [ + "H5_SZIP_EC_OPTION_MASK", + "H5_SZIP_NN_OPTION_MASK" + ] + } + }, + "required": [ + "class" + ] + } + } +} diff --git a/h5json/schema/group.schema.json b/h5json/schema/group.schema.json new file mode 100644 index 0000000..8ff1402 --- /dev/null +++ b/h5json/schema/group.schema.json @@ -0,0 +1,255 @@ +{ + "$schema": "https://json-schema.org/draft/2020-12/schema#", + "$id": "https://hdfgroup.org/schemas/hdf5/json/group.schema.json", + "title": "HDF5/JSON Group Schema", + "description": "JSON Schema describing HDF5 group.", + "type": "object", + "$defs": { + "group": { + "type": "object", + "patternProperties": { + "^[a-f0-9]{8}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{4}-[a-f0-9]{12}$": { + "type": "object", + "properties": { + "alias": { + "description": "HDF5 group path names", + "type": "array", + "minItems": 1, + "items": { + "type": "string", + "pattern": "^(/|(/[^/]+)+)$" + } + }, + "created": { + "description": "UTC date/time of group creation.", + "type": "string", + "format": "date-time" + }, + "lastModified": { + "description": "UTC date/time of last group content modification.", + "type": "string", + "format": "date-time" + }, + "attributes": { + "description": "All attributes of this HDF5 group.", + "type": "array", + "$ref": "attribute.schema.json#/$defs/attributes" + }, + "links": { + "description": "All links whose source is this HDF5 group.", + "type": "array", + "$ref": "#/$defs/links" + }, + "creationProperties": { + "type": "object", + "properties": { + "track_times": { + "type": "boolean" + }, + "localHeapSizeHint": { + "type": "integer", + "minimum": 0 + }, + "filters": { + "type": "array", + "minItems": 1, + "items": { + "$ref": "filters.schema.json#/$defs/deflate_filter" + } + }, + "linkCreationOrder": { + "type": "string", + "enum": [ + "H5P_CRT_ORDER_TRACKED", + "H5P_CRT_ORDER_INDEXED" + ] + }, + "linkPhaseChange": { + "type": "object", + "properties": { + "maxCompact": { + "type": "integer", + "minimum": 0 + }, + "minDense": { + "type": "integer", + "minimum": 0 + } + }, + "additionalProperties": false + }, + "linksEstimate": { + "type": "object", + "properties": { + "numEntries": { + "type": "integer", + "minimum": 0 + }, + "nameLength": { + "type": "integer", + "minimum": 0 + } + }, + "additionalProperties": false + } + }, + "additionalProperties": false + } + } + } + } + }, + "links": { + "type": "array", + "minItems": 1, + "items": { + "oneOf": [ + { + "$ref": "#/$defs/hard_link" + }, + { + "$ref": "#/$defs/soft_link" + }, + { + "$ref": "#/$defs/external_link" + }, + { + "$ref": "#/$defs/user_defined_link" + } + ] + } + }, + "hard_link": { + "type": "object", + "properties": { + "class": { + "const": "H5L_TYPE_HARD" + }, + "title": { + "$ref": "#/$defs/link_name" + }, + "collection": { + "type": "string", + "enum": [ + "datasets", + "datatypes", + "groups" + ] + }, + "id": { + "type": "string", + "format": "uuid" + }, + "creationProperties": { + "$ref": "#/$defs/link_creation_props" + } + }, + "required": [ + "class", + "title", + "collection", + "id" + ] + }, + "soft_link": { + "type": "object", + "properties": { + "class": { + "const": "H5L_TYPE_SOFT" + }, + "title": { + "$ref": "#/$defs/link_name" + }, + "h5path": { + "$ref": "#/$defs/h5path" + }, + "creationProperties": { + "$ref": "#/$defs/link_creation_props" + } + }, + "required": [ + "class", + "title", + "h5path" + ] + }, + "external_link": { + "type": "object", + "properties": { + "class": { + "const": "H5L_TYPE_EXTERNAL" + }, + "title": { + "$ref": "#/$defs/link_name" + }, + "file": { + "type": "string" + }, + "h5path": { + "$ref": "#/$defs/h5path" + }, + "creationProperties": { + "$ref": "#/$defs/link_creation_props" + } + }, + "required": [ + "class", + "title", + "file", + "h5path" + ] + }, + "user_defined_link": { + "type": "object", + "properties": { + "class": { + "const": "H5L_TYPE_USER_DEFINED" + }, + "title": { + "$ref": "#/$defs/link_name" + }, + "creationProperties": { + "$ref": "#/$defs/link_creation_props" + }, + "target": { + "type": "array", + "minItems": 1, + "items": { + "type": "integer", + "minimum": 0, + "maximum": 255 + } + } + }, + "required": [ + "class", + "title" + ] + }, + "link_name": { + "type": "string", + "pattern": "^[^/]+$" + }, + "link_creation_props": { + "type": "object", + "properties": { + "charSet": { + "type": "string", + "enum": [ + "H5T_CSET_ASCII", + "H5T_CSET_UTF8" + ] + }, + "creationOrder": { + "type": "integer", + "minimum": 0 + } + }, + "additionalProperties": false + }, + "h5path": { + "type": "string", + "pattern": "^(/?[^/]+)+$" + } + } +} diff --git a/h5json/schema/hdf5.schema.json b/h5json/schema/hdf5.schema.json new file mode 100644 index 0000000..df97f8c --- /dev/null +++ b/h5json/schema/hdf5.schema.json @@ -0,0 +1,246 @@ +{ + "$schema": "https://json-schema.org/draft/2020-12/schema#", + "$id": "https://hdfgroup.org/schemas/hdf5/json/hdf5.schema.json", + "title": "HDF5 File JSON Schema", + "description": "JSON Schema describing HDF5 file.", + "type": "object", + "required": [ + "apiVersion", + "root" + ], + "properties": { + "apiVersion": { + "description": "HDF5/JSON version identifier.", + "type": "string", + "enum": [ + "1.0.0", + "1.1.0", + "1.1.1" + ] + }, + "id": { + "description": "HDF5 file identifier.", + "type": "string" + }, + "root": { + "description": "Unique identifier of the HDF5 root group.", + "type": "string" + }, + "created": { + "description": "UTC date/time of file creation.", + "type": "string", + "format": "date-time" + }, + "lastModified": { + "description": "UTC date/time of last file content modification.", + "type": "string", + "format": "date-time" + }, + "userblock": { + "description": "HDF5 file user block.", + "type": "array", + "items": { + "type": "integer" + }, + "minItems": 1 + }, + "userblockSize": { + "description": "HDF5 file user block size.", + "type": "integer", + "minimum": 512, + "multipleOf": 2 + }, + "driverInfo": { + "description": "HDF5 file driver information.", + "oneOf": [ + { + "type": "object", + "properties": { + "memberSize": { + "type": "integer", + "exclusiveMinimum": 0 + } + } + }, + { + "type": "array", + "minItems": 1, + "items": { + "type": "object", + "properties": { + "dataMap": { + "type": "string", + "enum": [ + "H5FD_MEM_SUPER", + "H5FD_MEM_BTREE", + "H5FD_MEM_DRAW", + "H5FD_MEM_GHEAP", + "H5FD_MEM_LHEAP", + "H5FD_MEM_OHDR" + ] + }, + "fileName": { + "type": "string" + }, + "address": { + "type": "integer", + "exclusiveMinimum": 0 + }, + "relaxFlag": { + "type": "boolean" + } + }, + "required": [ + "dataMap", + "fileName", + "address", + "relaxFlag" + ] + } + } + ] + }, + "creationProperties": { + "type": "object", + "properties": { + "chunkStorageConfig": { + "type": "object", + "properties": { + "chunkStorageBTreeHalfRank": { + "type": "integer", + "exclusiveMinimum": 0 + } + }, + "required": [ + "chunkStorageBTreeHalfRank" + ] + }, + "freeListConfig": { + "type": "object", + "properties": { + "freeListVersion": { + "type": "integer", + "minimum": 0 + } + }, + "required": [ + "freeListVersion" + ] + }, + "sizeConfig": { + "type": "object", + "properties": { + "lengthSizeInBytes": { + "type": "integer", + "exclusiveMinimum": 0 + }, + "offsetSizeInBytes": { + "type": "integer", + "exclusiveMinimum": 0 + } + }, + "required": [ + "lengthSizeInBytes", + "offsetSizeInBytes" + ] + }, + "sohmConfig": { + "type": "object", + "properties": { + "maxList": { + "type": "integer", + "exclusiveMinimum": 0 + }, + "minBTree": { + "type": "integer", + "exclusiveMinimum": 0 + }, + "version": { + "type": "integer", + "exclusiveMinimum": 0 + }, + "indexes": { + "type": "array", + "minItems": 1, + "items": { + "description": "HDF5 shared object header message (SOHM) index.", + "type": "object", + "properties": { + "minMessageSize": { + "type": "integer", + "exclusiveMinimum": 0 + }, + "messageTypes": { + "type": "array", + "minItems": 1, + "items": { + "type": "string", + "enum": [ + "H5O_SHMESG_ATTR_FLAG", + "H5O_SHMESG_DTYPE_FLAG", + "H5O_SHMESG_FILL_FLAG", + "H5O_SHMESG_PLINE_FLAG", + "H5O_SHMESG_SDSPACE_FLAG" + ] + } + } + }, + "required": [ + "minMessageSize", + "messageTypes" + ] + } + } + }, + "required": [ + "maxList", + "minBTree", + "version", + "indexes" + ] + }, + "superBlockVersion": { + "type": "integer", + "minimum": 0 + }, + "symbolTableConfig": { + "type": "object", + "properties": { + "nodeSize": { + "type": "integer", + "exclusiveMinimum": 0 + }, + "treeRank": { + "type": "integer", + "exclusiveMinimum": 0 + }, + "version": { + "type": "integer", + "minimum": 0 + } + }, + "required": [ + "nodeSize", + "treeRank", + "version" + ] + } + } + }, + "groups": { + "description": "All groups in HDF5 file.", + "type": "object", + "$ref": "group.schema.json#/$defs/group" + }, + "datasets": { + "description": "All datasets in HDF5 file.", + "type": "object", + "$ref": "dataset.schema.json#/$defs/dataset" + }, + "datatypes": { + "description": "All committed datatypes in HDF5 file.", + "type": "object", + "$ref": "datatypes.schema.json#/$defs/committed" + } + } +} diff --git a/h5json/validator/__init__.py b/h5json/validator/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/h5json/validator/validator.py b/h5json/validator/validator.py new file mode 100644 index 0000000..b6bb044 --- /dev/null +++ b/h5json/validator/validator.py @@ -0,0 +1,98 @@ +############################################################################## +# Copyright by The HDF Group. # +# All rights reserved. # +# # +# This file is part of h5json. The full copyright notice, including # +# terms governing use, modification, and redistribution, is contained in # +# the file COPYING, which can be found at the root of the source code # +# distribution tree. If you do not have access to this file, you may # +# request a copy from help@hdfgroup.org. # +############################################################################## +import sys +import argparse +from pathlib import Path +import importlib.resources +import json +import jsonschema +from h5json import schema + + +def prepare_validator() -> jsonschema.Draft202012Validator: + """Return a configured jsonschema.Draft202012Validator instance.""" + with importlib.resources.open_text(schema, "hdf5.schema.json") as f: + h5schema = json.load(f) + + schema_store = dict() + schema_components = [ + "attribute.schema.json", + "filters.schema.json", + "group.schema.json", + "datatypes.schema.json", + "dataspaces.schema.json", + "dataset.schema.json", + ] + for sc in schema_components: + with importlib.resources.open_text(schema, sc) as f: + temp = json.load(f) + schema_store[temp["$id"]] = temp + resolver = jsonschema.RefResolver(h5schema["$id"], h5schema, store=schema_store) + return jsonschema.Draft202012Validator(h5schema, resolver=resolver) + + +def main() -> None: + parser = argparse.ArgumentParser( + description="HDF5/JSON validator", + epilog="Copyright 2021 The HDF Group", + formatter_class=argparse.ArgumentDefaultsHelpFormatter, + ) + parser.add_argument( + "jsonloc", + nargs="+", + help="JSON location (files or folders)", + metavar="JSON_LOC", + type=Path, + ) + parser.add_argument( + "--stop", + "-s", + action="store_true", + help="Stop after first HDF5/JSON file failed validation", + ) + args = parser.parse_args() + + # Find all JSON files for validation... + json_files = list() + for p in args.jsonloc: + if p.is_file(): + json_files.append(p) + elif p.is_dir(): + json_files.extend([f for f in p.glob("*.json")]) + if not json_files: + sys.exit("No JSON files for validation found.") + + validator = prepare_validator() + + # Validate HDF5/JSON files... + valid_errors = False + for h5j in json_files: + print(f"Validating {str(h5j)} ... ", end="") + try: + with h5j.open() as f: + inst = json.load(f) + validator.validate(inst) + print("pass") + except jsonschema.exceptions.ValidationError: + print("FAIL") + valid_errors = True + inst_name = str(h5j) + print(f"HDF5/JSON validation failed for {inst_name}", file=sys.stderr) + for err in validator.iter_errors(inst): + print(f"{inst_name} ---> {err}", file=sys.stderr) + if args.stop: + sys.exit("HDF5/JSON validation failed.") + if valid_errors: + sys.exit("HDF5/JSON validation failed.") + + +if __name__ == "__main__": + main() diff --git a/setup.cfg b/setup.cfg index 79bc678..7cafe08 100644 --- a/setup.cfg +++ b/setup.cfg @@ -3,3 +3,11 @@ # 3. If at all possible, it is good practice to do this. If you cannot, you # will need to generate wheels for each Python version that you support. universal=1 + +[versioneer] +# Automatic version numbering scheme +VCS = git +style = pep440 +versionfile_source = h5json/_version.py +versionfile_build = h5json/_version.py +tag_prefix = '' diff --git a/setup.py b/setup.py index d932f8b..809d1d6 100755 --- a/setup.py +++ b/setup.py @@ -1,115 +1,55 @@ -"""A setuptools based setup module for h5json. - -See: -https://packaging.python.org/en/latest/distributing.html -https://github.com/pypa/sampleproject -""" - -# Always prefer setuptools over distutils -from setuptools import setup, find_packages -# To use a consistent encoding +from setuptools import setup from codecs import open from os import path +import versioneer here = path.abspath(path.dirname(__file__)) # Get the long description from the README file -with open(path.join(here, 'README.rst'), encoding='utf-8') as f: +with open(path.join(here, "README.rst"), encoding="utf-8") as f: long_description = f.read() setup( - name='h5json', - - # Versions should comply with PEP440. For a discussion on single-sourcing - # the version across setup.py and the project code, see - # https://packaging.python.org/en/latest/single_source_version.html - version='1.1.0', - - description='HDF5/JSON Tools', + name="h5json", + version=versioneer.get_version(), + cmdclass=versioneer.get_cmdclass(), + description="HDF5/JSON Tools", long_description=long_description, - - # The project's main homepage. - url='https://github.com/HDFGroup/hdf5-json', - - # Author details - author='John Readey', - author_email='jreadey@hdfgroup.org', - - # Choose your license - license='BSD', - - # See https://pypi.python.org/pypi?%3Aaction=list_classifiers + url="https://github.com/HDFGroup/hdf5-json", + author="John Readey", + author_email="jreadey@hdfgroup.org", + license="BSD", classifiers=[ - # How mature is this project? Common values are - # 3 - Alpha - # 4 - Beta - # 5 - Production/Stable - 'Development Status :: 5 - Production/Stable', - - # Indicate who your project is intended for - 'Intended Audience :: Developers', - 'Topic :: Software Development :: Build Tools', - - # Pick your license as you wish (should match "license" above) - 'License :: OSI Approved :: BSD License', - - # Specify the Python versions you support here. In particular, ensure - # that you indicate whether you support Python 2, Python 3 or both. - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.2', - 'Programming Language :: Python :: 3.3', - 'Programming Language :: Python :: 3.4', - 'Programming Language :: Python :: 3.5', + "Development Status :: 5 - Production/Stable", + "Intended Audience :: Developers", + "Topic :: Software Development :: Build Tools", + "License :: OSI Approved :: BSD License", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", ], - - # What does your project relate to? - keywords='json hdf5 numpy array data', - - # You can just specify the packages manually here if your project is - # simple. Or you can use find_packages(). - packages=find_packages(exclude=['docs', 'test']), - - # Alternatively, if you want to distribute just a my_module.py, uncomment - # this: - # py_modules=["my_module"], - - # List run-time dependencies here. These will be installed by pip when - # your project is installed. For an analysis of "install_requires" vs pip's - # requirements files see: - # https://packaging.python.org/en/latest/requirements.html - install_requires=['numpy>=1.10.4', 'h5py>=2.5'], - - # List additional groups of dependencies here (e.g. development - # dependencies). You can install these using the following syntax, - # for example: - # $ pip install -e .[dev,test] + keywords="json hdf5 numpy array data datacube", + packages=[ + "h5json", + "h5json.h5tojson", + "h5json.jsontoh5", + "h5json.validator", + ], + python_requires=">=3.7", + install_requires=["numpy>=1.16.6", "h5py>=3.0", "jsonschema>=4.4.0"], + setup_requires=["pkgconfig"], + zip_safe=False, extras_require={ - 'dev': ['check-manifest'], - 'test': ['coverage'], + "dev": ["check-manifest"], + "test": ["coverage"], }, - - # If there are data files included in your packages that need to be - # installed, specify them here. If using Python 2.6 or less, then these - # have to be included in MANIFEST.in as well. - package_data={ - 'h5json': ['data/json/*', 'data/hdf5/*'] + package_data={"h5json": ["data/json/*", "data/hdf5/*", "schema/*.json"]}, + entry_points={ + "console_scripts": [ + "h5tojson = h5json.h5tojson.h5tojson:main", + "jsontoh5 = h5json.jsontoh5.jsontoh5:main", + "h5jvalidate = h5json.validator.validator:main", + ] }, - - # Although 'package_data' is the preferred approach, in some case you may - # need to place data files outside of your packages. See: - # http://docs.python.org/3.4/distutils/setupscript.html#installing-additional-files # noqa - # In this case, 'data_file' will be installed into '/my_data' - #data_files=[('my_data', ['data/data_file'])], - - # To provide executable scripts, use entry points in preference to the - # "scripts" keyword. Entry points provide cross-platform support and allow - # pip to create the appropriate form of executable for the target platform. - #entry_points={ - # 'console_scripts': [ - # 'h5tojson=h5tojson:h5tojson', 'jsontoh5=jsontoh5:jsontoh5', - # ], - #}, - scripts=['h5tojson/h5tojson.py', 'jsontoh5/jsontoh5.py'], ) diff --git a/test/__init__.py b/test/__init__.py index cca48e2..c537f1f 100644 --- a/test/__init__.py +++ b/test/__init__.py @@ -1 +1 @@ -# test module +# test module diff --git a/test/integ/__init__.py b/test/integ/__init__.py index 4d54173..62cb504 100644 --- a/test/integ/__init__.py +++ b/test/integ/__init__.py @@ -1 +1 @@ -# Integration test module +# Integration test module diff --git a/test/integ/h5tojson_test.py b/test/integ/h5tojson_test.py index 30f4ada..fc6dd65 100644 --- a/test/integ/h5tojson_test.py +++ b/test/integ/h5tojson_test.py @@ -11,20 +11,16 @@ ############################################################################## import sys import os -import stat -from shutil import copyfile - - """ main """ -top_dir = os.path.abspath(os.path.join("..","..")) +top_dir = os.path.abspath(os.path.join("..", "..")) -data_dir = os.path.join(top_dir, "data","hdf5") +data_dir = os.path.join(top_dir, "data", "hdf5") -out_dir = os.path.join(top_dir, "test","integ","json_out") +out_dir = os.path.join(top_dir, "test", "integ", "json_out") test_files = ( "array_dset.h5", @@ -32,11 +28,14 @@ # bitfields not supported yet # "bitfield_attr.h5", # "bitfield_dset.h5", + "bool_attr.h5", + "bool_dset.h5", "committed_type.h5", + "comp_complex.h5", "compound.h5", "compound_array.h5", "compound_array_attr.h5", - #"compound_array_vlen_string.h5", # crashes python w/ Linux! + # "compound_array_vlen_string.h5", # crashes python w/ Linux! "compound_array_dset.h5", "compound_attr.h5", "compound_committed.h5", @@ -80,6 +79,7 @@ "resizable.h5", "sample.h5", "scalar.h5", + "scalar_array_dset.h5", "scalar_attr.h5", "tall.h5", "tall_with_udlink.h5", @@ -92,11 +92,11 @@ "vlen_dset.h5", "vlen_string_attr.h5", "vlen_string_dset.h5", - # "vlen_string_dset_utc.h5", + "vlen_string_dset_utc.h5", "vlen_string_nullterm_attr.h5", "vlen_string_nullterm_dset.h5", "vlen_unicode_attr.h5", - "zerodim.h5" + "zerodim.h5", ) # mkdir for output files @@ -106,21 +106,23 @@ # delete any output files from previous run for out_file in os.listdir(out_dir): split_ext = os.path.splitext(out_file) - if split_ext[1] == '.json': + if split_ext[1] == ".json": os.unlink(os.path.join(out_dir, out_file)) - - -# convert test files to json +# convert test files to json and validate for test_file in test_files: split_ext = os.path.splitext(test_file) file_path = os.path.join(data_dir, test_file) out_file = os.path.join(out_dir, split_ext[0] + ".json") if not os.path.exists(file_path): sys.exit("file: " + file_path + " not found") - cmd = "python ../../h5tojson/h5tojson.py " + file_path + " >" + out_file + cmd = "python ../../h5json/h5tojson/h5tojson.py " + file_path + " >" + out_file print("cmd:", cmd) - rc = os.system(cmd) if rc != 0: sys.exit("h5tojson failed converting: " + test_file) + + cmd = "python ../../h5json/validator/validator.py " + out_file + print("cmd:", cmd) + if rc != 0: + sys.exit("HDF5/JSON validation failed for: " + out_file) diff --git a/test/integ/jsontoh5_test.py b/test/integ/jsontoh5_test.py index 0308bfc..7da562f 100644 --- a/test/integ/jsontoh5_test.py +++ b/test/integ/jsontoh5_test.py @@ -19,11 +19,11 @@ """ main """ -top_dir = os.path.abspath(os.path.join("..","..")) +top_dir = os.path.abspath(os.path.join("..", "..")) -data_dir = os.path.join(top_dir, "data","json") +data_dir = os.path.join(top_dir, "data", "json") -out_dir = os.path.join(top_dir, "test","integ","h5_out") +out_dir = os.path.join(top_dir, "test", "integ", "h5_out") test_files = ( # "array_dset.json", @@ -31,6 +31,8 @@ # bitfields not supported yet # "bitfield_attr.json", # "bitfield_dset.json", + "bool_attr.json", + "bool_dset.json", "committed_type.json", "compound.json", # "compound_array.json", @@ -44,8 +46,8 @@ "dset_creationprop.json", # "dset_gzip.json", "empty.json", - # "enum_attr.json", - # "enum_dset.json", + "enum_attr.json", + "enum_dset.json", "fillvalue.json", "h5ex_d_alloc.json", "h5ex_d_checksum.json", @@ -73,7 +75,8 @@ "resizable.json", # "sample.json", "scalar.json", - #"scalar_attr.json", + # "scalar_attr.json", + # "scalar_array_dset.json", "tall.json", "tall_with_udlink.json", "tgroup.json", @@ -81,26 +84,27 @@ # "tstr.json", "types_attr.json", "types_dset.json", - "zerodim.json" + "zerodim.json", ) # these files require a more recent version of hf5 lib (1.8.15 or later) test_files_latest = ( - "fixed_string_attr.json", - "fixed_string_dset.json", - "null_space_attr.json", - "null_space_dset.json", - "objref_attr.json", - "regionref_attr.json", - #"regionref_dset.json", - "scalar_attr.json", - "vlen_attr.json", - "vlen_dset.json", - "vlen_string_attr.json", - "vlen_string_dset.json", - "vlen_string_nullterm_attr.json", - "vlen_string_nullterm_dset.json", - "vlen_unicode_attr.json" + "fixed_string_attr.json", + "fixed_string_dset.json", + "null_space_attr.json", + "null_space_dset.json", + "objref_attr.json", + "regionref_attr.json", + # "regionref_dset.json", + "scalar_attr.json", + "vlen_attr.json", + "vlen_dset.json", + "vlen_string_attr.json", + "vlen_string_dset.json", + "vlen_string_nullterm_attr.json", + "vlen_string_nullterm_dset.json", + "vlen_string_dset_utc.json", + "vlen_unicode_attr.json", ) # mkdir for output files @@ -110,16 +114,18 @@ # delete any output files from previous run for out_file in os.listdir(out_dir): split_ext = os.path.splitext(out_file) - if split_ext[1] == '.h5': + if split_ext[1] == ".h5": os.unlink(os.path.join(out_dir, out_file)) -if hdf5_version_tuple[1] > 8 or (hdf5_version_tuple[1] == 8 and hdf5_version_tuple[2] > 14): +if hdf5_version_tuple[1] > 8 or ( + hdf5_version_tuple[1] == 8 and hdf5_version_tuple[2] > 14 +): # add in additional test files print("adding library version dependendent files") test_files = list(test_files) for filename in test_files_latest: test_files.append(filename) - + # convert test files to json for test_file in test_files: split_ext = os.path.splitext(test_file) @@ -127,7 +133,7 @@ out_file = os.path.join(out_dir, split_ext[0] + ".h5") if not os.path.exists(file_path): sys.exit("file: " + file_path + " not found") - cmd = "python ../../jsontoh5/jsontoh5.py " + file_path + " " + out_file + cmd = "python ../../h5json/jsontoh5/jsontoh5.py " + file_path + " " + out_file print("cmd:", cmd) rc = os.system(cmd) if rc != 0: diff --git a/test/unit/__init__.py b/test/unit/__init__.py index 0832b91..80d8eb8 100644 --- a/test/unit/__init__.py +++ b/test/unit/__init__.py @@ -1 +1 @@ -# Unit test module +# Unit test module diff --git a/test/unit/hdf5dbTest.py b/test/unit/hdf5dbTest.py index ec657ff..9b6432f 100755 --- a/test/unit/hdf5dbTest.py +++ b/test/unit/hdf5dbTest.py @@ -10,32 +10,31 @@ # request a copy from help@hdfgroup.org. # ############################################################################## import unittest -import sys import os import time -import base64 import errno import os.path as op import stat import logging import shutil - from h5json import Hdf5db + UUID_LEN = 36 # length for uuid strings + def getFile(name, tgt, ro=False): - src = '../../data/hdf5/' + name + src = "data/hdf5/" + name logging.info("copying file to this directory: " + src) filepath = "./out/" + tgt if op.isfile(filepath): # make sure it's writable, before we copy over it - os.chmod(filepath, stat.S_IWRITE|stat.S_IREAD) + os.chmod(filepath, stat.S_IWRITE | stat.S_IREAD) shutil.copyfile(src, filepath) if ro: - logging.info('make read-only') + logging.info("make read-only") os.chmod(filepath, stat.S_IREAD) return filepath @@ -44,9 +43,11 @@ def removeFile(name): try: os.stat(name) except OSError: - return; # file does not exist + return + # file does not exist os.remove(name) + class Hdf5dbTest(unittest.TestCase): def __init__(self, *args, **kwargs): super(Hdf5dbTest, self).__init__(*args, **kwargs) @@ -61,18 +62,18 @@ def __init__(self, *args, **kwargs): self.log.setLevel(logging.INFO) # create logger - handler = logging.FileHandler('./hdf5dbtest.log') + handler = logging.FileHandler("./hdf5dbtest.log") # add handler to logger self.log.addHandler(handler) if lhStdout is not None: self.log.removeHandler(lhStdout) - #self.log.propagate = False # prevent log out going to stdout - self.log.info('init!') + # self.log.propagate = False # prevent log out going to stdout + self.log.info("init!") - #create directory for test output files - if not os.path.exists('./out'): - os.makedirs('./out') + # create directory for test output files + if not os.path.exists("./out"): + os.makedirs("./out") def testInvalidPath(self): filepath = "/tmp/thisisnotafile.h5" @@ -84,7 +85,7 @@ def testInvalidPath(self): self.assertEqual(e.strerror, "file not found") def testInvalidFile(self): - filepath = getFile('notahdf5file.h5', 'notahdf5file.h5') + filepath = getFile("notahdf5file.h5", "notahdf5file.h5") try: with Hdf5db(filepath, app_logger=self.log) as db: self.assertTrue(False) # shouldn't get here @@ -92,32 +93,31 @@ def testInvalidFile(self): self.assertEqual(e.errno, errno.EINVAL) self.assertEqual(e.strerror, "not an HDF5 file") - def testGetUUIDByPath(self): # get test file g1Uuid = None - filepath = getFile('tall.h5', 'getuuidbypath.h5') + filepath = getFile("tall.h5", "getuuidbypath.h5") with Hdf5db(filepath, app_logger=self.log) as db: - g1Uuid = db.getUUIDByPath('/g1') + g1Uuid = db.getUUIDByPath("/g1") self.assertEqual(len(g1Uuid), UUID_LEN) - obj = db.getObjByPath('/g1') - self.assertEqual(obj.name, '/g1') + obj = db.getObjByPath("/g1") + self.assertEqual(obj.name, "/g1") for name in obj: g = obj[name] g1links = db.getLinkItems(g1Uuid) self.assertEqual(len(g1links), 2) for item in g1links: - self.assertEqual(len(item['id']), UUID_LEN) + self.assertEqual(len(item["id"]), UUID_LEN) # end of with will close file # open again and verify we can get obj by name with Hdf5db(filepath, app_logger=self.log) as db: obj = db.getGroupObjByUuid(g1Uuid) - g1 = db.getObjByPath('/g1') + g1 = db.getObjByPath("/g1") self.assertEqual(obj, g1) def testGetCounts(self): - filepath = getFile('tall.h5', 'testgetcounts_tall.h5') + filepath = getFile("tall.h5", "testgetcounts_tall.h5") with Hdf5db(filepath, app_logger=self.log) as db: cnt = db.getNumberOfGroups() self.assertEqual(cnt, 6) @@ -126,7 +126,7 @@ def testGetCounts(self): cnt = db.getNumberOfDatatypes() self.assertEqual(cnt, 0) - filepath = getFile('empty.h5', 'testgetcounts_empty.h5') + filepath = getFile("empty.h5", "testgetcounts_empty.h5") with Hdf5db(filepath, app_logger=self.log) as db: cnt = db.getNumberOfGroups() self.assertEqual(cnt, 1) @@ -135,14 +135,13 @@ def testGetCounts(self): cnt = db.getNumberOfDatatypes() self.assertEqual(cnt, 0) - def testGroupOperations(self): # get test file - filepath = getFile('tall.h5', 'tall_del_g11.h5') + filepath = getFile("tall.h5", "tall_del_g11.h5") with Hdf5db(filepath, app_logger=self.log) as db: - rootuuid = db.getUUIDByPath('/') + rootuuid = db.getUUIDByPath("/") root = db.getGroupObjByUuid(rootuuid) - self.assertEqual('/', root.name) + self.assertEqual("/", root.name) rootLinks = db.getLinkItems(rootuuid) self.assertEqual(len(rootLinks), 2) g1uuid = db.getUUIDByPath("/g1") @@ -154,123 +153,122 @@ def testGroupOperations(self): def testCreateGroup(self): # get test file - filepath = getFile('tall.h5', 'tall_newgrp.h5') + filepath = getFile("tall.h5", "tall_newgrp.h5") with Hdf5db(filepath, app_logger=self.log) as db: - rootUuid = db.getUUIDByPath('/') + rootUuid = db.getUUIDByPath("/") numRootChildren = len(db.getLinkItems(rootUuid)) self.assertEqual(numRootChildren, 2) newGrpUuid = db.createGroup() newGrp = db.getGroupObjByUuid(newGrpUuid) self.assertNotEqual(newGrp, None) - db.linkObject(rootUuid, newGrpUuid, 'g3') + db.linkObject(rootUuid, newGrpUuid, "g3") numRootChildren = len(db.getLinkItems(rootUuid)) self.assertEqual(numRootChildren, 3) # verify linkObject can be called idempotent-ly - db.linkObject(rootUuid, newGrpUuid, 'g3') + db.linkObject(rootUuid, newGrpUuid, "g3") def testGetLinkItemsBatch(self): # get test file - filepath = getFile('group100.h5', 'getlinkitemsbatch.h5') + filepath = getFile("group100.h5", "getlinkitemsbatch.h5") marker = None count = 0 with Hdf5db(filepath, app_logger=self.log) as db: - rootUuid = db.getUUIDByPath('/') + rootUuid = db.getUUIDByPath("/") while True: # get items 13 at a time batch = db.getLinkItems(rootUuid, marker=marker, limit=13) if len(batch) == 0: - break # done! + break # done! count += len(batch) lastItem = batch[len(batch) - 1] - marker = lastItem['title'] + marker = lastItem["title"] self.assertEqual(count, 100) def testGetItemHardLink(self): - filepath = getFile('tall.h5', 'getitemhardlink.h5') + filepath = getFile("tall.h5", "getitemhardlink.h5") with Hdf5db(filepath, app_logger=self.log) as db: - grpUuid = db.getUUIDByPath('/g1/g1.1') + grpUuid = db.getUUIDByPath("/g1/g1.1") item = db.getLinkItemByUuid(grpUuid, "dset1.1.1") - self.assertTrue('id' in item) - self.assertEqual(item['title'], 'dset1.1.1') - self.assertEqual(item['class'], 'H5L_TYPE_HARD') - self.assertEqual(item['collection'], 'datasets') - self.assertTrue('target' not in item) - self.assertTrue('mtime' in item) - self.assertTrue('ctime' in item) + self.assertTrue("id" in item) + self.assertEqual(item["title"], "dset1.1.1") + self.assertEqual(item["class"], "H5L_TYPE_HARD") + self.assertEqual(item["collection"], "datasets") + self.assertTrue("target" not in item) + self.assertTrue("mtime" in item) + self.assertTrue("ctime" in item) def testGetItemSoftLink(self): - filepath = getFile('tall.h5', 'getitemsoftlink.h5') + filepath = getFile("tall.h5", "getitemsoftlink.h5") with Hdf5db(filepath, app_logger=self.log) as db: - grpUuid = db.getUUIDByPath('/g1/g1.2/g1.2.1') + grpUuid = db.getUUIDByPath("/g1/g1.2/g1.2.1") item = db.getLinkItemByUuid(grpUuid, "slink") - self.assertTrue('id' not in item) - self.assertEqual(item['title'], 'slink') - self.assertEqual(item['class'], 'H5L_TYPE_SOFT') - self.assertEqual(item['h5path'], 'somevalue') - self.assertTrue('mtime' in item) - self.assertTrue('ctime' in item) + self.assertTrue("id" not in item) + self.assertEqual(item["title"], "slink") + self.assertEqual(item["class"], "H5L_TYPE_SOFT") + self.assertEqual(item["h5path"], "somevalue") + self.assertTrue("mtime" in item) + self.assertTrue("ctime" in item) def testGetItemExternalLink(self): - filepath = getFile('tall_with_udlink.h5', 'getitemexternallink.h5') + filepath = getFile("tall_with_udlink.h5", "getitemexternallink.h5") with Hdf5db(filepath, app_logger=self.log) as db: - grpUuid = db.getUUIDByPath('/g1/g1.2') + grpUuid = db.getUUIDByPath("/g1/g1.2") item = db.getLinkItemByUuid(grpUuid, "extlink") - self.assertTrue('uuid' not in item) - self.assertEqual(item['title'], 'extlink') - self.assertEqual(item['class'], 'H5L_TYPE_EXTERNAL') - self.assertEqual(item['h5path'], 'somepath') - self.assertEqual(item['file'], 'somefile') - self.assertTrue('mtime' in item) - self.assertTrue('ctime' in item) + self.assertTrue("uuid" not in item) + self.assertEqual(item["title"], "extlink") + self.assertEqual(item["class"], "H5L_TYPE_EXTERNAL") + self.assertEqual(item["h5path"], "somepath") + self.assertEqual(item["file"], "somefile") + self.assertTrue("mtime" in item) + self.assertTrue("ctime" in item) def testGetItemUDLink(self): - filepath = getFile('tall_with_udlink.h5', 'getitemudlink.h5') + filepath = getFile("tall_with_udlink.h5", "getitemudlink.h5") with Hdf5db(filepath, app_logger=self.log) as db: - grpUuid = db.getUUIDByPath('/g2') + grpUuid = db.getUUIDByPath("/g2") item = db.getLinkItemByUuid(grpUuid, "udlink") - self.assertTrue('uuid' not in item) - self.assertEqual(item['title'], 'udlink') - self.assertEqual(item['class'], 'H5L_TYPE_USER_DEFINED') - self.assertTrue('h5path' not in item) - self.assertTrue('file' not in item) - self.assertTrue('mtime' in item) - self.assertTrue('ctime' in item) + self.assertTrue("uuid" not in item) + self.assertEqual(item["title"], "udlink") + self.assertEqual(item["class"], "H5L_TYPE_USER_DEFINED") + self.assertTrue("h5path" not in item) + self.assertTrue("file" not in item) + self.assertTrue("mtime" in item) + self.assertTrue("ctime" in item) def testGetNumLinks(self): items = None - filepath = getFile('tall.h5', 'getnumlinks.h5') + filepath = getFile("tall.h5", "getnumlinks.h5") with Hdf5db(filepath, app_logger=self.log) as db: - g1= db.getObjByPath('/g1') + g1 = db.getObjByPath("/g1") numLinks = db.getNumLinksToObject(g1) self.assertEqual(numLinks, 1) def testGetLinks(self): - g12_links = ('extlink', 'g1.2.1') + g12_links = ("extlink", "g1.2.1") hardLink = None externalLink = None - filepath = getFile('tall_with_udlink.h5', 'getlinks.h5') + filepath = getFile("tall_with_udlink.h5", "getlinks.h5") with Hdf5db(filepath, app_logger=self.log) as db: - grpUuid = db.getUUIDByPath('/g1/g1.2') + grpUuid = db.getUUIDByPath("/g1/g1.2") items = db.getLinkItems(grpUuid) self.assertEqual(len(items), 2) for item in items: - self.assertTrue(item['title'] in g12_links) - if item['class'] == 'H5L_TYPE_HARD': + self.assertTrue(item["title"] in g12_links) + if item["class"] == "H5L_TYPE_HARD": hardLink = item - elif item['class'] == 'H5L_TYPE_EXTERNAL': + elif item["class"] == "H5L_TYPE_EXTERNAL": externalLink = item - self.assertEqual(hardLink['collection'], 'groups') - self.assertTrue('id' in hardLink) - self.assertTrue('id' not in externalLink) - self.assertEqual(externalLink['h5path'], 'somepath') - self.assertEqual(externalLink['file'], 'somefile') - + self.assertEqual(hardLink["collection"], "groups") + self.assertTrue("id" in hardLink) + self.assertTrue("id" not in externalLink) + self.assertEqual(externalLink["h5path"], "somepath") + self.assertEqual(externalLink["file"], "somefile") def testDeleteLink(self): # get test file - filepath = getFile('tall.h5', 'deletelink.h5') + filepath = getFile("tall.h5", "deletelink.h5") with Hdf5db(filepath, app_logger=self.log) as db: - rootUuid = db.getUUIDByPath('/') + rootUuid = db.getUUIDByPath("/") numRootChildren = len(db.getLinkItems(rootUuid)) self.assertEqual(numRootChildren, 2) db.unlinkItem(rootUuid, "g2") @@ -279,9 +277,9 @@ def testDeleteLink(self): def testDeleteUDLink(self): # get test file - filepath = getFile('tall_with_udlink.h5', 'deleteudlink.h5') + filepath = getFile("tall_with_udlink.h5", "deleteudlink.h5") with Hdf5db(filepath, app_logger=self.log) as db: - g2Uuid = db.getUUIDByPath('/g2') + g2Uuid = db.getUUIDByPath("/g2") numG2Children = len(db.getLinkItems(g2Uuid)) self.assertEqual(numG2Children, 3) got_exception = False @@ -294,36 +292,35 @@ def testDeleteUDLink(self): numG2Children = len(db.getLinkItems(g2Uuid)) self.assertEqual(numG2Children, 3) - def testReadOnlyGetUUID(self): # get test file - filepath = getFile('tall.h5', 'readonlygetuuid.h5', ro=True) + filepath = getFile("tall.h5", "readonlygetuuid.h5", ro=True) # remove db file! - removeFile('./out/.' + 'readonlygetuuid.h5') + removeFile("./out/." + "readonlygetuuid.h5") g1Uuid = None with Hdf5db(filepath, app_logger=self.log) as db: - g1Uuid = db.getUUIDByPath('/g1') + g1Uuid = db.getUUIDByPath("/g1") self.assertEqual(len(g1Uuid), UUID_LEN) - obj = db.getObjByPath('/g1') - self.assertEqual(obj.name, '/g1') + obj = db.getObjByPath("/g1") + self.assertEqual(obj.name, "/g1") # end of with will close file # open again and verify we can get obj by name with Hdf5db(filepath, app_logger=self.log) as db: obj = db.getGroupObjByUuid(g1Uuid) - g1 = db.getObjByPath('/g1') + g1 = db.getObjByPath("/g1") self.assertEqual(obj, g1) g1links = db.getLinkItems(g1Uuid) self.assertEqual(len(g1links), 2) for item in g1links: - self.assertEqual(len(item['id']), UUID_LEN) + self.assertEqual(len(item["id"]), UUID_LEN) def testReadDataset(self): - filepath = getFile('tall.h5', 'readdataset.h5') - d111_values = None - d112_values = None - with Hdf5db(filepath, app_logger=self.log) as db: - d111Uuid = db.getUUIDByPath('/g1/g1.1/dset1.1.1') + filepath = getFile("tall.h5", "readdataset.h5") + d111_values = None + d112_values = None + with Hdf5db(filepath, app_logger=self.log) as db: + d111Uuid = db.getUUIDByPath("/g1/g1.1/dset1.1.1") self.assertEqual(len(d111Uuid), UUID_LEN) d111_values = db.getDatasetValuesByUuid(d111Uuid) self.assertTrue(type(d111_values) is list) @@ -332,37 +329,36 @@ def testReadDataset(self): arr = d111_values[i] self.assertEqual(len(arr), 10) for j in range(10): - self.assertEqual(arr[j], i*j) + self.assertEqual(arr[j], i * j) - d112Uuid = db.getUUIDByPath('/g1/g1.1/dset1.1.2') + d112Uuid = db.getUUIDByPath("/g1/g1.1/dset1.1.2") self.assertEqual(len(d112Uuid), UUID_LEN) d112_values = db.getDatasetValuesByUuid(d112Uuid) self.assertTrue(type(d112_values) is list) self.assertEqual(len(d112_values), 20) for i in range(20): self.assertEqual(d112_values[i], i) - + def testReadDatasetBinary(self): - filepath = getFile('tall.h5', 'readdatasetbinary.h5') - d111_values = None - d112_values = None - with Hdf5db(filepath, app_logger=self.log) as db: - d111Uuid = db.getUUIDByPath('/g1/g1.1/dset1.1.1') + filepath = getFile("tall.h5", "readdatasetbinary.h5") + d111_values = None + d112_values = None + with Hdf5db(filepath, app_logger=self.log) as db: + d111Uuid = db.getUUIDByPath("/g1/g1.1/dset1.1.1") self.assertEqual(len(d111Uuid), UUID_LEN) d111_data = db.getDatasetValuesByUuid(d111Uuid, format="binary") - self.assertTrue(type(d111_data) is bytes) + self.assertTrue(type(d111_data) is bytes) self.assertEqual(len(d111_data), 400) # 10x10x(4 byte type) - - d112Uuid = db.getUUIDByPath('/g1/g1.1/dset1.1.2') + + d112Uuid = db.getUUIDByPath("/g1/g1.1/dset1.1.2") self.assertEqual(len(d112Uuid), UUID_LEN) d112_data = db.getDatasetValuesByUuid(d112Uuid, format="binary") - self.assertEqual(len(d112_data), 80) # 20x(4 byte type) - - + self.assertEqual(len(d112_data), 80) # 20x(4 byte type) + def testReadCompoundDataset(self): - filepath = getFile('compound.h5', 'readcompound.h5') - with Hdf5db(filepath, app_logger=self.log) as db: - dset_uuid = db.getUUIDByPath('/dset') + filepath = getFile("compound.h5", "readcompound.h5") + with Hdf5db(filepath, app_logger=self.log) as db: + dset_uuid = db.getUUIDByPath("/dset") self.assertEqual(len(dset_uuid), UUID_LEN) dset_values = db.getDatasetValuesByUuid(dset_uuid) @@ -373,687 +369,779 @@ def testReadCompoundDataset(self): self.assertEqual(elem[2], 63) self.assertEqual(elem[3], 29.88) self.assertEqual(elem[4], "SE 10") - + def testReadDatasetCreationProp(self): - filepath = getFile('compound.h5', 'readdatasetcreationprop.h5') - with Hdf5db(filepath, app_logger=self.log) as db: - dset_uuid = db.getUUIDByPath('/dset') + filepath = getFile("compound.h5", "readdatasetcreationprop.h5") + with Hdf5db(filepath, app_logger=self.log) as db: + dset_uuid = db.getUUIDByPath("/dset") self.assertEqual(len(dset_uuid), UUID_LEN) dset_item = db.getDatasetItemByUuid(dset_uuid) - self.assertTrue('creationProperties' in dset_item) - creationProp = dset_item['creationProperties'] - self.assertTrue('fillValue' in creationProp) - fillValue = creationProp['fillValue'] - + self.assertTrue("creationProperties" in dset_item) + creationProp = dset_item["creationProperties"] + self.assertTrue("fillValue" in creationProp) + fillValue = creationProp["fillValue"] + self.assertEqual(fillValue[0], 999) self.assertEqual(fillValue[1], "99:90") self.assertEqual(fillValue[2], 999) self.assertEqual(fillValue[3], 999.0) self.assertEqual(fillValue[4], "N") - - def testCreateScalarDataset(self): creation_props = { - "allocTime": "H5D_ALLOC_TIME_LATE", - "fillTime": "H5D_FILL_TIME_IFSET", - "fillValue": "", - "layout": { - "class": "H5D_CONTIGUOUS" - } - } + "allocTime": "H5D_ALLOC_TIME_LATE", + "fillTime": "H5D_FILL_TIME_IFSET", + "fillValue": "", + "layout": {"class": "H5D_CONTIGUOUS"}, + } datatype = { - "charSet": "H5T_CSET_ASCII", - "class": "H5T_STRING", - "length": 1, - "strPad": "H5T_STR_NULLPAD" - } - filepath = getFile('empty.h5', 'createscalardataset.h5') + "charSet": "H5T_CSET_ASCII", + "class": "H5T_STRING", + "length": 1, + "strPad": "H5T_STR_NULLPAD", + } + filepath = getFile("empty.h5", "createscalardataset.h5") with Hdf5db(filepath, app_logger=self.log) as db: dims = () # if no space in body, default to scalar - max_shape=None + max_shape = None - db.createDataset(datatype, dims, max_shape=max_shape, creation_props=creation_props) + db.createDataset( + datatype, dims, max_shape=max_shape, creation_props=creation_props + ) def testCreate1dDataset(self): datatype = "H5T_STD_I64LE" dims = (10,) - filepath = getFile('empty.h5', 'create1ddataset.h5') + filepath = getFile("empty.h5", "create1ddataset.h5") dset_uuid = None with Hdf5db(filepath, app_logger=self.log) as db: rsp = db.createDataset(datatype, dims) - dset_uuid = rsp['id'] + dset_uuid = rsp["id"] item = db.getDatasetItemByUuid(dset_uuid) - self.assertEqual(item['attributeCount'], 0) - type_item = item['type'] - self.assertEqual(type_item['class'], 'H5T_INTEGER') - self.assertEqual(type_item['base'], 'H5T_STD_I64LE') - shape_item = item['shape'] - self.assertEqual(shape_item['class'], 'H5S_SIMPLE') - self.assertEqual(shape_item['dims'], (10,)) + self.assertEqual(item["attributeCount"], 0) + type_item = item["type"] + self.assertEqual(type_item["class"], "H5T_INTEGER") + self.assertEqual(type_item["base"], "H5T_STD_I64LE") + shape_item = item["shape"] + self.assertEqual(shape_item["class"], "H5S_SIMPLE") + self.assertEqual(shape_item["dims"], (10,)) def testCreate2dExtendableDataset(self): datatype = "H5T_STD_I64LE" dims = (10, 10) max_shape = (None, 10) - filepath = getFile('empty.h5', 'create2dextendabledataset.h5') + filepath = getFile("empty.h5", "create2dextendabledataset.h5") dset_uuid = None with Hdf5db(filepath, app_logger=self.log) as db: rsp = db.createDataset(datatype, dims, max_shape=max_shape) - dset_uuid = rsp['id'] + dset_uuid = rsp["id"] item = db.getDatasetItemByUuid(dset_uuid) - self.assertEqual(item['attributeCount'], 0) - type_item = item['type'] - self.assertEqual(type_item['class'], 'H5T_INTEGER') - self.assertEqual(type_item['base'], 'H5T_STD_I64LE') - shape_item = item['shape'] - self.assertEqual(shape_item['class'], 'H5S_SIMPLE') - self.assertEqual(shape_item['dims'], (10,10)) - self.assertTrue('maxdims' in shape_item) - self.assertEqual(shape_item['maxdims'], [0, 10]) + self.assertEqual(item["attributeCount"], 0) + type_item = item["type"] + self.assertEqual(type_item["class"], "H5T_INTEGER") + self.assertEqual(type_item["base"], "H5T_STD_I64LE") + shape_item = item["shape"] + self.assertEqual(shape_item["class"], "H5S_SIMPLE") + self.assertEqual(shape_item["dims"], (10, 10)) + self.assertTrue("maxdims" in shape_item) + self.assertEqual(shape_item["maxdims"], [0, 10]) def testCreateCommittedTypeDataset(self): - filepath = getFile('empty.h5', 'createcommittedtypedataset.h5') + filepath = getFile("empty.h5", "createcommittedtypedataset.h5") with Hdf5db(filepath, app_logger=self.log) as db: - root_uuid = db.getUUIDByPath('/') + root_uuid = db.getUUIDByPath("/") - datatype = { 'charSet': 'H5T_CSET_ASCII', - 'class': 'H5T_STRING', - 'strPad': 'H5T_STR_NULLTERM', - 'length': 15} + datatype = { + "charSet": "H5T_CSET_ASCII", + "class": "H5T_STRING", + "strPad": "H5T_STR_NULLTERM", + "length": 15, + } item = db.createCommittedType(datatype) - type_uuid = item['id'] + type_uuid = item["id"] dims = () # if no space in body, default to scalar rsp = db.createDataset(type_uuid, dims, max_shape=None, creation_props=None) - dset_uuid = rsp['id'] + dset_uuid = rsp["id"] item = db.getDatasetItemByUuid(dset_uuid) - type_item = item['type'] - self.assertTrue('uuid' in type_item) - self.assertEqual(type_item['uuid'], type_uuid) + type_item = item["type"] + self.assertTrue("uuid" in type_item) + self.assertEqual(type_item["uuid"], type_uuid) def testCreateCommittedCompoundTypeDataset(self): - filepath = getFile('empty.h5', 'createcommittedcompoundtypedataset.h5') + filepath = getFile("empty.h5", "createcommittedcompoundtypedataset.h5") with Hdf5db(filepath, app_logger=self.log) as db: - root_uuid = db.getUUIDByPath('/') + root_uuid = db.getUUIDByPath("/") - datatype = {'class': 'H5T_COMPOUND', - 'fields': [] } + datatype = {"class": "H5T_COMPOUND", "fields": []} type_fields = [] - type_fields.append({'name': 'field_1', 'type': 'H5T_STD_I64BE' }) - type_fields.append({'name': 'field_2', 'type': 'H5T_IEEE_F64BE' }) + type_fields.append({"name": "field_1", "type": "H5T_STD_I64BE"}) + type_fields.append({"name": "field_2", "type": "H5T_IEEE_F64BE"}) - datatype['fields'] = type_fields + datatype["fields"] = type_fields - creation_props = { - "fillValue": [ - 0, - 0.0 ] - } + creation_props = {"fillValue": [0, 0.0]} - item = db.createCommittedType(datatype) - type_uuid = item['id'] + item = db.createCommittedType(datatype) + type_uuid = item["id"] dims = () # if no space in body, default to scalar - rsp = db.createDataset(type_uuid, dims, max_shape=None, creation_props=creation_props) - dset_uuid = rsp['id'] + rsp = db.createDataset( + type_uuid, dims, max_shape=None, creation_props=creation_props + ) + dset_uuid = rsp["id"] item = db.getDatasetItemByUuid(dset_uuid) - type_item = item['type'] - self.assertTrue('uuid' in type_item) - self.assertEqual(type_item['uuid'], type_uuid) - - + type_item = item["type"] + self.assertTrue("uuid" in type_item) + self.assertEqual(type_item["uuid"], type_uuid) def testReadZeroDimDataset(self): - filepath = getFile('zerodim.h5', 'readzerodeimdataset.h5') - + filepath = getFile("zerodim.h5", "readzerodeimdataset.h5") + with Hdf5db(filepath, app_logger=self.log) as db: - dsetUuid = db.getUUIDByPath('/dset') + dsetUuid = db.getUUIDByPath("/dset") self.assertEqual(len(dsetUuid), UUID_LEN) dset_value = db.getDatasetValuesByUuid(dsetUuid) self.assertEqual(dset_value, 42) - - + def testReadNullSpaceDataset(self): - filepath = getFile('null_space_dset.h5', 'readnullspacedataset.h5') - + filepath = getFile("null_space_dset.h5", "readnullspacedataset.h5") + + with Hdf5db(filepath, app_logger=self.log) as db: + dsetUuid = db.getUUIDByPath("/DS1") + self.assertEqual(len(dsetUuid), UUID_LEN) + obj = db.getDatasetObjByUuid(dsetUuid) + shape_item = db.getShapeItemByDsetObj(obj) + self.assertTrue("class" in shape_item) + self.assertEqual(shape_item["class"], "H5S_NULL") + + def testReadScalarSpaceArrayDataset(self): + filepath = getFile("scalar_array_dset.h5", "readscalarspacearraydataset.h5") + with Hdf5db(filepath, app_logger=self.log) as db: - dsetUuid = db.getUUIDByPath('/DS1') + dsetUuid = db.getUUIDByPath("/DS1") self.assertEqual(len(dsetUuid), UUID_LEN) obj = db.getDatasetObjByUuid(dsetUuid) shape_item = db.getShapeItemByDsetObj(obj) - self.assertTrue('class' in shape_item) - self.assertEqual(shape_item['class'], 'H5S_NULL') - + self.assertTrue("class" in shape_item) + self.assertEqual(shape_item["class"], "H5S_SCALAR") + def testReadNullSpaceAttribute(self): - filepath = getFile('null_space_attr.h5', 'readnullspaceattr.h5') - + filepath = getFile("null_space_attr.h5", "readnullspaceattr.h5") + with Hdf5db(filepath, app_logger=self.log) as db: - rootUuid = db.getUUIDByPath('/') + rootUuid = db.getUUIDByPath("/") self.assertEqual(len(rootUuid), UUID_LEN) - item = db.getAttributeItem("groups", rootUuid, "attr1") - self.assertTrue('shape' in item) - shape_item = item['shape'] - self.assertTrue('class' in shape_item) - self.assertEqual(shape_item['class'], 'H5S_NULL') + item = db.getAttributeItem("groups", rootUuid, "attr1") + self.assertTrue("shape" in item) + shape_item = item["shape"] + self.assertTrue("class" in shape_item) + self.assertEqual(shape_item["class"], "H5S_NULL") def testReadAttribute(self): # getAttributeItemByUuid item = None - filepath = getFile('tall.h5', 'readattribute.h5') + filepath = getFile("tall.h5", "readattribute.h5") with Hdf5db(filepath, app_logger=self.log) as db: - rootUuid = db.getUUIDByPath('/') + rootUuid = db.getUUIDByPath("/") self.assertEqual(len(rootUuid), UUID_LEN) - item = db.getAttributeItem("groups", rootUuid, "attr1") + item = db.getAttributeItem("groups", rootUuid, "attr1") def testWriteScalarAttribute(self): # getAttributeItemByUuid item = None - filepath = getFile('empty.h5', 'writescalarattribute.h5') + filepath = getFile("empty.h5", "writescalarattribute.h5") with Hdf5db(filepath, app_logger=self.log) as db: - root_uuid = db.getUUIDByPath('/') + root_uuid = db.getUUIDByPath("/") dims = () datatype = "H5T_STD_I32LE" value = 42 db.createAttribute("groups", root_uuid, "A1", dims, datatype, value) - item = db.getAttributeItem("groups", root_uuid, "A1") - self.assertEqual(item['name'], "A1") - self.assertEqual(item['value'], 42) + item = db.getAttributeItem("groups", root_uuid, "A1") + self.assertEqual(item["name"], "A1") + self.assertEqual(item["value"], 42) now = int(time.time()) - self.assertTrue(item['ctime'] > now - 5) - self.assertTrue(item['mtime'] > now - 5) - shape = item['shape'] - self.assertEqual(shape['class'], 'H5S_SCALAR') - item_type = item['type'] - - self.assertEqual(item_type['class'], 'H5T_INTEGER') - self.assertEqual(item_type['base'], 'H5T_STD_I32LE') - self.assertEqual(len(item_type.keys()), 2) # just two keys should be returned - + self.assertTrue(item["ctime"] > now - 5) + self.assertTrue(item["mtime"] > now - 5) + shape = item["shape"] + self.assertEqual(shape["class"], "H5S_SCALAR") + item_type = item["type"] + + self.assertEqual(item_type["class"], "H5T_INTEGER") + self.assertEqual(item_type["base"], "H5T_STD_I32LE") + self.assertEqual( + len(item_type.keys()), 2 + ) # just two keys should be returned def testWriteFixedStringAttribute(self): # getAttributeItemByUuid item = None - filepath = getFile('empty.h5', 'writefixedstringattribute.h5') + filepath = getFile("empty.h5", "writefixedstringattribute.h5") with Hdf5db(filepath, app_logger=self.log) as db: - root_uuid = db.getUUIDByPath('/') + root_uuid = db.getUUIDByPath("/") dims = () - datatype = { 'charSet': 'H5T_CSET_ASCII', - 'class': 'H5T_STRING', - 'strPad': 'H5T_STR_NULLPAD', - 'length': 13} + datatype = { + "charSet": "H5T_CSET_ASCII", + "class": "H5T_STRING", + "strPad": "H5T_STR_NULLPAD", + "length": 13, + } value = "Hello, world!" - db.createAttribute("groups", root_uuid, "A1", dims, datatype, value) + db.createAttribute("groups", root_uuid, "A1", dims, datatype, value) item = db.getAttributeItem("groups", root_uuid, "A1") - self.assertEqual(item['name'], "A1") - self.assertEqual(item['value'], "Hello, world!") + self.assertEqual(item["name"], "A1") + self.assertEqual(item["value"], "Hello, world!") now = int(time.time()) - self.assertTrue(item['ctime'] > now - 5) - self.assertTrue(item['mtime'] > now - 5) - shape = item['shape'] - self.assertEqual(shape['class'], 'H5S_SCALAR') - item_type = item['type'] - self.assertEqual(item_type['length'], 13) - self.assertEqual(item_type['class'], 'H5T_STRING') - self.assertEqual(item_type['strPad'], 'H5T_STR_NULLPAD') - self.assertEqual(item_type['charSet'], 'H5T_CSET_ASCII') - + self.assertTrue(item["ctime"] > now - 5) + self.assertTrue(item["mtime"] > now - 5) + shape = item["shape"] + self.assertEqual(shape["class"], "H5S_SCALAR") + item_type = item["type"] + self.assertEqual(item_type["length"], 13) + self.assertEqual(item_type["class"], "H5T_STRING") + self.assertEqual(item_type["strPad"], "H5T_STR_NULLPAD") + self.assertEqual(item_type["charSet"], "H5T_CSET_ASCII") def testWriteFixedNullTermStringAttribute(self): # getAttributeItemByUuid item = None - filepath = getFile('empty.h5', 'writefixednulltermstringattribute.h5') + filepath = getFile("empty.h5", "writefixednulltermstringattribute.h5") with Hdf5db(filepath, app_logger=self.log) as db: - root_uuid = db.getUUIDByPath('/') + root_uuid = db.getUUIDByPath("/") dims = () - datatype = { 'charSet': 'H5T_CSET_ASCII', - 'class': 'H5T_STRING', - 'strPad': 'H5T_STR_NULLTERM', - 'length': 13} + datatype = { + "charSet": "H5T_CSET_ASCII", + "class": "H5T_STRING", + "strPad": "H5T_STR_NULLTERM", + "length": 13, + } value = b"Hello, world!" # write the attribute - db.createAttribute("groups", root_uuid, "A1", dims, datatype, value) + db.createAttribute("groups", root_uuid, "A1", dims, datatype, value) # read it back item = db.getAttributeItem("groups", root_uuid, "A1") - self.assertEqual(item['name'], "A1") + self.assertEqual(item["name"], "A1") # the following compare fails - see issue #34 - #self.assertEqual(item['value'], "Hello, world!") + # self.assertEqual(item['value'], "Hello, world!") now = int(time.time()) - self.assertTrue(item['ctime'] > now - 5) - self.assertTrue(item['mtime'] > now - 5) - shape = item['shape'] - self.assertEqual(shape['class'], 'H5S_SCALAR') - item_type = item['type'] - self.assertEqual(item_type['length'], 13) - self.assertEqual(item_type['class'], 'H5T_STRING') + self.assertTrue(item["ctime"] > now - 5) + self.assertTrue(item["mtime"] > now - 5) + shape = item["shape"] + self.assertEqual(shape["class"], "H5S_SCALAR") + item_type = item["type"] + self.assertEqual(item_type["length"], 13) + self.assertEqual(item_type["class"], "H5T_STRING") # NULLTERM get's converted to NULLPAD since the numpy dtype does not # support other padding conventions. - self.assertEqual(item_type['strPad'], 'H5T_STR_NULLPAD') - self.assertEqual(item_type['charSet'], 'H5T_CSET_ASCII') + self.assertEqual(item_type["strPad"], "H5T_STR_NULLPAD") + self.assertEqual(item_type["charSet"], "H5T_CSET_ASCII") def testWriteVlenStringAttribute(self): # getAttributeItemByUuid item = None - filepath = getFile('empty.h5', 'writevlenstringattribute.h5') + filepath = getFile("empty.h5", "writevlenstringattribute.h5") with Hdf5db(filepath, app_logger=self.log) as db: - root_uuid = db.getUUIDByPath('/') + root_uuid = db.getUUIDByPath("/") dims = () - datatype = { 'charSet': 'H5T_CSET_ASCII', - 'class': 'H5T_STRING', - 'strPad': 'H5T_STR_NULLTERM', - 'length': 'H5T_VARIABLE' } - - #value = np.string_("Hello, world!") + datatype = { + "charSet": "H5T_CSET_ASCII", + "class": "H5T_STRING", + "strPad": "H5T_STR_NULLTERM", + "length": "H5T_VARIABLE", + } + + # value = np.string_("Hello, world!") value = "Hello, world!" - db.createAttribute("groups", root_uuid, "A1", dims, datatype, value) + db.createAttribute("groups", root_uuid, "A1", dims, datatype, value) item = db.getAttributeItem("groups", root_uuid, "A1") - self.assertEqual(item['name'], "A1") - self.assertEqual(item['value'], "Hello, world!") + self.assertEqual(item["name"], "A1") + self.assertEqual(item["value"], "Hello, world!") now = int(time.time()) - self.assertTrue(item['ctime'] > now - 5) - self.assertTrue(item['mtime'] > now - 5) - shape = item['shape'] - self.assertEqual(shape['class'], 'H5S_SCALAR') - item_type = item['type'] - self.assertEqual(item_type['class'], 'H5T_STRING') - self.assertEqual(item_type['strPad'], 'H5T_STR_NULLTERM') - self.assertEqual(item_type['charSet'], 'H5T_CSET_ASCII') - self.assertEqual(item_type['length'], 'H5T_VARIABLE') + self.assertTrue(item["ctime"] > now - 5) + self.assertTrue(item["mtime"] > now - 5) + shape = item["shape"] + self.assertEqual(shape["class"], "H5S_SCALAR") + item_type = item["type"] + self.assertEqual(item_type["class"], "H5T_STRING") + self.assertEqual(item_type["strPad"], "H5T_STR_NULLTERM") + self.assertEqual(item_type["charSet"], "H5T_CSET_ASCII") + self.assertEqual(item_type["length"], "H5T_VARIABLE") def testReadVlenStringDataset(self): item = None - filepath = getFile('vlen_string_dset.h5', 'vlen_string_dset.h5') + filepath = getFile("vlen_string_dset.h5", "vlen_string_dset.h5") with Hdf5db(filepath, app_logger=self.log) as db: - dset_uuid = db.getUUIDByPath('/DS1') - item = db.getDatasetItemByUuid(dset_uuid) - shape = item['shape'] - self.assertEqual(shape['class'], 'H5S_SIMPLE') - dims = shape['dims'] + dset_uuid = db.getUUIDByPath("/DS1") + item = db.getDatasetItemByUuid(dset_uuid) + shape = item["shape"] + self.assertEqual(shape["class"], "H5S_SIMPLE") + dims = shape["dims"] self.assertEqual(len(dims), 1) self.assertEqual(dims[0], 4) - item_type = item['type'] - self.assertEqual(item_type['class'], 'H5T_STRING') + item_type = item["type"] + self.assertEqual(item_type["class"], "H5T_STRING") # actual padding is SPACEPAD - See issue #32 - self.assertEqual(item_type['strPad'], 'H5T_STR_NULLTERM') - self.assertEqual(item_type['charSet'], 'H5T_CSET_ASCII') - self.assertEqual(item_type['length'], 'H5T_VARIABLE') + self.assertEqual(item_type["strPad"], "H5T_STR_NULLTERM") + self.assertEqual(item_type["charSet"], "H5T_CSET_ASCII") + self.assertEqual(item_type["length"], "H5T_VARIABLE") row = db.getDatasetValuesByUuid(dset_uuid, (slice(0, 1),)) - self.assertEqual(row, ['Parting']) + self.assertEqual(row, ["Parting"]) def testReadVlenStringDataset_utc(self): item = None - filepath = getFile('vlen_string_dset_utc.h5', 'vlen_string_dset_utc.h5') + filepath = getFile("vlen_string_dset_utc.h5", "vlen_string_dset_utc.h5") with Hdf5db(filepath, app_logger=self.log) as db: - dset_uuid = db.getUUIDByPath('/ds1') - item = db.getDatasetItemByUuid(dset_uuid) - shape = item['shape'] - self.assertEqual(shape['class'], 'H5S_SIMPLE') - dims = shape['dims'] + dset_uuid = db.getUUIDByPath("/ds1") + item = db.getDatasetItemByUuid(dset_uuid) + shape = item["shape"] + self.assertEqual(shape["class"], "H5S_SIMPLE") + dims = shape["dims"] self.assertEqual(len(dims), 1) self.assertEqual(dims[0], 2293) - item_type = item['type'] - self.assertEqual(item_type['class'], 'H5T_STRING') - self.assertEqual(item_type['strPad'], 'H5T_STR_NULLTERM') - self.assertEqual(item_type['charSet'], 'H5T_CSET_ASCII') - self.assertEqual(item_type['length'], 'H5T_VARIABLE') + item_type = item["type"] + self.assertEqual(item_type["class"], "H5T_STRING") + self.assertEqual(item_type["strPad"], "H5T_STR_NULLTERM") + self.assertEqual(item_type["charSet"], "H5T_CSET_ASCII") + self.assertEqual(item_type["length"], "H5T_VARIABLE") # next line throws conversion error - see issue #19 - #row = db.getDatasetValuesByUuid(dset_uuid, (slice(0, 1),)) - + # row = db.getDatasetValuesByUuid(dset_uuid, (slice(0, 1),)) + def testReadFixedStringDataset(self): item = None - filepath = getFile('fixed_string_dset.h5', 'fixed_string_dset.h5') + filepath = getFile("fixed_string_dset.h5", "fixed_string_dset.h5") with Hdf5db(filepath, app_logger=self.log) as db: - dset_uuid = db.getUUIDByPath('/DS1') - item = db.getDatasetItemByUuid(dset_uuid) - shape = item['shape'] - self.assertEqual(shape['class'], 'H5S_SIMPLE') - dims = shape['dims'] + dset_uuid = db.getUUIDByPath("/DS1") + item = db.getDatasetItemByUuid(dset_uuid) + shape = item["shape"] + self.assertEqual(shape["class"], "H5S_SIMPLE") + dims = shape["dims"] self.assertEqual(len(dims), 1) self.assertEqual(dims[0], 4) - item_type = item['type'] - self.assertEqual(item_type['class'], 'H5T_STRING') - self.assertEqual(item_type['strPad'], 'H5T_STR_NULLPAD') - self.assertEqual(item_type['charSet'], 'H5T_CSET_ASCII') - self.assertEqual(item_type['length'], 7) + item_type = item["type"] + self.assertEqual(item_type["class"], "H5T_STRING") + self.assertEqual(item_type["strPad"], "H5T_STR_NULLPAD") + self.assertEqual(item_type["charSet"], "H5T_CSET_ASCII") + self.assertEqual(item_type["length"], 7) + row = db.getDatasetValuesByUuid(dset_uuid) + self.assertEqual(row, ["Parting", "is such", "sweet", "sorrow."]) row = db.getDatasetValuesByUuid(dset_uuid, (slice(0, 1),)) - self.assertEqual(row, ['Parting']) - + self.assertEqual( + row, + [ + "Parting", + ], + ) + row = db.getDatasetValuesByUuid(dset_uuid, (slice(2, 3),)) + self.assertEqual( + row, + [ + "sweet", + ], + ) + + def testReadFixedStringDatasetBinary(self): + item = None + filepath = getFile("fixed_string_dset.h5", "fixed_string_dset.h5") + with Hdf5db(filepath, app_logger=self.log) as db: + dset_uuid = db.getUUIDByPath("/DS1") + item = db.getDatasetItemByUuid(dset_uuid) + shape = item["shape"] + self.assertEqual(shape["class"], "H5S_SIMPLE") + dims = shape["dims"] + self.assertEqual(len(dims), 1) + self.assertEqual(dims[0], 4) + item_type = item["type"] + self.assertEqual(item_type["class"], "H5T_STRING") + self.assertEqual(item_type["strPad"], "H5T_STR_NULLPAD") + self.assertEqual(item_type["charSet"], "H5T_CSET_ASCII") + self.assertEqual(item_type["length"], 7) + row = db.getDatasetValuesByUuid(dset_uuid, format="binary") + self.assertEqual(row, b"Partingis suchsweet\x00\x00sorrow.") + row = db.getDatasetValuesByUuid(dset_uuid, (slice(0, 1),), format="binary") + self.assertEqual(row, b"Parting") + row = db.getDatasetValuesByUuid(dset_uuid, (slice(2, 3),), format="binary") + self.assertEqual(row, b"sweet\x00\x00") def testWriteVlenUnicodeAttribute(self): # getAttributeItemByUuid item = None - filepath = getFile('empty.h5', 'writevlenunicodeattribute.h5') + filepath = getFile("empty.h5", "writevlenunicodeattribute.h5") with Hdf5db(filepath, app_logger=self.log) as db: - root_uuid = db.getUUIDByPath('/') + root_uuid = db.getUUIDByPath("/") dims = () - datatype = { 'charSet': 'H5T_CSET_UTF8', - 'class': 'H5T_STRING', - 'strPad': 'H5T_STR_NULLTERM', - 'length': 'H5T_VARIABLE' } - value = u'\u6b22\u8fce\u63d0\u4ea4\u5fae\u535a\u641c\u7d22\u4f7f\u7528\u53cd\u9988\uff0c\u8bf7\u76f4\u63a5' + datatype = { + "charSet": "H5T_CSET_UTF8", + "class": "H5T_STRING", + "strPad": "H5T_STR_NULLTERM", + "length": "H5T_VARIABLE", + } + value = u"\u6b22\u8fce\u63d0\u4ea4\u5fae\u535a\u641c\u7d22\u4f7f\u7528\u53cd\u9988\uff0c\u8bf7\u76f4\u63a5" db.createAttribute("groups", root_uuid, "A1", dims, datatype, value) - item = db.getAttributeItem("groups", root_uuid, "A1") + item = db.getAttributeItem("groups", root_uuid, "A1") - self.assertEqual(item['name'], "A1") - self.assertEqual(item['value'], value) + self.assertEqual(item["name"], "A1") + self.assertEqual(item["value"], value) now = int(time.time()) - self.assertTrue(item['ctime'] > now - 5) - self.assertTrue(item['mtime'] > now - 5) - shape = item['shape'] - self.assertEqual(shape['class'], 'H5S_SCALAR') - item_type = item['type'] - self.assertEqual(item_type['class'], 'H5T_STRING') - self.assertEqual(item_type['strPad'], 'H5T_STR_NULLTERM') - self.assertEqual(item_type['charSet'], 'H5T_CSET_UTF8') - self.assertEqual(item_type['length'], 'H5T_VARIABLE') - + self.assertTrue(item["ctime"] > now - 5) + self.assertTrue(item["mtime"] > now - 5) + shape = item["shape"] + self.assertEqual(shape["class"], "H5S_SCALAR") + item_type = item["type"] + self.assertEqual(item_type["class"], "H5T_STRING") + self.assertEqual(item_type["strPad"], "H5T_STR_NULLTERM") + self.assertEqual(item_type["charSet"], "H5T_CSET_UTF8") + self.assertEqual(item_type["length"], "H5T_VARIABLE") def testWriteIntAttribute(self): # getAttributeItemByUuid item = None - filepath = getFile('empty.h5', 'writeintattribute.h5') + filepath = getFile("empty.h5", "writeintattribute.h5") with Hdf5db(filepath, app_logger=self.log) as db: - root_uuid = db.getUUIDByPath('/') + root_uuid = db.getUUIDByPath("/") dims = (5,) datatype = "H5T_STD_I16LE" value = [2, 3, 5, 7, 11] db.createAttribute("groups", root_uuid, "A1", dims, datatype, value) - item = db.getAttributeItem("groups", root_uuid, "A1") - self.assertEqual(item['name'], "A1") - self.assertEqual(item['value'], [2, 3, 5, 7, 11]) + item = db.getAttributeItem("groups", root_uuid, "A1") + self.assertEqual(item["name"], "A1") + self.assertEqual(item["value"], [2, 3, 5, 7, 11]) now = int(time.time()) - self.assertTrue(item['ctime'] > now - 5) - self.assertTrue(item['mtime'] > now - 5) - shape = item['shape'] - self.assertEqual(shape['class'], 'H5S_SIMPLE') - item_type = item['type'] - self.assertEqual(item_type['class'], 'H5T_INTEGER') - self.assertEqual(item_type['base'], 'H5T_STD_I16LE') + self.assertTrue(item["ctime"] > now - 5) + self.assertTrue(item["mtime"] > now - 5) + shape = item["shape"] + self.assertEqual(shape["class"], "H5S_SIMPLE") + item_type = item["type"] + self.assertEqual(item_type["class"], "H5T_INTEGER") + self.assertEqual(item_type["base"], "H5T_STD_I16LE") def testCreateReferenceAttribute(self): - filepath = getFile('empty.h5', 'createreferencedataset.h5') + filepath = getFile("empty.h5", "createreferencedataset.h5") with Hdf5db(filepath, app_logger=self.log) as db: - root_uuid = db.getUUIDByPath('/') + root_uuid = db.getUUIDByPath("/") dims = () # if no space in body, default to scalar - rsp = db.createDataset("H5T_STD_I64LE", dims, max_shape=None, creation_props=None) - dset_uuid = rsp['id'] - db.linkObject(root_uuid, dset_uuid, 'DS1') + rsp = db.createDataset( + "H5T_STD_I64LE", dims, max_shape=None, creation_props=None + ) + dset_uuid = rsp["id"] + db.linkObject(root_uuid, dset_uuid, "DS1") dims = (1,) - datatype = { "class": "H5T_REFERENCE", "base": "H5T_STD_REF_OBJ"} + datatype = {"class": "H5T_REFERENCE", "base": "H5T_STD_REF_OBJ"} ds1_ref = "datasets/" + dset_uuid - value = [ds1_ref,] + value = [ + ds1_ref, + ] db.createAttribute("groups", root_uuid, "A1", dims, datatype, value) - item = db.getAttributeItem("groups", root_uuid, "A1") + item = db.getAttributeItem("groups", root_uuid, "A1") - attr_type = item['type'] + attr_type = item["type"] self.assertEqual(attr_type["class"], "H5T_REFERENCE") self.assertEqual(attr_type["base"], "H5T_STD_REF_OBJ") - attr_value = item['value'] + attr_value = item["value"] self.assertEqual(len(attr_value), 1) self.assertEqual(attr_value[0], ds1_ref) def testCreateVlenReferenceAttribute(self): - filepath = getFile('empty.h5', 'createreferenceattribute.h5') + filepath = getFile("empty.h5", "createreferenceattribute.h5") with Hdf5db(filepath, app_logger=self.log) as db: - root_uuid = db.getUUIDByPath('/') + root_uuid = db.getUUIDByPath("/") dims = () # if no space in body, default to scalar - rsp = db.createDataset("H5T_STD_I64LE", dims, max_shape=None, creation_props=None) - dset_uuid = rsp['id'] - db.linkObject(root_uuid, dset_uuid, 'DS1') + rsp = db.createDataset( + "H5T_STD_I64LE", dims, max_shape=None, creation_props=None + ) + dset_uuid = rsp["id"] + db.linkObject(root_uuid, dset_uuid, "DS1") dims = (1,) - datatype = {"class": "H5T_VLEN", - "base": { "class": "H5T_REFERENCE", "base": "H5T_STD_REF_OBJ"} + datatype = { + "class": "H5T_VLEN", + "base": {"class": "H5T_REFERENCE", "base": "H5T_STD_REF_OBJ"}, } ds1_ref = "datasets/" + dset_uuid - value = [[ds1_ref,],] + value = [ + [ + ds1_ref, + ], + ] db.createAttribute("groups", root_uuid, "A1", dims, datatype, value) - item = db.getAttributeItem("groups", root_uuid, "A1") + item = db.getAttributeItem("groups", root_uuid, "A1") - attr_type = item['type'] + attr_type = item["type"] self.assertEqual(attr_type["class"], "H5T_VLEN") base_type = attr_type["base"] # todo - this should be H5T_REFERENCE, not H5T_OPAQUE # See h5py issue: https://github.com/h5py/h5py/issues/553 import h5py + # test based on h5py version until we change install requirements - if h5py.version.version_tuple[1] >= 6: + if h5py.version.version_tuple >= (2, 6, 0): self.assertEqual(base_type["class"], "H5T_REFERENCE") else: self.assertEqual(base_type["class"], "H5T_OPAQUE") def testCreateReferenceListAttribute(self): - filepath = getFile('empty.h5', 'createreferencelistattribute.h5') + filepath = getFile("empty.h5", "createreferencelistattribute.h5") with Hdf5db(filepath, app_logger=self.log) as db: - root_uuid = db.getUUIDByPath('/') + root_uuid = db.getUUIDByPath("/") dims = (10,) - rsp = db.createDataset("H5T_STD_I64LE", dims, max_shape=None, creation_props=None) - dset_uuid = rsp['id'] - db.linkObject(root_uuid, dset_uuid, 'dset') - - rsp = db.createDataset("H5T_STD_I64LE", dims, max_shape=None, creation_props=None) - xscale_uuid = rsp['id'] - nullterm_string_type = { - "charSet": "H5T_CSET_ASCII", - "class": "H5T_STRING", - "length": 16, - "strPad": "H5T_STR_NULLTERM" + rsp = db.createDataset( + "H5T_STD_I64LE", dims, max_shape=None, creation_props=None + ) + dset_uuid = rsp["id"] + db.linkObject(root_uuid, dset_uuid, "dset") + + rsp = db.createDataset( + "H5T_STD_I64LE", dims, max_shape=None, creation_props=None + ) + xscale_uuid = rsp["id"] + nullterm_string_type = { + "charSet": "H5T_CSET_ASCII", + "class": "H5T_STRING", + "length": 16, + "strPad": "H5T_STR_NULLTERM", } scalar_dims = () db.createAttribute( - "datasets", xscale_uuid, "CLASS", scalar_dims, - nullterm_string_type, "DIMENSION_SCALE") - db.linkObject(root_uuid, xscale_uuid, 'xscale') - + "datasets", + xscale_uuid, + "CLASS", + scalar_dims, + nullterm_string_type, + "DIMENSION_SCALE", + ) + db.linkObject(root_uuid, xscale_uuid, "xscale") ref_dims = (1,) - datatype = {"class": "H5T_VLEN", - "base": { "class": "H5T_REFERENCE", "base": "H5T_STD_REF_OBJ"} + datatype = { + "class": "H5T_VLEN", + "base": {"class": "H5T_REFERENCE", "base": "H5T_STD_REF_OBJ"}, } xscale_ref = "datasets/" + xscale_uuid - value = [(xscale_ref,),] - db.createAttribute("datasets", dset_uuid, "DIMENSION_LIST", ref_dims, datatype, value) + value = [ + (xscale_ref,), + ] + db.createAttribute( + "datasets", dset_uuid, "DIMENSION_LIST", ref_dims, datatype, value + ) item = db.getAttributeItem("datasets", dset_uuid, "DIMENSION_LIST") - attr_type = item['type'] + attr_type = item["type"] self.assertEqual(attr_type["class"], "H5T_VLEN") base_type = attr_type["base"] # todo - this should be H5T_REFERENCE, not H5T_OPAQUE self.assertEqual(base_type["class"], "H5T_REFERENCE") - - def testReadCommittedType(self): - filepath = getFile('committed_type.h5', 'readcommitted_type.h5') + filepath = getFile("committed_type.h5", "readcommitted_type.h5") with Hdf5db(filepath, app_logger=self.log) as db: - root_uuid = db.getUUIDByPath('/') - type_uuid = db.getUUIDByPath('/Sensor_Type') + root_uuid = db.getUUIDByPath("/") + type_uuid = db.getUUIDByPath("/Sensor_Type") item = db.getCommittedTypeItemByUuid(type_uuid) - self.assertTrue('type' in item) - item_type = item['type'] - self.assertTrue(item_type['class'], 'H5T_COMPOUND') - ds1_uuid = db.getUUIDByPath('/DS1') + self.assertTrue("type" in item) + item_type = item["type"] + self.assertTrue(item_type["class"], "H5T_COMPOUND") + ds1_uuid = db.getUUIDByPath("/DS1") item = db.getDatasetItemByUuid(ds1_uuid) - shape = item['shape'] - self.assertEqual(shape['class'], 'H5S_SIMPLE') - dims = shape['dims'] + shape = item["shape"] + self.assertEqual(shape["class"], "H5S_SIMPLE") + dims = shape["dims"] self.assertEqual(len(dims), 1) self.assertEqual(dims[0], 4) - item_type = item['type'] - self.assertTrue('class' in item_type) - self.assertEqual(item_type['class'], 'H5T_COMPOUND') - self.assertTrue('uuid' in item_type) - self.assertEqual(item_type['uuid'], type_uuid) - - item = db.getAttributeItem("groups", root_uuid, "attr1") - shape = item['shape'] - self.assertEqual(shape['class'], 'H5S_SCALAR') - item_type = item['type'] - self.assertTrue('class' in item_type) - self.assertEqual(item_type['class'], 'H5T_COMPOUND') - self.assertTrue('uuid' in item_type) - self.assertEqual(item_type['uuid'], type_uuid) - + item_type = item["type"] + self.assertTrue("class" in item_type) + self.assertEqual(item_type["class"], "H5T_COMPOUND") + self.assertTrue("uuid" in item_type) + self.assertEqual(item_type["uuid"], type_uuid) + + item = db.getAttributeItem("groups", root_uuid, "attr1") + shape = item["shape"] + self.assertEqual(shape["class"], "H5S_SCALAR") + item_type = item["type"] + self.assertTrue("class" in item_type) + self.assertEqual(item_type["class"], "H5T_COMPOUND") + self.assertTrue("uuid" in item_type) + self.assertEqual(item_type["uuid"], type_uuid) def testWriteCommittedType(self): - filepath = getFile('empty.h5', 'writecommittedtype.h5') + filepath = getFile("empty.h5", "writecommittedtype.h5") with Hdf5db(filepath, app_logger=self.log) as db: - root_uuid = db.getUUIDByPath('/') - datatype = { 'charSet': 'H5T_CSET_ASCII', - 'class': 'H5T_STRING', - 'strPad': 'H5T_STR_NULLTERM', - 'length': 15} + root_uuid = db.getUUIDByPath("/") + datatype = { + "charSet": "H5T_CSET_ASCII", + "class": "H5T_STRING", + "strPad": "H5T_STR_NULLTERM", + "length": 15, + } item = db.createCommittedType(datatype) - type_uuid = item['id'] + type_uuid = item["id"] item = db.getCommittedTypeItemByUuid(type_uuid) - self.assertEqual(item['id'], type_uuid) - self.assertEqual(item['attributeCount'], 0) + self.assertEqual(item["id"], type_uuid) + self.assertEqual(item["attributeCount"], 0) now = int(time.time()) - self.assertTrue(item['ctime'] > now - 5) - self.assertTrue(item['mtime'] > now - 5) - self.assertEqual(len(item['alias']), 0) # anonymous, so no alias + self.assertTrue(item["ctime"] > now - 5) + self.assertTrue(item["mtime"] > now - 5) + self.assertEqual(len(item["alias"]), 0) # anonymous, so no alias - item_type = item['type'] + item_type = item["type"] - self.assertEqual(item_type['class'], 'H5T_STRING') - self.assertEqual(item_type['strPad'], 'H5T_STR_NULLPAD') - self.assertEqual(item_type['charSet'], 'H5T_CSET_ASCII') - self.assertEqual(item_type['length'], 15) + self.assertEqual(item_type["class"], "H5T_STRING") + self.assertEqual(item_type["strPad"], "H5T_STR_NULLPAD") + self.assertEqual(item_type["charSet"], "H5T_CSET_ASCII") + self.assertEqual(item_type["length"], 15) def testWriteCommittedCompoundType(self): - filepath = getFile('empty.h5', 'writecommittedcompoundtype.h5') + filepath = getFile("empty.h5", "writecommittedcompoundtype.h5") with Hdf5db(filepath, app_logger=self.log) as db: - root_uuid = db.getUUIDByPath('/') - datatype = {'class': 'H5T_COMPOUND', - 'fields': [] } + root_uuid = db.getUUIDByPath("/") + datatype = {"class": "H5T_COMPOUND", "fields": []} - fixed_str_type = { 'charSet': 'H5T_CSET_ASCII', - 'class': 'H5T_STRING', - 'strPad': 'H5T_STR_NULLTERM', - 'length': 15} + fixed_str_type = { + "charSet": "H5T_CSET_ASCII", + "class": "H5T_STRING", + "strPad": "H5T_STR_NULLTERM", + "length": 15, + } var_str_type = { - "charSet": "H5T_CSET_ASCII", - "class": "H5T_STRING", - "length": "H5T_VARIABLE", - "strPad": "H5T_STR_NULLTERM" } + "charSet": "H5T_CSET_ASCII", + "class": "H5T_STRING", + "length": "H5T_VARIABLE", + "strPad": "H5T_STR_NULLTERM", + } type_fields = [] - type_fields.append({'name': 'field_1', 'type': 'H5T_STD_I64BE' }) - type_fields.append({'name': 'field_2', 'type': 'H5T_IEEE_F64BE' }) - type_fields.append({'name': 'field_3', 'type': fixed_str_type }) - type_fields.append({'name': 'field_4', 'type': var_str_type }) - datatype['fields'] = type_fields - + type_fields.append({"name": "field_1", "type": "H5T_STD_I64BE"}) + type_fields.append({"name": "field_2", "type": "H5T_IEEE_F64BE"}) + type_fields.append({"name": "field_3", "type": fixed_str_type}) + type_fields.append({"name": "field_4", "type": var_str_type}) + datatype["fields"] = type_fields item = db.createCommittedType(datatype) - type_uuid = item['id'] + type_uuid = item["id"] item = db.getCommittedTypeItemByUuid(type_uuid) - self.assertEqual(item['id'], type_uuid) - self.assertEqual(item['attributeCount'], 0) + self.assertEqual(item["id"], type_uuid) + self.assertEqual(item["attributeCount"], 0) now = int(time.time()) - self.assertTrue(item['ctime'] > now - 5) - self.assertTrue(item['mtime'] > now - 5) - self.assertEqual(len(item['alias']), 0) # anonymous, so no alias + self.assertTrue(item["ctime"] > now - 5) + self.assertTrue(item["mtime"] > now - 5) + self.assertEqual(len(item["alias"]), 0) # anonymous, so no alias - item_type = item['type'] + item_type = item["type"] - self.assertEqual(item_type['class'], 'H5T_COMPOUND') - fields = item_type['fields'] + self.assertEqual(item_type["class"], "H5T_COMPOUND") + fields = item_type["fields"] self.assertEqual(len(fields), 4) # todo - the last field class should be H5T_STRING, but it is getting # saved to HDF5 as Opaque - see: https://github.com/h5py/h5py/issues/613 # this is fixed in h5py v. 2.6.0 - check the version until 2.6.0 becomes # available via pip and anaconda. import h5py - if h5py.version.version_tuple[1] >= 6: - field_classes = ('H5T_INTEGER', 'H5T_FLOAT', 'H5T_STRING', 'H5T_STRING') + + if h5py.version.version_tuple >= (2, 6, 0): + field_classes = ("H5T_INTEGER", "H5T_FLOAT", "H5T_STRING", "H5T_STRING") else: - field_classes = ('H5T_INTEGER', 'H5T_FLOAT', 'H5T_STRING', 'H5T_OPAQUE') + field_classes = ("H5T_INTEGER", "H5T_FLOAT", "H5T_STRING", "H5T_OPAQUE") for i in range(4): field = fields[i] - self.assertEqual(field['name'], 'field_' + str(i+1)) - field_type = field['type'] - self.assertEqual(field_type['class'], field_classes[i]) - - + self.assertEqual(field["name"], "field_" + str(i + 1)) + field_type = field["type"] + self.assertEqual(field_type["class"], field_classes[i]) def testToRef(self): - filepath = getFile('empty.h5', 'toref.h5') + filepath = getFile("empty.h5", "toref.h5") with Hdf5db(filepath, app_logger=self.log) as db: - type_item = {'order': 'H5T_ORDER_LE', 'base_size': 1, 'class': 'H5T_INTEGER', 'base': 'H5T_STD_I8LE', 'size': 1} + type_item = { + "order": "H5T_ORDER_LE", + "base_size": 1, + "class": "H5T_INTEGER", + "base": "H5T_STD_I8LE", + "size": 1, + } data_list = [2, 3, 5, 7, 11] ref_value = db.toRef(1, type_item, data_list) self.assertEqual(ref_value, data_list) - type_item = { "charSet": "H5T_CSET_ASCII", - "class": "H5T_STRING", - "length": 8, - "strPad": "H5T_STR_NULLPAD" } - data_list = [ "Hypertext", "as", "engine", "of", "state" ] + type_item = { + "charSet": "H5T_CSET_ASCII", + "class": "H5T_STRING", + "length": 8, + "strPad": "H5T_STR_NULLPAD", + } + data_list = ["Hypertext", "as", "engine", "of", "state"] ref_value = db.toRef(1, type_item, data_list) - def testToTuple(self): - filepath = getFile('empty.h5', 'totuple.h5') - data1d = [1,2,3] - data2d = [[1,2],[3,4]] - data3d = [[[1,2],[3,4]], [[5,6],[7,8]]] + filepath = getFile("empty.h5", "totuple.h5") + data1d = [1, 2, 3] + data2d = [[1, 2], [3, 4]] + data3d = [[[1, 2], [3, 4]], [[5, 6], [7, 8]]] with Hdf5db(filepath, app_logger=self.log) as db: - self.assertEqual(db.toTuple(1, data1d ), [1,2,3] ) - self.assertEqual(db.toTuple(2, data2d ), [[1,2],[3,4]] ) - self.assertEqual(db.toTuple(1, data2d ), [(1,2),(3,4)] ) - self.assertEqual(db.toTuple(3, data3d), [[[1,2],[3,4]], [[5,6],[7,8]]] ) - self.assertEqual(db.toTuple(2, data3d), [[(1,2),(3,4)], [(5,6),(7,8)]] ) - self.assertEqual(db.toTuple(1, data3d), [((1,2),(3,4)), ((5,6),(7,8))] ) - - + self.assertEqual(db.toTuple(1, data1d), [1, 2, 3]) + self.assertEqual(db.toTuple(2, data2d), [[1, 2], [3, 4]]) + self.assertEqual(db.toTuple(1, data2d), [(1, 2), (3, 4)]) + self.assertEqual( + db.toTuple(3, data3d), [[[1, 2], [3, 4]], [[5, 6], [7, 8]]] + ) + self.assertEqual( + db.toTuple(2, data3d), [[(1, 2), (3, 4)], [(5, 6), (7, 8)]] + ) + self.assertEqual( + db.toTuple(1, data3d), [((1, 2), (3, 4)), ((5, 6), (7, 8))] + ) + def testBytesArrayToList(self): - filepath = getFile('empty.h5', 'bytestostring.h5') + filepath = getFile("empty.h5", "bytestostring.h5") with Hdf5db(filepath, app_logger=self.log) as db: - - val = db.bytesArrayToList(b'Hello') + + val = db.bytesArrayToList(b"Hello") self.assertTrue(type(val) is str) - val = db.bytesArrayToList([b'Hello',]) + val = db.bytesArrayToList( + [ + b"Hello", + ] + ) + self.assertEqual(len(val), 1) self.assertTrue(type(val[0]) is str) - + self.assertEqual(val[0], "Hello") + import numpy as np - - data = np.array([b'Hello']) + + data = np.array([b"Hello"]) val = db.bytesArrayToList(data) + self.assertEqual(len(val), 1) self.assertTrue(type(val[0]) is str) - - + self.assertEqual(val[0], "Hello") + def testGetDataValue(self): # typeItem, value, dimension=0, dims=None): - filepath = getFile('empty.h5', 'bytestostring.h5') - string_type = { 'charSet': 'H5T_CSET_ASCII', - 'class': 'H5T_STRING', - 'strPad': 'H5T_STR_NULLTERM', - 'length': 'H5T_VARIABLE' } - + filepath = getFile("empty.h5", "bytestostring.h5") + string_type = { + "charSet": "H5T_CSET_ASCII", + "class": "H5T_STRING", + "strPad": "H5T_STR_NULLTERM", + "length": "H5T_VARIABLE", + } + with Hdf5db(filepath, app_logger=self.log) as db: - + import numpy as np - - data = np.array([b'Hello']) - val = db.getDataValue(string_type, data, dimension=1,dims=(1,)) + + data = np.array([b"Hello"]) + val = db.getDataValue(string_type, data, dimension=1, dims=(1,)) self.assertTrue(type(val[0]) is str) - def testGetAclDataset(self): - filepath = getFile('tall.h5', 'getacldataset.h5') + filepath = getFile("tall.h5", "getacldataset.h5") with Hdf5db(filepath, app_logger=self.log) as db: - d111_uuid = db.getUUIDByPath('/g1/g1.1/dset1.1.1') + d111_uuid = db.getUUIDByPath("/g1/g1.1/dset1.1.1") num_acls = db.getNumAcls(d111_uuid) self.assertEqual(num_acls, 0) acl_dset = db.getAclDataset(d111_uuid, create=True) @@ -1065,21 +1153,21 @@ def testGetAclDataset(self): self.assertEqual(num_acls, 0) def testSetAcl(self): - filepath = getFile('tall.h5', 'setacl.h5') + filepath = getFile("tall.h5", "setacl.h5") user1 = 123 user2 = 456 with Hdf5db(filepath, app_logger=self.log) as db: - d111_uuid = db.getUUIDByPath('/g1/g1.1/dset1.1.1') + d111_uuid = db.getUUIDByPath("/g1/g1.1/dset1.1.1") num_acls = db.getNumAcls(d111_uuid) self.assertEqual(num_acls, 0) # add read/write acl for user1 acl_user1 = db.getAcl(d111_uuid, user1) - self.assertEqual(acl_user1['userid'], 0) - acl_user1['userid'] = user1 - acl_user1['readACL'] = 0 - acl_user1['updateACL'] = 0 + self.assertEqual(acl_user1["userid"], 0) + acl_user1["userid"] = user1 + acl_user1["readACL"] = 0 + acl_user1["updateACL"] = 0 num_acls = db.getNumAcls(d111_uuid) self.assertEqual(num_acls, 0) @@ -1088,39 +1176,38 @@ def testSetAcl(self): num_acls = db.getNumAcls(d111_uuid) self.assertEqual(num_acls, 1) - # add read-only acl for user2 acl_user2 = db.getAcl(d111_uuid, user2) - self.assertEqual(acl_user2['userid'], 0) - acl_user2['userid'] = user2 - acl_user2['create'] = 0 - acl_user2['read'] = 1 - acl_user2['update'] = 0 - acl_user2['delete'] = 0 - acl_user2['readACL'] = 0 - acl_user2['updateACL'] = 0 + self.assertEqual(acl_user2["userid"], 0) + acl_user2["userid"] = user2 + acl_user2["create"] = 0 + acl_user2["read"] = 1 + acl_user2["update"] = 0 + acl_user2["delete"] = 0 + acl_user2["readACL"] = 0 + acl_user2["updateACL"] = 0 db.setAcl(d111_uuid, acl_user2) num_acls = db.getNumAcls(d111_uuid) self.assertEqual(num_acls, 2) # fetch and verify acls acl = db.getAcl(d111_uuid, user1) - self.assertEqual(acl['userid'], user1) - self.assertEqual(acl['create'], 1) - self.assertEqual(acl['read'], 1) - self.assertEqual(acl['update'], 1) - self.assertEqual(acl['delete'], 1) - self.assertEqual(acl['readACL'], 0) - self.assertEqual(acl['updateACL'], 0) + self.assertEqual(acl["userid"], user1) + self.assertEqual(acl["create"], 1) + self.assertEqual(acl["read"], 1) + self.assertEqual(acl["update"], 1) + self.assertEqual(acl["delete"], 1) + self.assertEqual(acl["readACL"], 0) + self.assertEqual(acl["updateACL"], 0) acl = db.getAcl(d111_uuid, user2) - self.assertEqual(acl['userid'], user2) - self.assertEqual(acl['create'], 0) - self.assertEqual(acl['read'], 1) - self.assertEqual(acl['update'], 0) - self.assertEqual(acl['delete'], 0) - self.assertEqual(acl['readACL'], 0) - self.assertEqual(acl['updateACL'], 0) + self.assertEqual(acl["userid"], user2) + self.assertEqual(acl["create"], 0) + self.assertEqual(acl["read"], 1) + self.assertEqual(acl["update"], 0) + self.assertEqual(acl["delete"], 0) + self.assertEqual(acl["readACL"], 0) + self.assertEqual(acl["updateACL"], 0) num_acls = db.getNumAcls(d111_uuid) self.assertEqual(num_acls, 2) @@ -1129,25 +1216,24 @@ def testSetAcl(self): acls = db.getAcls(d111_uuid) self.assertEqual(len(acls), 2) - def testRootAcl(self): - filepath = getFile('tall.h5', 'rootacl.h5') + filepath = getFile("tall.h5", "rootacl.h5") user1 = 123 with Hdf5db(filepath, app_logger=self.log) as db: - root_uuid = db.getUUIDByPath('/') - d111_uuid = db.getUUIDByPath('/g1/g1.1/dset1.1.1') + root_uuid = db.getUUIDByPath("/") + d111_uuid = db.getUUIDByPath("/g1/g1.1/dset1.1.1") num_acls = db.getNumAcls(d111_uuid) self.assertEqual(num_acls, 0) # add read/write acl for user1 at root acl_root = db.getAcl(root_uuid, 0) - self.assertEqual(acl_root['userid'], 0) - acl_root['create'] = 0 - acl_root['read'] = 1 - acl_root['update'] = 0 - acl_root['delete'] = 0 - acl_root['readACL'] = 0 - acl_root['updateACL'] = 0 + self.assertEqual(acl_root["userid"], 0) + acl_root["create"] = 0 + acl_root["read"] = 1 + acl_root["update"] = 0 + acl_root["delete"] = 0 + acl_root["readACL"] = 0 + acl_root["updateACL"] = 0 num_acls = db.getNumAcls(root_uuid) self.assertEqual(num_acls, 0) @@ -1158,68 +1244,71 @@ def testRootAcl(self): acl = db.getAcl(d111_uuid, user1) num_acls = db.getNumAcls(d111_uuid) # this will fetch the root acl self.assertEqual(num_acls, 0) - self.assertEqual(acl['userid'], 0) - self.assertEqual(acl['create'], 0) - self.assertEqual(acl['read'], 1) - self.assertEqual(acl['update'], 0) - self.assertEqual(acl['delete'], 0) - self.assertEqual(acl['readACL'], 0) - self.assertEqual(acl['updateACL'], 0) - + self.assertEqual(acl["userid"], 0) + self.assertEqual(acl["create"], 0) + self.assertEqual(acl["read"], 1) + self.assertEqual(acl["update"], 0) + self.assertEqual(acl["delete"], 0) + self.assertEqual(acl["readACL"], 0) + self.assertEqual(acl["updateACL"], 0) + def testGetEvalStr(self): - queries = { "date == 23": "rows['date'] == 23", - "wind == b'W 5'": "rows['wind'] == b'W 5'", - "temp > 61": "rows['temp'] > 61", - "(date >=22) & (date <= 24)": "(rows['date'] >=22) & (rows['date'] <= 24)", - "(date == 21) & (temp > 70)": "(rows['date'] == 21) & (rows['temp'] > 70)", - "(wind == b'E 7') | (wind == b'S 7')": "(rows['wind'] == b'E 7') | (rows['wind'] == b'S 7')" } - - fields = ["date", "wind", "temp" ] - filepath = getFile('empty.h5', 'getevalstring.h5') + queries = { + "date == 23": "rows['date'] == 23", + "wind == b'W 5'": "rows['wind'] == b'W 5'", + "temp > 61": "rows['temp'] > 61", + "(date >=22) & (date <= 24)": "(rows['date'] >=22) & (rows['date'] <= 24)", + "(date == 21) & (temp > 70)": "(rows['date'] == 21) & (rows['temp'] > 70)", + "(wind == b'E 7') | (wind == b'S 7')": "(rows['wind'] == b'E 7') | (rows['wind'] == b'S 7')", + } + + fields = ["date", "wind", "temp"] + filepath = getFile("empty.h5", "getevalstring.h5") with Hdf5db(filepath, app_logger=self.log) as db: - + for query in queries.keys(): eval_str = db._getEvalStr(query, fields) self.assertEqual(eval_str, queries[query]) - #print(query, "->", eval_str) - + # print(query, "->", eval_str) + def testBadQuery(self): - queries = ( "foobar", # no variable used - "wind = b'abc", # non-closed literal - "(wind = b'N') & (temp = 32", # missing paren - "foobar > 42", # invalid field name - "import subprocess; subprocess.call(['ls', '/'])") # injection attack - - fields = ("date", "wind", "temp" ) - filepath = getFile('empty.h5', 'badquery.h5') + queries = ( + "foobar", # no variable used + "wind = b'abc", # non-closed literal + "(wind = b'N') & (temp = 32", # missing paren + "foobar > 42", # invalid field name + "import subprocess; subprocess.call(['ls', '/'])", + ) # injection attack + + fields = ("date", "wind", "temp") + filepath = getFile("empty.h5", "badquery.h5") with Hdf5db(filepath, app_logger=self.log) as db: - + for query in queries: try: eval_str = db._getEvalStr(query, fields) self.assertTrue(False) # shouldn't get here except IOError as e: pass # ok - + def testInjectionBlock(self): - queries = ( - "import subprocess; subprocess.call(['ls', '/'])", ) # injection attack - - fields = ("import", "subprocess", "call" ) - filepath = getFile('empty.h5', 'injectionblock.h5') + queries = ( + "import subprocess; subprocess.call(['ls', '/'])", + ) # injection attack + + fields = ("import", "subprocess", "call") + filepath = getFile("empty.h5", "injectionblock.h5") with Hdf5db(filepath, app_logger=self.log) as db: - + for query in queries: try: eval_str = db._getEvalStr(query, fields) self.assertTrue(False) # shouldn't get here - except IOError as e: + except IOError: pass # ok - - -if __name__ == '__main__': - #setup test files +if __name__ == "__main__": + # setup test files unittest.main() diff --git a/test/unit/hdf5dtypeTest.py b/test/unit/hdf5dtypeTest.py index 814b8e3..0f67d7b 100755 --- a/test/unit/hdf5dtypeTest.py +++ b/test/unit/hdf5dtypeTest.py @@ -12,12 +12,10 @@ import unittest import logging import numpy as np -import sys from h5py import special_dtype from h5py import check_dtype -import six - - +from h5py import Reference +from h5py import RegionReference from h5json import hdf5dtype @@ -29,450 +27,593 @@ def __init__(self, *args, **kwargs): self.logger.setLevel(logging.INFO) def testBaseIntegerTypeItem(self): - dt = np.dtype('') - self.assertEqual(dt.kind, 'u') + dt = hdf5dtype.createDataType("H5T_STD_U32BE") + self.assertEqual(dt.name, "uint32") + self.assertEqual(dt.byteorder, ">") + self.assertEqual(dt.kind, "u") - dt = hdf5dtype.createDataType('H5T_STD_I16LE') - self.assertEqual(dt.name, 'int16') - self.assertEqual(dt.kind, 'i') + dt = hdf5dtype.createDataType("H5T_STD_I16LE") + self.assertEqual(dt.name, "int16") + self.assertEqual(dt.kind, "i") - dt = hdf5dtype.createDataType('H5T_IEEE_F64LE') - self.assertEqual(dt.name, 'float64') - self.assertEqual(dt.kind, 'f') + dt = hdf5dtype.createDataType("H5T_IEEE_F64LE") + self.assertEqual(dt.name, "float64") + self.assertEqual(dt.kind, "f") - dt = hdf5dtype.createDataType('H5T_IEEE_F32LE') - self.assertEqual(dt.name, 'float32') - self.assertEqual(dt.kind, 'f') + dt = hdf5dtype.createDataType("H5T_IEEE_F32LE") + self.assertEqual(dt.name, "float32") + self.assertEqual(dt.kind, "f") - typeItem = { 'class': 'H5T_INTEGER', 'base': 'H5T_STD_I32BE' } + typeItem = {"class": "H5T_INTEGER", "base": "H5T_STD_I32BE"} typeSize = hdf5dtype.getItemSize(typeItem) dt = hdf5dtype.createDataType(typeItem) - self.assertEqual(dt.name, 'int32') - self.assertEqual(dt.kind, 'i') + self.assertEqual(dt.name, "int32") + self.assertEqual(dt.kind, "i") self.assertEqual(typeSize, 4) def testCreateBaseStringType(self): - typeItem = { 'class': 'H5T_STRING', 'charSet': 'H5T_CSET_ASCII', 'length': 6 } + typeItem = {"class": "H5T_STRING", "charSet": "H5T_CSET_ASCII", "length": 6} typeSize = hdf5dtype.getItemSize(typeItem) dt = hdf5dtype.createDataType(typeItem) - if six.PY3: - self.assertEqual(dt.name, 'bytes48') - else: - self.assertEqual(dt.name, 'string48') - self.assertEqual(dt.kind, 'S') + self.assertEqual(dt.name, "bytes48") + self.assertEqual(dt.kind, "S") self.assertEqual(typeSize, 6) def testCreateBaseUnicodeType(self): - typeItem = { 'class': 'H5T_STRING', 'charSet': 'H5T_CSET_UTF8', 'length': 32 } + typeItem = {"class": "H5T_STRING", "charSet": "H5T_CSET_UTF8", "length": 32} try: - dt = hdf5dtype.createDataType(typeItem) + # dt = hdf5dtype.createDataType(typeItem) + hdf5dtype.createDataType(typeItem) self.assertTrue(False) # expected exception except TypeError: pass def testCreateNullTermStringType(self): - typeItem = { 'class': 'H5T_STRING', 'charSet': 'H5T_CSET_ASCII', - 'length': 6, 'strPad': 'H5T_STR_NULLTERM'} + typeItem = { + "class": "H5T_STRING", + "charSet": "H5T_CSET_ASCII", + "length": 6, + "strPad": "H5T_STR_NULLTERM", + } typeSize = hdf5dtype.getItemSize(typeItem) dt = hdf5dtype.createDataType(typeItem) - if six.PY3: - self.assertEqual(dt.name, 'bytes48') - else: - self.assertEqual(dt.name, 'string48') - self.assertEqual(dt.kind, 'S') + self.assertEqual(dt.name, "bytes48") + self.assertEqual(dt.kind, "S") self.assertEqual(typeSize, 6) - def testCreateVLenStringType(self): - typeItem = { 'class': 'H5T_STRING', 'charSet': 'H5T_CSET_ASCII', 'length': 'H5T_VARIABLE' } + typeItem = { + "class": "H5T_STRING", + "charSet": "H5T_CSET_ASCII", + "length": "H5T_VARIABLE", + } typeSize = hdf5dtype.getItemSize(typeItem) dt = hdf5dtype.createDataType(typeItem) - self.assertEqual(dt.name, 'object') - self.assertEqual(dt.kind, 'O') + self.assertEqual(dt.name, "object") + self.assertEqual(dt.kind, "O") self.assertEqual(check_dtype(vlen=dt), bytes) - self.assertEqual(typeSize, 'H5T_VARIABLE') - - + self.assertEqual(typeSize, "H5T_VARIABLE") def testCreateVLenUTF8Type(self): - typeItem = { 'class': 'H5T_STRING', 'charSet': 'H5T_CSET_UTF8', 'length': 'H5T_VARIABLE' } + typeItem = { + "class": "H5T_STRING", + "charSet": "H5T_CSET_UTF8", + "length": "H5T_VARIABLE", + } typeSize = hdf5dtype.getItemSize(typeItem) dt = hdf5dtype.createDataType(typeItem) - self.assertEqual(dt.name, 'object') - self.assertEqual(dt.kind, 'O') - self.assertEqual(check_dtype(vlen=dt), six.text_type) - self.assertEqual(typeSize, 'H5T_VARIABLE') + self.assertEqual(dt.name, "object") + self.assertEqual(dt.kind, "O") + self.assertEqual(check_dtype(vlen=dt), str) + self.assertEqual(typeSize, "H5T_VARIABLE") def testCreateVLenDataType(self): - typeItem = {'class': 'H5T_VLEN', 'base': 'H5T_STD_I32BE'} + typeItem = {"class": "H5T_VLEN", "base": "H5T_STD_I32BE"} typeSize = hdf5dtype.getItemSize(typeItem) dt = hdf5dtype.createDataType(typeItem) - self.assertEqual(dt.name, 'object') - self.assertEqual(dt.kind, 'O') - self.assertEqual(typeSize, 'H5T_VARIABLE') + self.assertEqual(dt.name, "object") + self.assertEqual(dt.kind, "O") + self.assertEqual(typeSize, "H5T_VARIABLE") def testCreateOpaqueType(self): - typeItem = {'class': 'H5T_OPAQUE', 'size': 200} + typeItem = {"class": "H5T_OPAQUE", "size": 200} typeSize = hdf5dtype.getItemSize(typeItem) dt = hdf5dtype.createDataType(typeItem) - self.assertEqual(dt.name, 'void1600') - self.assertEqual(dt.kind, 'V') + self.assertEqual(dt.name, "void1600") + self.assertEqual(dt.kind, "V") self.assertEqual(typeSize, 200) + def testCreateEnumType(self): + typeItem = { + "class": "H5T_ENUM", + "base": {"base": "H5T_STD_I16LE", "class": "H5T_INTEGER"}, + "members": [ + {"name": "GAS", "value": 2}, + {"name": "LIQUID", "value": 1}, + {"name": "PLASMA", "value": 3}, + {"name": "SOLID", "value": 0}, + ], + } + + typeSize = hdf5dtype.getItemSize(typeItem) + self.assertEqual(typeSize, 2) + dt = hdf5dtype.createDataType(typeItem) + self.assertEqual(dt.name, "int16") + self.assertEqual(dt.kind, "i") + mapping = check_dtype(enum=dt) + self.assertTrue(isinstance(mapping, dict)) + self.assertEqual(mapping["SOLID"], 0) + self.assertEqual(mapping["LIQUID"], 1) + self.assertEqual(mapping["GAS"], 2) + self.assertEqual(mapping["PLASMA"], 3) + + def testCreateBoolType(self): + typeItem = { + "class": "H5T_ENUM", + "base": {"base": "H5T_STD_I8LE", "class": "H5T_INTEGER"}, + "members": [{"name": "TRUE", "value": 1}, {"name": "FALSE", "value": 0}], + } + + typeSize = hdf5dtype.getItemSize(typeItem) + self.assertEqual(typeSize, 1) + dt = hdf5dtype.createDataType(typeItem) + self.assertEqual(dt.name, "bool") + self.assertEqual(dt.kind, "b") + def testCreateCompoundType(self): typeItem = { - 'class': 'H5T_COMPOUND', 'fields': - [{'name': 'temp', 'type': 'H5T_IEEE_F32LE'}, - {'name': 'pressure', 'type': 'H5T_IEEE_F32LE'}, - {'name': 'location', 'type': { - 'length': 'H5T_VARIABLE', - 'charSet': 'H5T_CSET_ASCII', - 'class': 'H5T_STRING', - 'strPad': 'H5T_STR_NULLTERM'}}, - {'name': 'wind', 'type': 'H5T_STD_I16LE'}] + "class": "H5T_COMPOUND", + "fields": [ + {"name": "temp", "type": "H5T_IEEE_F32LE"}, + {"name": "pressure", "type": "H5T_IEEE_F32LE"}, + { + "name": "location", + "type": { + "length": "H5T_VARIABLE", + "charSet": "H5T_CSET_ASCII", + "class": "H5T_STRING", + "strPad": "H5T_STR_NULLTERM", + }, + }, + {"name": "wind", "type": "H5T_STD_I16LE"}, + ], } typeSize = hdf5dtype.getItemSize(typeItem) dt = hdf5dtype.createDataType(typeItem) - self.assertEqual(dt.name, 'void144') - self.assertEqual(dt.kind, 'V') + self.assertEqual(dt.name, "void144") + self.assertEqual(dt.kind, "V") self.assertEqual(len(dt.fields), 4) dtLocation = dt[2] - self.assertEqual(dtLocation.name, 'object') - self.assertEqual(dtLocation.kind, 'O') + self.assertEqual(dtLocation.name, "object") + self.assertEqual(dtLocation.kind, "O") self.assertEqual(check_dtype(vlen=dtLocation), bytes) - self.assertEqual(typeSize, 'H5T_VARIABLE') + self.assertEqual(typeSize, "H5T_VARIABLE") def testCreateCompoundOfCompoundType(self): - typeItem = {'class': 'H5T_COMPOUND', 'fields': - [{'name': 'field1', 'type': {'class': 'H5T_COMPOUND', 'fields': - [{'name': 'x', 'type': {'class': 'H5T_FLOAT', 'base': 'H5T_IEEE_F32LE'}}, - {'name': 'y', 'type': {'class': 'H5T_FLOAT', 'base': 'H5T_IEEE_F32LE'}}]}}, - {'name': 'field2', 'type': {'class': 'H5T_COMPOUND', 'fields': - [{'name': 'a', 'type': {'class': 'H5T_FLOAT', 'base': 'H5T_IEEE_F32LE'}}, - {'name': 'b', 'type': {'class': 'H5T_FLOAT', 'base': 'H5T_IEEE_F32LE'}}, - {'name': 'c', 'type': {'class': 'H5T_FLOAT', 'base': 'H5T_IEEE_F32LE'}}]}}]} - dt = hdf5dtype.createDataType(typeItem) - self.assertEqual(dt.name, 'void160') - self.assertEqual(dt.kind, 'V') + typeItem = { + "class": "H5T_COMPOUND", + "fields": [ + { + "name": "field1", + "type": { + "class": "H5T_COMPOUND", + "fields": [ + { + "name": "x", + "type": { + "class": "H5T_FLOAT", + "base": "H5T_IEEE_F32LE", + }, + }, + { + "name": "y", + "type": { + "class": "H5T_FLOAT", + "base": "H5T_IEEE_F32LE", + }, + }, + ], + }, + }, + { + "name": "field2", + "type": { + "class": "H5T_COMPOUND", + "fields": [ + { + "name": "a", + "type": { + "class": "H5T_FLOAT", + "base": "H5T_IEEE_F32LE", + }, + }, + { + "name": "b", + "type": { + "class": "H5T_FLOAT", + "base": "H5T_IEEE_F32LE", + }, + }, + { + "name": "c", + "type": { + "class": "H5T_FLOAT", + "base": "H5T_IEEE_F32LE", + }, + }, + ], + }, + }, + ], + } + dt = hdf5dtype.createDataType(typeItem) + self.assertEqual(dt.name, "void160") + self.assertEqual(dt.kind, "V") self.assertEqual(len(dt.fields), 2) dt_field1 = dt[0] - self.assertEqual(dt_field1.name, 'void64') - self.assertEqual(dt_field1.kind, 'V') + self.assertEqual(dt_field1.name, "void64") + self.assertEqual(dt_field1.kind, "V") self.assertEqual(len(dt_field1.fields), 2) dt_field2 = dt[1] - self.assertEqual(dt_field2.name, 'void96') - self.assertEqual(dt_field2.kind, 'V') + self.assertEqual(dt_field2.name, "void96") + self.assertEqual(dt_field2.kind, "V") self.assertEqual(len(dt_field2.fields), 3) - def testCreateCompoundTypeUnicodeFields(self): typeItem = { - 'class': 'H5T_COMPOUND', 'fields': - [{'name': u'temp', 'type': 'H5T_IEEE_F32LE'}, - {'name': u'pressure', 'type': 'H5T_IEEE_F32LE'}, - {'name': u'wind', 'type': 'H5T_STD_I16LE'}] + "class": "H5T_COMPOUND", + "fields": [ + {"name": u"temp", "type": "H5T_IEEE_F32LE"}, + {"name": u"pressure", "type": "H5T_IEEE_F32LE"}, + {"name": u"wind", "type": "H5T_STD_I16LE"}, + ], } typeSize = hdf5dtype.getItemSize(typeItem) - dt = hdf5dtype.createDataType(typeItem) - self.assertEqual(dt.name, 'void80') - self.assertEqual(dt.kind, 'V') + dt = hdf5dtype.createDataType(typeItem) + self.assertEqual(dt.name, "void80") + self.assertEqual(dt.kind, "V") self.assertEqual(len(dt.fields), 3) self.assertEqual(typeSize, 10) def testCreateArrayType(self): - typeItem = {'class': 'H5T_ARRAY', - 'base': 'H5T_STD_I64LE', - 'dims': (3, 5) } + typeItem = {"class": "H5T_ARRAY", "base": "H5T_STD_I64LE", "dims": (3, 5)} typeSize = hdf5dtype.getItemSize(typeItem) dt = hdf5dtype.createDataType(typeItem) - self.assertEqual(dt.name, 'void960') - self.assertEqual(dt.kind, 'V') + self.assertEqual(dt.name, "void960") + self.assertEqual(dt.kind, "V") self.assertEqual(typeSize, 120) def testCreateArrayIntegerType(self): - typeItem = {'class': 'H5T_INTEGER', - 'base': 'H5T_STD_I64LE', - 'dims': (3, 5) } - + typeItem = {"class": "H5T_INTEGER", "base": "H5T_STD_I64LE", "dims": (3, 5)} + try: hdf5dtype.createDataType(typeItem) - self.assertTrue(False) # expected exception - dims used with none array type + self.assertTrue( + False + ) # expected exception - dims used with none array type except TypeError: - pass # should get exception - + pass # should get exception + + def testCreateVlenObjRefType(self): + typeItem = { + "class": "H5T_VLEN", + "base": {"class": "H5T_REFERENCE", "base": "H5T_STD_REF_OBJ"}, + } + dt = hdf5dtype.createDataType(typeItem) + self.assertEqual(dt.name, "object") + self.assertEqual(dt.kind, "O") + self.assertTrue(check_dtype(ref=dt) is None) + dt_base = check_dtype(vlen=dt) + self.assertTrue(dt_base is not None) + self.assertTrue(check_dtype(ref=dt_base) is Reference) def testCreateCompoundArrayType(self): typeItem = { "class": "H5T_COMPOUND", "fields": [ + {"type": {"base": "H5T_STD_I8LE", "class": "H5T_INTEGER"}, "name": "a"}, { "type": { - "base": "H5T_STD_I8LE", - "class": "H5T_INTEGER" - }, - "name": "a" - }, - { - "type": { - "dims": [ - 10 - ], + "dims": [10], "base": { "length": 1, "charSet": "H5T_CSET_ASCII", "class": "H5T_STRING", - "strPad": "H5T_STR_NULLPAD" + "strPad": "H5T_STR_NULLPAD", }, - "class": "H5T_ARRAY" + "class": "H5T_ARRAY", }, - "name": "b" - } - ] + "name": "b", + }, + ], } typeSize = hdf5dtype.getItemSize(typeItem) - dt = hdf5dtype.createDataType(typeItem) + dt = hdf5dtype.createDataType(typeItem) self.assertEqual(len(dt.fields), 2) - self.assertTrue('a' in dt.fields.keys()) - self.assertTrue('b' in dt.fields.keys()) + self.assertTrue("a" in dt.fields.keys()) + self.assertTrue("b" in dt.fields.keys()) self.assertEqual(typeSize, 11) - -if __name__ == '__main__': - #setup test files +if __name__ == "__main__": + # setup test files unittest.main() diff --git a/testall.py b/testall.py index 3525dd9..881ecd2 100755 --- a/testall.py +++ b/testall.py @@ -12,41 +12,42 @@ import os import sys +import shutil import h5py -unit_tests = ( 'hdf5dtypeTest', 'hdf5dbTest' ) -integ_tests = ( 'h5tojson_test', 'jsontoh5_test' ) -print(h5py.version.info) +unit_tests = ("hdf5dtypeTest", "hdf5dbTest") +integ_tests = ("h5tojson_test", "jsontoh5_test") + # verify the hdf5 lib version is recent -hdf5_version = h5py.version.hdf5_version_tuple -if hdf5_version[1] < 8: - sys.exit("Need hdf5 lib 1.8 or later") -if hdf5_version[1] == 8 and hdf5_version[2] < 4: - sys.exit("Need hdf5 lib 1.8.4 or later") +if h5py.version.hdf5_version_tuple < (1, 10, 4): + print(h5py.version.info) + sys.exit("Need HDF5 library 1.10.4 or later") + # verify we have a recent version of h5py +if h5py.version.version_tuple < (3, 0, 0): + print(h5py.version.info) + sys.exit("Need h5py version 3.0 or later") -h5py_version = h5py.version.version_tuple -if h5py_version[0] != 2 or h5py_version[1] < 5: - sys.exit("Need h5py version 2.5 or later") - -# -# # Run all hdf5-json tests # Run this script before running any integ tests -# -os.chdir('test/unit') for file_name in unit_tests: print(file_name) - rc = os.system('python ' + file_name + '.py') + rc = os.system("python test/unit/" + file_name + ".py") if rc != 0: - sys.exit("Failed") + sys.exit("FAILED") +shutil.rmtree("./out", ignore_errors=True) +os.remove("hdf5dbtest.log") - -os.chdir('../integ') +os.chdir("test/integ") for file_name in integ_tests: print(file_name) - rc = os.system('python ' + file_name + '.py') + rc = os.system("python " + file_name + ".py") if rc != 0: - sys.exit("failed") -os.chdir('..') -print("Done!") + sys.exit("FAILED") +shutil.rmtree("./h5_out", ignore_errors=True) +shutil.rmtree("./json_out", ignore_errors=True) +os.remove("h5tojson.log") +os.remove("jsontoh5.log") + +os.chdir("..") +print("Testing suite: Success!") diff --git a/versioneer.py b/versioneer.py new file mode 100644 index 0000000..e283ecb --- /dev/null +++ b/versioneer.py @@ -0,0 +1,2123 @@ +# Version: 0.20 + +"""The Versioneer - like a rocketeer, but for versions. + +The Versioneer +============== + +* like a rocketeer, but for versions! +* https://github.com/python-versioneer/python-versioneer +* Brian Warner +* License: Public Domain +* Compatible with: Python 3.6, 3.7, 3.8, 3.9 and pypy3 +* [![Latest Version][pypi-image]][pypi-url] +* [![Build Status][travis-image]][travis-url] + +This is a tool for managing a recorded version number in distutils-based +python projects. The goal is to remove the tedious and error-prone "update +the embedded version string" step from your release process. Making a new +release should be as easy as recording a new tag in your version-control +system, and maybe making new tarballs. + + +## Quick Install + +* `pip install versioneer` to somewhere in your $PATH +* add a `[versioneer]` section to your setup.cfg (see [Install](INSTALL.md)) +* run `versioneer install` in your source tree, commit the results +* Verify version information with `python setup.py version` + +## Version Identifiers + +Source trees come from a variety of places: + +* a version-control system checkout (mostly used by developers) +* a nightly tarball, produced by build automation +* a snapshot tarball, produced by a web-based VCS browser, like github's + "tarball from tag" feature +* a release tarball, produced by "setup.py sdist", distributed through PyPI + +Within each source tree, the version identifier (either a string or a number, +this tool is format-agnostic) can come from a variety of places: + +* ask the VCS tool itself, e.g. "git describe" (for checkouts), which knows + about recent "tags" and an absolute revision-id +* the name of the directory into which the tarball was unpacked +* an expanded VCS keyword ($Id$, etc) +* a `_version.py` created by some earlier build step + +For released software, the version identifier is closely related to a VCS +tag. Some projects use tag names that include more than just the version +string (e.g. "myproject-1.2" instead of just "1.2"), in which case the tool +needs to strip the tag prefix to extract the version identifier. For +unreleased software (between tags), the version identifier should provide +enough information to help developers recreate the same tree, while also +giving them an idea of roughly how old the tree is (after version 1.2, before +version 1.3). Many VCS systems can report a description that captures this, +for example `git describe --tags --dirty --always` reports things like +"0.7-1-g574ab98-dirty" to indicate that the checkout is one revision past the +0.7 tag, has a unique revision id of "574ab98", and is "dirty" (it has +uncommitted changes). + +The version identifier is used for multiple purposes: + +* to allow the module to self-identify its version: `myproject.__version__` +* to choose a name and prefix for a 'setup.py sdist' tarball + +## Theory of Operation + +Versioneer works by adding a special `_version.py` file into your source +tree, where your `__init__.py` can import it. This `_version.py` knows how to +dynamically ask the VCS tool for version information at import time. + +`_version.py` also contains `$Revision$` markers, and the installation +process marks `_version.py` to have this marker rewritten with a tag name +during the `git archive` command. As a result, generated tarballs will +contain enough information to get the proper version. + +To allow `setup.py` to compute a version too, a `versioneer.py` is added to +the top level of your source tree, next to `setup.py` and the `setup.cfg` +that configures it. This overrides several distutils/setuptools commands to +compute the version when invoked, and changes `setup.py build` and `setup.py +sdist` to replace `_version.py` with a small static file that contains just +the generated version data. + +## Installation + +See [INSTALL.md](./INSTALL.md) for detailed installation instructions. + +## Version-String Flavors + +Code which uses Versioneer can learn about its version string at runtime by +importing `_version` from your main `__init__.py` file and running the +`get_versions()` function. From the "outside" (e.g. in `setup.py`), you can +import the top-level `versioneer.py` and run `get_versions()`. + +Both functions return a dictionary with different flavors of version +information: + +* `['version']`: A condensed version string, rendered using the selected + style. This is the most commonly used value for the project's version + string. The default "pep440" style yields strings like `0.11`, + `0.11+2.g1076c97`, or `0.11+2.g1076c97.dirty`. See the "Styles" section + below for alternative styles. + +* `['full-revisionid']`: detailed revision identifier. For Git, this is the + full SHA1 commit id, e.g. "1076c978a8d3cfc70f408fe5974aa6c092c949ac". + +* `['date']`: Date and time of the latest `HEAD` commit. For Git, it is the + commit date in ISO 8601 format. This will be None if the date is not + available. + +* `['dirty']`: a boolean, True if the tree has uncommitted changes. Note that + this is only accurate if run in a VCS checkout, otherwise it is likely to + be False or None + +* `['error']`: if the version string could not be computed, this will be set + to a string describing the problem, otherwise it will be None. It may be + useful to throw an exception in setup.py if this is set, to avoid e.g. + creating tarballs with a version string of "unknown". + +Some variants are more useful than others. Including `full-revisionid` in a +bug report should allow developers to reconstruct the exact code being tested +(or indicate the presence of local changes that should be shared with the +developers). `version` is suitable for display in an "about" box or a CLI +`--version` output: it can be easily compared against release notes and lists +of bugs fixed in various releases. + +The installer adds the following text to your `__init__.py` to place a basic +version in `YOURPROJECT.__version__`: + + from ._version import get_versions + __version__ = get_versions()['version'] + del get_versions + +## Styles + +The setup.cfg `style=` configuration controls how the VCS information is +rendered into a version string. + +The default style, "pep440", produces a PEP440-compliant string, equal to the +un-prefixed tag name for actual releases, and containing an additional "local +version" section with more detail for in-between builds. For Git, this is +TAG[+DISTANCE.gHEX[.dirty]] , using information from `git describe --tags +--dirty --always`. For example "0.11+2.g1076c97.dirty" indicates that the +tree is like the "1076c97" commit but has uncommitted changes (".dirty"), and +that this commit is two revisions ("+2") beyond the "0.11" tag. For released +software (exactly equal to a known tag), the identifier will only contain the +stripped tag, e.g. "0.11". + +Other styles are available. See [details.md](details.md) in the Versioneer +source tree for descriptions. + +## Debugging + +Versioneer tries to avoid fatal errors: if something goes wrong, it will tend +to return a version of "0+unknown". To investigate the problem, run `setup.py +version`, which will run the version-lookup code in a verbose mode, and will +display the full contents of `get_versions()` (including the `error` string, +which may help identify what went wrong). + +## Known Limitations + +Some situations are known to cause problems for Versioneer. This details the +most significant ones. More can be found on Github +[issues page](https://github.com/python-versioneer/python-versioneer/issues). + +### Subprojects + +Versioneer has limited support for source trees in which `setup.py` is not in +the root directory (e.g. `setup.py` and `.git/` are *not* siblings). The are +two common reasons why `setup.py` might not be in the root: + +* Source trees which contain multiple subprojects, such as + [Buildbot](https://github.com/buildbot/buildbot), which contains both + "master" and "slave" subprojects, each with their own `setup.py`, + `setup.cfg`, and `tox.ini`. Projects like these produce multiple PyPI + distributions (and upload multiple independently-installable tarballs). +* Source trees whose main purpose is to contain a C library, but which also + provide bindings to Python (and perhaps other languages) in subdirectories. + +Versioneer will look for `.git` in parent directories, and most operations +should get the right version string. However `pip` and `setuptools` have bugs +and implementation details which frequently cause `pip install .` from a +subproject directory to fail to find a correct version string (so it usually +defaults to `0+unknown`). + +`pip install --editable .` should work correctly. `setup.py install` might +work too. + +Pip-8.1.1 is known to have this problem, but hopefully it will get fixed in +some later version. + +[Bug #38](https://github.com/python-versioneer/python-versioneer/issues/38) is tracking +this issue. The discussion in +[PR #61](https://github.com/python-versioneer/python-versioneer/pull/61) describes the +issue from the Versioneer side in more detail. +[pip PR#3176](https://github.com/pypa/pip/pull/3176) and +[pip PR#3615](https://github.com/pypa/pip/pull/3615) contain work to improve +pip to let Versioneer work correctly. + +Versioneer-0.16 and earlier only looked for a `.git` directory next to the +`setup.cfg`, so subprojects were completely unsupported with those releases. + +### Editable installs with setuptools <= 18.5 + +`setup.py develop` and `pip install --editable .` allow you to install a +project into a virtualenv once, then continue editing the source code (and +test) without re-installing after every change. + +"Entry-point scripts" (`setup(entry_points={"console_scripts": ..})`) are a +convenient way to specify executable scripts that should be installed along +with the python package. + +These both work as expected when using modern setuptools. When using +setuptools-18.5 or earlier, however, certain operations will cause +`pkg_resources.DistributionNotFound` errors when running the entrypoint +script, which must be resolved by re-installing the package. This happens +when the install happens with one version, then the egg_info data is +regenerated while a different version is checked out. Many setup.py commands +cause egg_info to be rebuilt (including `sdist`, `wheel`, and installing into +a different virtualenv), so this can be surprising. + +[Bug #83](https://github.com/python-versioneer/python-versioneer/issues/83) describes +this one, but upgrading to a newer version of setuptools should probably +resolve it. + + +## Updating Versioneer + +To upgrade your project to a new release of Versioneer, do the following: + +* install the new Versioneer (`pip install -U versioneer` or equivalent) +* edit `setup.cfg`, if necessary, to include any new configuration settings + indicated by the release notes. See [UPGRADING](./UPGRADING.md) for details. +* re-run `versioneer install` in your source tree, to replace + `SRC/_version.py` +* commit any changed files + +## Future Directions + +This tool is designed to make it easily extended to other version-control +systems: all VCS-specific components are in separate directories like +src/git/ . The top-level `versioneer.py` script is assembled from these +components by running make-versioneer.py . In the future, make-versioneer.py +will take a VCS name as an argument, and will construct a version of +`versioneer.py` that is specific to the given VCS. It might also take the +configuration arguments that are currently provided manually during +installation by editing setup.py . Alternatively, it might go the other +direction and include code from all supported VCS systems, reducing the +number of intermediate scripts. + +## Similar projects + +* [setuptools_scm](https://github.com/pypa/setuptools_scm/) - a non-vendored build-time + dependency +* [minver](https://github.com/jbweston/miniver) - a lightweight reimplementation of + versioneer +* [versioningit](https://github.com/jwodder/versioningit) - a PEP 518-based setuptools + plugin + +## License + +To make Versioneer easier to embed, all its code is dedicated to the public +domain. The `_version.py` that it creates is also in the public domain. +Specifically, both are released under the Creative Commons "Public Domain +Dedication" license (CC0-1.0), as described in +https://creativecommons.org/publicdomain/zero/1.0/ . + +[pypi-image]: https://img.shields.io/pypi/v/versioneer.svg +[pypi-url]: https://pypi.python.org/pypi/versioneer/ +[travis-image]: +https://img.shields.io/travis/com/python-versioneer/python-versioneer.svg +[travis-url]: https://travis-ci.com/github/python-versioneer/python-versioneer + +""" + +import configparser +import errno +import json +import os +import re +import subprocess +import sys + + +class VersioneerConfig: # pylint: disable=too-few-public-methods # noqa + """Container for Versioneer configuration parameters.""" + + +def get_root(): + """Get the project root directory. + + We require that all commands are run from the project root, i.e. the + directory that contains setup.py, setup.cfg, and versioneer.py . + """ + root = os.path.realpath(os.path.abspath(os.getcwd())) + setup_py = os.path.join(root, "setup.py") + versioneer_py = os.path.join(root, "versioneer.py") + if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): + # allow 'python path/to/setup.py COMMAND' + root = os.path.dirname(os.path.realpath(os.path.abspath(sys.argv[0]))) + setup_py = os.path.join(root, "setup.py") + versioneer_py = os.path.join(root, "versioneer.py") + if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): + err = ( + "Versioneer was unable to run the project root directory. " + "Versioneer requires setup.py to be executed from " + "its immediate directory (like 'python setup.py COMMAND'), " + "or in a way that lets it use sys.argv[0] to find the root " + "(like 'python path/to/setup.py COMMAND')." + ) + raise VersioneerBadRootError(err) + try: + # Certain runtime workflows (setup.py install/develop in a setuptools + # tree) execute all dependencies in a single python process, so + # "versioneer" may be imported multiple times, and python's shared + # module-import table will cache the first one. So we can't use + # os.path.dirname(__file__), as that will find whichever + # versioneer.py was first imported, even in later projects. + my_path = os.path.realpath(os.path.abspath(__file__)) + me_dir = os.path.normcase(os.path.splitext(my_path)[0]) + vsr_dir = os.path.normcase(os.path.splitext(versioneer_py)[0]) + if me_dir != vsr_dir: + print( + "Warning: build in %s is using versioneer.py from %s" + % (os.path.dirname(my_path), versioneer_py) + ) + except NameError: + pass + return root + + +def get_config_from_root(root): + """Read the project setup.cfg file to determine Versioneer config.""" + # This might raise EnvironmentError (if setup.cfg is missing), or + # configparser.NoSectionError (if it lacks a [versioneer] section), or + # configparser.NoOptionError (if it lacks "VCS="). See the docstring at + # the top of versioneer.py for instructions on writing your setup.cfg . + setup_cfg = os.path.join(root, "setup.cfg") + parser = configparser.ConfigParser() + with open(setup_cfg, "r") as cfg_file: + parser.read_file(cfg_file) + VCS = parser.get("versioneer", "VCS") # mandatory + + # Dict-like interface for non-mandatory entries + section = parser["versioneer"] + + # pylint:disable=attribute-defined-outside-init # noqa + cfg = VersioneerConfig() + cfg.VCS = VCS + cfg.style = section.get("style", "") + cfg.versionfile_source = section.get("versionfile_source") + cfg.versionfile_build = section.get("versionfile_build") + cfg.tag_prefix = section.get("tag_prefix") + if cfg.tag_prefix in ("''", '""'): + cfg.tag_prefix = "" + cfg.parentdir_prefix = section.get("parentdir_prefix") + cfg.verbose = section.get("verbose") + return cfg + + +class NotThisMethod(Exception): + """Exception raised if a method is not valid for the current scenario.""" + + +# these dictionaries contain VCS-specific tools +LONG_VERSION_PY = {} +HANDLERS = {} + + +def register_vcs_handler(vcs, method): # decorator + """Create decorator to mark a method as the handler of a VCS.""" + + def decorate(f): + """Store f in HANDLERS[vcs][method].""" + HANDLERS.setdefault(vcs, {})[method] = f + return f + + return decorate + + +# pylint:disable=too-many-arguments,consider-using-with # noqa +def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=None): + """Call the given command(s).""" + assert isinstance(commands, list) + process = None + for command in commands: + try: + dispcmd = str([command] + args) + # remember shell=False, so use git.cmd on windows, not just git + process = subprocess.Popen( + [command] + args, + cwd=cwd, + env=env, + stdout=subprocess.PIPE, + stderr=(subprocess.PIPE if hide_stderr else None), + ) + break + except EnvironmentError: + e = sys.exc_info()[1] + if e.errno == errno.ENOENT: + continue + if verbose: + print("unable to run %s" % dispcmd) + print(e) + return None, None + else: + if verbose: + print("unable to find command, tried %s" % (commands,)) + return None, None + stdout = process.communicate()[0].strip().decode() + if process.returncode != 0: + if verbose: + print("unable to run %s (error)" % dispcmd) + print("stdout was %s" % stdout) + return None, process.returncode + return stdout, process.returncode + + +LONG_VERSION_PY[ + "git" +] = r''' +# This file helps to compute a version number in source trees obtained from +# git-archive tarball (such as those provided by githubs download-from-tag +# feature). Distribution tarballs (built by setup.py sdist) and build +# directories (produced by setup.py build) will contain a much shorter file +# that just contains the computed version number. + +# This file is released into the public domain. Generated by +# versioneer-0.20 (https://github.com/python-versioneer/python-versioneer) + +"""Git implementation of _version.py.""" + +import errno +import os +import re +import subprocess +import sys + + +def get_keywords(): + """Get the keywords needed to look up the version information.""" + # these strings will be replaced by git during git-archive. + # setup.py/versioneer.py will grep for the variable names, so they must + # each be defined on a line of their own. _version.py will just call + # get_keywords(). + git_refnames = "%(DOLLAR)sFormat:%%d%(DOLLAR)s" + git_full = "%(DOLLAR)sFormat:%%H%(DOLLAR)s" + git_date = "%(DOLLAR)sFormat:%%ci%(DOLLAR)s" + keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} + return keywords + + +class VersioneerConfig: # pylint: disable=too-few-public-methods + """Container for Versioneer configuration parameters.""" + + +def get_config(): + """Create, populate and return the VersioneerConfig() object.""" + # these strings are filled in when 'setup.py versioneer' creates + # _version.py + cfg = VersioneerConfig() + cfg.VCS = "git" + cfg.style = "%(STYLE)s" + cfg.tag_prefix = "%(TAG_PREFIX)s" + cfg.parentdir_prefix = "%(PARENTDIR_PREFIX)s" + cfg.versionfile_source = "%(VERSIONFILE_SOURCE)s" + cfg.verbose = False + return cfg + + +class NotThisMethod(Exception): + """Exception raised if a method is not valid for the current scenario.""" + + +LONG_VERSION_PY = {} +HANDLERS = {} + + +def register_vcs_handler(vcs, method): # decorator + """Create decorator to mark a method as the handler of a VCS.""" + def decorate(f): + """Store f in HANDLERS[vcs][method].""" + if vcs not in HANDLERS: + HANDLERS[vcs] = {} + HANDLERS[vcs][method] = f + return f + return decorate + + +# pylint:disable=too-many-arguments,consider-using-with # noqa +def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, + env=None): + """Call the given command(s).""" + assert isinstance(commands, list) + process = None + for command in commands: + try: + dispcmd = str([command] + args) + # remember shell=False, so use git.cmd on windows, not just git + process = subprocess.Popen([command] + args, cwd=cwd, env=env, + stdout=subprocess.PIPE, + stderr=(subprocess.PIPE if hide_stderr + else None)) + break + except EnvironmentError: + e = sys.exc_info()[1] + if e.errno == errno.ENOENT: + continue + if verbose: + print("unable to run %%s" %% dispcmd) + print(e) + return None, None + else: + if verbose: + print("unable to find command, tried %%s" %% (commands,)) + return None, None + stdout = process.communicate()[0].strip().decode() + if process.returncode != 0: + if verbose: + print("unable to run %%s (error)" %% dispcmd) + print("stdout was %%s" %% stdout) + return None, process.returncode + return stdout, process.returncode + + +def versions_from_parentdir(parentdir_prefix, root, verbose): + """Try to determine the version from the parent directory name. + + Source tarballs conventionally unpack into a directory that includes both + the project name and a version string. We will also support searching up + two directory levels for an appropriately named parent directory + """ + rootdirs = [] + + for _ in range(3): + dirname = os.path.basename(root) + if dirname.startswith(parentdir_prefix): + return {"version": dirname[len(parentdir_prefix):], + "full-revisionid": None, + "dirty": False, "error": None, "date": None} + rootdirs.append(root) + root = os.path.dirname(root) # up a level + + if verbose: + print("Tried directories %%s but none started with prefix %%s" %% + (str(rootdirs), parentdir_prefix)) + raise NotThisMethod("rootdir doesn't start with parentdir_prefix") + + +@register_vcs_handler("git", "get_keywords") +def git_get_keywords(versionfile_abs): + """Extract version information from the given file.""" + # the code embedded in _version.py can just fetch the value of these + # keywords. When used from setup.py, we don't want to import _version.py, + # so we do it with a regexp instead. This function is not used from + # _version.py. + keywords = {} + try: + with open(versionfile_abs, "r") as fobj: + for line in fobj: + if line.strip().startswith("git_refnames ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["refnames"] = mo.group(1) + if line.strip().startswith("git_full ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["full"] = mo.group(1) + if line.strip().startswith("git_date ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["date"] = mo.group(1) + except EnvironmentError: + pass + return keywords + + +@register_vcs_handler("git", "keywords") +def git_versions_from_keywords(keywords, tag_prefix, verbose): + """Get version information from git keywords.""" + if "refnames" not in keywords: + raise NotThisMethod("Short version file found") + date = keywords.get("date") + if date is not None: + # Use only the last line. Previous lines may contain GPG signature + # information. + date = date.splitlines()[-1] + + # git-2.2.0 added "%%cI", which expands to an ISO-8601 -compliant + # datestamp. However we prefer "%%ci" (which expands to an "ISO-8601 + # -like" string, which we must then edit to make compliant), because + # it's been around since git-1.5.3, and it's too difficult to + # discover which version we're using, or to work around using an + # older one. + date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) + refnames = keywords["refnames"].strip() + if refnames.startswith("$Format"): + if verbose: + print("keywords are unexpanded, not using") + raise NotThisMethod("unexpanded keywords, not a git-archive tarball") + refs = {r.strip() for r in refnames.strip("()").split(",")} + # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of + # just "foo-1.0". If we see a "tag: " prefix, prefer those. + TAG = "tag: " + tags = {r[len(TAG):] for r in refs if r.startswith(TAG)} + if not tags: + # Either we're using git < 1.8.3, or there really are no tags. We use + # a heuristic: assume all version tags have a digit. The old git %%d + # expansion behaves like git log --decorate=short and strips out the + # refs/heads/ and refs/tags/ prefixes that would let us distinguish + # between branches and tags. By ignoring refnames without digits, we + # filter out many common branch names like "release" and + # "stabilization", as well as "HEAD" and "master". + tags = {r for r in refs if re.search(r'\d', r)} + if verbose: + print("discarding '%%s', no digits" %% ",".join(refs - tags)) + if verbose: + print("likely tags: %%s" %% ",".join(sorted(tags))) + for ref in sorted(tags): + # sorting will prefer e.g. "2.0" over "2.0rc1" + if ref.startswith(tag_prefix): + r = ref[len(tag_prefix):] + # Filter out refs that exactly match prefix or that don't start + # with a number once the prefix is stripped (mostly a concern + # when prefix is '') + if not re.match(r'\d', r): + continue + if verbose: + print("picking %%s" %% r) + return {"version": r, + "full-revisionid": keywords["full"].strip(), + "dirty": False, "error": None, + "date": date} + # no suitable tags, so version is "0+unknown", but full hex is still there + if verbose: + print("no suitable tags, using unknown + full revision id") + return {"version": "0+unknown", + "full-revisionid": keywords["full"].strip(), + "dirty": False, "error": "no suitable tags", "date": None} + + +@register_vcs_handler("git", "pieces_from_vcs") +def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command): + """Get version from 'git describe' in the root of the source tree. + + This only gets called if the git-archive 'subst' keywords were *not* + expanded, and _version.py hasn't already been rewritten with a short + version string, meaning we're inside a checked out source tree. + """ + GITS = ["git"] + if sys.platform == "win32": + GITS = ["git.cmd", "git.exe"] + + _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root, + hide_stderr=True) + if rc != 0: + if verbose: + print("Directory %%s not under git control" %% root) + raise NotThisMethod("'git rev-parse --git-dir' returned error") + + # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] + # if there isn't one, this yields HEX[-dirty] (no NUM) + describe_out, rc = runner(GITS, ["describe", "--tags", "--dirty", + "--always", "--long", + "--match", "%%s*" %% tag_prefix], + cwd=root) + # --long was added in git-1.5.5 + if describe_out is None: + raise NotThisMethod("'git describe' failed") + describe_out = describe_out.strip() + full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root) + if full_out is None: + raise NotThisMethod("'git rev-parse' failed") + full_out = full_out.strip() + + pieces = {} + pieces["long"] = full_out + pieces["short"] = full_out[:7] # maybe improved later + pieces["error"] = None + + branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"], + cwd=root) + # --abbrev-ref was added in git-1.6.3 + if rc != 0 or branch_name is None: + raise NotThisMethod("'git rev-parse --abbrev-ref' returned error") + branch_name = branch_name.strip() + + if branch_name == "HEAD": + # If we aren't exactly on a branch, pick a branch which represents + # the current commit. If all else fails, we are on a branchless + # commit. + branches, rc = runner(GITS, ["branch", "--contains"], cwd=root) + # --contains was added in git-1.5.4 + if rc != 0 or branches is None: + raise NotThisMethod("'git branch --contains' returned error") + branches = branches.split("\n") + + # Remove the first line if we're running detached + if "(" in branches[0]: + branches.pop(0) + + # Strip off the leading "* " from the list of branches. + branches = [branch[2:] for branch in branches] + if "master" in branches: + branch_name = "master" + elif not branches: + branch_name = None + else: + # Pick the first branch that is returned. Good or bad. + branch_name = branches[0] + + pieces["branch"] = branch_name + + # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] + # TAG might have hyphens. + git_describe = describe_out + + # look for -dirty suffix + dirty = git_describe.endswith("-dirty") + pieces["dirty"] = dirty + if dirty: + git_describe = git_describe[:git_describe.rindex("-dirty")] + + # now we have TAG-NUM-gHEX or HEX + + if "-" in git_describe: + # TAG-NUM-gHEX + mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) + if not mo: + # unparseable. Maybe git-describe is misbehaving? + pieces["error"] = ("unable to parse git-describe output: '%%s'" + %% describe_out) + return pieces + + # tag + full_tag = mo.group(1) + if not full_tag.startswith(tag_prefix): + if verbose: + fmt = "tag '%%s' doesn't start with prefix '%%s'" + print(fmt %% (full_tag, tag_prefix)) + pieces["error"] = ("tag '%%s' doesn't start with prefix '%%s'" + %% (full_tag, tag_prefix)) + return pieces + pieces["closest-tag"] = full_tag[len(tag_prefix):] + + # distance: number of commits since tag + pieces["distance"] = int(mo.group(2)) + + # commit: short hex revision ID + pieces["short"] = mo.group(3) + + else: + # HEX: no tags + pieces["closest-tag"] = None + count_out, rc = runner(GITS, ["rev-list", "HEAD", "--count"], cwd=root) + pieces["distance"] = int(count_out) # total number of commits + + # commit date: see ISO-8601 comment in git_versions_from_keywords() + date = runner(GITS, ["show", "-s", "--format=%%ci", "HEAD"], cwd=root)[0].strip() + # Use only the last line. Previous lines may contain GPG signature + # information. + date = date.splitlines()[-1] + pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) + + return pieces + + +def plus_or_dot(pieces): + """Return a + if we don't already have one, else return a .""" + if "+" in pieces.get("closest-tag", ""): + return "." + return "+" + + +def render_pep440(pieces): + """Build up version string, with post-release "local version identifier". + + Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you + get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty + + Exceptions: + 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += plus_or_dot(pieces) + rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + else: + # exception #1 + rendered = "0+untagged.%%d.g%%s" %% (pieces["distance"], + pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + return rendered + + +def render_pep440_branch(pieces): + """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] . + + The ".dev0" means not master branch. Note that .dev0 sorts backwards + (a feature branch will appear "older" than the master branch). + + Exceptions: + 1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + if pieces["branch"] != "master": + rendered += ".dev0" + rendered += plus_or_dot(pieces) + rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + else: + # exception #1 + rendered = "0" + if pieces["branch"] != "master": + rendered += ".dev0" + rendered += "+untagged.%%d.g%%s" %% (pieces["distance"], + pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + return rendered + + +def render_pep440_pre(pieces): + """TAG[.post0.devDISTANCE] -- No -dirty. + + Exceptions: + 1: no tags. 0.post0.devDISTANCE + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"]: + rendered += ".post0.dev%%d" %% pieces["distance"] + else: + # exception #1 + rendered = "0.post0.dev%%d" %% pieces["distance"] + return rendered + + +def render_pep440_post(pieces): + """TAG[.postDISTANCE[.dev0]+gHEX] . + + The ".dev0" means dirty. Note that .dev0 sorts backwards + (a dirty tree will appear "older" than the corresponding clean one), + but you shouldn't be releasing software with -dirty anyways. + + Exceptions: + 1: no tags. 0.postDISTANCE[.dev0] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%%d" %% pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + rendered += plus_or_dot(pieces) + rendered += "g%%s" %% pieces["short"] + else: + # exception #1 + rendered = "0.post%%d" %% pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + rendered += "+g%%s" %% pieces["short"] + return rendered + + +def render_pep440_post_branch(pieces): + """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] . + + The ".dev0" means not master branch. + + Exceptions: + 1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%%d" %% pieces["distance"] + if pieces["branch"] != "master": + rendered += ".dev0" + rendered += plus_or_dot(pieces) + rendered += "g%%s" %% pieces["short"] + if pieces["dirty"]: + rendered += ".dirty" + else: + # exception #1 + rendered = "0.post%%d" %% pieces["distance"] + if pieces["branch"] != "master": + rendered += ".dev0" + rendered += "+g%%s" %% pieces["short"] + if pieces["dirty"]: + rendered += ".dirty" + return rendered + + +def render_pep440_old(pieces): + """TAG[.postDISTANCE[.dev0]] . + + The ".dev0" means dirty. + + Exceptions: + 1: no tags. 0.postDISTANCE[.dev0] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%%d" %% pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + else: + # exception #1 + rendered = "0.post%%d" %% pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + return rendered + + +def render_git_describe(pieces): + """TAG[-DISTANCE-gHEX][-dirty]. + + Like 'git describe --tags --dirty --always'. + + Exceptions: + 1: no tags. HEX[-dirty] (note: no 'g' prefix) + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"]: + rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) + else: + # exception #1 + rendered = pieces["short"] + if pieces["dirty"]: + rendered += "-dirty" + return rendered + + +def render_git_describe_long(pieces): + """TAG-DISTANCE-gHEX[-dirty]. + + Like 'git describe --tags --dirty --always -long'. + The distance/hash is unconditional. + + Exceptions: + 1: no tags. HEX[-dirty] (note: no 'g' prefix) + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) + else: + # exception #1 + rendered = pieces["short"] + if pieces["dirty"]: + rendered += "-dirty" + return rendered + + +def render(pieces, style): + """Render the given version pieces into the requested style.""" + if pieces["error"]: + return {"version": "unknown", + "full-revisionid": pieces.get("long"), + "dirty": None, + "error": pieces["error"], + "date": None} + + if not style or style == "default": + style = "pep440" # the default + + if style == "pep440": + rendered = render_pep440(pieces) + elif style == "pep440-branch": + rendered = render_pep440_branch(pieces) + elif style == "pep440-pre": + rendered = render_pep440_pre(pieces) + elif style == "pep440-post": + rendered = render_pep440_post(pieces) + elif style == "pep440-post-branch": + rendered = render_pep440_post_branch(pieces) + elif style == "pep440-old": + rendered = render_pep440_old(pieces) + elif style == "git-describe": + rendered = render_git_describe(pieces) + elif style == "git-describe-long": + rendered = render_git_describe_long(pieces) + else: + raise ValueError("unknown style '%%s'" %% style) + + return {"version": rendered, "full-revisionid": pieces["long"], + "dirty": pieces["dirty"], "error": None, + "date": pieces.get("date")} + + +def get_versions(): + """Get version information or return default if unable to do so.""" + # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have + # __file__, we can work backwards from there to the root. Some + # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which + # case we can only use expanded keywords. + + cfg = get_config() + verbose = cfg.verbose + + try: + return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, + verbose) + except NotThisMethod: + pass + + try: + root = os.path.realpath(__file__) + # versionfile_source is the relative path from the top of the source + # tree (where the .git directory might live) to this file. Invert + # this to find the root from __file__. + for _ in cfg.versionfile_source.split('/'): + root = os.path.dirname(root) + except NameError: + return {"version": "0+unknown", "full-revisionid": None, + "dirty": None, + "error": "unable to find root of source tree", + "date": None} + + try: + pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) + return render(pieces, cfg.style) + except NotThisMethod: + pass + + try: + if cfg.parentdir_prefix: + return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) + except NotThisMethod: + pass + + return {"version": "0+unknown", "full-revisionid": None, + "dirty": None, + "error": "unable to compute version", "date": None} +''' + + +@register_vcs_handler("git", "get_keywords") +def git_get_keywords(versionfile_abs): + """Extract version information from the given file.""" + # the code embedded in _version.py can just fetch the value of these + # keywords. When used from setup.py, we don't want to import _version.py, + # so we do it with a regexp instead. This function is not used from + # _version.py. + keywords = {} + try: + with open(versionfile_abs, "r") as fobj: + for line in fobj: + if line.strip().startswith("git_refnames ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["refnames"] = mo.group(1) + if line.strip().startswith("git_full ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["full"] = mo.group(1) + if line.strip().startswith("git_date ="): + mo = re.search(r'=\s*"(.*)"', line) + if mo: + keywords["date"] = mo.group(1) + except EnvironmentError: + pass + return keywords + + +@register_vcs_handler("git", "keywords") +def git_versions_from_keywords(keywords, tag_prefix, verbose): + """Get version information from git keywords.""" + if "refnames" not in keywords: + raise NotThisMethod("Short version file found") + date = keywords.get("date") + if date is not None: + # Use only the last line. Previous lines may contain GPG signature + # information. + date = date.splitlines()[-1] + + # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant + # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 + # -like" string, which we must then edit to make compliant), because + # it's been around since git-1.5.3, and it's too difficult to + # discover which version we're using, or to work around using an + # older one. + date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) + refnames = keywords["refnames"].strip() + if refnames.startswith("$Format"): + if verbose: + print("keywords are unexpanded, not using") + raise NotThisMethod("unexpanded keywords, not a git-archive tarball") + refs = {r.strip() for r in refnames.strip("()").split(",")} + # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of + # just "foo-1.0". If we see a "tag: " prefix, prefer those. + TAG = "tag: " + tags = {r[len(TAG) :] for r in refs if r.startswith(TAG)} + if not tags: + # Either we're using git < 1.8.3, or there really are no tags. We use + # a heuristic: assume all version tags have a digit. The old git %d + # expansion behaves like git log --decorate=short and strips out the + # refs/heads/ and refs/tags/ prefixes that would let us distinguish + # between branches and tags. By ignoring refnames without digits, we + # filter out many common branch names like "release" and + # "stabilization", as well as "HEAD" and "master". + tags = {r for r in refs if re.search(r"\d", r)} + if verbose: + print("discarding '%s', no digits" % ",".join(refs - tags)) + if verbose: + print("likely tags: %s" % ",".join(sorted(tags))) + for ref in sorted(tags): + # sorting will prefer e.g. "2.0" over "2.0rc1" + if ref.startswith(tag_prefix): + r = ref[len(tag_prefix) :] + # Filter out refs that exactly match prefix or that don't start + # with a number once the prefix is stripped (mostly a concern + # when prefix is '') + if not re.match(r"\d", r): + continue + if verbose: + print("picking %s" % r) + return { + "version": r, + "full-revisionid": keywords["full"].strip(), + "dirty": False, + "error": None, + "date": date, + } + # no suitable tags, so version is "0+unknown", but full hex is still there + if verbose: + print("no suitable tags, using unknown + full revision id") + return { + "version": "0+unknown", + "full-revisionid": keywords["full"].strip(), + "dirty": False, + "error": "no suitable tags", + "date": None, + } + + +@register_vcs_handler("git", "pieces_from_vcs") +def git_pieces_from_vcs(tag_prefix, root, verbose, runner=run_command): + """Get version from 'git describe' in the root of the source tree. + + This only gets called if the git-archive 'subst' keywords were *not* + expanded, and _version.py hasn't already been rewritten with a short + version string, meaning we're inside a checked out source tree. + """ + GITS = ["git"] + if sys.platform == "win32": + GITS = ["git.cmd", "git.exe"] + + _, rc = runner(GITS, ["rev-parse", "--git-dir"], cwd=root, hide_stderr=True) + if rc != 0: + if verbose: + print("Directory %s not under git control" % root) + raise NotThisMethod("'git rev-parse --git-dir' returned error") + + # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] + # if there isn't one, this yields HEX[-dirty] (no NUM) + describe_out, rc = runner( + GITS, + [ + "describe", + "--tags", + "--dirty", + "--always", + "--long", + "--match", + "%s*" % tag_prefix, + ], + cwd=root, + ) + # --long was added in git-1.5.5 + if describe_out is None: + raise NotThisMethod("'git describe' failed") + describe_out = describe_out.strip() + full_out, rc = runner(GITS, ["rev-parse", "HEAD"], cwd=root) + if full_out is None: + raise NotThisMethod("'git rev-parse' failed") + full_out = full_out.strip() + + pieces = {} + pieces["long"] = full_out + pieces["short"] = full_out[:7] # maybe improved later + pieces["error"] = None + + branch_name, rc = runner(GITS, ["rev-parse", "--abbrev-ref", "HEAD"], cwd=root) + # --abbrev-ref was added in git-1.6.3 + if rc != 0 or branch_name is None: + raise NotThisMethod("'git rev-parse --abbrev-ref' returned error") + branch_name = branch_name.strip() + + if branch_name == "HEAD": + # If we aren't exactly on a branch, pick a branch which represents + # the current commit. If all else fails, we are on a branchless + # commit. + branches, rc = runner(GITS, ["branch", "--contains"], cwd=root) + # --contains was added in git-1.5.4 + if rc != 0 or branches is None: + raise NotThisMethod("'git branch --contains' returned error") + branches = branches.split("\n") + + # Remove the first line if we're running detached + if "(" in branches[0]: + branches.pop(0) + + # Strip off the leading "* " from the list of branches. + branches = [branch[2:] for branch in branches] + if "master" in branches: + branch_name = "master" + elif not branches: + branch_name = None + else: + # Pick the first branch that is returned. Good or bad. + branch_name = branches[0] + + pieces["branch"] = branch_name + + # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] + # TAG might have hyphens. + git_describe = describe_out + + # look for -dirty suffix + dirty = git_describe.endswith("-dirty") + pieces["dirty"] = dirty + if dirty: + git_describe = git_describe[: git_describe.rindex("-dirty")] + + # now we have TAG-NUM-gHEX or HEX + + if "-" in git_describe: + # TAG-NUM-gHEX + mo = re.search(r"^(.+)-(\d+)-g([0-9a-f]+)$", git_describe) + if not mo: + # unparseable. Maybe git-describe is misbehaving? + pieces["error"] = "unable to parse git-describe output: '%s'" % describe_out + return pieces + + # tag + full_tag = mo.group(1) + if not full_tag.startswith(tag_prefix): + if verbose: + fmt = "tag '%s' doesn't start with prefix '%s'" + print(fmt % (full_tag, tag_prefix)) + pieces["error"] = "tag '%s' doesn't start with prefix '%s'" % ( + full_tag, + tag_prefix, + ) + return pieces + pieces["closest-tag"] = full_tag[len(tag_prefix) :] + + # distance: number of commits since tag + pieces["distance"] = int(mo.group(2)) + + # commit: short hex revision ID + pieces["short"] = mo.group(3) + + else: + # HEX: no tags + pieces["closest-tag"] = None + count_out, rc = runner(GITS, ["rev-list", "HEAD", "--count"], cwd=root) + pieces["distance"] = int(count_out) # total number of commits + + # commit date: see ISO-8601 comment in git_versions_from_keywords() + date = runner(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[0].strip() + # Use only the last line. Previous lines may contain GPG signature + # information. + date = date.splitlines()[-1] + pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) + + return pieces + + +def do_vcs_install(manifest_in, versionfile_source, ipy): + """Git-specific installation logic for Versioneer. + + For Git, this means creating/changing .gitattributes to mark _version.py + for export-subst keyword substitution. + """ + GITS = ["git"] + if sys.platform == "win32": + GITS = ["git.cmd", "git.exe"] + files = [manifest_in, versionfile_source] + if ipy: + files.append(ipy) + try: + my_path = __file__ + if my_path.endswith(".pyc") or my_path.endswith(".pyo"): + my_path = os.path.splitext(my_path)[0] + ".py" + versioneer_file = os.path.relpath(my_path) + except NameError: + versioneer_file = "versioneer.py" + files.append(versioneer_file) + present = False + try: + with open(".gitattributes", "r") as fobj: + for line in fobj: + if line.strip().startswith(versionfile_source): + if "export-subst" in line.strip().split()[1:]: + present = True + break + except EnvironmentError: + pass + if not present: + with open(".gitattributes", "a+") as fobj: + fobj.write(f"{versionfile_source} export-subst\n") + files.append(".gitattributes") + run_command(GITS, ["add", "--"] + files) + + +def versions_from_parentdir(parentdir_prefix, root, verbose): + """Try to determine the version from the parent directory name. + + Source tarballs conventionally unpack into a directory that includes both + the project name and a version string. We will also support searching up + two directory levels for an appropriately named parent directory + """ + rootdirs = [] + + for _ in range(3): + dirname = os.path.basename(root) + if dirname.startswith(parentdir_prefix): + return { + "version": dirname[len(parentdir_prefix) :], + "full-revisionid": None, + "dirty": False, + "error": None, + "date": None, + } + rootdirs.append(root) + root = os.path.dirname(root) # up a level + + if verbose: + print( + "Tried directories %s but none started with prefix %s" + % (str(rootdirs), parentdir_prefix) + ) + raise NotThisMethod("rootdir doesn't start with parentdir_prefix") + + +SHORT_VERSION_PY = """ +# This file was generated by 'versioneer.py' (0.20) from +# revision-control system data, or from the parent directory name of an +# unpacked source archive. Distribution tarballs contain a pre-generated copy +# of this file. + +import json + +version_json = ''' +%s +''' # END VERSION_JSON + + +def get_versions(): + return json.loads(version_json) +""" + + +def versions_from_file(filename): + """Try to determine the version from _version.py if present.""" + try: + with open(filename) as f: + contents = f.read() + except EnvironmentError: + raise NotThisMethod("unable to read _version.py") + mo = re.search( + r"version_json = '''\n(.*)''' # END VERSION_JSON", contents, re.M | re.S + ) + if not mo: + mo = re.search( + r"version_json = '''\r\n(.*)''' # END VERSION_JSON", contents, re.M | re.S + ) + if not mo: + raise NotThisMethod("no version_json in _version.py") + return json.loads(mo.group(1)) + + +def write_to_version_file(filename, versions): + """Write the given version number to the given _version.py file.""" + os.unlink(filename) + contents = json.dumps(versions, sort_keys=True, indent=1, separators=(",", ": ")) + with open(filename, "w") as f: + f.write(SHORT_VERSION_PY % contents) + + print("set %s to '%s'" % (filename, versions["version"])) + + +def plus_or_dot(pieces): + """Return a + if we don't already have one, else return a .""" + if "+" in pieces.get("closest-tag", ""): + return "." + return "+" + + +def render_pep440(pieces): + """Build up version string, with post-release "local version identifier". + + Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you + get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty + + Exceptions: + 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += plus_or_dot(pieces) + rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + else: + # exception #1 + rendered = "0+untagged.%d.g%s" % (pieces["distance"], pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + return rendered + + +def render_pep440_branch(pieces): + """TAG[[.dev0]+DISTANCE.gHEX[.dirty]] . + + The ".dev0" means not master branch. Note that .dev0 sorts backwards + (a feature branch will appear "older" than the master branch). + + Exceptions: + 1: no tags. 0[.dev0]+untagged.DISTANCE.gHEX[.dirty] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + if pieces["branch"] != "master": + rendered += ".dev0" + rendered += plus_or_dot(pieces) + rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + else: + # exception #1 + rendered = "0" + if pieces["branch"] != "master": + rendered += ".dev0" + rendered += "+untagged.%d.g%s" % (pieces["distance"], pieces["short"]) + if pieces["dirty"]: + rendered += ".dirty" + return rendered + + +def render_pep440_pre(pieces): + """TAG[.post0.devDISTANCE] -- No -dirty. + + Exceptions: + 1: no tags. 0.post0.devDISTANCE + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"]: + rendered += ".post0.dev%d" % pieces["distance"] + else: + # exception #1 + rendered = "0.post0.dev%d" % pieces["distance"] + return rendered + + +def render_pep440_post(pieces): + """TAG[.postDISTANCE[.dev0]+gHEX] . + + The ".dev0" means dirty. Note that .dev0 sorts backwards + (a dirty tree will appear "older" than the corresponding clean one), + but you shouldn't be releasing software with -dirty anyways. + + Exceptions: + 1: no tags. 0.postDISTANCE[.dev0] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + rendered += plus_or_dot(pieces) + rendered += "g%s" % pieces["short"] + else: + # exception #1 + rendered = "0.post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + rendered += "+g%s" % pieces["short"] + return rendered + + +def render_pep440_post_branch(pieces): + """TAG[.postDISTANCE[.dev0]+gHEX[.dirty]] . + + The ".dev0" means not master branch. + + Exceptions: + 1: no tags. 0.postDISTANCE[.dev0]+gHEX[.dirty] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%d" % pieces["distance"] + if pieces["branch"] != "master": + rendered += ".dev0" + rendered += plus_or_dot(pieces) + rendered += "g%s" % pieces["short"] + if pieces["dirty"]: + rendered += ".dirty" + else: + # exception #1 + rendered = "0.post%d" % pieces["distance"] + if pieces["branch"] != "master": + rendered += ".dev0" + rendered += "+g%s" % pieces["short"] + if pieces["dirty"]: + rendered += ".dirty" + return rendered + + +def render_pep440_old(pieces): + """TAG[.postDISTANCE[.dev0]] . + + The ".dev0" means dirty. + + Exceptions: + 1: no tags. 0.postDISTANCE[.dev0] + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"] or pieces["dirty"]: + rendered += ".post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + else: + # exception #1 + rendered = "0.post%d" % pieces["distance"] + if pieces["dirty"]: + rendered += ".dev0" + return rendered + + +def render_git_describe(pieces): + """TAG[-DISTANCE-gHEX][-dirty]. + + Like 'git describe --tags --dirty --always'. + + Exceptions: + 1: no tags. HEX[-dirty] (note: no 'g' prefix) + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + if pieces["distance"]: + rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) + else: + # exception #1 + rendered = pieces["short"] + if pieces["dirty"]: + rendered += "-dirty" + return rendered + + +def render_git_describe_long(pieces): + """TAG-DISTANCE-gHEX[-dirty]. + + Like 'git describe --tags --dirty --always -long'. + The distance/hash is unconditional. + + Exceptions: + 1: no tags. HEX[-dirty] (note: no 'g' prefix) + """ + if pieces["closest-tag"]: + rendered = pieces["closest-tag"] + rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) + else: + # exception #1 + rendered = pieces["short"] + if pieces["dirty"]: + rendered += "-dirty" + return rendered + + +def render(pieces, style): + """Render the given version pieces into the requested style.""" + if pieces["error"]: + return { + "version": "unknown", + "full-revisionid": pieces.get("long"), + "dirty": None, + "error": pieces["error"], + "date": None, + } + + if not style or style == "default": + style = "pep440" # the default + + if style == "pep440": + rendered = render_pep440(pieces) + elif style == "pep440-branch": + rendered = render_pep440_branch(pieces) + elif style == "pep440-pre": + rendered = render_pep440_pre(pieces) + elif style == "pep440-post": + rendered = render_pep440_post(pieces) + elif style == "pep440-post-branch": + rendered = render_pep440_post_branch(pieces) + elif style == "pep440-old": + rendered = render_pep440_old(pieces) + elif style == "git-describe": + rendered = render_git_describe(pieces) + elif style == "git-describe-long": + rendered = render_git_describe_long(pieces) + else: + raise ValueError("unknown style '%s'" % style) + + return { + "version": rendered, + "full-revisionid": pieces["long"], + "dirty": pieces["dirty"], + "error": None, + "date": pieces.get("date"), + } + + +class VersioneerBadRootError(Exception): + """The project root directory is unknown or missing key files.""" + + +def get_versions(verbose=False): + """Get the project version from whatever source is available. + + Returns dict with two keys: 'version' and 'full'. + """ + if "versioneer" in sys.modules: + # see the discussion in cmdclass.py:get_cmdclass() + del sys.modules["versioneer"] + + root = get_root() + cfg = get_config_from_root(root) + + assert cfg.VCS is not None, "please set [versioneer]VCS= in setup.cfg" + handlers = HANDLERS.get(cfg.VCS) + assert handlers, "unrecognized VCS '%s'" % cfg.VCS + verbose = verbose or cfg.verbose + assert ( + cfg.versionfile_source is not None + ), "please set versioneer.versionfile_source" + assert cfg.tag_prefix is not None, "please set versioneer.tag_prefix" + + versionfile_abs = os.path.join(root, cfg.versionfile_source) + + # extract version from first of: _version.py, VCS command (e.g. 'git + # describe'), parentdir. This is meant to work for developers using a + # source checkout, for users of a tarball created by 'setup.py sdist', + # and for users of a tarball/zipball created by 'git archive' or github's + # download-from-tag feature or the equivalent in other VCSes. + + get_keywords_f = handlers.get("get_keywords") + from_keywords_f = handlers.get("keywords") + if get_keywords_f and from_keywords_f: + try: + keywords = get_keywords_f(versionfile_abs) + ver = from_keywords_f(keywords, cfg.tag_prefix, verbose) + if verbose: + print("got version from expanded keyword %s" % ver) + return ver + except NotThisMethod: + pass + + try: + ver = versions_from_file(versionfile_abs) + if verbose: + print("got version from file %s %s" % (versionfile_abs, ver)) + return ver + except NotThisMethod: + pass + + from_vcs_f = handlers.get("pieces_from_vcs") + if from_vcs_f: + try: + pieces = from_vcs_f(cfg.tag_prefix, root, verbose) + ver = render(pieces, cfg.style) + if verbose: + print("got version from VCS %s" % ver) + return ver + except NotThisMethod: + pass + + try: + if cfg.parentdir_prefix: + ver = versions_from_parentdir(cfg.parentdir_prefix, root, verbose) + if verbose: + print("got version from parentdir %s" % ver) + return ver + except NotThisMethod: + pass + + if verbose: + print("unable to compute version") + + return { + "version": "0+unknown", + "full-revisionid": None, + "dirty": None, + "error": "unable to compute version", + "date": None, + } + + +def get_version(): + """Get the short version string for this project.""" + return get_versions()["version"] + + +def get_cmdclass(cmdclass=None): + """Get the custom setuptools/distutils subclasses used by Versioneer. + + If the package uses a different cmdclass (e.g. one from numpy), it + should be provide as an argument. + """ + if "versioneer" in sys.modules: + del sys.modules["versioneer"] + # this fixes the "python setup.py develop" case (also 'install' and + # 'easy_install .'), in which subdependencies of the main project are + # built (using setup.py bdist_egg) in the same python process. Assume + # a main project A and a dependency B, which use different versions + # of Versioneer. A's setup.py imports A's Versioneer, leaving it in + # sys.modules by the time B's setup.py is executed, causing B to run + # with the wrong versioneer. Setuptools wraps the sub-dep builds in a + # sandbox that restores sys.modules to it's pre-build state, so the + # parent is protected against the child's "import versioneer". By + # removing ourselves from sys.modules here, before the child build + # happens, we protect the child from the parent's versioneer too. + # Also see https://github.com/python-versioneer/python-versioneer/issues/52 + + cmds = {} if cmdclass is None else cmdclass.copy() + + # we add "version" to both distutils and setuptools + from distutils.core import Command + + class cmd_version(Command): + description = "report generated version string" + user_options = [] + boolean_options = [] + + def initialize_options(self): + pass + + def finalize_options(self): + pass + + def run(self): + vers = get_versions(verbose=True) + print("Version: %s" % vers["version"]) + print(" full-revisionid: %s" % vers.get("full-revisionid")) + print(" dirty: %s" % vers.get("dirty")) + print(" date: %s" % vers.get("date")) + if vers["error"]: + print(" error: %s" % vers["error"]) + + cmds["version"] = cmd_version + + # we override "build_py" in both distutils and setuptools + # + # most invocation pathways end up running build_py: + # distutils/build -> build_py + # distutils/install -> distutils/build ->.. + # setuptools/bdist_wheel -> distutils/install ->.. + # setuptools/bdist_egg -> distutils/install_lib -> build_py + # setuptools/install -> bdist_egg ->.. + # setuptools/develop -> ? + # pip install: + # copies source tree to a tempdir before running egg_info/etc + # if .git isn't copied too, 'git describe' will fail + # then does setup.py bdist_wheel, or sometimes setup.py install + # setup.py egg_info -> ? + + # we override different "build_py" commands for both environments + if "build_py" in cmds: + _build_py = cmds["build_py"] + elif "setuptools" in sys.modules: + from setuptools.command.build_py import build_py as _build_py + else: + from distutils.command.build_py import build_py as _build_py + + class cmd_build_py(_build_py): + def run(self): + root = get_root() + cfg = get_config_from_root(root) + versions = get_versions() + _build_py.run(self) + # now locate _version.py in the new build/ directory and replace + # it with an updated value + if cfg.versionfile_build: + target_versionfile = os.path.join(self.build_lib, cfg.versionfile_build) + print("UPDATING %s" % target_versionfile) + write_to_version_file(target_versionfile, versions) + + cmds["build_py"] = cmd_build_py + + if "build_ext" in cmds: + _build_ext = cmds["build_ext"] + elif "setuptools" in sys.modules: + from setuptools.command.build_ext import build_ext as _build_ext + else: + from distutils.command.build_ext import build_ext as _build_ext + + class cmd_build_ext(_build_ext): + def run(self): + root = get_root() + cfg = get_config_from_root(root) + versions = get_versions() + _build_ext.run(self) + if self.inplace: + # build_ext --inplace will only build extensions in + # build/lib<..> dir with no _version.py to write to. + # As in place builds will already have a _version.py + # in the module dir, we do not need to write one. + return + # now locate _version.py in the new build/ directory and replace + # it with an updated value + target_versionfile = os.path.join(self.build_lib, cfg.versionfile_build) + print("UPDATING %s" % target_versionfile) + write_to_version_file(target_versionfile, versions) + + cmds["build_ext"] = cmd_build_ext + + if "cx_Freeze" in sys.modules: # cx_freeze enabled? + from cx_Freeze.dist import build_exe as _build_exe + + # nczeczulin reports that py2exe won't like the pep440-style string + # as FILEVERSION, but it can be used for PRODUCTVERSION, e.g. + # setup(console=[{ + # "version": versioneer.get_version().split("+", 1)[0], # FILEVERSION + # "product_version": versioneer.get_version(), + # ... + + class cmd_build_exe(_build_exe): + def run(self): + root = get_root() + cfg = get_config_from_root(root) + versions = get_versions() + target_versionfile = cfg.versionfile_source + print("UPDATING %s" % target_versionfile) + write_to_version_file(target_versionfile, versions) + + _build_exe.run(self) + os.unlink(target_versionfile) + with open(cfg.versionfile_source, "w") as f: + LONG = LONG_VERSION_PY[cfg.VCS] + f.write( + LONG + % { + "DOLLAR": "$", + "STYLE": cfg.style, + "TAG_PREFIX": cfg.tag_prefix, + "PARENTDIR_PREFIX": cfg.parentdir_prefix, + "VERSIONFILE_SOURCE": cfg.versionfile_source, + } + ) + + cmds["build_exe"] = cmd_build_exe + del cmds["build_py"] + + if "py2exe" in sys.modules: # py2exe enabled? + from py2exe.distutils_buildexe import py2exe as _py2exe + + class cmd_py2exe(_py2exe): + def run(self): + root = get_root() + cfg = get_config_from_root(root) + versions = get_versions() + target_versionfile = cfg.versionfile_source + print("UPDATING %s" % target_versionfile) + write_to_version_file(target_versionfile, versions) + + _py2exe.run(self) + os.unlink(target_versionfile) + with open(cfg.versionfile_source, "w") as f: + LONG = LONG_VERSION_PY[cfg.VCS] + f.write( + LONG + % { + "DOLLAR": "$", + "STYLE": cfg.style, + "TAG_PREFIX": cfg.tag_prefix, + "PARENTDIR_PREFIX": cfg.parentdir_prefix, + "VERSIONFILE_SOURCE": cfg.versionfile_source, + } + ) + + cmds["py2exe"] = cmd_py2exe + + # we override different "sdist" commands for both environments + if "sdist" in cmds: + _sdist = cmds["sdist"] + elif "setuptools" in sys.modules: + from setuptools.command.sdist import sdist as _sdist + else: + from distutils.command.sdist import sdist as _sdist + + class cmd_sdist(_sdist): + def run(self): + versions = get_versions() + # pylint:disable=attribute-defined-outside-init # noqa + self._versioneer_generated_versions = versions + # unless we update this, the command will keep using the old + # version + self.distribution.metadata.version = versions["version"] + return _sdist.run(self) + + def make_release_tree(self, base_dir, files): + root = get_root() + cfg = get_config_from_root(root) + _sdist.make_release_tree(self, base_dir, files) + # now locate _version.py in the new base_dir directory + # (remembering that it may be a hardlink) and replace it with an + # updated value + target_versionfile = os.path.join(base_dir, cfg.versionfile_source) + print("UPDATING %s" % target_versionfile) + write_to_version_file( + target_versionfile, self._versioneer_generated_versions + ) + + cmds["sdist"] = cmd_sdist + + return cmds + + +CONFIG_ERROR = """ +setup.cfg is missing the necessary Versioneer configuration. You need +a section like: + + [versioneer] + VCS = git + style = pep440 + versionfile_source = src/myproject/_version.py + versionfile_build = myproject/_version.py + tag_prefix = + parentdir_prefix = myproject- + +You will also need to edit your setup.py to use the results: + + import versioneer + setup(version=versioneer.get_version(), + cmdclass=versioneer.get_cmdclass(), ...) + +Please read the docstring in ./versioneer.py for configuration instructions, +edit setup.cfg, and re-run the installer or 'python versioneer.py setup'. +""" + +SAMPLE_CONFIG = """ +# See the docstring in versioneer.py for instructions. Note that you must +# re-run 'versioneer.py setup' after changing this section, and commit the +# resulting files. + +[versioneer] +#VCS = git +#style = pep440 +#versionfile_source = +#versionfile_build = +#tag_prefix = +#parentdir_prefix = + +""" + +OLD_SNIPPET = """ +from ._version import get_versions +__version__ = get_versions()['version'] +del get_versions +""" + +INIT_PY_SNIPPET = """ +from . import {0} +__version__ = {0}.get_versions()['version'] +""" + + +def do_setup(): + """Do main VCS-independent setup function for installing Versioneer.""" + root = get_root() + try: + cfg = get_config_from_root(root) + except ( + EnvironmentError, + configparser.NoSectionError, + configparser.NoOptionError, + ) as e: + if isinstance(e, (EnvironmentError, configparser.NoSectionError)): + print("Adding sample versioneer config to setup.cfg", file=sys.stderr) + with open(os.path.join(root, "setup.cfg"), "a") as f: + f.write(SAMPLE_CONFIG) + print(CONFIG_ERROR, file=sys.stderr) + return 1 + + print(" creating %s" % cfg.versionfile_source) + with open(cfg.versionfile_source, "w") as f: + LONG = LONG_VERSION_PY[cfg.VCS] + f.write( + LONG + % { + "DOLLAR": "$", + "STYLE": cfg.style, + "TAG_PREFIX": cfg.tag_prefix, + "PARENTDIR_PREFIX": cfg.parentdir_prefix, + "VERSIONFILE_SOURCE": cfg.versionfile_source, + } + ) + + ipy = os.path.join(os.path.dirname(cfg.versionfile_source), "__init__.py") + if os.path.exists(ipy): + try: + with open(ipy, "r") as f: + old = f.read() + except EnvironmentError: + old = "" + module = os.path.splitext(os.path.basename(cfg.versionfile_source))[0] + snippet = INIT_PY_SNIPPET.format(module) + if OLD_SNIPPET in old: + print(" replacing boilerplate in %s" % ipy) + with open(ipy, "w") as f: + f.write(old.replace(OLD_SNIPPET, snippet)) + elif snippet not in old: + print(" appending to %s" % ipy) + with open(ipy, "a") as f: + f.write(snippet) + else: + print(" %s unmodified" % ipy) + else: + print(" %s doesn't exist, ok" % ipy) + ipy = None + + # Make sure both the top-level "versioneer.py" and versionfile_source + # (PKG/_version.py, used by runtime code) are in MANIFEST.in, so + # they'll be copied into source distributions. Pip won't be able to + # install the package without this. + manifest_in = os.path.join(root, "MANIFEST.in") + simple_includes = set() + try: + with open(manifest_in, "r") as f: + for line in f: + if line.startswith("include "): + for include in line.split()[1:]: + simple_includes.add(include) + except EnvironmentError: + pass + # That doesn't cover everything MANIFEST.in can do + # (http://docs.python.org/2/distutils/sourcedist.html#commands), so + # it might give some false negatives. Appending redundant 'include' + # lines is safe, though. + if "versioneer.py" not in simple_includes: + print(" appending 'versioneer.py' to MANIFEST.in") + with open(manifest_in, "a") as f: + f.write("include versioneer.py\n") + else: + print(" 'versioneer.py' already in MANIFEST.in") + if cfg.versionfile_source not in simple_includes: + print( + " appending versionfile_source ('%s') to MANIFEST.in" + % cfg.versionfile_source + ) + with open(manifest_in, "a") as f: + f.write("include %s\n" % cfg.versionfile_source) + else: + print(" versionfile_source already in MANIFEST.in") + + # Make VCS-specific changes. For git, this means creating/changing + # .gitattributes to mark _version.py for export-subst keyword + # substitution. + do_vcs_install(manifest_in, cfg.versionfile_source, ipy) + return 0 + + +def scan_setup_py(): + """Validate the contents of setup.py against Versioneer's expectations.""" + found = set() + setters = False + errors = 0 + with open("setup.py", "r") as f: + for line in f.readlines(): + if "import versioneer" in line: + found.add("import") + if "versioneer.get_cmdclass()" in line: + found.add("cmdclass") + if "versioneer.get_version()" in line: + found.add("get_version") + if "versioneer.VCS" in line: + setters = True + if "versioneer.versionfile_source" in line: + setters = True + if len(found) != 3: + print("") + print("Your setup.py appears to be missing some important items") + print("(but I might be wrong). Please make sure it has something") + print("roughly like the following:") + print("") + print(" import versioneer") + print(" setup( version=versioneer.get_version(),") + print(" cmdclass=versioneer.get_cmdclass(), ...)") + print("") + errors += 1 + if setters: + print("You should remove lines like 'versioneer.VCS = ' and") + print("'versioneer.versionfile_source = ' . This configuration") + print("now lives in setup.cfg, and should be removed from setup.py") + print("") + errors += 1 + return errors + + +if __name__ == "__main__": + cmd = sys.argv[1] + if cmd == "setup": + errors = do_setup() + errors += scan_setup_py() + if errors: + sys.exit(1)