-
Notifications
You must be signed in to change notification settings - Fork 20
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
* add optoDAS support * remove dead code, add entry point * add data category as DAS
- Loading branch information
1 parent
1d49530
commit 38518fb
Showing
9 changed files
with
185 additions
and
5 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,9 @@ | ||
""" | ||
Support for OptoDAS format. | ||
This is used by the OptoDAS interrogator made by Alcatel Submarine Networks. | ||
More info here: https://web.asn.com/ | ||
""" | ||
from __future__ import annotations | ||
from .core import OptoDASV8 |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,66 @@ | ||
"""IO module for reading OptoDAS data.""" | ||
from __future__ import annotations | ||
|
||
import numpy as np | ||
|
||
import dascore as dc | ||
from dascore.constants import opt_timeable_types | ||
from dascore.io import FiberIO | ||
from dascore.utils.hdf5 import H5Reader | ||
from dascore.utils.models import UnitQuantity, UTF8Str | ||
|
||
from .utils import _get_opto_das_attrs, _get_opto_das_version_str, _read_opto_das | ||
|
||
|
||
class OptoDASPatchAttrs(dc.PatchAttrs): | ||
"""Patch attrs for OptoDAS.""" | ||
|
||
gauge_length: float = np.NaN | ||
gauge_length_units: UnitQuantity | None = None | ||
schema_version: UTF8Str = "" | ||
|
||
|
||
class OptoDASV8(FiberIO): | ||
"""Support for OptoDAS V 8.""" | ||
|
||
name = "OptoDAS" | ||
preferred_extensions = ("hdf5", "h5") | ||
version = "8" | ||
|
||
def get_format(self, resource: H5Reader) -> tuple[str, str] | bool: | ||
""" | ||
Return True if file contains OptoDAS version 8 data else False. | ||
Parameters | ||
---------- | ||
resource | ||
A path to the file which may contain terra15 data. | ||
""" | ||
version_str = _get_opto_das_version_str(resource) | ||
if version_str: | ||
return self.name, version_str | ||
|
||
def scan(self, resource: H5Reader) -> list[dc.PatchAttrs]: | ||
"""Scan a OptoDAS file, return summary information about the file's contents.""" | ||
file_version = _get_opto_das_version_str(resource) | ||
extras = { | ||
"path": resource.filename, | ||
"file_format": self.name, | ||
"file_version": str(file_version), | ||
} | ||
attrs = _get_opto_das_attrs(resource) | ||
attrs.update(extras) | ||
return [OptoDASPatchAttrs(**attrs)] | ||
|
||
def read( | ||
self, | ||
resource: H5Reader, | ||
time: tuple[opt_timeable_types, opt_timeable_types] | None = None, | ||
distance: tuple[float | None, float | None] | None = None, | ||
**kwargs, | ||
) -> dc.BaseSpool: | ||
"""Read a OptoDAS spool of patches.""" | ||
patches = _read_opto_das( | ||
resource, time=time, distance=distance, attr_cls=OptoDASPatchAttrs | ||
) | ||
return dc.spool(patches) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,89 @@ | ||
"""Utilities for terra15.""" | ||
from __future__ import annotations | ||
|
||
import dascore as dc | ||
import dascore.core | ||
from dascore.core.coords import get_coord | ||
from dascore.utils.hdf5 import unpack_scalar_h5_dataset | ||
from dascore.utils.misc import unbyte | ||
|
||
# --- Getting format/version | ||
|
||
|
||
def _get_opto_das_version_str(hdf_fi) -> str: | ||
"""Return the version string for OptoDAS file.""" | ||
# define a few root attrs that act as a "fingerprint" | ||
expected_attrs = ( | ||
"acqSpec", | ||
"header", | ||
"cableSpec", | ||
"data", | ||
"fileVersion", | ||
) | ||
if not all([x in hdf_fi for x in expected_attrs]): | ||
return "" | ||
version_str = str(unbyte(hdf_fi["fileVersion"][()])) | ||
return version_str | ||
|
||
|
||
def _get_coord_manager(header): | ||
"""Get the distance ranges and spacing.""" | ||
dims = tuple(unbyte(x) for x in header["dimensionNames"]) | ||
units = tuple(unbyte(x) for x in header["dimensionUnits"]) | ||
|
||
coords = {} | ||
for index, (dim, unit) in enumerate(zip(dims, units)): | ||
crange = header["dimensionRanges"][f"dimension{index}"] | ||
step = unpack_scalar_h5_dataset(crange["unitScale"]) | ||
|
||
# special case for time. | ||
if dim == "time": | ||
step = dc.to_timedelta64(step) | ||
t1 = dc.to_datetime64(unpack_scalar_h5_dataset(header["time"])) | ||
start = t1 + unpack_scalar_h5_dataset(crange["min"]) * step | ||
stop = t1 + (unpack_scalar_h5_dataset(crange["max"]) + 1) * step | ||
else: | ||
# The min/max values appear to be int ranges so we need to | ||
# multiply by step. | ||
start = unpack_scalar_h5_dataset(crange["min"]) * step | ||
stop = (unpack_scalar_h5_dataset(crange["max"]) + 1) * step | ||
|
||
coords[dim] = get_coord(min=start, max=stop, step=step, units=unit) | ||
return dascore.core.get_coord_manager(coords=coords, dims=dims) | ||
|
||
|
||
def _get_attr_dict(header): | ||
"""Map header info to DAS attrs.""" | ||
attr_map = { | ||
"gaugeLength": "gauge_length", | ||
"unit": "data_units", | ||
"instrument": "intrument_id", | ||
"experiment": "acquisition_id", | ||
} | ||
out = {"data_category": "DAS"} | ||
for head_name, attr_name in attr_map.items(): | ||
value = header[head_name] | ||
if hasattr(value, "shape"): | ||
value = unpack_scalar_h5_dataset(value) | ||
out[attr_name] = unbyte(value) | ||
return out | ||
|
||
|
||
def _get_opto_das_attrs(fi) -> dict: | ||
"""Scan a OptoDAS file, return metadata.""" | ||
header = fi["header"] | ||
cm = _get_coord_manager(header) | ||
attrs = _get_attr_dict(header) | ||
attrs["coords"] = cm | ||
return attrs | ||
|
||
|
||
def _read_opto_das(fi, distance=None, time=None, attr_cls=dc.PatchAttrs): | ||
"""Read the OptoDAS values into a patch.""" | ||
attrs = _get_opto_das_attrs(fi) | ||
data_node = fi["data"] | ||
coords = attrs.pop("coords") | ||
cm, data = coords.select(array=data_node, distance=distance, time=time) | ||
attrs["coords"] = cm.to_summary_dict() | ||
attrs["dims"] = cm.dims | ||
return [dc.Patch(data=data, coords=cm, attrs=attr_cls(**attrs))] |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters