-
Notifications
You must be signed in to change notification settings - Fork 20
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
1 parent
71b8f49
commit fab72ab
Showing
5 changed files
with
199 additions
and
1 deletion.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,9 @@ | ||
""" | ||
Support for Febus format. | ||
This is used by the Febus DAS interrogator. | ||
More info here: https://www.febus-optics.com/en/ | ||
""" | ||
from __future__ import annotations | ||
from .core import Febus2 |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,82 @@ | ||
"""IO module for reading Febus data.""" | ||
from __future__ import annotations | ||
|
||
import numpy as np | ||
|
||
import dascore as dc | ||
from dascore.constants import opt_timeable_types | ||
from dascore.io import FiberIO | ||
from dascore.utils.hdf5 import H5Reader | ||
from dascore.utils.models import UnitQuantity, UTF8Str | ||
|
||
from .utils import _get_febus_attrs, _get_febus_version_str, _read_febus | ||
|
||
|
||
class FebusPatchAttrs(dc.PatchAttrs): | ||
""" | ||
Patch attrs for febus. | ||
Attributes | ||
---------- | ||
source | ||
The source designation | ||
zone | ||
The zone designations | ||
""" | ||
|
||
gauge_length: float = np.NaN | ||
pulse_width: float = np.NaN | ||
source: str = "" | ||
zone: str = "" | ||
|
||
gauge_length_units: UnitQuantity | None = None | ||
schema_version: UTF8Str = "" | ||
|
||
|
||
class Febus2(FiberIO): | ||
"""Support for Febus V 2. | ||
This should cover all versions 2.* of the format (maybe). | ||
""" | ||
|
||
name = "febus" | ||
preferred_extensions = ("hdf5", "h5") | ||
version = "2" | ||
|
||
def get_format(self, resource: H5Reader) -> tuple[str, str] | bool: | ||
""" | ||
Return True if file contains febus version 8 data else False. | ||
Parameters | ||
---------- | ||
resource | ||
A path to the file which may contain terra15 data. | ||
""" | ||
version_str = _get_febus_version_str(resource) | ||
if version_str: | ||
return self.name, version_str | ||
|
||
def scan(self, resource: H5Reader) -> list[dc.PatchAttrs]: | ||
"""Scan a febus file, return summary information about the file's contents.""" | ||
file_version = _get_febus_version_str(resource) | ||
extras = { | ||
"path": resource.filename, | ||
"file_format": self.name, | ||
"file_version": str(file_version), | ||
} | ||
attrs = _get_febus_attrs(resource) | ||
attrs.update(extras) | ||
return [FebusPatchAttrs(**attrs)] | ||
|
||
def read( | ||
self, | ||
resource: H5Reader, | ||
time: tuple[opt_timeable_types, opt_timeable_types] | None = None, | ||
distance: tuple[float | None, float | None] | None = None, | ||
**kwargs, | ||
) -> dc.BaseSpool: | ||
"""Read a febus spool of patches.""" | ||
patches = _read_febus( | ||
resource, time=time, distance=distance, attr_cls=FebusPatchAttrs | ||
) | ||
return dc.spool(patches) |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,105 @@ | ||
"""Utilities for Febus.""" | ||
from __future__ import annotations | ||
|
||
import dascore as dc | ||
import dascore.core | ||
from dascore.core.coords import get_coord | ||
from dascore.utils.hdf5 import unpack_scalar_h5_dataset | ||
from dascore.utils.misc import unbyte | ||
|
||
# --- Getting format/version | ||
|
||
|
||
def _get_febus_version_str(hdf_fi) -> str: | ||
"""Return the version string for febus file.""" | ||
# define a few root attrs that act as a "fingerprint" | ||
# all Febus DAS files have folders that start with fa | ||
inst_keys = sorted(hdf_fi.keys()) | ||
expected_source_attrs = { | ||
"AmpliPower", | ||
"Hostname", | ||
"WholeExtent", | ||
"SamplingRate", | ||
} | ||
# iterate instrument keys | ||
is_febus = all([x.startswith("fa") for x in inst_keys]) | ||
# Version 1, or what I think is version one (eg Valencia PubDAS data) | ||
# did not include a Version attr in Source dataset, so we use that as | ||
# the default. | ||
version = "1" | ||
for inst_key in inst_keys: | ||
inst = hdf_fi[inst_key] | ||
source_keys = set(inst.keys()) | ||
is_febus = is_febus and all(x.startswith("Source") for x in source_keys) | ||
for source_key in source_keys: | ||
source = inst[source_key] | ||
# If the version is set in a Source use that version. | ||
# Hopefully this is the file version... | ||
version = source.attrs.get("Version", version) | ||
is_febus = is_febus and expected_source_attrs.issubset(set(source.attrs)) | ||
if is_febus: | ||
return version | ||
return "" | ||
|
||
|
||
def _get_coord_manager(header): | ||
"""Get the distance ranges and spacing.""" | ||
dims = tuple(unbyte(x) for x in header["dimensionNames"]) | ||
units = tuple(unbyte(x) for x in header["dimensionUnits"]) | ||
|
||
coords = {} | ||
for index, (dim, unit) in enumerate(zip(dims, units)): | ||
crange = header["dimensionRanges"][f"dimension{index}"] | ||
step = unpack_scalar_h5_dataset(crange["unitScale"]) | ||
|
||
# special case for time. | ||
if dim == "time": | ||
step = dc.to_timedelta64(step) | ||
t1 = dc.to_datetime64(unpack_scalar_h5_dataset(header["time"])) | ||
start = t1 + unpack_scalar_h5_dataset(crange["min"]) * step | ||
stop = t1 + (unpack_scalar_h5_dataset(crange["max"]) + 1) * step | ||
else: | ||
# The min/max values appear to be int ranges so we need to | ||
# multiply by step. | ||
start = unpack_scalar_h5_dataset(crange["min"]) * step | ||
stop = (unpack_scalar_h5_dataset(crange["max"]) + 1) * step | ||
|
||
coords[dim] = get_coord(min=start, max=stop, step=step, units=unit) | ||
return dascore.core.get_coord_manager(coords=coords, dims=dims) | ||
|
||
|
||
def _get_attr_dict(header): | ||
"""Map header info to DAS attrs.""" | ||
attr_map = { | ||
"gaugeLength": "gauge_length", | ||
"unit": "data_units", | ||
"instrument": "intrument_id", | ||
"experiment": "acquisition_id", | ||
} | ||
out = {"data_category": "DAS"} | ||
for head_name, attr_name in attr_map.items(): | ||
value = header[head_name] | ||
if hasattr(value, "shape"): | ||
value = unpack_scalar_h5_dataset(value) | ||
out[attr_name] = unbyte(value) | ||
return out | ||
|
||
|
||
def _get_febus_attrs(fi) -> dict: | ||
"""Scan a febus file, return metadata.""" | ||
header = fi["header"] | ||
cm = _get_coord_manager(header) | ||
attrs = _get_attr_dict(header) | ||
attrs["coords"] = cm | ||
return attrs | ||
|
||
|
||
def _read_febus(fi, distance=None, time=None, attr_cls=dc.PatchAttrs): | ||
"""Read the febus values into a patch.""" | ||
attrs = _get_febus_attrs(fi) | ||
data_node = fi["data"] | ||
coords = attrs.pop("coords") | ||
cm, data = coords.select(array=data_node, distance=distance, time=time) | ||
attrs["coords"] = cm.to_summary_dict() | ||
attrs["dims"] = cm.dims | ||
return [dc.Patch(data=data, coords=cm, attrs=attr_cls(**attrs))] |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters