diff --git a/dascore/core/attrs.py b/dascore/core/attrs.py
index a8a5f677..43dd2f57 100644
--- a/dascore/core/attrs.py
+++ b/dascore/core/attrs.py
@@ -77,7 +77,24 @@ def _get_dims(data_dict):
class PatchAttrs(DascoreBaseModel):
- """The expected attributes for a Patch."""
+ """
+ The expected attributes for a Patch.
+
+ The default attributes are:
+ ```{python}
+ #| echo: false
+
+ import dascore as dc
+ from IPython.display import Markdown
+
+ df_str = (
+ dc.PatchAttrs.get_summary_df()
+ .reset_index()
+ .to_markdown(index=False, stralign="center")
+ )
+ Markdown(df_str)
+ ```
+ """
model_config = ConfigDict(
title="Patch Summary",
diff --git a/dascore/core/patch.py b/dascore/core/patch.py
index 272deec0..7ca91ba7 100644
--- a/dascore/core/patch.py
+++ b/dascore/core/patch.py
@@ -25,6 +25,8 @@ class Patch:
"""
A Class for managing data and metadata.
+ See the [patch tutorial](/tutorial/patch.qmd) for examples.
+
Parameters
----------
data
diff --git a/dascore/core/spool.py b/dascore/core/spool.py
index cb3fe783..7c87d049 100644
--- a/dascore/core/spool.py
+++ b/dascore/core/spool.py
@@ -587,9 +587,9 @@ def _load_patch(self, kwargs) -> Self:
@singledispatch
def spool(obj: Path | str | BaseSpool | Sequence[PatchType], **kwargs) -> BaseSpool:
"""
- Create a spool from some data source.
+ Create a spool from a data source.
- This function is used to load data from many different sources.
+ This is the main function for loading in DASCore.
Parameters
----------
diff --git a/dascore/examples.py b/dascore/examples.py
index a66dca24..c699cb06 100644
--- a/dascore/examples.py
+++ b/dascore/examples.py
@@ -22,26 +22,53 @@
@register_func(EXAMPLE_PATCHES, key="random_das")
-def _random_patch(
+def random_patch(
*,
- starttime="2017-09-18",
- start_distance=0,
+ time_min="2017-09-18",
+ time_step=to_timedelta64(1 / 250),
+ time_array=None,
+ distance_min=0,
+ distance_step=1,
+ dist_array=None,
network="",
station="",
tag="random",
shape=(300, 2_000),
- time_step=to_timedelta64(1 / 250),
- distance_step=1,
- time_array=None,
- dist_array=None,
):
- """Generate a random DAS Patch."""
+ """
+ Generate a random DAS Patch.
+
+ Parameters
+ ----------
+ time_min
+ The time the patch starts.
+ time_step
+ The step between time samples.
+ time_array
+ If not None, an array for time coordinate and`time_min` and
+ `time_step` will not be used.
+ distance_min
+ The start of the distance coordinate.
+ distance_step
+ The spacing between distance samples.
+ dist_array
+ If not None, an array of distance values and `distance_min` and
+ `distance_step` will not be used.
+ network
+ The network code.
+ station
+ The station designation.
+ tag
+ The patch tag
+ shape
+ The shape pf data array.
+ """
# get input data
rand = np.random.RandomState(13)
array = rand.random(shape)
# create attrs
- t1 = np.atleast_1d(np.datetime64(starttime))[0]
- d1 = np.atleast_1d(start_distance)
+ t1 = np.atleast_1d(np.datetime64(time_min))[0]
+ d1 = np.atleast_1d(distance_min)
attrs = dict(
distance_step=distance_step,
time_step=to_timedelta64(time_step),
@@ -81,9 +108,16 @@ def _random_patch(
@register_func(EXAMPLE_PATCHES, key="patch_with_null")
-def _patch_with_null():
- """Create a patch which has nullish values."""
- patch = _random_patch()
+def patch_with_null(**kwargs):
+ """
+ A patch which has nullish values.
+
+ Parameters
+ ----------
+ **kwargs
+ Parameters passed to [`random_patch`](`dascore.examples.random_patch`).
+ """
+ patch = random_patch(**kwargs)
data = np.array(patch.data)
data[data > 0.9] = np.NaN
# also set the first row and column to NaN
@@ -92,15 +126,36 @@ def _patch_with_null():
return patch.new(data=data)
+@register_func(EXAMPLE_PATCHES, key="random_patch_with_lat_lon")
+def random_patch_lat_lon(**kwargs):
+ """
+ Create a patch with latitude/longitude coords on distance dim.
+
+ Parameters
+ ----------
+ **kwargs
+ Parameters passed to [`random_patch`](`dascore.examples.random_patch`).
+ """
+ patch = random_patch(**kwargs)
+ dist = patch.coords.get_array("distance")
+ lat = np.arange(0, len(dist)) * 0.001 - 109.857952
+ lon = np.arange(0, len(dist)) * 0.001 + 41.544654
+ # add a single coord
+ out = patch.update_coords(latitude=("distance", lat), longitude=("distance", lon))
+ return out
+
+
@register_func(EXAMPLE_PATCHES, key="wacky_dim_coords_patch")
-def _wacky_dim_coord_patch():
- """Creates a patch with one Monotonic and one Array coord."""
+def wacky_dim_coord_patch():
+ """
+ A patch with one Monotonic and one Array coord.
+ """
shape = (100, 1_000)
# distance is neither monotonic nor evenly sampled.
dist_ar = np.random.random(100) + np.arange(100) * 0.3
# time is monotonic, not evenly sampled.
time_ar = dc.to_datetime64(np.cumsum(np.random.random(1_000)))
- patch = _random_patch(shape=shape, dist_array=dist_ar, time_array=time_ar)
+ patch = random_patch(shape=shape, dist_array=dist_ar, time_array=time_ar)
# check attrs
attrs = patch.attrs
assert pd.isnull(attrs.coords["time"].step)
@@ -109,7 +164,7 @@ def _wacky_dim_coord_patch():
@register_func(EXAMPLE_PATCHES, key="sin_wav")
-def _sin_wave_patch(
+def sin_wave_patch(
sample_rate=44100,
frequency: Sequence[float] | float = 100.0,
time_min="2020-01-01",
@@ -118,16 +173,15 @@ def _sin_wave_patch(
amplitude=10,
):
"""
- Return a Patch composed of simple 1 second sin waves.
-
- This is useful for debugging output to audio formats.
+ A Patch composed of sine waves.
Parameters
----------
sample_rate
The sample rate in Hz.
frequency
- The frequency of the sin wave.
+ The frequency of the sin wave. If a sequence is provided multiple
+ sine waves will be generated at each frequency.
time_min
The start time in the metadata.
channel_count
@@ -155,37 +209,19 @@ def _sin_wave_patch(
return patch
-@register_func(EXAMPLE_PATCHES, key="random_patch_with_lat_lon")
-def _random_patch_lat_lon():
- """Create a patch with latitude/longitude coords on distance dim."""
- random_patch = get_example_patch("random_das")
- dist = random_patch.coords.get_array("distance")
- lat = np.arange(0, len(dist)) * 0.001 - 109.857952
- lon = np.arange(0, len(dist)) * 0.001 + 41.544654
- # add a single coord
- out = random_patch.update_coords(
- latitude=("distance", lat), longitude=("distance", lon)
- )
- return out
-
-
@register_func(EXAMPLE_PATCHES, key="example_event_1")
-def _example_event_1():
+def example_event_1():
"""
- Returns an example of a passive event recorded by DAS.
-
- This event is from @stanvek2022fracture.
+ An induced event recorded on a borehole fiber from @stanvek2022fracture.
"""
path = fetch("example_dasdae_event_1.h5")
return dc.spool(path)[0]
@register_func(EXAMPLE_PATCHES, key="example_event_2")
-def _example_event_2():
+def example_event_2():
"""
- The same as example event 1, but units are set and filtered.
-
- This event is from @stanvek2022fracture.
+ [`example_event_1`](`dascore.examples.example_event_1`) with pre-processing.
"""
path = fetch("example_dasdae_event_1.h5")
patch = dc.spool(path)[0].update_attrs(data_type="strain_rate")
@@ -202,7 +238,7 @@ def _example_event_2():
@register_func(EXAMPLE_PATCHES, key="ricker_moveout")
-def _ricker_moveout(
+def ricker_moveout(
frequency=15,
peak_time=0.25,
duration=1.5,
@@ -213,7 +249,7 @@ def _ricker_moveout(
velocity=100,
):
"""
- Return a patch which has a ricker wavelet. with moveout.
+ A patch of a ricker wavelet with some apparent velocity.
Parameters
----------
@@ -264,12 +300,19 @@ def _ricker(time, delay):
return dc.Patch(data=data, coords=coords, dims=dims)
+@register_func(EXAMPLE_PATCHES, key="dispersion_event")
+def dispersion_event():
+ """
+ A synthetic shot record that exhibits dispersion.
+ """
+ path = fetch("dispersion_event.h5")
+ return dc.spool(path)[0]
+
+
@register_func(EXAMPLE_SPOOLS, key="random_das")
-def _random_spool(
- time_gap=0, length=3, starttime=np.datetime64("2020-01-03"), **kwargs
-):
+def random_spool(time_gap=0, length=3, time_min=np.datetime64("2020-01-03"), **kwargs):
"""
- Generate several random patches in the spool.
+ Several random patches in the spool.
Parameters
----------
@@ -278,43 +321,43 @@ def _random_spool(
number to create overlap.
length
The number of patches to generate.
- starttime
- The starttime of the first patch. Subsequent patches have startimes
- after the endtime of the previous patch, plus the time_gap.
+ time_min
+ The start time of the first patch. Subsequent patches have start times
+ after the end time of the previous patch, plus the time_gap.
**kwargs
- Passed to the [_random_patch](`dasocre.examples._random_patch`) function.
+ Passed to the [_random_patch](`dascore.examples.random_patch`) function.
"""
out = []
for _ in range(length):
- patch = _random_patch(starttime=starttime, **kwargs)
+ patch = random_patch(time_min=time_min, **kwargs)
out.append(patch)
diff = to_timedelta64(time_gap) + patch.attrs.coords["time"].step
- starttime = patch.attrs["time_max"] + diff
+ time_min = patch.attrs["time_max"] + diff
return dc.spool(out)
@register_func(EXAMPLE_SPOOLS, key="diverse_das")
-def _diverse_spool():
+def diverse_spool():
"""
- Create a spool with a diverse set of patches for testing.
+ A spool with a diverse set of patch metadata for testing.
There are various gaps, tags, station names, etc.
"""
- spool_no_gaps = _random_spool()
- spool_no_gaps_different_network = _random_spool(network="das2")
- spool_big_gaps = _random_spool(time_gap=np.timedelta64(1, "s"), station="big_gaps")
- spool_overlaps = _random_spool(
+ spool_no_gaps = random_spool()
+ spool_no_gaps_different_network = random_spool(network="das2")
+ spool_big_gaps = random_spool(time_gap=np.timedelta64(1, "s"), station="big_gaps")
+ spool_overlaps = random_spool(
time_gap=-np.timedelta64(10, "ms"), station="overlaps"
)
time_step = spool_big_gaps[0].attrs.coords["time"].step
dt = to_timedelta64(time_step / np.timedelta64(1, "s"))
- spool_small_gaps = _random_spool(time_gap=dt, station="smallg")
- spool_way_late = _random_spool(
- length=1, starttime=np.datetime64("2030-01-01"), station="wayout"
+ spool_small_gaps = random_spool(time_gap=dt, station="smallg")
+ spool_way_late = random_spool(
+ length=1, time_min=np.datetime64("2030-01-01"), station="wayout"
)
- spool_new_tag = _random_spool(tag="some_tag", length=1)
- spool_way_early = _random_spool(
- length=1, starttime=np.datetime64("1989-05-04"), station="wayout"
+ spool_new_tag = random_spool(tag="some_tag", length=1)
+ spool_way_early = random_spool(
+ length=1, time_min=np.datetime64("1989-05-04"), station="wayout"
)
all_spools = [
@@ -331,13 +374,6 @@ def _diverse_spool():
return dc.spool([y for x in all_spools for y in x])
-@register_func(EXAMPLE_PATCHES, key="dispersion_event")
-def _dispersion_event():
- """Returns an example of a synthetic shot record that exhibits dispersion."""
- path = fetch("dispersion_event.h5")
- return dc.spool(path)[0]
-
-
def spool_to_directory(spool, path=None, file_format="DASDAE", extention="hdf5"):
"""
Write out each patch in a spool to a directory.
@@ -366,7 +402,16 @@ def get_example_patch(example_name="random_das", **kwargs) -> dc.Patch:
Load an example Patch.
Options are:
- {examples}
+ ```{python}
+ #| echo: false
+ #| output: asis
+ from dascore.examples import EXAMPLE_PATCHES
+
+ from dascore.utils.docs import objs_to_doc_df
+
+ df = objs_to_doc_df(EXAMPLE_PATCHES)
+ print(df.to_markdown(index=False, stralign="center"))
+ ```
Parameters
----------
@@ -377,7 +422,8 @@ def get_example_patch(example_name="random_das", **kwargs) -> dc.Patch:
Raises
------
- UnknownExample if unregistered patch is requested.
+ (`UnknownExampleError`)['dascore.examples.UnknownExampleError`] if
+ unregistered patch is requested.
"""
if example_name not in EXAMPLE_PATCHES:
msg = (
@@ -393,8 +439,17 @@ def get_example_spool(example_name="random_das", **kwargs) -> dc.BaseSpool:
"""
Load an example Spool.
- Options are:
- {examples}
+ Supported example spools are:
+ ```{python}
+ #| echo: false
+ #| output: asis
+ from dascore.examples import EXAMPLE_SPOOLS
+
+ from dascore.utils.docs import objs_to_doc_df
+
+ df = objs_to_doc_df(EXAMPLE_SPOOLS)
+ print(df.to_markdown(index=False, stralign="center"))
+ ```
Parameters
----------
diff --git a/dascore/io/core.py b/dascore/io/core.py
index dbaa44f3..30e9468b 100644
--- a/dascore/io/core.py
+++ b/dascore/io/core.py
@@ -477,6 +477,9 @@ def read(
"""
Read a fiber file.
+ For most cases, [`dascore.spool`](`dascore.spool`) is preferable to
+ this function.
+
Parameters
----------
path
@@ -492,6 +495,20 @@ def read(
An optional tuple of distances.
*kwargs
All kwargs are passed to the format-specific read functions.
+
+ Notes
+ -----
+ Unlike [`spool`](`dascore.spool`) this function reads the entire file
+ into memory.
+
+ Examples
+ --------
+ >>> import dascore as dc
+ >>> from dascore.utils.downloader import fetch
+ >>>
+ >>> file_path = fetch("prodml_2.1.h5")
+ >>>
+ >>> patch = dc.read(file_path)
"""
with IOResourceManager(path) as man:
if not file_format or not file_version:
@@ -539,6 +556,14 @@ def scan_to_df(
exclude
A sequence of strings to exclude from the analysis.
+ Examples
+ --------
+ >>> import dascore as dc
+ >>> from dascore.utils.downloader import fetch
+ >>>
+ >>> file_path = fetch("prodml_2.1.h5")
+ >>>
+ >>> df = dc.scan_to_df(file_path)
"""
info = scan(
path=path,
@@ -587,6 +612,15 @@ def scan(
-------
A list of [`PatchAttrs`](`dascore.core.attrs.PatchAttrs`) or subclasses
which may have extra fields.
+
+ Examples
+ --------
+ >>> import dascore as dc
+ >>> from dascore.utils.downloader import fetch
+ >>>
+ >>> file_path = fetch("prodml_2.1.h5")
+ >>>
+ >>> attr_list = dc.scan(file_path)
"""
out = []
for patch_source in _iterate_scan_inputs(path):
@@ -644,6 +678,14 @@ def get_format(
------
dascore.exceptions.UnknownFiberFormat - Could not determine the fiber format.
+ Examples
+ --------
+ >>> import dascore as dc
+ >>> from dascore.utils.downloader import fetch
+ >>>
+ >>> file_path = fetch("prodml_2.1.h5")
+ >>>
+ >>> file_format, file_version = dc.get_format(file_path)
"""
with IOResourceManager(path) as man:
path = man.source
@@ -697,11 +739,24 @@ def write(
The string indicating the format to write.
file_version
Optionally specify the version of the file, else use the latest
- version.
+ version for the format.
Raises
------
- dascore.exceptions.UnknownFiberFormat - Could not determine the fiber format.
+ [`UnkownFiberFormatError`](`dascore.exceptions.UnknownFiberFormatError`)
+ - Could not determine the fiber format.
+
+ Examples
+ --------
+ >>> from pathlib import Path
+ >>> import dascore as dc
+ >>>
+ >>> patch = dc.get_example_patch()
+ >>> path = Path("output.h5")
+ >>> _ = dc.write(patch, path, "dasdae")
+ >>>
+ >>> assert path.exists()
+ >>> path.unlink()
"""
formatter = FiberIO.manager.get_fiberio(file_format, file_version)
if not isinstance(patch_or_spool, dc.BaseSpool):
diff --git a/dascore/transform/fft.py b/dascore/transform/fft.py
index d3b5a6f1..428d1deb 100644
--- a/dascore/transform/fft.py
+++ b/dascore/transform/fft.py
@@ -1,7 +1,6 @@
"""
-Module for Fourier transforms.
-
-See the [FFT note](dascore.org/notes/fft_notes.html) details.
+Deprecated module for Fourier transforms. Use
+[fourier](`dascore.transform.fourier`) instead.
"""
from __future__ import annotations
diff --git a/dascore/transform/fourier.py b/dascore/transform/fourier.py
index 5a1a6b2e..7d78df04 100644
--- a/dascore/transform/fourier.py
+++ b/dascore/transform/fourier.py
@@ -1,7 +1,7 @@
"""
Module for Fourier transforms.
-See the [FFT note](dascore.org/notes/fft_notes.html) for discussion on the
+See the [FFT note](/notes/dft_notes.qmd) for discussion on the
implementation.
"""
from __future__ import annotations
diff --git a/dascore/units.py b/dascore/units.py
index fd946bbe..dd34b096 100644
--- a/dascore/units.py
+++ b/dascore/units.py
@@ -38,7 +38,13 @@ def get_registry():
@cache
def get_unit(value) -> Unit:
- """Convert a value to a pint unit."""
+ """
+ Convert a value to a pint unit.
+
+ Usually quantities, generated with
+ [`get_quantity`](`dascore.units.get_quantity`), are easy to work
+ with.
+ """
if isinstance(value, Quantity):
assert value.magnitude == 1.0
value = value.units
@@ -55,7 +61,20 @@ def _str_to_quant(qunat_str):
def get_quantity(value: str_or_none) -> Quantity | None:
- """Convert a value to a pint quantity."""
+ """
+ Convert a value to a pint quantity.
+
+ Parameters
+ ----------
+ value
+ The value to convert to a quantity.
+
+ Examples
+ --------
+ >>> import dascore as dc
+ >>> meters = dc.get_quantity("m")
+ >>> accel = dc.get_quantity("m/s^2")
+ """
value = unbyte(value)
if value is None or value is ... or value == "":
return None
diff --git a/dascore/utils/docs.py b/dascore/utils/docs.py
index 3d900295..f1f05d10 100644
--- a/dascore/utils/docs.py
+++ b/dascore/utils/docs.py
@@ -1,10 +1,17 @@
"""Utilities for documentation."""
from __future__ import annotations
+import inspect
+import os
import textwrap
from collections.abc import Sequence
+from pathlib import Path
from typing import Any
+import pandas as pd
+
+import dascore as dc
+
def format_dtypes(dtype_dict: dict[str, Any]) -> str:
"""
@@ -87,3 +94,22 @@ def _wrap(func):
return func
return _wrap
+
+
+def objs_to_doc_df(doc_dict, cross_reference=True):
+ """
+ Convert a dictionary of documentable entities to a pandas dataframe.
+ """
+ out = {}
+ base = Path(dc.__file__).parent.parent
+ for key, obj in doc_dict.items():
+ if cross_reference:
+ path = Path(inspect.getfile(obj)).relative_to(base)
+ name = obj.__name__
+ address = str(path).replace(".py", "").replace(os.sep, ".")
+ key = f"[`{key}`](`{address + '.' + name}`)"
+ doc = str(getattr(obj, "__func__", obj).__doc__).strip()
+ out[key] = doc.splitlines()[0]
+ df = pd.Series(out).to_frame().reset_index()
+ df.columns = ["Name", "Description"]
+ return df
diff --git a/docs/filters/fill_links.py b/docs/filters/fill_links.py
index 719e8e72..960bcdad 100644
--- a/docs/filters/fill_links.py
+++ b/docs/filters/fill_links.py
@@ -99,6 +99,3 @@ def main(raw_data=None):
if __name__ == "__main__":
main() # uncomment before running quarto so IO works.
-
-
-# this is helpful for debugging while quarto is running.
diff --git a/docs/index.qmd b/docs/index.qmd
index cd857ca0..ea003ce5 100644
--- a/docs/index.qmd
+++ b/docs/index.qmd
@@ -1,24 +1,9 @@
---
-title: DASCore
execute:
warning: false
---
-A python library for distributed fiber optic sensing.
-
-[](https://codecov.io/gh/dasdae/dascore)
-[](https://pypi.python.org/pypi/dascore)
-[](https://pypi.python.org/pypi/dascore)
-[](https://pypi.org/project/dascore/)
-[](https://github.com/conda-forge/dascore-feedstock)
-[](https://zenodo.org/badge/latestdoi/422627477)
-[](https://www.gnu.org/licenses/lgpl.html)
-
-
-[Code](https://github.com/DASDAE/dascore)
-
-Documentation [[stable](https://dascore.org), [development](https://dascore.netlify.app/)]
-
+{{< include ../readme.md >}}
:::{.callout minimal="true"}
Version-specific documentation builds are attached to the [release pages](https://github.com/DASDAE/dascore/releases).
@@ -39,9 +24,13 @@ DASCore is a foundational package of the [DAS Data Analysis Ecosystem (DASDAE)](
# Supported file formats
```{python}
#| echo: false
+#| output: asis
import pandas as pd
from dascore.io.core import FiberIO
-FiberIO.get_supported_io_table().replace(True, value='✅').replace(False, value='❌')
+out_str = FiberIO.get_supported_io_table().replace(True, value='✅').replace(False, value='❌').to_markdown(index=False, stralign="center")
+
+out_str += '\n: {.striped}'
+print(out_str)
```
# Introductory usage
diff --git a/docs/styles.css b/docs/styles.css
index 1289c45b..f80bd3f1 100644
--- a/docs/styles.css
+++ b/docs/styles.css
@@ -1,6 +1,9 @@
/* css styles */
-/* Table stuff */
+body {
+ font-size: 18px;
+}
+
table {
border-collapse: collapse;
border: 2px solid rgb(200, 200, 200);
@@ -12,7 +15,7 @@ table {
}
caption {
- padding: 1px;
+ padding: 10px;
caption-side: top;
color: #666;
text-align: center;
@@ -20,27 +23,32 @@ caption {
font-size: small;
}
-td, th {
- border: 1px solid rgb(190, 190, 190);
+td {
+ border: 0.5px solid rgb(190, 190, 190);
padding: 10px 20px;
+ text-align: left;
+ vertical-align: middle;
+ /*white-space: pre;*/
}
-th {
- background-color: rgb(235, 235, 235);
- text-align: center;
+/*Try not to let first column wrap.*/
+td:first-child {
+ white-space: nowrap;
}
-td {
+th {
+ border: 0.5px solid rgb(190, 190, 190);
+ padding: 10px 20px;
text-align: center;
}
-caption {
- padding: 10px;
-}
+/*This makes every other row striped. */
+
+
-tbody tr:nth-child(even) {
- background-color: rgba(210, 209, 209, 0.4);
+tbody tr:nth-child(odd) {
+ background-color: rgba(210, 209, 209, 0.2);
}
@@ -51,7 +59,7 @@ tbody tr:nth-child(even) {
}
.def_block {
- background-color: rgba(210, 209, 209, 0.4);
+ background-color: rgba(210, 209, 209, 0.3);
margin: 1em;
box-sizing: content-box;
padding-left: 10px;
diff --git a/docs/tutorial/patch.qmd b/docs/tutorial/patch.qmd
index 3b8dd760..6e502abd 100644
--- a/docs/tutorial/patch.qmd
+++ b/docs/tutorial/patch.qmd
@@ -26,15 +26,7 @@ pa1 = dc.get_example_patch("random_das")
pa2 = dc.get_example_patch("example_event_1")
```
-Other supported example patches are:
-```{python}
-#| code-fold: true
-from pprint import pprint
-
-import dascore as dc
-
-pprint(sorted(dc.examples.EXAMPLE_PATCHES))
-```
+See [`get_example_patch`](`dascore.examples.get_example_patch`) for supported patches.
## Load a file
@@ -166,15 +158,24 @@ Most of the other `CoordManager` features are primarily used internally by DASCo
## Attrs
-The metadata stored in [`Patch.attrs`](`dascore.core.attrs.PatchAttrs`) is a [pydantic model](https://docs.pydantic.dev/usage/models/) which enforces a schema and provides validation. `PatchAttrs.get_summary_df` generates a table of the attribute descriptions:
+The metadata stored in [`Patch.attrs`](`dascore.core.attrs.PatchAttrs`) is a [pydantic model](https://docs.pydantic.dev/usage/models/) which enforces a schema and provides validation. [`PatchAttrs.get_summary_df`](`dascore.utils.models.DascoreBaseModel.get_summary_df`) generates a table of the attribute descriptions:
+
```{python}
-#| code-fold: true
+#| echo: false
+
import dascore as dc
+from IPython.display import Markdown
-dc.PatchAttrs.get_summary_df()
+df_str = (
+ dc.PatchAttrs.get_summary_df()
+ .reset_index()
+ .to_markdown(index=False, stralign="center")
+)
+Markdown(df_str)
```
+
Specific data formats may also add attributes (e.g. "gauge_length", "pulse_width"), but this depends on the parser.
## String representation
diff --git a/pyproject.toml b/pyproject.toml
index 634bdeb0..50de5f74 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -68,6 +68,7 @@ extras = [
docs = [
"jinja2",
"ipywidgets",
+ "tabulate",
]
test = [
diff --git a/readme.md b/readme.md
index 83184a1f..e5097806 100644
--- a/readme.md
+++ b/readme.md
@@ -2,8 +2,8 @@
A python library for distributed fiber optic sensing.
-[](https://pypi.python.org/pypi/dascore)
[](https://codecov.io/gh/dasdae/dascore)
+[](https://pypi.python.org/pypi/dascore)
[](https://pypi.python.org/pypi/dascore)
[](https://pypi.org/project/dascore/)
[](https://github.com/conda-forge/dascore-feedstock)
diff --git a/scripts/_render_api.py b/scripts/_render_api.py
index 1f9224d8..80559c23 100644
--- a/scripts/_render_api.py
+++ b/scripts/_render_api.py
@@ -236,7 +236,9 @@ def parse_sections(self, docstr):
return out
def style_parameters(self, param_str):
- """Style the parameters block."""
+ """
+ Style the parameters block.
+ """
lines = param_str.split("\n")
# parameters dont have spaces at the start
param_start = [num for num, x in enumerate(lines) if not x.startswith(" ")]
@@ -245,8 +247,16 @@ def style_parameters(self, param_str):
param_desc = []
for ind_num, ind in enumerate(param_start[:-1]):
key = lines[ind].strip()
- vals = [x.strip() for x in lines[ind + 1 : param_start[ind_num + 1]]]
- param_desc.append((key, "\n".join(vals)))
+ # get the number of indents (usually 4)
+ in_char = (len(lines[1]) - len(lines[1].lstrip())) * " "
+ desc_lines = lines[ind + 1 : param_start[ind_num + 1]]
+ vals = [
+ # strip out the first indentation line
+ (x[len(in_char) :] if x.startswith(in_char) and in_char else x)
+ for x in desc_lines
+ ]
+ # breakpoint()
+ param_desc.append((key, "
".join(vals)))
table = pd.DataFrame(param_desc, columns=["Parameter", "Description"])
return build_table(table)
diff --git a/scripts/_templates/table.html b/scripts/_templates/table.html
index 4501b3c0..555a3a3e 100644
--- a/scripts/_templates/table.html
+++ b/scripts/_templates/table.html
@@ -1,5 +1,5 @@
{# A template for making tables. #}
-