Skip to content

Commit

Permalink
fix typos, add to docs
Browse files Browse the repository at this point in the history
  • Loading branch information
d-chambers committed Nov 6, 2024
1 parent cc99802 commit 2746f5e
Show file tree
Hide file tree
Showing 2 changed files with 20 additions and 31 deletions.
29 changes: 5 additions & 24 deletions dascore/proc/aggregate.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@

from collections.abc import Callable, Sequence
from functools import partial
from typing import Literal

import numpy as np

Expand Down Expand Up @@ -43,8 +42,11 @@
dim_reduce
How to reduce the dimensional coordinate associated with the
aggregated axis. Can be the name of any valid aggregator, a callable,
or "empty" - which returns and empty coord, or "squeeze" which drops
the coordinate.
"empty" - which returns and empty coord, or "squeeze" which drops
the coordinate. For dimensions with datetime or timedelta datatypes,
if the operation fails it will automatically be applied to the
coordinates converted to floats then the output converted back
to the appropriate time type.
"""

AGG_NOTES = """
Expand All @@ -54,27 +56,6 @@
and more details.
"""

COORD_MODE_DOC_STR = """
coord_mode
Controls the behavior of the aggregated coordinate.
Options are:
empty - empty the coordinate values but keep it in output.
squeeze - remove the aggregated dimension.
min - keep the min value of the aggregated dimension.
mean - keep the mean value of the aggregated dimension.
If the operation fails and the coords are a time-type it will be tried
again converting to, then back, from floats. This can cause a small
loss of precision.
"""

_COORD_MODE_TYPE_HINT = Literal[
"empty",
"squeeze",
"min",
"mean",
"max",
]


def _get_new_coord(coord, dim_reduce):
"""Get the new coordinate."""
Expand Down
22 changes: 15 additions & 7 deletions dascore/utils/patch.py
Original file line number Diff line number Diff line change
Expand Up @@ -201,8 +201,8 @@ def patch_function(
None - Function call is not recorded in history attribute.
validate_call
If True, use pydantic to validate the function call. This can save
quite a lot of code in validation checks, does have some overhead.
See: https://docs.pydantic.dev/latest/api/validate_call/.
quite a lot of code in validation checks, but does have some overhead.
See [validate_call](https://docs.pydantic.dev/latest/api/validate_call/).
Examples
--------
Expand All @@ -221,7 +221,7 @@ def patch_function(
... # called "data_type" or its values is not equal to "DAS".
>>>
>>> # 3. A patch method which does type checking on inputs.
>>> # The `Field` allows enforces various properties (like ranges)
>>> # The `Field` instance can require various data properties (like ranges)
>>> from typing_extensions import Annotated, Literal
>>> from pydantic import Field
>>> @dc.patch_function(validate_call=True)
Expand All @@ -230,7 +230,7 @@ def patch_function(
... int_le_10_ge_1: int = Field(ge=1, le=10, default=1),
... option: Literal["min", "max", None] = None,
... ):
... pass
... ...
Notes
-----
Expand Down Expand Up @@ -264,9 +264,10 @@ def _func(patch, *args, **kwargs):
out = out.update_attrs(history=hist)
return out

# attach original function
# attach original function. Although we want to encourage raw_function
# for consistency with pydantic, we leave this to not break old code.
_func.func = getattr(func, "raw_function", func)
# matches pydantic behavior.
# matches pydantic naming.
_func.raw_function = getattr(func, "raw_function", func)
_func.__wrapped__ = func

Expand Down Expand Up @@ -486,7 +487,14 @@ def get_dim_axis_value(
allow_multiple
If True, allow multiple dimensions to be selected.
allow_extra
If True, do not raise if extra kwargs found.
If True, do not raise an error if extra args or kwargs are found.
Returns
-------
Returns a tuple of:
((dim, axis, value), (dim, axis, value), ...)
To support retreiving multiple values from the same inputs. If dim name
is found in args, its corresponding values is `None`.
Examples
--------
Expand Down

0 comments on commit 2746f5e

Please sign in to comment.