Skip to content

Commit

Permalink
rename patch.new to patch.update
Browse files Browse the repository at this point in the history
  • Loading branch information
d-chambers committed Jan 4, 2024
1 parent c9e9e99 commit 51a411e
Show file tree
Hide file tree
Showing 16 changed files with 67 additions and 38 deletions.
10 changes: 6 additions & 4 deletions dascore/core/attrs.py
Original file line number Diff line number Diff line change
Expand Up @@ -456,10 +456,12 @@ def merge_compatible_coords_attrs(
Merge the coordinates and attributes of patches or raise if incompatible.
The rules for compatibility are:
- All attrs must be equal other than history.
- Patches must share the same dimensions, in the same order
- All dimensional coordinates must be strictly equal
- If patches share a non-dimensional coordinate they must be equal.
- All attrs must be equal other than history.
- Patches must share the same dimensions, in the same order
- All dimensional coordinates must be strictly equal
- If patches share a non-dimensional coordinate they must be equal.
Any coordinates or attributes contained by a single patch will be included
in the output.
Expand Down
4 changes: 3 additions & 1 deletion dascore/core/patch.py
Original file line number Diff line number Diff line change
Expand Up @@ -175,7 +175,9 @@ def size(self) -> tuple[int, ...]:

# --- basic patch functionality.

new = dascore.proc.new
update = dascore.proc.update
# Before 0.1.0 update was called new, this is for backwards compatibility.
new = dascore.proc.update
equals = dascore.proc.equals
update_attrs = dascore.proc.update_attrs
assert_has_coords = dascore.proc.assert_has_coords
Expand Down
2 changes: 1 addition & 1 deletion dascore/proc/basic.py
Original file line number Diff line number Diff line change
Expand Up @@ -139,7 +139,7 @@ def equals(self: PatchType, other: Any, only_required_attrs=True) -> bool:
return np.equal(self.data, other.data).all()


def new(
def update(
self: PatchType,
data: ArrayLike | np.ndarray | None = None,
coords: None | dict[str | Sequence[str], ArrayLike] | CoordManager = None,
Expand Down
4 changes: 2 additions & 2 deletions dascore/proc/rolling.py
Original file line number Diff line number Diff line change
Expand Up @@ -112,7 +112,7 @@ def apply(self, function):
out = self._pad_roll_array(raw)
new_coords = self.get_coords()
attrs = self._get_attrs_with_apply_history(function)
return self.patch.new(data=out, coords=new_coords, attrs=attrs)
return self.patch.update(data=out, coords=new_coords, attrs=attrs)

def mean(self):
"""Apply mean to moving window."""
Expand Down Expand Up @@ -168,7 +168,7 @@ def _repack_patch(self, df, attrs=None):
if len(data.shape) != len(self.patch.data.shape):
data = np.squeeze(data)
coords = self.get_coords()
return self.patch.new(data=data, coords=coords, attrs=attrs)
return self.patch.update(data=data, coords=coords, attrs=attrs)

def _call_rolling_func(self, name, *args, **kwargs):
"""Helper function for calling a rolling function."""
Expand Down
2 changes: 1 addition & 1 deletion dascore/transform/strain.py
Original file line number Diff line number Diff line change
Expand Up @@ -39,4 +39,4 @@ def velocity_to_strain_rate(
new_attrs = patch.attrs.update(
data_type="strain_rate", gauge_length=step * gauge_multiple
)
return patch.new(attrs=new_attrs)
return patch.update(attrs=new_attrs)
45 changes: 37 additions & 8 deletions dascore/utils/time.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,13 +24,28 @@ def to_datetime64(obj: timeable_types | np.ndarray):
This function accepts a wide range of inputs and returns something
of the same shape, but converted to numpy's datetime64 representation.
Parameters
----------
obj
An object to convert to a datetime64. If a string is passed, it
should conform to [ISO 8601](https://en.wikipedia.org/wiki/ISO_8601).
Floats and integers are interpreted as seconds from Jan 1st, 1970.
Arrays and Series of floats or strings are also supported.
Examples
--------
>>> # Convert an [iso 8601](https://en.wikipedia.org/wiki/ISO_8601) string
>>> import numpy as np
>>> import dascore as dc
>>> time = dc.to_datetime64('2017-09-17T12:11:01.23212')
>>> # Convert a timestamp (float)
>>> dt = dc.to_datetime64(631152000.0)
>>>
>>> # Convert an iso 8601 string to datetime64
>>> dt_1 = dc.to_datetime64('2017-09-17T12:11:01.23212')
>>>
>>> # Convert a time stamp (seconds from 1970) to datetime64
>>> dt_2 = dc.to_datetime64(631152000.0)
>>>
>>> # Convert an array of time stamps to datetime64
>>> timestamp_array = np.random.uniform(1704400000, 1704900000)
>>> dt_array = dc.to_datetime64(timestamp_array)
"""
if pd.isnull(obj):
return np.datetime64("NaT")
Expand Down Expand Up @@ -124,13 +139,27 @@ def to_timedelta64(obj: float | np.ndarray | str | timedelta):
This function accepts a wide range of inputs and returns something
of the same shape, but converted to numpy's timedelta64 representation.
Parameters
----------
obj
An object to convert to timedelta64. Can be a float, str or arary of
such. Floats are interpreted as seconds and strings must conform to
the output style of timedeltas (e.g. str(time_delta)).
Examples
--------
>>> # Convert a float to seconds
>>> import dascore as dc
>>> d_time_1 = dc.to_timedelta64(10.1232)
>>> # also works on negative numbers
>>> d_time_2 = dc.to_datetime64(-10.5)
>>>
>>> # Convert a float to timedelta64 representing seconds.
>>> td_1 = dc.to_timedelta64(10.1232)
>>>
>>> # This also works on negative numbers.
>>> td_2 = dc.to_datetime64(-10.5)
>>>
>>> # Convert a string to timedelta64
>>> td_str = "1000000000 nanoseconds"
>>> td_3 = dc.to_timedelta64(td_str)
"""
if pd.isnull(obj):
return np.timedelta64("NaT")
Expand Down
5 changes: 1 addition & 4 deletions docs/styles.css
Original file line number Diff line number Diff line change
Expand Up @@ -44,11 +44,8 @@ th {


/*This makes every other row striped. */



tbody tr:nth-child(odd) {
background-color: rgba(210, 209, 209, 0.2);
background-color: rgba(210, 209, 209, 0.35);
}


Expand Down
13 changes: 6 additions & 7 deletions docs/tutorial/patch.qmd
Original file line number Diff line number Diff line change
Expand Up @@ -282,24 +282,24 @@ patch.viz.waterfall(show=True, scale=0.2);

Because patches should be treated as immutable objects, they can't be modified with normal attribute assignment. However, DASCore provides several methods that return new patches with modifications.

## New
## Update

[`Patch.new`](`dascore.core.patch.Patch.new`) uses the Patch as a template and returns a new Patch with one or more aspects modified.
[`Patch.update`](`dascore.core.patch.Patch.update`) uses the `Patch` instances as a template and returns a new `Patch` instances with one or more aspects modified.

```{python}
import dascore as dc
pa = dc.get_example_patch()
# Create a copy of patch with new data but coords and attrs stay the same.
new_data_patch = pa.new(data=pa.data * 10)
new_data_patch = pa.update(data=pa.data * 10)
# Completely replace the attributes.
new_data_patch = pa.new(attrs=dict(station="TMU"))
new_data_patch = pa.update(attrs=dict(station="TMU"))
```

## Update attrs

[`Patch.update_attrs`](`dascore.core.patch.Patch.update_attrs`) is for making changes to the attrs (metadata) while keeping the unaffected metadata (`Patch.new` completely replaces the old attrs).
[`Patch.update_attrs`](`dascore.core.patch.Patch.update_attrs`) is for making changes to the attrs (metadata) while keeping the unaffected metadata (`Patch.update` would completely replace the old attrs).

```{python}
import dascore as dc
Expand Down Expand Up @@ -351,7 +351,6 @@ new_time = pa.coords.min('time') + one_second
new = pa.update_coords(time_min=new_time)
```


### Adding coordinates

Commonly, additional coordinates, such as latitude/longitude, are attached to a particular dimension such as distance. It is also possible to include coordinates that are not associated with any dimensions.
Expand Down Expand Up @@ -406,7 +405,7 @@ patch_detached_lat = patch.update_coords(latitude=(None, lat))

## Dropping coordinates

Non-dimensional coordinates can be dropped using `Patch.drop_coords`. Dimensional coordinates, however, cannot be dropped doing so would force the patch data to become degenerate.
Non-dimensional coordinates can be dropped using [`Patch.drop_coords`](`dascore.proc.coords.drop_coords`). Dimensional coordinates, however, cannot be dropped doing so would force the patch data to become degenerate.

```{python}
import dascore as dc
Expand Down
6 changes: 3 additions & 3 deletions tests/test_core/test_patch.py
Original file line number Diff line number Diff line change
Expand Up @@ -218,7 +218,7 @@ def test_erase_history(self, random_patch):
assert patch.attrs.history
new_attrs = dict(patch.attrs)
new_attrs["history"] = []
new_patch = patch.new(attrs=new_attrs)
new_patch = patch.update(attrs=new_attrs)
assert not new_patch.attrs.history

def test_new_coord_dict_order(self, random_patch):
Expand All @@ -238,7 +238,7 @@ def test_new_coord_dict_order(self, random_patch):
def test_attrs_preserved_when_not_specified(self, random_patch):
"""If attrs is not passed to new, old attrs should remain."""
pa = random_patch.update_attrs(network="bob", tag="2", station="10")
new_1 = pa.new(data=pa.data * 10)
new_1 = pa.update(data=pa.data * 10)
assert new_1.attrs == pa.attrs

def test_new_dims_renames_dims(self, random_patch):
Expand Down Expand Up @@ -368,7 +368,7 @@ def test_one_coord_not_equal(self, wacky_dim_patch):
coord_array[20:30] *= 0.9
assert not np.allclose(coord_array, coords.get_array("distance"))
new_patch = patch.update_coords(distance=coord_array)
new = patch.new(coords=new_patch.coords)
new = patch.update(coords=new_patch.coords)
assert new != patch

def test_other_types(self, random_patch):
Expand Down
2 changes: 1 addition & 1 deletion tests/test_core/test_spool.py
Original file line number Diff line number Diff line change
Expand Up @@ -484,7 +484,7 @@ def test_changed_memory_spool(self, random_patch):
# create new patch with cleared history
new_attrs = dict(patch.attrs)
new_attrs["history"] = []
new_patch = patch.new(attrs=new_attrs)
new_patch = patch.update(attrs=new_attrs)
assert not new_patch.attrs.history
# add new patch (w/ no history) to spool, get first patch out.
spool = dc.spool([new_patch])
Expand Down
2 changes: 1 addition & 1 deletion tests/test_proc/test_basic.py
Original file line number Diff line number Diff line change
Expand Up @@ -331,7 +331,7 @@ def patch_3d_with_null(self, range_patch_3d):
nans = [(1, 1, 1), (0, 9, 9)]
for nan_ind in nans:
data[nan_ind] = np.NaN
patch = range_patch_3d.new(data=data)
patch = range_patch_3d.update(data=data)
return patch

def test_drop_time_any(self, patch_with_null):
Expand Down
2 changes: 1 addition & 1 deletion tests/test_proc/test_correlate.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ def corr_patch(self):
time_axis = patch.dims.index("time")
data = patch.data
norm = np.linalg.norm(data, axis=time_axis, keepdims=True)
return patch.new(data=data / norm)
return patch.update(data=data / norm)

@pytest.fixture(scope="session")
def ricker_moveout_patch(self):
Expand Down
2 changes: 1 addition & 1 deletion tests/test_proc/test_proc_coords.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ def test_data_sorted_correctly(self, wacky_dim_patch):
for dim, array, ind in zip(dims, array_list, (1, 0)):
coord = patch.coords.coord_map[dim]
arg_sort = np.argsort(coord.values)
new = patch.new(data=array).sort_coords(dim)
new = patch.update(data=array).sort_coords(dim)
data_along_slice = np.take(new.data, 0, ind)
assert np.all(np.equal(arg_sort, data_along_slice))

Expand Down
2 changes: 1 addition & 1 deletion tests/test_proc/test_taper.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ def patch_ones(random_patch):
def time_tapered_patch(request, patch_ones):
"""Return a tapered trace."""
# first get a patch with all ones for easy testing
patch = patch_ones.new(data=np.ones_like(patch_ones.data))
patch = patch_ones.update(data=np.ones_like(patch_ones.data))
out = taper(patch, time=0.05, window_type=request.param)
return out

Expand Down
2 changes: 1 addition & 1 deletion tests/test_transform/test_differentiate.py
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ def test_all_axis(self, random_patch):

def test_uneven_spacing(self, wacky_dim_patch):
"""Ensure we can diff over uneven dimensions."""
patch = wacky_dim_patch.new(data=np.ones_like(wacky_dim_patch.data))
patch = wacky_dim_patch.update(data=np.ones_like(wacky_dim_patch.data))
out = differentiate(patch, "time")
# very occasionally, numpy outputs a few nan values from grad when
# coordinate spacing is provided. I am still trying to figure out
Expand Down
2 changes: 1 addition & 1 deletion tests/test_viz/test_waterfall.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ def patch_random_start(event_patch_1):
coords["time"] = time + random_starttime
attrs["time_min"] = coords["time"].min()
attrs["time_max"] = coords["time"].max()
patch = event_patch_1.new(attrs=attrs, coords=coords)
patch = event_patch_1.update(attrs=attrs, coords=coords)
return patch


Expand Down

0 comments on commit 51a411e

Please sign in to comment.