Skip to content

Commit

Permalink
Fix non-nanosecond casting behavior for expand_dims (#8782)
Browse files Browse the repository at this point in the history
  • Loading branch information
spencerkclark committed Feb 27, 2024
1 parent dfdd631 commit 2983c53
Show file tree
Hide file tree
Showing 4 changed files with 35 additions and 4 deletions.
5 changes: 5 additions & 0 deletions doc/whats-new.rst
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,11 @@ Bug fixes
- The default ``freq`` parameter in :py:meth:`xr.date_range` and :py:meth:`xr.cftime_range` is
set to ``'D'`` only if ``periods``, ``start``, or ``end`` are ``None`` (:issue:`8770`, :pull:`8774`).
By `Roberto Chang <https://github.com/rjavierch>`_.
- Ensure that non-nanosecond precision :py:class:`numpy.datetime64` and
:py:class:`numpy.timedelta64` values are cast to nanosecond precision values
when used in :py:meth:`DataArray.expand_dims` and
::py:meth:`Dataset.expand_dims` (:pull:`8781`). By `Spencer
Clark <https://github.com/spencerkclark>`_.

Documentation
~~~~~~~~~~~~~
Expand Down
10 changes: 6 additions & 4 deletions xarray/core/variable.py
Original file line number Diff line number Diff line change
Expand Up @@ -218,10 +218,12 @@ def _possibly_convert_datetime_or_timedelta_index(data):
this in version 2.0.0, in xarray we will need to make sure we are ready to
handle non-nanosecond precision datetimes or timedeltas in our code
before allowing such values to pass through unchanged."""
if isinstance(data, (pd.DatetimeIndex, pd.TimedeltaIndex)):
return _as_nanosecond_precision(data)
else:
return data
if isinstance(data, PandasIndexingAdapter):
if isinstance(data.array, (pd.DatetimeIndex, pd.TimedeltaIndex)):
data = PandasIndexingAdapter(_as_nanosecond_precision(data.array))
elif isinstance(data, (pd.DatetimeIndex, pd.TimedeltaIndex)):
data = _as_nanosecond_precision(data)
return data


def as_compatible_data(
Expand Down
8 changes: 8 additions & 0 deletions xarray/tests/test_dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,7 @@
requires_cupy,
requires_dask,
requires_numexpr,
requires_pandas_version_two,
requires_pint,
requires_scipy,
requires_sparse,
Expand Down Expand Up @@ -3446,6 +3447,13 @@ def test_expand_dims_kwargs_python36plus(self) -> None:
)
assert_identical(other_way_expected, other_way)

@requires_pandas_version_two
def test_expand_dims_non_nanosecond_conversion(self) -> None:
# Regression test for https://github.com/pydata/xarray/issues/7493#issuecomment-1953091000
with pytest.warns(UserWarning, match="non-nanosecond precision"):
ds = Dataset().expand_dims({"time": [np.datetime64("2018-01-01", "s")]})
assert ds.time.dtype == np.dtype("datetime64[ns]")

def test_set_index(self) -> None:
expected = create_test_multiindex()
mindex = expected["x"].to_index()
Expand Down
16 changes: 16 additions & 0 deletions xarray/tests/test_variable.py
Original file line number Diff line number Diff line change
Expand Up @@ -3011,3 +3011,19 @@ def test_pandas_two_only_timedelta_conversion_warning() -> None:
var = Variable(["time"], data)

assert var.dtype == np.dtype("timedelta64[ns]")


@requires_pandas_version_two
@pytest.mark.parametrize(
("index", "dtype"),
[
(pd.date_range("2000", periods=1), "datetime64"),
(pd.timedelta_range("1", periods=1), "timedelta64"),
],
ids=lambda x: f"{x}",
)
def test_pandas_indexing_adapter_non_nanosecond_conversion(index, dtype) -> None:
data = PandasIndexingAdapter(index.astype(f"{dtype}[s]"))
with pytest.warns(UserWarning, match="non-nanosecond precision"):
var = Variable(["time"], data)
assert var.dtype == np.dtype(f"{dtype}[ns]")

0 comments on commit 2983c53

Please sign in to comment.