Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Modernize type annotations #21

Merged
merged 2 commits into from
Aug 21, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
62 changes: 32 additions & 30 deletions src/tophu/io.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,11 @@
from __future__ import annotations

import mmap
import os
import textwrap
from dataclasses import dataclass
from pathlib import Path
from typing import Optional, Protocol, Tuple, Union, overload, runtime_checkable
from typing import Protocol, overload, runtime_checkable

import h5py
import numpy as np
Expand Down Expand Up @@ -39,13 +41,13 @@ class DatasetReader(Protocol):
dtype: np.dtype
"""numpy.dtype : Data-type of the array's elements."""

shape: Tuple[int, ...]
shape: tuple[int, ...]
"""tuple of int : Tuple of array dimensions."""

ndim: int
"""int : Number of array dimensions."""

def __getitem__(self, key: Tuple[slice, ...], /) -> ArrayLike:
def __getitem__(self, key: tuple[slice, ...], /) -> ArrayLike:
"""Read a block of data."""
...

Expand All @@ -68,18 +70,18 @@ class DatasetWriter(Protocol):
dtype: np.dtype
"""numpy.dtype : Data-type of the array's elements."""

shape: Tuple[int, ...]
shape: tuple[int, ...]
"""tuple of int : Tuple of array dimensions."""

ndim: int
"""int : Number of array dimensions."""

def __setitem__(self, key: Tuple[slice, ...], value: np.ndarray, /) -> None:
def __setitem__(self, key: tuple[slice, ...], value: np.ndarray, /) -> None:
"""Write a block of data."""
...


def _create_or_extend_file(filepath: Union[str, os.PathLike], size: int) -> None:
def _create_or_extend_file(filepath: str | os.PathLike, size: int) -> None:
"""
Create a file with the specified size or extend an existing file to the same size.

Expand Down Expand Up @@ -126,13 +128,13 @@ class BinaryFile(DatasetReader, DatasetWriter):
filepath: Path
"""pathlib.Path : The file path."""

shape: Tuple[int, ...]
shape: tuple[int, ...]
dtype: np.dtype

def __init__(
self,
filepath: Union[str, os.PathLike],
shape: Tuple[int, ...],
filepath: str | os.PathLike,
shape: tuple[int, ...],
dtype: DTypeLike,
):
"""
Expand Down Expand Up @@ -175,7 +177,7 @@ def ndim(self) -> int: # type: ignore[override]
def __array__(self) -> np.ndarray:
return self[:,]

def __getitem__(self, key: Tuple[slice, ...], /) -> np.ndarray:
def __getitem__(self, key: tuple[slice, ...], /) -> np.ndarray:
with self.filepath.open("rb") as f:
# Memory-map the entire file.
with mmap.mmap(f.fileno(), 0, access=mmap.ACCESS_READ) as mm:
Expand All @@ -187,7 +189,7 @@ def __getitem__(self, key: Tuple[slice, ...], /) -> np.ndarray:
del arr
return data

def __setitem__(self, key: Tuple[slice, ...], value: np.ndarray, /) -> None:
def __setitem__(self, key: tuple[slice, ...], value: np.ndarray, /) -> None:
with self.filepath.open("r+b") as f:
# Memory-map the entire file.
with mmap.mmap(f.fileno(), 0, access=mmap.ACCESS_WRITE) as mm:
Expand Down Expand Up @@ -224,16 +226,16 @@ class HDF5Dataset(DatasetReader, DatasetWriter):
datapath: str
"""str : The path to the dataset within the file."""

chunks: Optional[Tuple[int, ...]]
chunks: tuple[int, ...] | None
"""
tuple of int : Tuple giving the chunk shape, or None if chunked storage is not used.
"""

shape: Tuple[int, ...]
shape: tuple[int, ...]
dtype: np.dtype

@overload
def __init__(self, filepath: Union[str, os.PathLike], datapath: str): # noqa: D418
def __init__(self, filepath: str | os.PathLike, datapath: str): # noqa: D418
"""
Construct a new `HDF5Dataset` object from an existing dataset.

Expand All @@ -249,9 +251,9 @@ def __init__(self, filepath: Union[str, os.PathLike], datapath: str): # noqa: D
@overload
def __init__(
self,
filepath: Union[str, os.PathLike],
filepath: str | os.PathLike,
datapath: str,
shape: Tuple[int, ...],
shape: tuple[int, ...],
dtype: DTypeLike,
**kwargs,
): # noqa: D418
Expand Down Expand Up @@ -340,19 +342,19 @@ def ndim(self) -> int: # type: ignore[override]
def __array__(self) -> np.ndarray:
return self[:,]

def __getitem__(self, key: Tuple[slice, ...], /) -> np.ndarray:
def __getitem__(self, key: tuple[slice, ...], /) -> np.ndarray:
with h5py.File(self.filepath, "r") as f:
dataset = f[self.datapath]
return dataset[key]

def __setitem__(self, key: Tuple[slice, ...], value: np.ndarray, /) -> None:
def __setitem__(self, key: tuple[slice, ...], value: np.ndarray, /) -> None:
with h5py.File(self.filepath, "r+") as f:
dataset = f[self.datapath]
dataset[key] = value


def _check_contains_single_band(
dataset: Union[rasterio.io.DatasetReader, rasterio.io.DatasetWriter]
dataset: rasterio.io.DatasetReader | rasterio.io.DatasetWriter,
) -> None:
"""
Validate that the supplied dataset contains a single raster band.
Expand All @@ -374,7 +376,7 @@ def _check_contains_single_band(


def _check_valid_band(
dataset: Union[rasterio.io.DatasetReader, rasterio.io.DatasetWriter],
dataset: rasterio.io.DatasetReader | rasterio.io.DatasetWriter,
band: int,
) -> None:
"""
Expand Down Expand Up @@ -438,18 +440,18 @@ class RasterBand(DatasetReader, DatasetWriter):
coordinate reference system.
"""

shape: Tuple[int, int]
shape: tuple[int, int]
dtype: np.dtype

# TODO: `chunks` & `nodata` attributes

@overload
def __init__(
self,
filepath: Union[str, os.PathLike],
filepath: str | os.PathLike,
*,
band: Optional[int] = None,
driver: Optional[str] = None,
band: int | None = None,
driver: str | None = None,
): # noqa: D418
"""
Construct a new `RasterBand` object.
Expand All @@ -470,14 +472,14 @@ def __init__(
@overload
def __init__(
self,
filepath: Union[str, os.PathLike],
filepath: str | os.PathLike,
width: int,
height: int,
dtype: DTypeLike,
*,
driver: Optional[str] = None,
crs: Optional[Union[str, dict, rasterio.crs.CRS]] = None,
transform: Optional[rasterio.transform.Affine] = None,
driver: str | None = None,
crs: str | dict | rasterio.crs.CRS | None = None,
transform: rasterio.transform.Affine | None = None,
): # noqa: D418
"""
Construct a new `RasterBand` object.
Expand Down Expand Up @@ -595,7 +597,7 @@ def ndim(self) -> int: # type: ignore[override]
def __array__(self) -> np.ndarray:
return self[:, :]

def __getitem__(self, key: Tuple[slice, ...], /) -> np.ndarray:
def __getitem__(self, key: tuple[slice, ...], /) -> np.ndarray:
with rasterio.io.DatasetReader(
self.filepath,
driver=self.driver,
Expand All @@ -607,7 +609,7 @@ def __getitem__(self, key: Tuple[slice, ...], /) -> np.ndarray:
)
return dataset.read(self.band, window=window)

def __setitem__(self, key: Tuple[slice, ...], value: np.ndarray, /) -> None:
def __setitem__(self, key: tuple[slice, ...], value: np.ndarray, /) -> None:
with rasterio.io.DatasetWriter(
self.filepath,
"r+",
Expand Down
10 changes: 5 additions & 5 deletions src/tophu/multilook.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,8 @@
from __future__ import annotations

import warnings
from typing import Iterable, SupportsInt, Tuple, Union, cast
from collections.abc import Iterable
from typing import Tuple, cast

import dask.array as da
import numpy as np
Expand All @@ -11,10 +14,7 @@
]


IntOrInts = Union[SupportsInt, Iterable[SupportsInt]]


def multilook(arr: da.Array, nlooks: IntOrInts) -> da.Array:
def multilook(arr: da.Array, nlooks: int | Iterable[int]) -> da.Array:
"""
Multilook an array by simple averaging.

Expand Down
25 changes: 13 additions & 12 deletions src/tophu/multiscale.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
from __future__ import annotations

import warnings
from typing import Optional, Tuple

import dask.array as da
import numpy as np
Expand All @@ -20,7 +21,7 @@

def lowpass_filter_and_multilook(
arr: da.Array,
downsample_factor: Tuple[int, int],
downsample_factor: tuple[int, int],
*,
shape_factor: float = 1.5,
overhang: float = 0.5,
Expand Down Expand Up @@ -204,14 +205,14 @@ def coarse_unwrap(
coherence: da.Array,
nlooks: float,
unwrap: UnwrapCallback,
downsample_factor: Tuple[int, int],
downsample_factor: tuple[int, int],
*,
do_lowpass_filter: bool = True,
shape_factor: float = 1.5,
overhang: float = 0.5,
ripple: float = 0.01,
attenuation: float = 40.0,
) -> Tuple[da.Array, da.Array]:
) -> tuple[da.Array, da.Array]:
"""
Estimate coarse unwrapped phase by unwrapping a downsampled interferogram.

Expand Down Expand Up @@ -429,14 +430,14 @@ def _multiscale_unwrap(
coherence: da.Array,
nlooks: float,
unwrap: UnwrapCallback,
downsample_factor: Tuple[int, int],
downsample_factor: tuple[int, int],
*,
do_lowpass_filter: bool = True,
shape_factor: float = 1.5,
overhang: float = 0.5,
ripple: float = 0.01,
attenuation: float = 40.0,
) -> Tuple[da.Array, da.Array]:
) -> tuple[da.Array, da.Array]:
"""
Perform 2-D phase unwrapping using a multi-resolution approach.

Expand Down Expand Up @@ -557,10 +558,10 @@ def _multiscale_unwrap(


def get_tile_dims(
shape: Tuple[int, ...],
ntiles: Tuple[int, ...],
snap_to: Optional[Tuple[int, ...]] = None,
) -> Tuple[int, ...]:
shape: tuple[int, ...],
ntiles: tuple[int, ...],
snap_to: tuple[int, ...] | None = None,
) -> tuple[int, ...]:
"""
Get tile dimensions of an array partitioned into tiles.

Expand Down Expand Up @@ -622,8 +623,8 @@ def multiscale_unwrap(
coherence: DatasetReader,
nlooks: float,
unwrap: UnwrapCallback,
downsample_factor: Tuple[int, int],
ntiles: Tuple[int, int],
downsample_factor: tuple[int, int],
ntiles: tuple[int, int],
*,
do_lowpass_filter: bool = True,
shape_factor: float = 1.5,
Expand Down
28 changes: 13 additions & 15 deletions src/tophu/tile.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,7 @@
from __future__ import annotations

import itertools
from typing import Iterable, Iterator, Optional, SupportsInt, Tuple, Union
from collections.abc import Iterable, Iterator

import numpy as np

Expand All @@ -10,10 +12,6 @@
]


IntOrInts = Union[SupportsInt, Iterable[SupportsInt]]
NDSlice = Tuple[slice, ...]


class TiledPartition:
"""
A partitioning of an N-dimensional array into tiles.
Expand All @@ -31,10 +29,10 @@ class TiledPartition:

def __init__(
self,
shape: IntOrInts,
ntiles: IntOrInts,
overlap: Optional[IntOrInts] = None,
snap_to: Optional[IntOrInts] = None,
shape: int | Iterable[int],
ntiles: int | Iterable[int],
overlap: int | Iterable[int] | None = None,
snap_to: int | Iterable[int] | None = None,
):
"""
Construct a new `TiledPartition` object.
Expand Down Expand Up @@ -111,32 +109,32 @@ def __init__(
self._tiledims = tiledims

@property
def ntiles(self) -> Tuple[int, ...]:
def ntiles(self) -> tuple[int, ...]:
"""tuple of int : Number of tiles along each array axis."""
return self._ntiles

@property
def tiledims(self) -> Tuple[int, ...]:
def tiledims(self) -> tuple[int, ...]:
"""
tuple of int : Shape of a typical tile. The last tile along each axis may be
smaller.
"""
return tuple(self._tiledims)

@property
def strides(self) -> Tuple[int, ...]:
def strides(self) -> tuple[int, ...]:
"""
tuple of int : Step size between the start of adjacent tiles along each
axis.
"""
return tuple(self._strides)

@property
def overlap(self) -> Tuple[int, ...]:
def overlap(self) -> tuple[int, ...]:
"""tuple of int : Overlap between adjacent tiles along each axis."""
return tuple(self._tiledims - self._strides)

def __getitem__(self, index: IntOrInts) -> NDSlice:
def __getitem__(self, index: int | Iterable[int]) -> tuple[slice, ...]:
"""
Access a tile.

Expand Down Expand Up @@ -171,7 +169,7 @@ def wrap_index(i: int, n: int) -> int:

return tuple([slice(a, b) for (a, b) in zip(start, stop)])

def __iter__(self) -> Iterator[NDSlice]:
def __iter__(self) -> Iterator[tuple[slice, ...]]:
"""
Iterate over tiles in arbitrary order.

Expand Down
Loading