Skip to content

Commit

Permalink
Merge branch 'development' into tweak-contributor-doc
Browse files Browse the repository at this point in the history
  • Loading branch information
JessicaS11 committed Aug 26, 2024
2 parents 19e5a5b + e491c84 commit 59d4355
Show file tree
Hide file tree
Showing 21 changed files with 574 additions and 553 deletions.
35 changes: 35 additions & 0 deletions .all-contributorsrc
Original file line number Diff line number Diff line change
Expand Up @@ -470,6 +470,41 @@
"contributions": [
"ideas"
]
},
{
"login": "cclauss",
"name": "Christian Clauss",
"avatar_url": "https://avatars.githubusercontent.com/u/3709715?v=4",
"profile": "https://github.com/cclauss",
"contributions": [
"maintenance",
"review",
"tool"
]
},
{
"login": "jrenrut",
"name": "Jeremy Turner",
"avatar_url": "https://avatars.githubusercontent.com/u/42989238?v=4",
"profile": "https://github.com/jrenrut",
"contributions": [
"bug",
"code"
]
},
{
"login": "mfisher87",
"name": "Matt Fisher",
"avatar_url": "https://avatars.githubusercontent.com/u/3608264?v=4",
"profile": "https://mfisher87.github.io/",
"contributions": [
"bug",
"code",
"doc",
"maintenance",
"test",
"infra"
]
}
],
"contributorsPerLine": 7,
Expand Down
23 changes: 14 additions & 9 deletions CONTRIBUTORS.rst

Large diffs are not rendered by default.

26 changes: 26 additions & 0 deletions README.rst
Original file line number Diff line number Diff line change
Expand Up @@ -85,6 +85,32 @@ Alternatively, you can also install icepyx using `pip <https://pip.pypa.io/en/st
More detailed instructions for installing `icepyx` can be found at
https://icepyx.readthedocs.io/en/latest/getting_started/install.html

Quick Start
-----------

.. code-block:: python
import icepyx as ipx
query = ipx.Query(
# Collection short name
"ATL06",
# Bounding box
[-55, 68, -48, 71],
# Time bounds
['2019-02-20','2019-02-28'],
)
query.download_granules('/tmp/icepyx')
.. code-block:: bash
$ ls -1 /tmp/icepyx/
processed_ATL06_20190221121851_08410203_006_02.h5
processed_ATL06_20190222010344_08490205_006_02.h5
processed_ATL06_20190225121032_09020203_006_02.h5
processed_ATL06_20190226005526_09100205_006_02.h5
See the examples below for more things `icepyx` can do!


Examples (Jupyter Notebooks)
----------------------------
Expand Down
495 changes: 244 additions & 251 deletions doc/source/user_guide/documentation/classes_dev_uml.svg
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
387 changes: 190 additions & 197 deletions doc/source/user_guide/documentation/classes_user_uml.svg
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
7 changes: 2 additions & 5 deletions doc/sphinxext/announce.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,10 +77,7 @@ def get_authors(revision_range):
# e.g. v1.0.1|HEAD
maybe_tag, head = cur_release.split("|")
assert head == "HEAD"
if maybe_tag in this_repo.tags:
cur_release = maybe_tag
else:
cur_release = head
cur_release = maybe_tag if maybe_tag in this_repo.tags else head
revision_range = f"{lst_release}..{cur_release}"

# authors, in current release and previous to current release.
Expand Down Expand Up @@ -125,7 +122,7 @@ def get_authors(revision_range):
# pre.discard("Homu")

# Append '+' to new authors.
authors = [s + " +" for s in cur - pre] + [s for s in cur & pre]
authors = [s + " +" for s in cur - pre] + list(cur & pre)
authors.sort()
return authors

Expand Down
16 changes: 8 additions & 8 deletions icepyx/core/APIformatting.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ def _fmt_readable_granules(dset, **kwds):
# list of readable granule names
readable_granule_list = []
# if querying either by 91-day orbital cycle or RGT
if "cycles" in kwargs.keys() or "tracks" in kwargs.keys():
if "cycles" in kwargs or "tracks" in kwargs:
# default character wildcards for cycles and tracks
kwargs.setdefault("cycles", ["??"])
kwargs.setdefault("tracks", ["????"])
Expand Down Expand Up @@ -110,7 +110,7 @@ def _fmt_var_subset_list(vdict):
"""

subcover = ""
for vn in vdict.keys():
for vn in vdict:
vpaths = vdict[vn]
for vpath in vpaths:
subcover += "/" + vpath + ","
Expand Down Expand Up @@ -298,9 +298,9 @@ def _check_valid_keys(self):
# if self._wanted == None:
# raise ValueError("No desired parameter list was passed")

val_list = list(set(val for lis in self.poss_keys.values() for val in lis))
val_list = list({val for lis in self.poss_keys.values() for val in lis})

for key in self.fmted_keys.keys():
for key in self.fmted_keys:
assert key in val_list, (
"An invalid key (" + key + ") was passed. Please remove it using `del`"
)
Expand All @@ -317,7 +317,7 @@ def check_req_values(self):
), "You cannot call this function for your parameter type"
reqkeys = self.poss_keys[self._reqtype]

if all(keys in self.fmted_keys.keys() for keys in reqkeys):
if all(keys in self.fmted_keys for keys in reqkeys):
assert all(
self.fmted_keys.get(key, -9999) != -9999 for key in reqkeys
), "One of your formatted parameters is missing a value"
Expand All @@ -337,7 +337,7 @@ def check_values(self):
spatial_keys = self.poss_keys["spatial"]

# not the most robust check, but better than nothing...
if any(keys in self._fmted_keys.keys() for keys in spatial_keys):
if any(keys in self._fmted_keys for keys in spatial_keys):
assert any(
self.fmted_keys.get(key, -9999) != -9999 for key in spatial_keys
), "One of your formatted parameters is missing a value"
Expand Down Expand Up @@ -410,13 +410,13 @@ def build_params(self, **kwargs):
opt_keys = self.poss_keys["optional"]

for key in opt_keys:
if key == "Coverage" and key in kwargs.keys():
if key == "Coverage" and key in kwargs:
# DevGoal: make there be an option along the lines of Coverage=default, which will get the default variables for that product without the user having to input is2obj.build_wanted_wanted_var_list as their input value for using the Coverage kwarg
self._fmted_keys.update(
{key: _fmt_var_subset_list(kwargs[key])}
)
elif (key == "temporal" or key == "time") and (
"start" in kwargs.keys() and "end" in kwargs.keys()
"start" in kwargs and "end" in kwargs
):
self._fmted_keys.update(
_fmt_temporal(kwargs["start"], kwargs["end"], key)
Expand Down
8 changes: 3 additions & 5 deletions icepyx/core/auth.py
Original file line number Diff line number Diff line change
Expand Up @@ -107,11 +107,9 @@ def set_s3_creds():

# Only generate s3login_credentials the first time credentials are accessed, or if an hour
# has passed since the last login
if self._s3login_credentials is None:
set_s3_creds()
elif (datetime.datetime.now() - self._s3_initial_ts) >= datetime.timedelta(
hours=1
):
if self._s3login_credentials is None or (
datetime.datetime.now() - self._s3_initial_ts
) >= datetime.timedelta(hours=1):
set_s3_creds()
return self._s3login_credentials

Expand Down
9 changes: 4 additions & 5 deletions icepyx/core/granules.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,14 +75,13 @@ def gran_IDs(grans, ids=False, cycles=False, tracks=False, dates=False, cloud=Fa
if cloud is True:
try:
for link in gran["links"]:
if link["href"].startswith("s3") and link["href"].endswith(
(".h5", "nc")
):
gran_s3urls.append(link["href"])
href = link["href"]
if href.startswith("s3") and href.endswith((".h5", "nc")):
gran_s3urls.append(href)
except KeyError:
pass

if any([param is True for param in [cycles, tracks, dates]]):
if any(param is True for param in [cycles, tracks, dates]):
# PRD: ICESat-2 product
# HEM: Sea Ice Hemisphere flag
# YY,MM,DD,HH,MN,SS: Year, Month, Day, Hour, Minute, Second
Expand Down
18 changes: 8 additions & 10 deletions icepyx/core/query.py
Original file line number Diff line number Diff line change
Expand Up @@ -126,7 +126,7 @@ def __init__(
**kwargs,
):
# validate & init spatial extent
if "xdateline" in kwargs.keys():
if "xdateline" in kwargs:
self._spatial = spat.Spatial(spatial_extent, xdateline=kwargs["xdateline"])
else:
self._spatial = spat.Spatial(spatial_extent)
Expand Down Expand Up @@ -637,7 +637,7 @@ def subsetparams(self, **kwargs):
else:
# If the user has supplied a subset list of variables, append the
# icepyx required variables to the Coverage dict
if "Coverage" in kwargs.keys():
if "Coverage" in kwargs:
var_list = [
"orbit_info/sc_orient",
"orbit_info/sc_orient_time",
Expand All @@ -653,7 +653,7 @@ def subsetparams(self, **kwargs):
]
# Add any variables from var_list to Coverage that are not already included
for var in var_list:
if var not in kwargs["Coverage"].keys():
if var not in kwargs["Coverage"]:
kwargs["Coverage"][var.split("/")[-1]] = [var]

if self._subsetparams is None:
Expand Down Expand Up @@ -741,9 +741,7 @@ def granules(self):
<icepyx.core.granules.Granules at [location]>
"""

if not hasattr(self, "_granules"):
self._granules = Granules()
elif self._granules is None:
if not hasattr(self, "_granules") or self._granules is None:
self._granules = Granules()

return self._granules
Expand Down Expand Up @@ -869,8 +867,8 @@ def show_custom_options(self, dictview=False):
]

try:
all(key in self._cust_options.keys() for key in keys)
except AttributeError or KeyError:
all(key in self._cust_options for key in keys)
except (AttributeError, KeyError):
self._cust_options = is2ref._get_custom_options(
self.session, self.product, self._version
)
Expand Down Expand Up @@ -999,7 +997,7 @@ def order_granules(self, verbose=False, subset=True, email=False, **kwargs):
if self._reqparams._reqtype == "search":
self._reqparams._reqtype = "download"

if "email" in self._reqparams.fmted_keys.keys() or email is False:
if "email" in self._reqparams.fmted_keys or email is False:
self._reqparams.build_params(**self._reqparams.fmted_keys)
elif email is True:
user_profile = self.auth.get_user_profile()
Expand All @@ -1022,7 +1020,7 @@ def order_granules(self, verbose=False, subset=True, email=False, **kwargs):
self.granules

# Place multiple orders, one per granule, if readable_granule_name is used.
if "readable_granule_name[]" in self.CMRparams.keys():
if "readable_granule_name[]" in self.CMRparams:
gran_name_list = self.CMRparams["readable_granule_name[]"]
tempCMRparams = self.CMRparams
if len(gran_name_list) > 1:
Expand Down
14 changes: 7 additions & 7 deletions icepyx/core/read.py
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,7 @@ def _parse_source(data_source, glob_kwargs={}) -> list:
# if data_source is a directory glob search the directory and assign to _filelist
data_source = os.path.join(data_source, "*")
filelist = glob.glob(data_source, **glob_kwargs)
elif isinstance(data_source, str) or isinstance(data_source, Path):
elif isinstance(data_source, (Path, str)):
if data_source.startswith("s3"):
# if the string is an s3 path put it in the _filelist without globbing
filelist = [data_source]
Expand Down Expand Up @@ -661,11 +661,11 @@ def _build_dataset_template(self, file):
"""

is2ds = xr.Dataset(
coords=dict(
gran_idx=[np.uint64(999999)],
source_file=(["gran_idx"], [file]),
),
attrs=dict(data_product=self.product),
coords={
"gran_idx": [np.uint64(999999)],
"source_file": (["gran_idx"], [file]),
},
attrs={"data_product": self.product},
)
return is2ds

Expand Down Expand Up @@ -740,7 +740,7 @@ def _build_single_file_dataset(self, file, groups_list):
"ATL23",
]:
wanted_grouponly_set = set(wanted_groups_tiered[0])
wanted_groups_list = list(sorted(wanted_grouponly_set))
wanted_groups_list = sorted(wanted_grouponly_set)
if len(wanted_groups_list) == 1:
is2ds = self._read_single_grp(file, grp_path=wanted_groups_list[0])
else:
Expand Down
7 changes: 2 additions & 5 deletions icepyx/core/spatial.py
Original file line number Diff line number Diff line change
Expand Up @@ -496,7 +496,7 @@ def __init__(self, spatial_extent, **kwarg):
self._spatial_ext = [float(i) for i in arrpoly]

# check for cross dateline keyword submission
if "xdateline" in kwarg.keys():
if "xdateline" in kwarg:
self._xdateln = kwarg["xdateline"]
assert self._xdateln in [
True,
Expand Down Expand Up @@ -542,10 +542,7 @@ def extent_as_gdf(self):
"""

# TODO: test this
if hasattr(self, "_xdateln"):
xdateln = self._xdateln
else:
xdateln = None
xdateln = self._xdateln if hasattr(self, "_xdateln") else None

if not hasattr(self, "_gdf_spat"):
if self._geom_file is not None:
Expand Down
19 changes: 6 additions & 13 deletions icepyx/core/variables.py
Original file line number Diff line number Diff line change
Expand Up @@ -84,10 +84,7 @@ def __init__(
self._path = val.check_s3bucket(path)

# Set up auth
if self._path.startswith("s3"):
auth = self.auth
else:
auth = None
auth = self.auth if self._path.startswith("s3") else None
# Read the product and version from the file
self._product = is2ref.extract_product(self._path, auth=auth)
self._version = is2ref.extract_version(self._path, auth=auth)
Expand All @@ -111,11 +108,7 @@ def __init__(

@property
def path(self):
if self._path:
path = self._path
else:
path = None
return path
return self._path if self._path else None

@property
def product(self):
Expand Down Expand Up @@ -271,7 +264,7 @@ def parse_var_list(varlist, tiered=True, tiered_vars=False):
for vn in varlist:
vpath, vkey = os.path.split(vn)
# print('path '+ vpath + ', key '+vkey)
if vkey not in vgrp.keys():
if vkey not in vgrp:
vgrp[vkey] = [vn]
else:
vgrp[vkey].append(vn)
Expand Down Expand Up @@ -321,7 +314,7 @@ def _check_valid_lists(
# check if the list of variables, if specified, are available in the product
if var_list is not None:
for var_id in var_list:
if var_id not in vgrp.keys():
if var_id not in vgrp:
err_msg_varid = "Invalid variable name: " + var_id + ". "
err_msg_varid = err_msg_varid + "Please select from this list: "
err_msg_varid = err_msg_varid + ", ".join(vgrp.keys())
Expand Down Expand Up @@ -514,9 +507,9 @@ def append(self, defaults=False, var_list=None, beam_list=None, keyword_list=Non
)

# update the data object variables
for vkey in final_vars.keys():
for vkey in final_vars:
# add all matching keys and paths for new variables
if vkey not in self.wanted.keys():
if vkey not in self.wanted:
self.wanted[vkey] = final_vars[vkey]
else:
for vpath in final_vars[vkey]:
Expand Down
2 changes: 1 addition & 1 deletion icepyx/quest/dataset_scripts/__init__.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
from .dataset import *
from .dataset import * # noqa: F403
Loading

0 comments on commit 59d4355

Please sign in to comment.