From 21729164128525ba8cb006808a39c42fb6506f5c Mon Sep 17 00:00:00 2001 From: Erik van Sebille Date: Wed, 3 Aug 2022 15:07:33 +0200 Subject: [PATCH 1/2] Adding support for netcdf_decodewarning to toggle xarray decode warning --- parcels/field.py | 29 ++++++++++++++++++++--------- parcels/fieldfilebuffer.py | 8 +++++--- 2 files changed, 25 insertions(+), 12 deletions(-) diff --git a/parcels/field.py b/parcels/field.py index bf26a8f95..4e0cf2dfd 100644 --- a/parcels/field.py +++ b/parcels/field.py @@ -192,6 +192,7 @@ def __init__(self, name, data, lon=None, lat=None, depth=None, time=None, grid=N self.dataFiles = np.append(self.dataFiles, self.dataFiles[0]) self._field_fb_class = kwargs.pop('FieldFileBuffer', None) self.netcdf_engine = kwargs.pop('netcdf_engine', 'netcdf4') + self.netcdf_decodewarning = kwargs.pop('netcdf_decodewarning', True) self.loaded_time_indices = [] self.creation_log = kwargs.pop('creation_log', '') self.chunksize = kwargs.pop('chunksize', None) @@ -229,7 +230,8 @@ def get_dim_filenames(cls, filenames, dim): return filenames @staticmethod - def collect_timeslices(timestamps, data_filenames, _grid_fb_class, dimensions, indices, netcdf_engine): + def collect_timeslices(timestamps, data_filenames, _grid_fb_class, dimensions, indices, netcdf_engine, + netcdf_decodewarning=True): if timestamps is not None: dataFiles = [] for findex in range(len(data_filenames)): @@ -241,7 +243,8 @@ def collect_timeslices(timestamps, data_filenames, _grid_fb_class, dimensions, i timeslices = [] dataFiles = [] for fname in data_filenames: - with _grid_fb_class(fname, dimensions, indices, netcdf_engine=netcdf_engine) as filebuffer: + with _grid_fb_class(fname, dimensions, indices, netcdf_engine=netcdf_engine, + netcdf_decodewarning=netcdf_decodewarning) as filebuffer: ftime = filebuffer.time timeslices.append(ftime) dataFiles.append([fname] * len(ftime)) @@ -267,7 +270,7 @@ def from_netcdf(cls, filenames, variable, dimensions, indices=None, grid=None, :param filenames: list of filenames to read for the field. filenames can be a list [files] or a dictionary {dim:[files]} (if lon, lat, depth and/or data not stored in same files as data) - In the latetr case, time values are in filenames[data] + In the latter case, time values are in filenames[data] :param variable: Tuple mapping field name to variable name in the NetCDF file. :param dimensions: Dictionary mapping variable names for the relevant dimensions in the NetCDF file :param indices: dictionary mapping indices for each dimension to read from file. @@ -293,6 +296,9 @@ def from_netcdf(cls, filenames, variable, dimensions, indices=None, grid=None, :param gridindexingtype: The type of gridindexing. Either 'nemo' (default) or 'mitgcm' are supported. See also the Grid indexing documentation on oceanparcels.org :param chunksize: size of the chunks in dask loading + :param netcdf_decodewarning: boolean whether to show a warning id there is a problem decoding the netcdf files. + Default is True, but in some cases where these warnings are expected, it may be useful to silence them + by setting netcdf_decodewarning=False. For usage examples see the following tutorial: @@ -328,6 +334,7 @@ def from_netcdf(cls, filenames, variable, dimensions, indices=None, grid=None, depth_filename = depth_filename[0] netcdf_engine = kwargs.pop('netcdf_engine', 'netcdf4') + netcdf_decodewarning = kwargs.pop('netcdf_decodewarning', True) indices = {} if indices is None else indices.copy() for ind in indices: @@ -349,7 +356,8 @@ def from_netcdf(cls, filenames, variable, dimensions, indices=None, grid=None, _grid_fb_class = NetcdfFileBuffer - with _grid_fb_class(lonlat_filename, dimensions, indices, netcdf_engine) as filebuffer: + with _grid_fb_class(lonlat_filename, dimensions, indices, netcdf_engine, + netcdf_decodewarning=netcdf_decodewarning) as filebuffer: lon, lat = filebuffer.lonlat indices = filebuffer.indices # Check if parcels_mesh has been explicitly set in file @@ -357,7 +365,8 @@ def from_netcdf(cls, filenames, variable, dimensions, indices=None, grid=None, mesh = filebuffer.dataset.attrs['parcels_mesh'] if 'depth' in dimensions: - with _grid_fb_class(depth_filename, dimensions, indices, netcdf_engine, interp_method=interp_method) as filebuffer: + with _grid_fb_class(depth_filename, dimensions, indices, netcdf_engine, interp_method=interp_method, + netcdf_decodewarning=netcdf_decodewarning) as filebuffer: filebuffer.name = filebuffer.parse_name(variable[1]) if dimensions['depth'] == 'not_yet_set': depth = filebuffer.depth_dimensions @@ -380,7 +389,7 @@ def from_netcdf(cls, filenames, variable, dimensions, indices=None, grid=None, # across multiple files time, time_origin, timeslices, dataFiles = cls.collect_timeslices(timestamps, data_filenames, _grid_fb_class, dimensions, - indices, netcdf_engine) + indices, netcdf_engine, netcdf_decodewarning) grid = Grid.create_grid(lon, lat, depth, time, time_origin=time_origin, mesh=mesh) grid.timeslices = timeslices kwargs['dataFiles'] = dataFiles @@ -388,7 +397,7 @@ def from_netcdf(cls, filenames, variable, dimensions, indices=None, grid=None, # ==== means: the field has a shared grid, but may have different data files, so we need to collect the # ==== correct file time series again. _, _, _, dataFiles = cls.collect_timeslices(timestamps, data_filenames, _grid_fb_class, - dimensions, indices, netcdf_engine) + dimensions, indices, netcdf_engine, netcdf_decodewarning) kwargs['dataFiles'] = dataFiles chunksize = kwargs.get('chunksize', None) @@ -421,7 +430,7 @@ def from_netcdf(cls, filenames, variable, dimensions, indices=None, grid=None, for tslice, fname in zip(grid.timeslices, data_filenames): with _field_fb_class(fname, dimensions, indices, netcdf_engine, interp_method=interp_method, data_full_zdim=data_full_zdim, - chunksize=chunksize) as filebuffer: + chunksize=chunksize, netcdf_decodewarning=netcdf_decodewarning) as filebuffer: # If Field.from_netcdf is called directly, it may not have a 'data' dimension # In that case, assume that 'name' is the data dimension filebuffer.name = filebuffer.parse_name(variable[1]) @@ -462,6 +471,7 @@ def from_netcdf(cls, filenames, variable, dimensions, indices=None, grid=None, kwargs['indices'] = indices kwargs['time_periodic'] = time_periodic kwargs['netcdf_engine'] = netcdf_engine + kwargs['netcdf_decodewarning'] = netcdf_decodewarning return cls(variable, data, grid=grid, timestamps=timestamps, allow_time_extrapolation=allow_time_extrapolation, interp_method=interp_method, **kwargs) @@ -1387,7 +1397,8 @@ def computeTimeChunk(self, data, tindex): data_full_zdim=self.data_full_zdim, chunksize=self.chunksize, rechunk_callback_fields=rechunk_callback_fields, - chunkdims_name_map=self.netcdf_chunkdims_name_map) + chunkdims_name_map=self.netcdf_chunkdims_name_map, + netcdf_decodewarning=self.netcdf_decodewarning) filebuffer.__enter__() time_data = filebuffer.time time_data = g.time_origin.reltime(time_data) diff --git a/parcels/fieldfilebuffer.py b/parcels/fieldfilebuffer.py index 149060af5..6e665a19f 100644 --- a/parcels/fieldfilebuffer.py +++ b/parcels/fieldfilebuffer.py @@ -31,6 +31,7 @@ class NetcdfFileBuffer(_FileBuffer): def __init__(self, *args, **kwargs): self.lib = np self.netcdf_engine = kwargs.pop('netcdf_engine', 'netcdf4') + self.netcdf_decodewarning = kwargs.pop('netcdf_decodewarning', True) super(NetcdfFileBuffer, self).__init__(*args, **kwargs) def __enter__(self): @@ -41,9 +42,10 @@ def __enter__(self): self.dataset = xr.open_dataset(str(self.filename), decode_cf=True, engine=self.netcdf_engine) self.dataset['decoded'] = True except: - logger.warning_once("File %s could not be decoded properly by xarray (version %s).\n " - "It will be opened with no decoding. Filling values might be wrongly parsed." - % (self.filename, xr.__version__)) + if self.netcdf_decodewarning: + logger.warning_once("File %s could not be decoded properly by xarray (version %s).\n " + "It will be opened with no decoding. Filling values might be wrongly parsed." + % (self.filename, xr.__version__)) self.dataset = xr.open_dataset(str(self.filename), decode_cf=False, engine=self.netcdf_engine) self.dataset['decoded'] = False for inds in self.indices.values(): From 43ec15b5e8901f654c296556d2c13d519fd362c2 Mon Sep 17 00:00:00 2001 From: Erik van Sebille Date: Wed, 3 Aug 2022 15:10:32 +0200 Subject: [PATCH 2/2] Updating notebooks with netcdf_decodewarning --- parcels/examples/documentation_indexing.ipynb | 33 ++++--- parcels/examples/tutorial_timestamps.ipynb | 90 ++++++++++++++----- 2 files changed, 86 insertions(+), 37 deletions(-) diff --git a/parcels/examples/documentation_indexing.ipynb b/parcels/examples/documentation_indexing.ipynb index 96dfc556e..e9f2e107a 100644 --- a/parcels/examples/documentation_indexing.ipynb +++ b/parcels/examples/documentation_indexing.ipynb @@ -65,7 +65,11 @@ { "cell_type": "code", "execution_count": 2, - "metadata": {}, + "metadata": { + "tags": [ + "raises-exception" + ] + }, "outputs": [ { "ename": "ValueError", @@ -123,19 +127,7 @@ "cell_type": "code", "execution_count": 4, "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "WARNING: File NemoNorthSeaORCA025-N006_data/coordinates.nc could not be decoded properly by xarray (version 0.15.1).\n", - " It will be opened with no decoding. Filling values might be wrongly parsed.\n", - "WARNING: Casting lon data to np.float32\n", - "WARNING: Casting lat data to np.float32\n", - "WARNING: Trying to initialize a shared grid with different chunking sizes - action prohibited. Replacing requested field_chunksize with grid's master chunksize.\n" - ] - } - ], + "outputs": [], "source": [ "mesh_mask = data_path + 'coordinates.nc'\n", "\n", @@ -147,7 +139,14 @@ " 'V': {'lon': 'glamf', 'lat': 'gphif', 'depth': 'depthw', 'time': 'time_counter'},\n", " 'W': {'lon': 'glamf', 'lat': 'gphif', 'depth': 'depthw', 'time': 'time_counter'}}\n", "\n", - "fieldsetC = FieldSet.from_nemo(filenames, variables, dimensions)" + "fieldsetC = FieldSet.from_nemo(filenames, variables, dimensions, netcdf_decodewarning=False)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Note by the way, that we used `netcdf_decodewarning=False` in the `FieldSet.from_nemo()` call above. This is to silence an expected warning because the time dimension in the `coordinates.nc` file can't be decoded by `xarray`." ] }, { @@ -221,7 +220,7 @@ ], "metadata": { "kernelspec": { - "display_name": "Python 3", + "display_name": "Python 3 (ipykernel)", "language": "python", "name": "python3" }, @@ -235,7 +234,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.8.5" + "version": "3.8.13" } }, "nbformat": 4, diff --git a/parcels/examples/tutorial_timestamps.ipynb b/parcels/examples/tutorial_timestamps.ipynb index 338e8d491..1bae6f1d0 100644 --- a/parcels/examples/tutorial_timestamps.ipynb +++ b/parcels/examples/tutorial_timestamps.ipynb @@ -31,17 +31,69 @@ "cell_type": "markdown", "metadata": {}, "source": [ - "The following cell would give an error, since the calendar of the World Ocean Atlas data is in \"months since 1955-01-01 00:00:00\"" + "The following cell raises an error, since the calendar of the World Ocean Atlas data is in \"months since 1955-01-01 00:00:00\"" ] }, { "cell_type": "code", "execution_count": 2, - "metadata": {}, - "outputs": [], + "metadata": { + "tags": [ + "raises-exception" + ] + }, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + "WARNING: File WOA_data/woa18_decav_t10_04.nc could not be decoded properly by xarray (version 0.20.1).\n", + " It will be opened with no decoding. Filling values might be wrongly parsed.\n" + ] + }, + { + "ename": "RuntimeError", + "evalue": "Xarray could not convert the calendar. If youre using from_netcdf, try using the timestamps keyword in the construction of your Field. See also the tutorial at https://nbviewer.jupyter.org/github/OceanParcels/parcels/blob/master/parcels/examples/tutorial_timestamps.ipynb", + "output_type": "error", + "traceback": [ + "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m", + "\u001b[0;31mKeyError\u001b[0m Traceback (most recent call last)", + "File \u001b[0;32m/opt/anaconda3/envs/py3_parcels/lib/python3.8/site-packages/xarray/coding/times.py:240\u001b[0m, in \u001b[0;36mdecode_cf_datetime\u001b[0;34m(num_dates, units, calendar, use_cftime)\u001b[0m\n\u001b[1;32m 239\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m--> 240\u001b[0m dates \u001b[38;5;241m=\u001b[39m \u001b[43m_decode_datetime_with_pandas\u001b[49m\u001b[43m(\u001b[49m\u001b[43mflat_num_dates\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43munits\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mcalendar\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 241\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m (\u001b[38;5;167;01mKeyError\u001b[39;00m, OutOfBoundsDatetime, \u001b[38;5;167;01mOverflowError\u001b[39;00m):\n", + "File \u001b[0;32m/opt/anaconda3/envs/py3_parcels/lib/python3.8/site-packages/xarray/coding/times.py:186\u001b[0m, in \u001b[0;36m_decode_datetime_with_pandas\u001b[0;34m(flat_num_dates, units, calendar)\u001b[0m\n\u001b[1;32m 185\u001b[0m delta, ref_date \u001b[38;5;241m=\u001b[39m _unpack_netcdf_time_units(units)\n\u001b[0;32m--> 186\u001b[0m delta \u001b[38;5;241m=\u001b[39m \u001b[43m_netcdf_to_numpy_timeunit\u001b[49m\u001b[43m(\u001b[49m\u001b[43mdelta\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 187\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n", + "File \u001b[0;32m/opt/anaconda3/envs/py3_parcels/lib/python3.8/site-packages/xarray/coding/times.py:83\u001b[0m, in \u001b[0;36m_netcdf_to_numpy_timeunit\u001b[0;34m(units)\u001b[0m\n\u001b[1;32m 82\u001b[0m units \u001b[38;5;241m=\u001b[39m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;132;01m{\u001b[39;00munits\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124ms\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[0;32m---> 83\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43m{\u001b[49m\n\u001b[1;32m 84\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mnanoseconds\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mns\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\n\u001b[1;32m 85\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mmicroseconds\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mus\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\n\u001b[1;32m 86\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mmilliseconds\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mms\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\n\u001b[1;32m 87\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mseconds\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43ms\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\n\u001b[1;32m 88\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mminutes\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mm\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\n\u001b[1;32m 89\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mhours\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mh\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\n\u001b[1;32m 90\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mdays\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[38;5;124;43mD\u001b[39;49m\u001b[38;5;124;43m\"\u001b[39;49m\u001b[43m,\u001b[49m\n\u001b[1;32m 91\u001b[0m \u001b[43m\u001b[49m\u001b[43m}\u001b[49m\u001b[43m[\u001b[49m\u001b[43munits\u001b[49m\u001b[43m]\u001b[49m\n", + "\u001b[0;31mKeyError\u001b[0m: 'months'", + "\nDuring handling of the above exception, another exception occurred:\n", + "\u001b[0;31mValueError\u001b[0m Traceback (most recent call last)", + "File \u001b[0;32m/opt/anaconda3/envs/py3_parcels/lib/python3.8/site-packages/xarray/coding/times.py:153\u001b[0m, in \u001b[0;36m_decode_cf_datetime_dtype\u001b[0;34m(data, units, calendar, use_cftime)\u001b[0m\n\u001b[1;32m 152\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m--> 153\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[43mdecode_cf_datetime\u001b[49m\u001b[43m(\u001b[49m\u001b[43mexample_value\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43munits\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mcalendar\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43muse_cftime\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 154\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mException\u001b[39;00m:\n", + "File \u001b[0;32m/opt/anaconda3/envs/py3_parcels/lib/python3.8/site-packages/xarray/coding/times.py:242\u001b[0m, in \u001b[0;36mdecode_cf_datetime\u001b[0;34m(num_dates, units, calendar, use_cftime)\u001b[0m\n\u001b[1;32m 241\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m (\u001b[38;5;167;01mKeyError\u001b[39;00m, OutOfBoundsDatetime, \u001b[38;5;167;01mOverflowError\u001b[39;00m):\n\u001b[0;32m--> 242\u001b[0m dates \u001b[38;5;241m=\u001b[39m \u001b[43m_decode_datetime_with_cftime\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 243\u001b[0m \u001b[43m \u001b[49m\u001b[43mflat_num_dates\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mastype\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mfloat\u001b[39;49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43munits\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mcalendar\u001b[49m\n\u001b[1;32m 244\u001b[0m \u001b[43m \u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 246\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m (\n\u001b[1;32m 247\u001b[0m dates[np\u001b[38;5;241m.\u001b[39mnanargmin(num_dates)]\u001b[38;5;241m.\u001b[39myear \u001b[38;5;241m<\u001b[39m \u001b[38;5;241m1678\u001b[39m\n\u001b[1;32m 248\u001b[0m \u001b[38;5;129;01mor\u001b[39;00m dates[np\u001b[38;5;241m.\u001b[39mnanargmax(num_dates)]\u001b[38;5;241m.\u001b[39myear \u001b[38;5;241m>\u001b[39m\u001b[38;5;241m=\u001b[39m \u001b[38;5;241m2262\u001b[39m\n\u001b[1;32m 249\u001b[0m ):\n", + "File \u001b[0;32m/opt/anaconda3/envs/py3_parcels/lib/python3.8/site-packages/xarray/coding/times.py:174\u001b[0m, in \u001b[0;36m_decode_datetime_with_cftime\u001b[0;34m(num_dates, units, calendar)\u001b[0m\n\u001b[1;32m 172\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mModuleNotFoundError\u001b[39;00m(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mNo module named \u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mcftime\u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[1;32m 173\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m np\u001b[38;5;241m.\u001b[39masarray(\n\u001b[0;32m--> 174\u001b[0m \u001b[43mcftime\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mnum2date\u001b[49m\u001b[43m(\u001b[49m\u001b[43mnum_dates\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43munits\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mcalendar\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43monly_use_cftime_datetimes\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43;01mTrue\u001b[39;49;00m\u001b[43m)\u001b[49m\n\u001b[1;32m 175\u001b[0m )\n", + "File \u001b[0;32msrc/cftime/_cftime.pyx:498\u001b[0m, in \u001b[0;36mcftime._cftime.num2date\u001b[0;34m()\u001b[0m\n", + "File \u001b[0;32msrc/cftime/_cftime.pyx:99\u001b[0m, in \u001b[0;36mcftime._cftime._dateparse\u001b[0;34m()\u001b[0m\n", + "\u001b[0;31mValueError\u001b[0m: 'months since' units only allowed for '360_day' calendar", + "\nDuring handling of the above exception, another exception occurred:\n", + "\u001b[0;31mValueError\u001b[0m Traceback (most recent call last)", + "File \u001b[0;32m~/Codes/parcels/parcels/tools/converters.py:233\u001b[0m, in \u001b[0;36mconvert_xarray_time_units\u001b[0;34m(ds, time)\u001b[0m\n\u001b[1;32m 232\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m--> 233\u001b[0m da2 \u001b[38;5;241m=\u001b[39m \u001b[43mxr\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mdecode_cf\u001b[49m\u001b[43m(\u001b[49m\u001b[43mda2\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 234\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mValueError\u001b[39;00m:\n", + "File \u001b[0;32m/opt/anaconda3/envs/py3_parcels/lib/python3.8/site-packages/xarray/conventions.py:646\u001b[0m, in \u001b[0;36mdecode_cf\u001b[0;34m(obj, concat_characters, mask_and_scale, decode_times, decode_coords, drop_variables, use_cftime, decode_timedelta)\u001b[0m\n\u001b[1;32m 644\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mTypeError\u001b[39;00m(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mcan only decode Dataset or DataStore objects\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[0;32m--> 646\u001b[0m \u001b[38;5;28mvars\u001b[39m, attrs, coord_names \u001b[38;5;241m=\u001b[39m \u001b[43mdecode_cf_variables\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 647\u001b[0m \u001b[43m \u001b[49m\u001b[38;5;28;43mvars\u001b[39;49m\u001b[43m,\u001b[49m\n\u001b[1;32m 648\u001b[0m \u001b[43m \u001b[49m\u001b[43mattrs\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 649\u001b[0m \u001b[43m \u001b[49m\u001b[43mconcat_characters\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 650\u001b[0m \u001b[43m \u001b[49m\u001b[43mmask_and_scale\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 651\u001b[0m \u001b[43m \u001b[49m\u001b[43mdecode_times\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 652\u001b[0m \u001b[43m \u001b[49m\u001b[43mdecode_coords\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 653\u001b[0m \u001b[43m \u001b[49m\u001b[43mdrop_variables\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mdrop_variables\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 654\u001b[0m \u001b[43m \u001b[49m\u001b[43muse_cftime\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43muse_cftime\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 655\u001b[0m \u001b[43m \u001b[49m\u001b[43mdecode_timedelta\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mdecode_timedelta\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 656\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 657\u001b[0m ds \u001b[38;5;241m=\u001b[39m Dataset(\u001b[38;5;28mvars\u001b[39m, attrs\u001b[38;5;241m=\u001b[39mattrs)\n", + "File \u001b[0;32m/opt/anaconda3/envs/py3_parcels/lib/python3.8/site-packages/xarray/conventions.py:512\u001b[0m, in \u001b[0;36mdecode_cf_variables\u001b[0;34m(variables, attributes, concat_characters, mask_and_scale, decode_times, decode_coords, drop_variables, use_cftime, decode_timedelta)\u001b[0m\n\u001b[1;32m 506\u001b[0m stack_char_dim \u001b[38;5;241m=\u001b[39m (\n\u001b[1;32m 507\u001b[0m concat_characters\n\u001b[1;32m 508\u001b[0m \u001b[38;5;129;01mand\u001b[39;00m v\u001b[38;5;241m.\u001b[39mdtype \u001b[38;5;241m==\u001b[39m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mS1\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 509\u001b[0m \u001b[38;5;129;01mand\u001b[39;00m v\u001b[38;5;241m.\u001b[39mndim \u001b[38;5;241m>\u001b[39m \u001b[38;5;241m0\u001b[39m\n\u001b[1;32m 510\u001b[0m \u001b[38;5;129;01mand\u001b[39;00m stackable(v\u001b[38;5;241m.\u001b[39mdims[\u001b[38;5;241m-\u001b[39m\u001b[38;5;241m1\u001b[39m])\n\u001b[1;32m 511\u001b[0m )\n\u001b[0;32m--> 512\u001b[0m new_vars[k] \u001b[38;5;241m=\u001b[39m \u001b[43mdecode_cf_variable\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m 513\u001b[0m \u001b[43m \u001b[49m\u001b[43mk\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 514\u001b[0m \u001b[43m \u001b[49m\u001b[43mv\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 515\u001b[0m \u001b[43m \u001b[49m\u001b[43mconcat_characters\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mconcat_characters\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 516\u001b[0m \u001b[43m \u001b[49m\u001b[43mmask_and_scale\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mmask_and_scale\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 517\u001b[0m \u001b[43m \u001b[49m\u001b[43mdecode_times\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mdecode_times\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 518\u001b[0m \u001b[43m \u001b[49m\u001b[43mstack_char_dim\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mstack_char_dim\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 519\u001b[0m \u001b[43m \u001b[49m\u001b[43muse_cftime\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43muse_cftime\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 520\u001b[0m \u001b[43m \u001b[49m\u001b[43mdecode_timedelta\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mdecode_timedelta\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 521\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 522\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m decode_coords \u001b[38;5;129;01min\u001b[39;00m [\u001b[38;5;28;01mTrue\u001b[39;00m, \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mcoordinates\u001b[39m\u001b[38;5;124m\"\u001b[39m, \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mall\u001b[39m\u001b[38;5;124m\"\u001b[39m]:\n", + "File \u001b[0;32m/opt/anaconda3/envs/py3_parcels/lib/python3.8/site-packages/xarray/conventions.py:360\u001b[0m, in \u001b[0;36mdecode_cf_variable\u001b[0;34m(name, var, concat_characters, mask_and_scale, decode_times, decode_endianness, stack_char_dim, use_cftime, decode_timedelta)\u001b[0m\n\u001b[1;32m 359\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m decode_times:\n\u001b[0;32m--> 360\u001b[0m var \u001b[38;5;241m=\u001b[39m \u001b[43mtimes\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mCFDatetimeCoder\u001b[49m\u001b[43m(\u001b[49m\u001b[43muse_cftime\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43muse_cftime\u001b[49m\u001b[43m)\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mdecode\u001b[49m\u001b[43m(\u001b[49m\u001b[43mvar\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mname\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mname\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 362\u001b[0m dimensions, data, attributes, encoding \u001b[38;5;241m=\u001b[39m variables\u001b[38;5;241m.\u001b[39munpack_for_decoding(var)\n", + "File \u001b[0;32m/opt/anaconda3/envs/py3_parcels/lib/python3.8/site-packages/xarray/coding/times.py:526\u001b[0m, in \u001b[0;36mCFDatetimeCoder.decode\u001b[0;34m(self, variable, name)\u001b[0m\n\u001b[1;32m 525\u001b[0m calendar \u001b[38;5;241m=\u001b[39m pop_to(attrs, encoding, \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mcalendar\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[0;32m--> 526\u001b[0m dtype \u001b[38;5;241m=\u001b[39m \u001b[43m_decode_cf_datetime_dtype\u001b[49m\u001b[43m(\u001b[49m\u001b[43mdata\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43munits\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mcalendar\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43muse_cftime\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 527\u001b[0m transform \u001b[38;5;241m=\u001b[39m partial(\n\u001b[1;32m 528\u001b[0m decode_cf_datetime,\n\u001b[1;32m 529\u001b[0m units\u001b[38;5;241m=\u001b[39munits,\n\u001b[1;32m 530\u001b[0m calendar\u001b[38;5;241m=\u001b[39mcalendar,\n\u001b[1;32m 531\u001b[0m use_cftime\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39muse_cftime,\n\u001b[1;32m 532\u001b[0m )\n", + "File \u001b[0;32m/opt/anaconda3/envs/py3_parcels/lib/python3.8/site-packages/xarray/coding/times.py:163\u001b[0m, in \u001b[0;36m_decode_cf_datetime_dtype\u001b[0;34m(data, units, calendar, use_cftime)\u001b[0m\n\u001b[1;32m 158\u001b[0m msg \u001b[38;5;241m=\u001b[39m (\n\u001b[1;32m 159\u001b[0m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124munable to decode time units \u001b[39m\u001b[38;5;132;01m{\u001b[39;00munits\u001b[38;5;132;01m!r}\u001b[39;00m\u001b[38;5;124m with \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mcalendar_msg\u001b[38;5;132;01m!r}\u001b[39;00m\u001b[38;5;124m. Try \u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 160\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mopening your dataset with decode_times=False or installing cftime \u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 161\u001b[0m \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mif it is not installed.\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[1;32m 162\u001b[0m )\n\u001b[0;32m--> 163\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mValueError\u001b[39;00m(msg)\n\u001b[1;32m 164\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n", + "\u001b[0;31mValueError\u001b[0m: unable to decode time units 'months since 1955-01-01 00:00:00' with 'the default calendar'. Try opening your dataset with decode_times=False or installing cftime if it is not installed.", + "\nDuring handling of the above exception, another exception occurred:\n", + "\u001b[0;31mRuntimeError\u001b[0m Traceback (most recent call last)", + "Input \u001b[0;32mIn [2]\u001b[0m, in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[0;32m----> 1\u001b[0m tempfield \u001b[38;5;241m=\u001b[39m \u001b[43mField\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mfrom_netcdf\u001b[49m\u001b[43m(\u001b[49m\u001b[43mglob\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[38;5;124;43mWOA_data/woa18_decav_*_04.nc\u001b[39;49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[43m)\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[38;5;124;43mt_an\u001b[39;49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\n\u001b[1;32m 2\u001b[0m \u001b[43m \u001b[49m\u001b[43m{\u001b[49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[38;5;124;43mlon\u001b[39;49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[38;5;124;43mlon\u001b[39;49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[38;5;124;43mlat\u001b[39;49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[38;5;124;43mlat\u001b[39;49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[38;5;124;43mtime\u001b[39;49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[43m:\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[38;5;124;43mtime\u001b[39;49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[43m}\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m~/Codes/parcels/parcels/field.py:390\u001b[0m, in \u001b[0;36mField.from_netcdf\u001b[0;34m(cls, filenames, variable, dimensions, indices, grid, mesh, timestamps, allow_time_extrapolation, time_periodic, deferred_load, **kwargs)\u001b[0m\n\u001b[1;32m 385\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mRuntimeError\u001b[39;00m(\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mMultiple files given but no time dimension specified\u001b[39m\u001b[38;5;124m'\u001b[39m)\n\u001b[1;32m 387\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m grid \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[1;32m 388\u001b[0m \u001b[38;5;66;03m# Concatenate time variable to determine overall dimension\u001b[39;00m\n\u001b[1;32m 389\u001b[0m \u001b[38;5;66;03m# across multiple files\u001b[39;00m\n\u001b[0;32m--> 390\u001b[0m time, time_origin, timeslices, dataFiles \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mcls\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mcollect_timeslices\u001b[49m\u001b[43m(\u001b[49m\u001b[43mtimestamps\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mdata_filenames\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 391\u001b[0m \u001b[43m \u001b[49m\u001b[43m_grid_fb_class\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mdimensions\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m 392\u001b[0m \u001b[43m \u001b[49m\u001b[43mindices\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mnetcdf_engine\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mnetcdf_decodewarning\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 393\u001b[0m grid \u001b[38;5;241m=\u001b[39m Grid\u001b[38;5;241m.\u001b[39mcreate_grid(lon, lat, depth, time, time_origin\u001b[38;5;241m=\u001b[39mtime_origin, mesh\u001b[38;5;241m=\u001b[39mmesh)\n\u001b[1;32m 394\u001b[0m grid\u001b[38;5;241m.\u001b[39mtimeslices \u001b[38;5;241m=\u001b[39m timeslices\n", + "File \u001b[0;32m~/Codes/parcels/parcels/field.py:248\u001b[0m, in \u001b[0;36mField.collect_timeslices\u001b[0;34m(timestamps, data_filenames, _grid_fb_class, dimensions, indices, netcdf_engine, netcdf_decodewarning)\u001b[0m\n\u001b[1;32m 245\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m fname \u001b[38;5;129;01min\u001b[39;00m data_filenames:\n\u001b[1;32m 246\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m _grid_fb_class(fname, dimensions, indices, netcdf_engine\u001b[38;5;241m=\u001b[39mnetcdf_engine,\n\u001b[1;32m 247\u001b[0m netcdf_decodewarning\u001b[38;5;241m=\u001b[39mnetcdf_decodewarning) \u001b[38;5;28;01mas\u001b[39;00m filebuffer:\n\u001b[0;32m--> 248\u001b[0m ftime \u001b[38;5;241m=\u001b[39m \u001b[43mfilebuffer\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mtime\u001b[49m\n\u001b[1;32m 249\u001b[0m timeslices\u001b[38;5;241m.\u001b[39mappend(ftime)\n\u001b[1;32m 250\u001b[0m dataFiles\u001b[38;5;241m.\u001b[39mappend([fname] \u001b[38;5;241m*\u001b[39m \u001b[38;5;28mlen\u001b[39m(ftime))\n", + "File \u001b[0;32m~/Codes/parcels/parcels/fieldfilebuffer.py:171\u001b[0m, in \u001b[0;36mNetcdfFileBuffer.time\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 169\u001b[0m \u001b[38;5;129m@property\u001b[39m\n\u001b[1;32m 170\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mtime\u001b[39m(\u001b[38;5;28mself\u001b[39m):\n\u001b[0;32m--> 171\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mtime_access\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n", + "File \u001b[0;32m~/Codes/parcels/parcels/fieldfilebuffer.py:181\u001b[0m, in \u001b[0;36mNetcdfFileBuffer.time_access\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 178\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m np\u001b[38;5;241m.\u001b[39marray([\u001b[38;5;28;01mNone\u001b[39;00m])\n\u001b[1;32m 180\u001b[0m time_da \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mdataset[\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mdimensions[\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mtime\u001b[39m\u001b[38;5;124m'\u001b[39m]]\n\u001b[0;32m--> 181\u001b[0m \u001b[43mconvert_xarray_time_units\u001b[49m\u001b[43m(\u001b[49m\u001b[43mtime_da\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mdimensions\u001b[49m\u001b[43m[\u001b[49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[38;5;124;43mtime\u001b[39;49m\u001b[38;5;124;43m'\u001b[39;49m\u001b[43m]\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m 182\u001b[0m time \u001b[38;5;241m=\u001b[39m np\u001b[38;5;241m.\u001b[39marray([time_da[\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mdimensions[\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mtime\u001b[39m\u001b[38;5;124m'\u001b[39m]]\u001b[38;5;241m.\u001b[39mdata]) \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mlen\u001b[39m(time_da\u001b[38;5;241m.\u001b[39mshape) \u001b[38;5;241m==\u001b[39m \u001b[38;5;241m0\u001b[39m \u001b[38;5;28;01melse\u001b[39;00m np\u001b[38;5;241m.\u001b[39marray(time_da[\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mdimensions[\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mtime\u001b[39m\u001b[38;5;124m'\u001b[39m]])\n\u001b[1;32m 183\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(time[\u001b[38;5;241m0\u001b[39m], datetime\u001b[38;5;241m.\u001b[39mdatetime):\n", + "File \u001b[0;32m~/Codes/parcels/parcels/tools/converters.py:235\u001b[0m, in \u001b[0;36mconvert_xarray_time_units\u001b[0;34m(ds, time)\u001b[0m\n\u001b[1;32m 233\u001b[0m da2 \u001b[38;5;241m=\u001b[39m xr\u001b[38;5;241m.\u001b[39mdecode_cf(da2)\n\u001b[1;32m 234\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mValueError\u001b[39;00m:\n\u001b[0;32m--> 235\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mRuntimeError\u001b[39;00m(\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mXarray could not convert the calendar. If you\u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mre using from_netcdf, \u001b[39m\u001b[38;5;124m'\u001b[39m\n\u001b[1;32m 236\u001b[0m \u001b[38;5;124m'\u001b[39m\u001b[38;5;124mtry using the timestamps keyword in the construction of your Field. \u001b[39m\u001b[38;5;124m'\u001b[39m\n\u001b[1;32m 237\u001b[0m \u001b[38;5;124m'\u001b[39m\u001b[38;5;124mSee also the tutorial at https://nbviewer.jupyter.org/github/OceanParcels/\u001b[39m\u001b[38;5;124m'\u001b[39m\n\u001b[1;32m 238\u001b[0m \u001b[38;5;124m'\u001b[39m\u001b[38;5;124mparcels/blob/master/parcels/examples/tutorial_timestamps.ipynb\u001b[39m\u001b[38;5;124m'\u001b[39m)\n\u001b[1;32m 239\u001b[0m ds[time] \u001b[38;5;241m=\u001b[39m da2[time]\n", + "\u001b[0;31mRuntimeError\u001b[0m: Xarray could not convert the calendar. If youre using from_netcdf, try using the timestamps keyword in the construction of your Field. See also the tutorial at https://nbviewer.jupyter.org/github/OceanParcels/parcels/blob/master/parcels/examples/tutorial_timestamps.ipynb" + ] + } + ], "source": [ - "# tempfield = Field.from_netcdf(glob('WOA_data/woa18_decav_*_04.nc'), 't_an', \n", - "# {'lon': 'lon', 'lat': 'lat', 'time': 'time'})" + "tempfield = Field.from_netcdf(glob('WOA_data/woa18_decav_*_04.nc'), 't_an', \n", + " {'lon': 'lon', 'lat': 'lat', 'time': 'time'})" ] }, { @@ -71,20 +123,11 @@ "cell_type": "code", "execution_count": 4, "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "WARNING: File WOA_data/woa18_decav_t10_04.nc could not be decoded properly by xarray (version 0.12.1).\n", - " It will be opened with no decoding. Filling values might be wrongly parsed.\n", - "WARNING: Casting depth data to np.float32\n" - ] - } - ], + "outputs": [], "source": [ - "tempfield = Field.from_netcdf(glob('WOA_data/woa18_decav_*_04.nc'), 't_an', \n", - " {'lon': 'lon', 'lat': 'lat', 'time': 'time'}, \n", + "tempfield = Field.from_netcdf(glob('WOA_data/woa18_decav_*_04.nc'), 't_an',\n", + " {'lon': 'lon', 'lat': 'lat', 'time': 'time'},\n", + " netcdf_decodewarning=False,\n", " timestamps=timestamps)" ] }, @@ -94,11 +137,18 @@ "source": [ "Note, by the way, that adding the `time_periodic=True` argument to `Field.from_netcdf()` will also mean that the climatology can be cycled for multiple years." ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Furthermore, note that we used `netcdf_decodewarning=False` in the `FieldSet.from_netcdf()` call above. This is to silence an expected warning because the time dimension in the `coordinates.nc` file can't be decoded by `xarray`." + ] } ], "metadata": { "kernelspec": { - "display_name": "Python 3", + "display_name": "Python 3 (ipykernel)", "language": "python", "name": "python3" }, @@ -112,7 +162,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.6.8" + "version": "3.8.13" } }, "nbformat": 4,