From ed75a4a22d367a5631c203667cad6a1456159bc4 Mon Sep 17 00:00:00 2001 From: zmoon Date: Wed, 19 Apr 2023 17:06:10 -0400 Subject: [PATCH 01/11] cli: Initial ISH-Lite getter --- melodies_monet/_cli.py | 144 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 144 insertions(+) diff --git a/melodies_monet/_cli.py b/melodies_monet/_cli.py index 28d43a99..04cc1963 100644 --- a/melodies_monet/_cli.py +++ b/melodies_monet/_cli.py @@ -471,6 +471,150 @@ def get_airnow( ds.to_netcdf(dst / out_name) +@app.command() +def get_ish_lite( + start_date: str = typer.Option(..., "-s", "--start-date", help=f"Start date. {_DATE_FMT_NOTE}"), + end_date: str = typer.Option(..., "-e", "--end-date", help=f"End date. {_DATE_FMT_NOTE} {_DATE_END_NOTE}"), + out_name: str = typer.Option(None, "-o", + help=( + "Output file name (or full/relative path). " + "By default the name is generated like 'ISH-Lite__.nc'." + ) + ), + dst: Path = typer.Option(".", "-d", "--dst", help=( + "Destination directory (to control output location " + "if using default output file name)." + ) + ), + compress: bool = typer.Option(True, help=( + "If true, pack float to int and apply compression using zlib with complevel 7. " + "This can take time if the dataset is large, but can lead to " + "significant space savings." + ) + ), + num_workers: int = typer.Option(1, "-n", "--num-workers", help="Number of download workers."), + verbose: bool = typer.Option(False), + debug: bool = typer.Option( + False, "--debug/", help="Print more messages (including full tracebacks)." + ), +): + """Download ISH-Lite data using monetio and reformat for MM usage. + + Note that the data are stored in yearly files by site, so the runtime + mostly depennds on the number of unique years that your date range includes. + """ + import warnings + + import monetio as mio + import pandas as pd + + from .util.write_util import write_ncf + + global DEBUG + + DEBUG = debug + + typer.echo(HEADER) + + start_date = pd.Timestamp(start_date) + end_date = pd.Timestamp(end_date) + dates = pd.date_range(start_date, end_date, freq="H") + if verbose: + print("Dates:") + print(dates) + + # Set destination and file name + fmt = r"%Y%m%d" + if out_name is None: + out_name = f"ISH-Lite_{start_date:{fmt}}_{end_date:{fmt}}.nc" + else: + p = Path(out_name) + if p.name == out_name: + # `out_name` is just the file name + out_name = p.name + else: + # `out_name` has path + if dst != Path("."): + typer.echo(f"warning: overriding `dst` setting {dst.as_posix()!r} with `out_name` {p.as_posix()!r}") + dst = p.parent + out_name = p.name + + with _timer("Fetching data with monetio"): + with warnings.catch_warnings(): + warnings.filterwarnings( + "ignore", + message="The (error|warn)_bad_lines argument has been deprecated" + ) + df = mio.ish_lite.add_data( + dates, + site="72224400358", + resample=False, + n_procs=num_workers, + verbose=verbose, + ) + + with _timer("Forming xarray Dataset"): + df = df.dropna(subset=["latitude", "longitude"]) + + site_vns = [ + "siteid", + "usaf", + "wban", + "latitude", + "longitude", + "country", + "state", + ] + # NOTE: time_local not included since it varies in time as well + + ds_site = ( + df[site_vns] + .groupby("siteid") + .first() + .to_xarray() + .swap_dims(siteid="x") + ) + + # TODO: units? + units = {} + + cols = list(df.columns) + ds = ( + df[cols] + .set_index(["time", "siteid"]) + .to_xarray() + .swap_dims(siteid="x") + .drop_vars(site_vns) + .merge(ds_site) + .set_coords(["latitude", "longitude"]) + .assign(x=range(ds_site.dims["x"])) + ) + + # Add units + for k, u in units.items(): + vn = k + ds[vn].attrs.update(units=u) + + # # TODO: Fill in local time array + # # (in the df, not all sites have rows for all times, so we have NaTs at this point) + # if not daily: + # ds["time_local"] = ds.time + ds.utcoffset.astype("timedelta64[h]") + + # Expand + ds = ( + ds + .expand_dims("y") + .transpose("time", "y", "x") + ) + + with _timer("Writing netCDF file"): + if compress: + write_ncf(ds, dst / out_name, verbose=verbose) + else: + ds.to_netcdf(dst / out_name) + + + cli = app _typer_click_object = typer.main.get_command(app) # for sphinx-click in docs From eb6a16d79898adad178c6807b955bea3a0aa7296 Mon Sep 17 00:00:00 2001 From: zmoon Date: Wed, 19 Apr 2023 17:44:31 -0400 Subject: [PATCH 02/11] Compute local time for ISH-Lite needed for MM time series, e.g. --- melodies_monet/_cli.py | 34 ++++++++++++++++++++++++++++++---- 1 file changed, 30 insertions(+), 4 deletions(-) diff --git a/melodies_monet/_cli.py b/melodies_monet/_cli.py index 04cc1963..bc554416 100644 --- a/melodies_monet/_cli.py +++ b/melodies_monet/_cli.py @@ -553,6 +553,33 @@ def get_ish_lite( verbose=verbose, ) + with _timer("Computing UTC offset for selected ISH-Lite sites"): + import datetime + + from timezonefinder import TimezoneFinder + from pytz import timezone, utc + + tf = TimezoneFinder(in_memory=True) + ref_date = datetime.datetime(2022, 1, 1, 0, 0) + + def get_utc_offset(*, lat, lon): + s = tf.timezone_at(lng=lon, lat=lat) + assert s is not None + + tz_target = timezone(s) + ref_date_tz_target = tz_target.localize(ref_date) + ref_date_utc = utc.localize(ref_date) + uo_h = (ref_date_utc - ref_date_tz_target).total_seconds() / 3600 + + return uo_h + + + locs = df[["siteid", "latitude", "longitude"]].groupby("siteid").first().reset_index() + locs["utcoffset"] = locs.apply(lambda r: get_utc_offset(lat=r.latitude, lon=r.longitude), axis="columns") + + df = df.merge(locs[["siteid", "utcoffset"]], on="siteid", how="left") + + with _timer("Forming xarray Dataset"): df = df.dropna(subset=["latitude", "longitude"]) @@ -595,10 +622,9 @@ def get_ish_lite( vn = k ds[vn].attrs.update(units=u) - # # TODO: Fill in local time array - # # (in the df, not all sites have rows for all times, so we have NaTs at this point) - # if not daily: - # ds["time_local"] = ds.time + ds.utcoffset.astype("timedelta64[h]") + # Fill in local time array + # (in the df, not all sites have rows for all times, so we have NaTs at this point) + ds["time_local"] = ds.time + (ds.utcoffset * 60).astype("timedelta64[m]") # Expand ds = ( From 743b68fddfce28b4e0956fccbe3ced62702088e1 Mon Sep 17 00:00:00 2001 From: zmoon Date: Wed, 19 Apr 2023 17:53:05 -0400 Subject: [PATCH 03/11] Dask progress bars if verbose --- melodies_monet/_cli.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/melodies_monet/_cli.py b/melodies_monet/_cli.py index bc554416..897b74c8 100644 --- a/melodies_monet/_cli.py +++ b/melodies_monet/_cli.py @@ -349,6 +349,11 @@ def get_airnow( DEBUG = debug + if verbose: + from dask.diagnostics import ProgressBar + + ProgressBar().register() + typer.echo(HEADER) start_date = pd.Timestamp(start_date) @@ -514,6 +519,11 @@ def get_ish_lite( DEBUG = debug + if verbose: + from dask.diagnostics import ProgressBar + + ProgressBar().register() + typer.echo(HEADER) start_date = pd.Timestamp(start_date) From 36ae41a861998a4673ca960d5c9d4766b8bae638 Mon Sep 17 00:00:00 2001 From: zmoon Date: Wed, 19 Apr 2023 18:14:33 -0400 Subject: [PATCH 04/11] Add country or state selection for ISH-Lite CLI --- melodies_monet/_cli.py | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/melodies_monet/_cli.py b/melodies_monet/_cli.py index 897b74c8..b9ff91e0 100644 --- a/melodies_monet/_cli.py +++ b/melodies_monet/_cli.py @@ -480,6 +480,13 @@ def get_airnow( def get_ish_lite( start_date: str = typer.Option(..., "-s", "--start-date", help=f"Start date. {_DATE_FMT_NOTE}"), end_date: str = typer.Option(..., "-e", "--end-date", help=f"End date. {_DATE_FMT_NOTE} {_DATE_END_NOTE}"), + country: str = typer.Option(None, "--country", + help=( + "Two-letter country code (e.g., in order of site count, " + "US, RS, CA, AS, BR, IN, CH, NO, JA, UK, FR, ...)." + ) + ), + state: str = typer.Option(None, "--state", help="Two-letter state code (e.g., MD, ...)."), out_name: str = typer.Option(None, "-o", help=( "Output file name (or full/relative path). " @@ -506,7 +513,9 @@ def get_ish_lite( """Download ISH-Lite data using monetio and reformat for MM usage. Note that the data are stored in yearly files by site, so the runtime - mostly depennds on the number of unique years that your date range includes. + mostly depends on the number of unique years that your date range includes, + as well as any site selection narrowing. + You can use --country or --state to select groups of sites. """ import warnings @@ -557,7 +566,8 @@ def get_ish_lite( ) df = mio.ish_lite.add_data( dates, - site="72224400358", + state=state, + country=country, resample=False, n_procs=num_workers, verbose=verbose, From 7359005efed5be1e641e8d42d61eefcde4c0974d Mon Sep 17 00:00:00 2001 From: zmoon Date: Wed, 19 Apr 2023 18:22:53 -0400 Subject: [PATCH 05/11] Other site vars --- melodies_monet/_cli.py | 18 ++++++++++++++++-- 1 file changed, 16 insertions(+), 2 deletions(-) diff --git a/melodies_monet/_cli.py b/melodies_monet/_cli.py index b9ff91e0..1cc8619d 100644 --- a/melodies_monet/_cli.py +++ b/melodies_monet/_cli.py @@ -603,14 +603,28 @@ def get_utc_offset(*, lat, lon): with _timer("Forming xarray Dataset"): df = df.dropna(subset=["latitude", "longitude"]) + df = df.rename( + columns={ + "station name": "station_name", + "elev(m)": "elevation", + }, + errors="ignore", + ) + site_vns = [ "siteid", - "usaf", - "wban", "latitude", "longitude", "country", "state", + "station_name", + "usaf", + "wban", + "icao", + "elevation", + "utcoffset", + "begin", + "end", ] # NOTE: time_local not included since it varies in time as well From b5ef20d5a722eadf2c343150cd19f3fa42d4d368 Mon Sep 17 00:00:00 2001 From: zmoon Date: Wed, 19 Apr 2023 18:26:34 -0400 Subject: [PATCH 06/11] notes --- melodies_monet/_cli.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/melodies_monet/_cli.py b/melodies_monet/_cli.py index 1cc8619d..bc82b44b 100644 --- a/melodies_monet/_cli.py +++ b/melodies_monet/_cli.py @@ -408,7 +408,7 @@ def get_airnow( "state_name", "epa_region", ] - # NOTE: time_local not included since it varies in time as well + # NOTE: time_local not included since it varies in time as well as by site if daily: site_vns.remove("utcoffset") # not present in the daily data product @@ -516,6 +516,7 @@ def get_ish_lite( mostly depends on the number of unique years that your date range includes, as well as any site selection narrowing. You can use --country or --state to select groups of sites. + ISH-Lite is an hourly product. """ import warnings @@ -626,7 +627,7 @@ def get_utc_offset(*, lat, lon): "begin", "end", ] - # NOTE: time_local not included since it varies in time as well + # NOTE: time_local not included since it varies in time as well as by site ds_site = ( df[site_vns] From 4ef3f68d84cf5b66b6863fda05479478650d1c5a Mon Sep 17 00:00:00 2001 From: zmoon Date: Thu, 20 Apr 2023 10:12:28 -0400 Subject: [PATCH 07/11] Initial ISH cli --- melodies_monet/_cli.py | 208 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 208 insertions(+) diff --git a/melodies_monet/_cli.py b/melodies_monet/_cli.py index bc82b44b..7a8e21e9 100644 --- a/melodies_monet/_cli.py +++ b/melodies_monet/_cli.py @@ -675,6 +675,214 @@ def get_utc_offset(*, lat, lon): ds.to_netcdf(dst / out_name) +@app.command() +def get_ish( + start_date: str = typer.Option(..., "-s", "--start-date", help=f"Start date. {_DATE_FMT_NOTE}"), + end_date: str = typer.Option(..., "-e", "--end-date", help=f"End date. {_DATE_FMT_NOTE} {_DATE_END_NOTE}"), + freq: str = typer.Option("H", "-f", "--freq", help=( + "Frequency to resample to. " + "Mean is used to reduce the time groups (as opposed to nearest, e.g.)." + ) + ), + country: str = typer.Option(None, "--country", + help=( + "Two-letter country code (e.g., in order of site count, " + "US, RS, CA, AS, BR, IN, CH, NO, JA, UK, FR, ...)." + ) + ), + state: str = typer.Option(None, "--state", help="Two-letter state code (e.g., MD, ...)."), + out_name: str = typer.Option(None, "-o", + help=( + "Output file name (or full/relative path). " + "By default the name is generated like 'ISH__.nc'." + ) + ), + dst: Path = typer.Option(".", "-d", "--dst", help=( + "Destination directory (to control output location " + "if using default output file name)." + ) + ), + compress: bool = typer.Option(True, help=( + "If true, pack float to int and apply compression using zlib with complevel 7. " + "This can take time if the dataset is large, but can lead to " + "significant space savings." + ) + ), + num_workers: int = typer.Option(1, "-n", "--num-workers", help="Number of download workers."), + verbose: bool = typer.Option(False), + debug: bool = typer.Option( + False, "--debug/", help="Print more messages (including full tracebacks)." + ), +): + """Download ISH data using monetio and reformat for MM usage. + + Note that the data are stored in yearly files by site, so the runtime + mostly depends on the number of unique years that your date range includes, + as well as any site selection narrowing. + You can use --country or --state to select groups of sites. + Time resolution may be sub-hourly, depending on site, + thus we resample to hourly by default. + """ + import warnings + + import monetio as mio + import pandas as pd + + from .util.write_util import write_ncf + + global DEBUG + + DEBUG = debug + + if verbose: + from dask.diagnostics import ProgressBar + + ProgressBar().register() + + typer.echo(HEADER) + + start_date = pd.Timestamp(start_date) + end_date = pd.Timestamp(end_date) + dates = pd.date_range(start_date, end_date, freq="H") + if verbose: + print("Dates:") + print(dates) + + # Set destination and file name + fmt = r"%Y%m%d" + if out_name is None: + out_name = f"ISH_{start_date:{fmt}}_{end_date:{fmt}}.nc" + else: + p = Path(out_name) + if p.name == out_name: + # `out_name` is just the file name + out_name = p.name + else: + # `out_name` has path + if dst != Path("."): + typer.echo(f"warning: overriding `dst` setting {dst.as_posix()!r} with `out_name` {p.as_posix()!r}") + dst = p.parent + out_name = p.name + + with _timer("Fetching data with monetio"), _ignore_pandas_numeric_only_futurewarning(): + with warnings.catch_warnings(): + warnings.filterwarnings( + "ignore", + message="The (error|warn)_bad_lines argument has been deprecated" + ) + df = mio.ish.add_data( + dates, + state=state, + country=country, + resample=True, + window=freq, + n_procs=num_workers, + verbose=verbose, + ) + + with _timer("Computing UTC offset for selected ISH sites"): + import datetime + + from timezonefinder import TimezoneFinder + from pytz import timezone, utc + + tf = TimezoneFinder(in_memory=True) + ref_date = datetime.datetime(2022, 1, 1, 0, 0) + + def get_utc_offset(*, lat, lon): + s = tf.timezone_at(lng=lon, lat=lat) + assert s is not None + + tz_target = timezone(s) + ref_date_tz_target = tz_target.localize(ref_date) + ref_date_utc = utc.localize(ref_date) + uo_h = (ref_date_utc - ref_date_tz_target).total_seconds() / 3600 + + return uo_h + + + locs = df[["siteid", "latitude", "longitude"]].groupby("siteid").first().reset_index() + locs["utcoffset"] = locs.apply(lambda r: get_utc_offset(lat=r.latitude, lon=r.longitude), axis="columns") + + df = df.merge(locs[["siteid", "utcoffset"]], on="siteid", how="left") + + + with _timer("Forming xarray Dataset"): + df = ( + df.dropna(subset=["latitude", "longitude"]) + .rename( + columns={ + "station name": "station_name", + "elev(m)": "elevation", + }, + errors="ignore", + ) + .drop(columns=["elev"], errors="ignore") # keep just elevation from the site meta file + ) + + site_vns = [ + "siteid", + "latitude", + "longitude", + "country", + "state", + "station_name", + "usaf", + "wban", + "icao", + "elevation", + "utcoffset", + "begin", + "end", + ] + # NOTE: time_local not included since it varies in time as well as by site + + ds_site = ( + df[site_vns] + .groupby("siteid") + .first() + .to_xarray() + .swap_dims(siteid="x") + ) + + # TODO: units? + units = {} + + cols = list(df.columns) + ds = ( + df[cols] + .set_index(["time", "siteid"]) + .to_xarray() + .swap_dims(siteid="x") + .drop_vars(site_vns) + .merge(ds_site) + .set_coords(["latitude", "longitude"]) + .assign(x=range(ds_site.dims["x"])) + ) + + # Add units + for k, u in units.items(): + vn = k + ds[vn].attrs.update(units=u) + + # Fill in local time array + # (in the df, not all sites have rows for all times, so we have NaTs at this point) + ds["time_local"] = ds.time + (ds.utcoffset * 60).astype("timedelta64[m]") + + # Expand + ds = ( + ds + .expand_dims("y") + .transpose("time", "y", "x") + ) + + with _timer("Writing netCDF file"): + if compress: + write_ncf(ds, dst / out_name, verbose=verbose) + else: + ds.to_netcdf(dst / out_name) + + cli = app From b8b16e7cb80bbe6cf0e4e139c8d77b471994f2a4 Mon Sep 17 00:00:00 2001 From: zmoon Date: Wed, 17 Jan 2024 13:11:23 -0700 Subject: [PATCH 08/11] Add --box option for get-ish-lite + test The only difference between mine and the file Jordan shared with me was for trying to get a box option work --- melodies_monet/_cli.py | 15 ++++++++++++++- melodies_monet/tests/test_get_data_cli.py | 16 ++++++++++++++++ 2 files changed, 30 insertions(+), 1 deletion(-) diff --git a/melodies_monet/_cli.py b/melodies_monet/_cli.py index 7a8e21e9..7c5d5b91 100644 --- a/melodies_monet/_cli.py +++ b/melodies_monet/_cli.py @@ -19,6 +19,8 @@ ) raise SystemExit(1) +from typing import Tuple + DEBUG = False INFO_COLOR = typer.colors.CYAN ERROR_COLOR = typer.colors.BRIGHT_RED @@ -487,6 +489,13 @@ def get_ish_lite( ) ), state: str = typer.Option(None, "--state", help="Two-letter state code (e.g., MD, ...)."), + box: Tuple[float, float, float, float] = typer.Option((None, None, None, None), "--box", + help=( + "Bounding box for site selection. " + "[latmin, lonmin, latmax, lonmax] in [-180, 180) format. " + "Can't be used if specifying country or state." + ) + ), out_name: str = typer.Option(None, "-o", help=( "Output file name (or full/relative path). " @@ -515,7 +524,7 @@ def get_ish_lite( Note that the data are stored in yearly files by site, so the runtime mostly depends on the number of unique years that your date range includes, as well as any site selection narrowing. - You can use --country or --state to select groups of sites. + You can use --country or --state or --box to select groups of sites. ISH-Lite is an hourly product. """ import warnings @@ -543,6 +552,9 @@ def get_ish_lite( print("Dates:") print(dates) + if box == (None, None, None, None): + box = None + # Set destination and file name fmt = r"%Y%m%d" if out_name is None: @@ -567,6 +579,7 @@ def get_ish_lite( ) df = mio.ish_lite.add_data( dates, + box=box, state=state, country=country, resample=False, diff --git a/melodies_monet/tests/test_get_data_cli.py b/melodies_monet/tests/test_get_data_cli.py index 2b3e55e2..19e1dde9 100644 --- a/melodies_monet/tests/test_get_data_cli.py +++ b/melodies_monet/tests/test_get_data_cli.py @@ -82,3 +82,19 @@ def test_get_airnow_comp(tmp_path): ds[vn] = ds[vn].where(~ ((ds[vn] == 0) & (ds0[vn] != 0))) # assert (np.abs((ds[vn] - ds0[vn]) / ds0[vn]).to_series().dropna() < 2e-6).all() assert (np.abs(ds[vn] - ds0[vn]).to_series().dropna() < 3e-7).all() + + +def test_get_ish_lite_box(tmp_path): + fn = "x.nc" + cmd = [ + "melodies-monet", "get-ish-lite", + "-s", "2023-01-01", "-e", "2023-01-01 23:00", + "--box", "39.5", "-105.75", "40.5", "-104.75", + "--dst", tmp_path.as_posix(), "-o", fn, + ] + subprocess.run(cmd, check=True) + + ds = xr.open_dataset(tmp_path / fn) + + assert ds.time.size == 24 + assert np.unique(ds.state) == ["CO"] From f867d1797e14957540b61959184593b22885cd5e Mon Sep 17 00:00:00 2001 From: zmoon Date: Wed, 17 Jan 2024 13:26:39 -0700 Subject: [PATCH 09/11] Add --box option for get-ish as well it can time out in read though, even for this small subset --- melodies_monet/_cli.py | 15 +++++++++++++-- melodies_monet/tests/test_get_data_cli.py | 16 ++++++++++++++++ 2 files changed, 29 insertions(+), 2 deletions(-) diff --git a/melodies_monet/_cli.py b/melodies_monet/_cli.py index 7c5d5b91..af0b7656 100644 --- a/melodies_monet/_cli.py +++ b/melodies_monet/_cli.py @@ -492,7 +492,7 @@ def get_ish_lite( box: Tuple[float, float, float, float] = typer.Option((None, None, None, None), "--box", help=( "Bounding box for site selection. " - "[latmin, lonmin, latmax, lonmax] in [-180, 180) format. " + "(latmin, lonmin, latmax, lonmax) in [-180, 180) format. " "Can't be used if specifying country or state." ) ), @@ -704,6 +704,13 @@ def get_ish( ) ), state: str = typer.Option(None, "--state", help="Two-letter state code (e.g., MD, ...)."), + box: Tuple[float, float, float, float] = typer.Option((None, None, None, None), "--box", + help=( + "Bounding box for site selection. " + "(latmin, lonmin, latmax, lonmax) in [-180, 180) format. " + "Can't be used if specifying country or state." + ) + ), out_name: str = typer.Option(None, "-o", help=( "Output file name (or full/relative path). " @@ -732,7 +739,7 @@ def get_ish( Note that the data are stored in yearly files by site, so the runtime mostly depends on the number of unique years that your date range includes, as well as any site selection narrowing. - You can use --country or --state to select groups of sites. + You can use --country or --state or --box to select groups of sites. Time resolution may be sub-hourly, depending on site, thus we resample to hourly by default. """ @@ -761,6 +768,9 @@ def get_ish( print("Dates:") print(dates) + if box == (None, None, None, None): + box = None + # Set destination and file name fmt = r"%Y%m%d" if out_name is None: @@ -785,6 +795,7 @@ def get_ish( ) df = mio.ish.add_data( dates, + box=box, state=state, country=country, resample=True, diff --git a/melodies_monet/tests/test_get_data_cli.py b/melodies_monet/tests/test_get_data_cli.py index 19e1dde9..73e794e6 100644 --- a/melodies_monet/tests/test_get_data_cli.py +++ b/melodies_monet/tests/test_get_data_cli.py @@ -98,3 +98,19 @@ def test_get_ish_lite_box(tmp_path): assert ds.time.size == 24 assert np.unique(ds.state) == ["CO"] + + +def test_get_ish_box(tmp_path): + fn = "x.nc" + cmd = [ + "melodies-monet", "get-ish", + "-s", "2023-01-01", "-e", "2023-01-01 23:00", + "--box", "39.5", "-105.75", "40.5", "-104.75", + "--dst", tmp_path.as_posix(), "-o", fn, + ] + subprocess.run(cmd, check=True) + + ds = xr.open_dataset(tmp_path / fn) + + assert ds.time.size == 24 + assert np.unique(ds.state) == ["CO"] From 7bd69cb86054f92b16bd8526df71247943a7c0f8 Mon Sep 17 00:00:00 2001 From: zmoon Date: Thu, 18 Jan 2024 12:32:04 -0700 Subject: [PATCH 10/11] Update anaconda-miniconda link --- docs/appendix/machine-specific-install.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/appendix/machine-specific-install.rst b/docs/appendix/machine-specific-install.rst index f4aa435b..236155b8 100644 --- a/docs/appendix/machine-specific-install.rst +++ b/docs/appendix/machine-specific-install.rst @@ -60,7 +60,7 @@ environment for running and developing MELODIES MONET. * You will need a NOAA HPC account to access the RDHPCS wiki link above. * Both Anaconda/Miniconda will work well for MELODIES MONET. See - `conda instructions `__ + `conda instructions `__ to determine, which is the best option for you. * Pick a directory for your download and run the following wget command with From 91d8a3e494c85236984344670d0749e54932ba8e Mon Sep 17 00:00:00 2001 From: zmoon Date: Thu, 18 Jan 2024 12:33:15 -0700 Subject: [PATCH 11/11] Add timezonefinder to env for computing UTC offsets for ISH sites --- docs/environment-docs.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/environment-docs.yml b/docs/environment-docs.yml index 7fa4cbce..f0d0d7c6 100644 --- a/docs/environment-docs.yml +++ b/docs/environment-docs.yml @@ -16,6 +16,7 @@ dependencies: # # Extras - pooch + - timezonefinder - typer - wrf-python # for WRF-Chem reader in monetio #