From 11a478781037d8d31b0a583f4cb4baa0f8341547 Mon Sep 17 00:00:00 2001 From: Romain Beucher Date: Mon, 2 Dec 2024 22:55:38 +1000 Subject: [PATCH 01/36] Fix 2593 Change log INFO to DEBUG (#2600) --- esmvalcore/_task.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/esmvalcore/_task.py b/esmvalcore/_task.py index 66018c2789..27a6b83d14 100644 --- a/esmvalcore/_task.py +++ b/esmvalcore/_task.py @@ -392,7 +392,7 @@ def _initialize_cmd(self): script_file = Path(script).expanduser().absolute() err_msg = f"Cannot execute script '{script}' ({script_file})" if not script_file.is_file(): - logger.info( + logger.debug( "No local diagnostic script found. Attempting to load the script from the base repository." ) # Check if esmvaltool package is available From 3a57194dc5a6949f4418bc31a5311dc4eaa73acb Mon Sep 17 00:00:00 2001 From: Bouwe Andela Date: Tue, 3 Dec 2024 12:23:09 +0100 Subject: [PATCH 02/36] Save all files in a task at the same time to avoid recomputing intermediate results (#2522) Co-authored-by: Valeriu Predoi --- doc/quickstart/configure.rst | 54 +++- environment.yml | 1 + esmvalcore/_recipe/recipe.py | 7 +- esmvalcore/config/_config_validators.py | 1 + .../configurations/defaults/logging.yml | 2 + esmvalcore/preprocessor/__init__.py | 29 ++- esmvalcore/preprocessor/_dask_progress.py | 242 ++++++++++++++++++ esmvalcore/preprocessor/_io.py | 61 +++-- pyproject.toml | 1 + .../integration/preprocessor/_io/test_save.py | 65 +++-- .../preprocessor/test_preprocessing_task.py | 2 +- tests/integration/recipe/test_recipe.py | 2 + tests/unit/config/test_config.py | 1 + tests/unit/preprocessor/test_dask_progress.py | 51 ++++ .../preprocessor/test_preprocessor_file.py | 1 + tests/unit/recipe/test_recipe.py | 16 +- 16 files changed, 480 insertions(+), 56 deletions(-) create mode 100644 esmvalcore/config/configurations/defaults/logging.yml create mode 100644 esmvalcore/preprocessor/_dask_progress.py create mode 100644 tests/unit/preprocessor/test_dask_progress.py diff --git a/doc/quickstart/configure.rst b/doc/quickstart/configure.rst index c65fdbd1c5..baf4dd2998 100644 --- a/doc/quickstart/configure.rst +++ b/doc/quickstart/configure.rst @@ -129,8 +129,8 @@ More information about this can be found :ref:`here `. .. _config_options: -Configuration options -===================== +Top level configuration options +=============================== Note: the following entries use Python syntax. For example, Python's ``None`` is YAML's ``null``, Python's ``True`` is YAML's @@ -170,6 +170,8 @@ For example, Python's ``None`` is YAML's ``null``, Python's ``True`` is YAML's | ``log_level`` | Log level of the console (``debug``, | :obj:`str` | ``info`` | | | ``info``, ``warning``, ``error``) | | | +-------------------------------+----------------------------------------+-----------------------------+----------------------------------------+ +| ``logging`` | :ref:`config-logging` | :obj:`dict` | | ++-------------------------------+----------------------------------------+-----------------------------+----------------------------------------+ | ``max_datasets`` | Maximum number of datasets to use, see | :obj:`int` | ``None`` (all datasets from recipe) | | | :ref:`running` | | | +-------------------------------+----------------------------------------+-----------------------------+----------------------------------------+ @@ -269,6 +271,54 @@ For example, Python's ``None`` is YAML's ``null``, Python's ``True`` is YAML's will be downloaded; otherwise, local data will be used. +.. _config-logging: + +Logging configuration +===================== + +Configure what information is logged and how it is presented in the ``logging`` +section. + +.. note:: + + Not all logging configuration is available here yet, see :issue:`2596`. + +Configuration file example: + +.. code:: yaml + + logging: + log_progress_interval: 10s + +will log progress of Dask computations every 10 seconds instead of showing a +progress bar. + +Command line example: + +.. code:: bash + + esmvaltool run --logging='{"log_progress_interval": "1m"}' recipe_example.yml + + +will log progress of Dask computations every minute instead of showing a +progress bar. + +Available options: + ++-------------------------------+----------------------------------------+-----------------------------+----------------------------------------+ +| Option | Description | Type | Default value | ++===============================+========================================+=============================+========================================+ +| ``log_progress_interval`` | When running computations with Dask, | :obj:`str` or :obj:`float` | 0 | +| | log progress every | | | +| | ``log_progress_interval`` instead of | | | +| | showing a progress bar. The value can | | | +| | be specified in the format accepted by | | | +| | :func:`dask.utils.parse_timedelta`. A | | | +| | negative value disables any progress | | | +| | reporting. A progress bar is only | | | +| | shown if ``max_parallel_tasks: 1``. | | | ++-------------------------------+----------------------------------------+-----------------------------+----------------------------------------+ + .. _config-dask: Dask configuration diff --git a/environment.yml b/environment.yml index 7a4b6e2201..321b0484c5 100644 --- a/environment.yml +++ b/environment.yml @@ -40,6 +40,7 @@ dependencies: - python-stratify >=0.3 - pyyaml - requests + - rich - scipy >=1.6 - shapely >=2.0.0 - yamale diff --git a/esmvalcore/_recipe/recipe.py b/esmvalcore/_recipe/recipe.py index 55e789d6f4..8d4809ffa0 100644 --- a/esmvalcore/_recipe/recipe.py +++ b/esmvalcore/_recipe/recipe.py @@ -220,7 +220,10 @@ def _get_default_settings(dataset): settings["remove_supplementary_variables"] = {} # Configure saving cubes to file - settings["save"] = {"compress": session["compress_netcdf"]} + settings["save"] = { + "compress": session["compress_netcdf"], + "compute": False, + } if facets["short_name"] != facets["original_short_name"]: settings["save"]["alias"] = facets["short_name"] @@ -381,6 +384,8 @@ def _get_downstream_settings(step, order, products): if key in remaining_steps: if all(p.settings.get(key, object()) == value for p in products): settings[key] = value + # Set the compute argument to the save step. + settings["save"] = {"compute": some_product.settings["save"]["compute"]} return settings diff --git a/esmvalcore/config/_config_validators.py b/esmvalcore/config/_config_validators.py index 0722b346b5..b12ed08204 100644 --- a/esmvalcore/config/_config_validators.py +++ b/esmvalcore/config/_config_validators.py @@ -332,6 +332,7 @@ def validate_extra_facets_dir(value): "exit_on_warning": validate_bool, "extra_facets_dir": validate_extra_facets_dir, "log_level": validate_string, + "logging": validate_dict, "max_datasets": validate_int_positive_or_none, "max_parallel_tasks": validate_int_or_none, "max_years": validate_int_positive_or_none, diff --git a/esmvalcore/config/configurations/defaults/logging.yml b/esmvalcore/config/configurations/defaults/logging.yml new file mode 100644 index 0000000000..d1cd1948f2 --- /dev/null +++ b/esmvalcore/config/configurations/defaults/logging.yml @@ -0,0 +1,2 @@ +logging: + log_progress_interval: 0. diff --git a/esmvalcore/preprocessor/__init__.py b/esmvalcore/preprocessor/__init__.py index 851aae49f0..2c956aa0ad 100644 --- a/esmvalcore/preprocessor/__init__.py +++ b/esmvalcore/preprocessor/__init__.py @@ -9,6 +9,7 @@ from pprint import pformat from typing import Any, Iterable +from dask.delayed import Delayed from iris.cube import Cube from .._provenance import TrackedFile @@ -25,6 +26,7 @@ ) from ._compare_with_refs import bias, distance_metric from ._cycles import amplitude +from ._dask_progress import _compute_with_progress from ._derive import derive from ._detrend import detrend from ._io import ( @@ -428,6 +430,9 @@ def preprocess( ) ) + if step == "save": + return result + items = [] for item in result: if isinstance(item, (PreprocessorFile, Cube, str, Path)): @@ -536,22 +541,24 @@ def cubes(self): def cubes(self, value): self._cubes = value - def save(self): + def save(self) -> Delayed | None: """Save cubes to disk.""" - preprocess( + return preprocess( self._cubes, "save", input_files=self._input_files, **self.settings["save"], - ) + )[0] - def close(self): + def close(self) -> Delayed | None: """Close the file.""" + result = None if self._cubes is not None: self._update_attributes() - self.save() + result = self.save() self._cubes = None self.save_provenance() + return result def _update_attributes(self): """Update product attributes from cube metadata.""" @@ -693,7 +700,7 @@ def _initialize_products(self, products): for product in products: product.initialize_provenance(self.activity) - def _run(self, _): + def _run(self, _) -> list[str]: """Run the preprocessor.""" self._initialize_product_provenance() @@ -703,6 +710,7 @@ def _run(self, _): blocks = get_step_blocks(steps, self.order) saved = set() + delayeds = [] for block in blocks: logger.debug("Running block %s", block) if block[0] in MULTI_MODEL_FUNCTIONS: @@ -718,14 +726,19 @@ def _run(self, _): product.apply(step, self.debug) if block == blocks[-1]: product.cubes # noqa: B018 pylint: disable=pointless-statement - product.close() + delayed = product.close() + delayeds.append(delayed) saved.add(product.filename) for product in self.products: if product.filename not in saved: product.cubes # noqa: B018 pylint: disable=pointless-statement - product.close() + delayed = product.close() + delayeds.append(delayed) + logger.info("Computing and saving data for task %s", self.name) + delayeds = [d for d in delayeds if d is not None] + _compute_with_progress(delayeds, description=self.name) metadata_files = write_metadata( self.products, self.write_ncl_interface ) diff --git a/esmvalcore/preprocessor/_dask_progress.py b/esmvalcore/preprocessor/_dask_progress.py new file mode 100644 index 0000000000..bcfc3380d5 --- /dev/null +++ b/esmvalcore/preprocessor/_dask_progress.py @@ -0,0 +1,242 @@ +"""Progress bars for use with Dask.""" + +from __future__ import annotations + +import contextlib +import datetime +import logging +import threading +import time +from collections.abc import Iterable + +import dask.diagnostics +import distributed +import rich.progress +from dask.delayed import Delayed + +from esmvalcore.config import CFG + +logger = logging.getLogger(__name__) + + +class RichProgressBar(dask.diagnostics.Callback): + """Progress bar using `rich` for the Dask default scheduler.""" + + # Disable warnings about design choices that have been made in the base class. + # pylint: disable=method-hidden,super-init-not-called,too-few-public-methods,unused-argument,useless-suppression + + # Adapted from https://github.com/dask/dask/blob/0f3e5ff6e642e7661b3f855bfd192a6f6fb83b49/dask/diagnostics/progress.py#L32-L153 + def __init__(self): + self.progress = rich.progress.Progress( + rich.progress.TaskProgressColumn(), + rich.progress.BarColumn(bar_width=80), + rich.progress.MofNCompleteColumn(), + rich.progress.TimeElapsedColumn(), + redirect_stdout=False, + redirect_stderr=False, + ) + self.task = self.progress.add_task(description="progress") + self._dt = 0.1 + self._state = None + self._running = False + self._timer = None + + def _start(self, dsk): + self._state = None + # Start background thread + self._running = True + self._timer = threading.Thread(target=self._timer_func) + self._timer.daemon = True + self._timer.start() + + def _start_state(self, dsk, state): + self.progress.start() + total = sum( + len(state[k]) for k in ["ready", "waiting", "running", "finished"] + ) + self.progress.update(self.task, total=total) + + def _pretask(self, key, dsk, state): + self._state = state + + def _finish(self, dsk, state, errored): + self._running = False + self._timer.join() + self._draw_bar() + self.progress.stop() + + def _timer_func(self): + """Background thread for updating the progress bar.""" + while self._running: + self._draw_bar() + time.sleep(self._dt) + + def _draw_bar(self): + state = self._state + completed = len(state["finished"]) if state else 0 + self.progress.update(self.task, completed=completed) + + +class RichDistributedProgressBar( + distributed.diagnostics.progressbar.TextProgressBar +): + """Progress bar using `rich` for the Dask distributed scheduler.""" + + # Disable warnings about design choices that have been made in the base class. + # pylint: disable=too-few-public-methods,unused-argument,useless-suppression + + def __init__(self, keys) -> None: + self.progress = rich.progress.Progress( + rich.progress.TaskProgressColumn(), + rich.progress.BarColumn(bar_width=80), + rich.progress.MofNCompleteColumn(), + rich.progress.TimeElapsedColumn(), + redirect_stdout=False, + redirect_stderr=False, + ) + self.progress.start() + self.task_id = self.progress.add_task(description="progress") + super().__init__(keys) + + def _draw_bar(self, remaining, all, **kwargs): # pylint: disable=redefined-builtin + completed = all - remaining + self.progress.update(self.task_id, completed=completed, total=all) + + def _draw_stop(self, **kwargs): + if kwargs.get("status") == "finished": + self._draw_bar(remaining=0, all=self.progress.tasks[0].total) + self.progress.stop() + + +class ProgressLogger(dask.diagnostics.ProgressBar): + """Progress logger for the Dask default scheduler.""" + + # Disable warnings about design choices that have been made in the base class. + # pylint: disable=too-few-public-methods,unused-argument,useless-suppression + + def __init__( + self, + log_interval: str | float = "1s", + description: str = "", + ) -> None: + self._desc = f"{description} " if description else description + self._log_interval = dask.utils.parse_timedelta( + log_interval, default="s" + ) + self._prev_elapsed = 0.0 + interval = dask.utils.parse_timedelta("1s", default="s") + super().__init__(dt=interval) + self._file = None + + def _draw_bar(self, frac: float, elapsed: float) -> None: + if (elapsed - self._prev_elapsed) < self._log_interval and frac != 1.0: + return + self._prev_elapsed = elapsed + pbar = "#" * int(self._width * frac) + percent = int(100 * frac) + elapsed_fmt = dask.utils.format_time(elapsed) + desc_width = 30 + msg = ( + f"{self._desc:<{desc_width}}[{pbar:<{self._width}}] | " + f"{percent:3}% Completed | {elapsed_fmt}" + ) + logger.info(msg) + + +class DistributedProgressLogger( + distributed.diagnostics.progressbar.TextProgressBar +): + """Progress logger for the Dask distributed scheduler.""" + + # Disable warnings about design choices that have been made in the base class. + # pylint: disable=too-few-public-methods,unused-argument,useless-suppression + + def __init__( + self, + keys, + log_interval: str | float = "1s", + description: str = "", + ) -> None: + self._desc = f"{description} " if description else description + self._log_interval = dask.utils.parse_timedelta( + log_interval, default="s" + ) + self._prev_elapsed = 0.0 + super().__init__(keys, interval="1s") + + def _draw_bar( + self, + remaining: int, + all: int, # pylint: disable=redefined-builtin + **kwargs, + ) -> None: + frac = (1 - remaining / all) if all else 1.0 + if ( + self.elapsed - self._prev_elapsed + ) < self._log_interval and frac != 1.0: + return + self._prev_elapsed = self.elapsed + pbar = "#" * int(self.width * frac) + percent = int(100 * frac) + elapsed = dask.utils.format_time(self.elapsed) + desc_width = 30 + msg = ( + f"{self._desc:<{desc_width}}[{pbar:<{self.width}}] | " + f"{percent:3}% Completed | {elapsed}" + ) + logger.info(msg) + + def _draw_stop(self, **kwargs): + pass + + +def _compute_with_progress( + delayeds: Iterable[Delayed], + description: str, +) -> None: + """Compute delayeds while displaying a progress bar.""" + use_distributed = True + try: + distributed.get_client() + except ValueError: + use_distributed = False + + log_progress_interval = CFG["logging"]["log_progress_interval"] + if isinstance(log_progress_interval, (str, datetime.timedelta)): + log_progress_interval = dask.utils.parse_timedelta( + log_progress_interval + ) + + if CFG["max_parallel_tasks"] != 1 and log_progress_interval == 0.0: + # Enable progress logging if `max_parallel_tasks` > 1 to avoid clutter. + log_progress_interval = 10.0 + + # There are three possible options, depending on the value of + # CFG["log_progress_interval"]: + # < 0: no progress reporting + # = 0: show progress bar + # > 0: log progress at this interval + if log_progress_interval < 0.0: + dask.compute(delayeds) + elif use_distributed: + futures = dask.persist(delayeds) + futures = distributed.client.futures_of(futures) + if log_progress_interval == 0.0: + RichDistributedProgressBar(futures) + else: + DistributedProgressLogger( + futures, + log_interval=log_progress_interval, + description=description, + ) + dask.compute(futures) + else: + if log_progress_interval == 0.0: + ctx: contextlib.AbstractContextManager = RichProgressBar() + else: + ctx = ProgressLogger( + description=description, + log_interval=log_progress_interval, + ) + with ctx: + dask.compute(delayeds) diff --git a/esmvalcore/preprocessor/_io.py b/esmvalcore/preprocessor/_io.py index 83f4d9bae5..eccac411f2 100644 --- a/esmvalcore/preprocessor/_io.py +++ b/esmvalcore/preprocessor/_io.py @@ -5,6 +5,7 @@ import copy import logging import os +from collections.abc import Sequence from itertools import groupby from pathlib import Path from typing import NamedTuple, Optional @@ -17,6 +18,7 @@ import numpy as np import yaml from cf_units import suppress_errors +from dask.delayed import Delayed from iris.cube import CubeList from esmvalcore.cmor.check import CheckLevels @@ -405,19 +407,25 @@ def concatenate(cubes, check_level=CheckLevels.DEFAULT): def save( - cubes, filename, optimize_access="", compress=False, alias="", **kwargs -): + cubes: Sequence[iris.cube.Cube], + filename: Path | str, + optimize_access: str = "", + compress: bool = False, + alias: str = "", + compute: bool = True, + **kwargs, +) -> Delayed | None: """Save iris cubes to file. Parameters ---------- - cubes: iterable of iris.cube.Cube + cubes: Data cubes to be saved - filename: str + filename: Name of target file - optimize_access: str + optimize_access: Set internal NetCDF chunking to favour a reading scheme Values can be map or timeseries, which improve performance when @@ -426,16 +434,30 @@ def save( case the better performance will be avhieved by loading all the values in that coordinate at a time - compress: bool, optional + compress: Use NetCDF internal compression. - alias: str, optional + alias: Var name to use when saving instead of the one in the cube. + compute : bool, default=True + Default is ``True``, meaning complete the file immediately, and return ``None``. + + When ``False``, create the output file but don't write any lazy array content to + its variables, such as lazy cube data or aux-coord points and bounds. + Instead return a :class:`dask.delayed.Delayed` which, when computed, will + stream all the lazy content via :meth:`dask.store`, to complete the file. + Several such data saves can be performed in parallel, by passing a list of them + into a :func:`dask.compute` call. + + **kwargs: + See :func:`iris.fileformats.netcdf.saver.save` for additional + keyword arguments. + Returns ------- - str - filename + :class:`dask.delayed.Delayed` or :obj:`None` + A delayed object that can be used to save the data in the cube. Raises ------ @@ -445,6 +467,9 @@ def save( if not cubes: raise ValueError(f"Cannot save empty cubes '{cubes}'") + if Path(filename).suffix.lower() == ".nc": + kwargs["compute"] = compute + # Rename some arguments kwargs["target"] = filename kwargs["zlib"] = compress @@ -462,7 +487,7 @@ def save( cubes, filename, ) - return filename + return None for cube in cubes: logger.debug( @@ -480,13 +505,11 @@ def save( elif optimize_access == "timeseries": dims = set(cube.coord_dims("time")) else: - dims = tuple() - for coord_dims in ( - cube.coord_dims(dimension) - for dimension in optimize_access.split(" ") - ): - dims += coord_dims - dims = set(dims) + dims = { + dim + for coord_name in optimize_access.split(" ") + for dim in cube.coord_dims(coord_name) + } kwargs["chunksizes"] = tuple( length if index in dims else 1 @@ -512,9 +535,7 @@ def save( category=UserWarning, module="iris", ) - iris.save(cubes, **kwargs) - - return filename + return iris.save(cubes, **kwargs) def _get_debug_filename(filename, step): diff --git a/pyproject.toml b/pyproject.toml index 1c30bd95ee..61de91ae25 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -61,6 +61,7 @@ dependencies = [ "pybtex", "pyyaml", "requests", + "rich", "scipy>=1.6", "scitools-iris>=3.11", # 3.11 first to support Numpy 2 and Python 3.13 "shapely>=2.0.0", diff --git a/tests/integration/preprocessor/_io/test_save.py b/tests/integration/preprocessor/_io/test_save.py index 0e4f6b4366..20278fb155 100644 --- a/tests/integration/preprocessor/_io/test_save.py +++ b/tests/integration/preprocessor/_io/test_save.py @@ -1,9 +1,13 @@ """Integration tests for :func:`esmvalcore.preprocessor.save`.""" +import logging +import re + import iris import netCDF4 import numpy as np import pytest +from dask.delayed import Delayed from iris.coords import DimCoord from iris.cube import Cube, CubeList @@ -59,32 +63,51 @@ def _check_chunks(path, expected_chunks): def test_save(cube, filename): """Test save.""" - path = save([cube], filename) - loaded_cube = iris.load_cube(path) + delayed = save([cube], filename) + assert delayed is None + loaded_cube = iris.load_cube(filename) + _compare_cubes(cube, loaded_cube) + + +def test_delayed_save(cube, filename): + """Test save.""" + delayed = save([cube], filename, compute=False) + assert isinstance(delayed, Delayed) + delayed.compute() + loaded_cube = iris.load_cube(filename) _compare_cubes(cube, loaded_cube) +def test_save_noop(cube, filename, caplog): + """Test save.""" + cube.data = cube.lazy_data() + save([cube], filename) + with caplog.at_level(logging.DEBUG): + save([cube], filename) + assert re.findall("Not saving cubes .* to avoid data loss.", caplog.text) + + def test_save_create_parent_dir(cube, tmp_path): filename = tmp_path / "preproc" / "something" / "test.nc" - path = save([cube], filename) - loaded_cube = iris.load_cube(path) + save([cube], filename) + loaded_cube = iris.load_cube(filename) _compare_cubes(cube, loaded_cube) def test_save_alias(cube, filename): """Test save.""" - path = save([cube], filename, alias="alias") - loaded_cube = iris.load_cube(path) + save([cube], filename, alias="alias") + loaded_cube = iris.load_cube(filename) _compare_cubes(cube, loaded_cube) assert loaded_cube.var_name == "alias" def test_save_zlib(cube, filename): """Test save.""" - path = save([cube], filename, compress=True) - loaded_cube = iris.load_cube(path) + save([cube], filename, compress=True) + loaded_cube = iris.load_cube(filename) _compare_cubes(cube, loaded_cube) - with netCDF4.Dataset(path, "r") as handler: + with netCDF4.Dataset(filename, "r") as handler: sample_filters = handler.variables["sample"].filters() assert sample_filters["zlib"] is True assert sample_filters["shuffle"] is True @@ -106,32 +129,32 @@ def test_fail_without_filename(cube): def test_save_optimized_map(cube, filename): """Test save.""" - path = save([cube], filename, optimize_access="map") - loaded_cube = iris.load_cube(path) + save([cube], filename, optimize_access="map") + loaded_cube = iris.load_cube(filename) _compare_cubes(cube, loaded_cube) - _check_chunks(path, [2, 2, 1]) + _check_chunks(filename, [2, 2, 1]) def test_save_optimized_timeseries(cube, filename): """Test save.""" - path = save([cube], filename, optimize_access="timeseries") - loaded_cube = iris.load_cube(path) + save([cube], filename, optimize_access="timeseries") + loaded_cube = iris.load_cube(filename) _compare_cubes(cube, loaded_cube) - _check_chunks(path, [1, 1, 2]) + _check_chunks(filename, [1, 1, 2]) def test_save_optimized_lat(cube, filename): """Test save.""" - path = save([cube], filename, optimize_access="latitude") - loaded_cube = iris.load_cube(path) + save([cube], filename, optimize_access="latitude") + loaded_cube = iris.load_cube(filename) _compare_cubes(cube, loaded_cube) expected_chunks = [2, 1, 1] - _check_chunks(path, expected_chunks) + _check_chunks(filename, expected_chunks) def test_save_optimized_lon_time(cube, filename): """Test save.""" - path = save([cube], filename, optimize_access="longitude time") - loaded_cube = iris.load_cube(path) + save([cube], filename, optimize_access="longitude time") + loaded_cube = iris.load_cube(filename) _compare_cubes(cube, loaded_cube) - _check_chunks(path, [1, 2, 2]) + _check_chunks(filename, [1, 2, 2]) diff --git a/tests/integration/preprocessor/test_preprocessing_task.py b/tests/integration/preprocessor/test_preprocessing_task.py index 6b3023f1d2..5b74a94cda 100644 --- a/tests/integration/preprocessor/test_preprocessing_task.py +++ b/tests/integration/preprocessor/test_preprocessing_task.py @@ -24,7 +24,7 @@ def test_load_save_task(tmp_path): [ PreprocessorFile( filename=tmp_path / "tas_out.nc", - settings={}, + settings={"save": {"compute": False}}, datasets=[dataset], ), ] diff --git a/tests/integration/recipe/test_recipe.py b/tests/integration/recipe/test_recipe.py index f486db1657..90b4985a6e 100644 --- a/tests/integration/recipe/test_recipe.py +++ b/tests/integration/recipe/test_recipe.py @@ -110,6 +110,7 @@ def _get_default_settings_for_chl(save_filename): "save": { "compress": False, "filename": save_filename, + "compute": False, }, } return defaults @@ -693,6 +694,7 @@ def test_default_fx_preprocessor(tmp_path, patched_datafinder, session): "save": { "compress": False, "filename": product.filename, + "compute": False, }, } assert product.settings == defaults diff --git a/tests/unit/config/test_config.py b/tests/unit/config/test_config.py index 194724a317..44b8d6ee3e 100644 --- a/tests/unit/config/test_config.py +++ b/tests/unit/config/test_config.py @@ -191,6 +191,7 @@ def test_load_default_config(cfg_default, monkeypatch): "exit_on_warning": False, "extra_facets_dir": [], "log_level": "info", + "logging": {"log_progress_interval": 0.0}, "max_datasets": None, "max_parallel_tasks": None, "max_years": None, diff --git a/tests/unit/preprocessor/test_dask_progress.py b/tests/unit/preprocessor/test_dask_progress.py new file mode 100644 index 0000000000..6712272386 --- /dev/null +++ b/tests/unit/preprocessor/test_dask_progress.py @@ -0,0 +1,51 @@ +"""Test :mod:`esmvalcore.preprocessor._dask_progress`.""" + +import logging +import time + +import dask +import distributed +import pytest + +from esmvalcore.preprocessor import _dask_progress + + +@pytest.mark.parametrize("use_distributed", [False, True]) +@pytest.mark.parametrize("interval", [-1, 0.0, 0.2]) +def test_compute_with_progress( + capsys, + caplog, + monkeypatch, + use_distributed, + interval, +): + caplog.set_level(logging.INFO) + if use_distributed: + client = distributed.Client(n_workers=1, threads_per_worker=1) + else: + client = None + + monkeypatch.setitem(_dask_progress.CFG, "max_parallel_tasks", 1) + monkeypatch.setitem( + _dask_progress.CFG["logging"], + "log_progress_interval", + f"{interval}s" if interval > 0 else interval, + ) + + def func(delay: float) -> None: + time.sleep(delay) + + delayeds = [dask.delayed(func)(0.11)] + _dask_progress._compute_with_progress(delayeds, description="test") + if interval == 0.0: + # Assert that some progress bar has been written to stdout. + progressbar = capsys.readouterr().out + else: + # Assert that some progress bar has been logged. + progressbar = caplog.text + if interval < 0.0: + assert not progressbar + else: + assert "100%" in progressbar + if client is not None: + client.shutdown() diff --git a/tests/unit/preprocessor/test_preprocessor_file.py b/tests/unit/preprocessor/test_preprocessor_file.py index d386dbc1e6..6e845e8aea 100644 --- a/tests/unit/preprocessor/test_preprocessor_file.py +++ b/tests/unit/preprocessor/test_preprocessor_file.py @@ -162,4 +162,5 @@ def test_save(mock_preprocess): mock.call( mock.sentinel.cubes, "save", input_files=mock.sentinel.input_files ), + mock.call().__getitem__(0), ] diff --git a/tests/unit/recipe/test_recipe.py b/tests/unit/recipe/test_recipe.py index 9934f02d3b..5acc625c8d 100644 --- a/tests/unit/recipe/test_recipe.py +++ b/tests/unit/recipe/test_recipe.py @@ -243,7 +243,10 @@ def test_multi_model_filename_full(): def test_update_multiproduct_multi_model_statistics(): """Test ``_update_multiproduct``.""" - settings = {"multi_model_statistics": {"statistics": ["mean", "std_dev"]}} + settings = { + "multi_model_statistics": {"statistics": ["mean", "std_dev"]}, + "save": {"compute": False}, + } common_attributes = { "project": "CMIP6", "diagnostic": "d", @@ -358,6 +361,7 @@ def test_update_multiproduct_multi_model_statistics_percentile(): {"operator": "percentile", "percent": 95.0}, ] }, + "save": {"compute": False}, } common_attributes = { "project": "CMIP6", @@ -468,7 +472,8 @@ def test_update_multiproduct_multi_model_statistics_percentile(): def test_update_multiproduct_ensemble_statistics(): """Test ``_update_multiproduct``.""" settings = { - "ensemble_statistics": {"statistics": ["median"], "span": "full"} + "ensemble_statistics": {"statistics": ["median"], "span": "full"}, + "save": {"compute": False}, } common_attributes = { "dataset": "CanESM2", @@ -539,6 +544,7 @@ def test_update_multiproduct_ensemble_statistics_percentile(): ], "span": "full", }, + "save": {"compute": False}, } common_attributes = { @@ -773,7 +779,11 @@ def test_get_default_settings(mocker): settings = _recipe._get_default_settings(dataset) assert settings == { "remove_supplementary_variables": {}, - "save": {"compress": False, "alias": "sic"}, + "save": { + "compress": False, + "alias": "sic", + "compute": False, + }, } From 0f9e8884ef4d6c0ba8e5fb78b4988ba0f26a9185 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 3 Dec 2024 14:46:53 +0000 Subject: [PATCH 03/36] [pre-commit.ci] pre-commit autoupdate (#2604) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index cea5c5c2cc..8b7004b93d 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -33,7 +33,7 @@ repos: - id: codespell additional_dependencies: [tomli] # required for Python 3.10 - repo: https://github.com/astral-sh/ruff-pre-commit - rev: "v0.8.0" + rev: "v0.8.1" hooks: - id: ruff args: [--fix] From 4c36a0c7be2a48b32e1394e6e3212c9f7913a2f7 Mon Sep 17 00:00:00 2001 From: Bouwe Andela Date: Tue, 3 Dec 2024 18:18:48 +0100 Subject: [PATCH 04/36] Avoid a crash when there is a timeout when shutting down the Dask cluster (#2580) --- esmvalcore/config/_dask.py | 9 ++++++++- tests/unit/config/test_dask.py | 7 +++++-- 2 files changed, 13 insertions(+), 3 deletions(-) diff --git a/esmvalcore/config/_dask.py b/esmvalcore/config/_dask.py index effd33058f..4de51e4aef 100644 --- a/esmvalcore/config/_dask.py +++ b/esmvalcore/config/_dask.py @@ -80,4 +80,11 @@ def get_distributed_client(): if client is not None: client.close() if cluster is not None: - cluster.close() + try: + cluster.close() + except TimeoutError: + logger.warning( + "Timeout while trying to shut down the cluster at %s, " + "you may want to check it was stopped.", + cluster.scheduler_address, + ) diff --git a/tests/unit/config/test_dask.py b/tests/unit/config/test_dask.py index e965c90a2e..8efc305023 100644 --- a/tests/unit/config/test_dask.py +++ b/tests/unit/config/test_dask.py @@ -37,7 +37,8 @@ def test_get_distributed_client_external(mocker, tmp_path, warn_unused_args): mock_client.close.assert_called() -def test_get_distributed_client_slurm(mocker, tmp_path): +@pytest.mark.parametrize("shutdown_timeout", [False, True]) +def test_get_distributed_client_slurm(mocker, tmp_path, shutdown_timeout): cfg = { "cluster": { "type": "dask_jobqueue.SLURMCluster", @@ -66,10 +67,12 @@ def test_get_distributed_client_slurm(mocker, tmp_path): create_autospec=True, return_value=mock_module, ) + mock_cluster = mock_cluster_cls.return_value + if shutdown_timeout: + mock_cluster.close.side_effect = TimeoutError with _dask.get_distributed_client() as client: assert client is mock_client mock_client.close.assert_called() - mock_cluster = mock_cluster_cls.return_value _dask.Client.assert_called_with(address=mock_cluster.scheduler_address) args = {k: v for k, v in cfg["cluster"].items() if k != "type"} mock_cluster_cls.assert_called_with(**args) From 65c7b28e883d24eb2471cf34a215553b8895a3c5 Mon Sep 17 00:00:00 2001 From: Manuel Schlund <32543114+schlunma@users.noreply.github.com> Date: Fri, 6 Dec 2024 12:41:49 +0100 Subject: [PATCH 05/36] Add support for native ERA5 data in GRIB format (#2178) Co-authored-by: Valeriu Predoi Co-authored-by: Bouwe Andela Co-authored-by: Bettina Gier --- doc/quickstart/configure.rst | 2 +- doc/quickstart/find_data.rst | 109 ++- esmvalcore/_provenance.py | 8 +- esmvalcore/_recipe/recipe.py | 30 + esmvalcore/cmor/_fixes/fix.py | 2 + esmvalcore/cmor/_fixes/native6/era5.py | 186 ++++- esmvalcore/config-developer.yml | 2 + .../configurations/defaults/config-user.yml | 4 +- .../config/extra_facets/native6-era5.yml | 196 ++++++ esmvalcore/preprocessor/_io.py | 9 +- pyproject.toml | 2 +- .../cmor/_fixes/native6/test_era5.py | 666 ++++++++++++++---- tests/integration/cmor/test_fix.py | 30 +- tests/integration/conftest.py | 38 +- .../integration/preprocessor/_io/test_load.py | 20 + tests/integration/recipe/test_recipe.py | 113 +++ tests/sample_data/iris-sample-data/LICENSE | 10 + .../iris-sample-data/polar_stereo.grib2 | Bin 0 -> 25934 bytes tests/unit/provenance/test_trackedfile.py | 66 +- 19 files changed, 1296 insertions(+), 197 deletions(-) create mode 100644 esmvalcore/config/extra_facets/native6-era5.yml create mode 100644 tests/sample_data/iris-sample-data/LICENSE create mode 100644 tests/sample_data/iris-sample-data/polar_stereo.grib2 diff --git a/doc/quickstart/configure.rst b/doc/quickstart/configure.rst index baf4dd2998..78ce5dcea2 100644 --- a/doc/quickstart/configure.rst +++ b/doc/quickstart/configure.rst @@ -974,7 +974,7 @@ infrastructure. The following example illustrates the concept. .. _extra-facets-example-1: .. code-block:: yaml - :caption: Extra facet example file `native6-era5.yml` + :caption: Extra facet example file `native6-era5-example.yml` ERA5: Amon: diff --git a/doc/quickstart/find_data.rst b/doc/quickstart/find_data.rst index b7708fd95f..d93f114f21 100644 --- a/doc/quickstart/find_data.rst +++ b/doc/quickstart/find_data.rst @@ -107,18 +107,27 @@ The following native reanalysis/observational datasets are supported under the To use these datasets, put the files containing the data in the directory that you have :ref:`configured ` for the ``rootpath`` of the ``native6`` project, in a subdirectory called -``Tier{tier}/{dataset}/{version}/{frequency}/{short_name}``. +``Tier{tier}/{dataset}/{version}/{frequency}/{short_name}`` (assuming you are +using the ``default`` DRS for ``native6``). Replace the items in curly braces by the values used in the variable/dataset definition in the :ref:`recipe `. -Below is a list of native reanalysis/observational datasets currently -supported. -.. _read_native_era5: +.. _read_native_era5_nc: -ERA5 -^^^^ +ERA5 (in netCDF format downloaded from the CDS) +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +ERA5 data can be downloaded from the Copernicus Climate Data Store (CDS) using +the convenient tool `era5cli `__. +For example for monthly data, place the files in the +``/Tier3/ERA5/version/mon/pr`` subdirectory of your ``rootpath`` that you have +configured for the ``native6`` project (assuming you are using the ``default`` +DRS for ``native6``). -- Supported variables: ``cl``, ``clt``, ``evspsbl``, ``evspsblpot``, ``mrro``, ``pr``, ``prsn``, ``ps``, ``psl``, ``ptype``, ``rls``, ``rlds``, ``rsds``, ``rsdt``, ``rss``, ``uas``, ``vas``, ``tas``, ``tasmax``, ``tasmin``, ``tdps``, ``ts``, ``tsn`` (``E1hr``/``Amon``), ``orog`` (``fx``) +- Supported variables: ``cl``, ``clt``, ``evspsbl``, ``evspsblpot``, ``mrro``, + ``pr``, ``prsn``, ``ps``, ``psl``, ``ptype``, ``rls``, ``rlds``, ``rsds``, + ``rsdt``, ``rss``, ``uas``, ``vas``, ``tas``, ``tasmax``, ``tasmin``, + ``tdps``, ``ts``, ``tsn`` (``E1hr``/``Amon``), ``orog`` (``fx``). - Tier: 3 .. note:: According to the description of Evapotranspiration and potential Evapotranspiration on the Copernicus page @@ -131,6 +140,85 @@ ERA5 of both liquid and solid phases to vapor (from underlying surface and vegetation)." Therefore, the ERA5 (and ERA5-Land) CMORizer switches the signs of ``evspsbl`` and ``evspsblpot`` to be compatible with the CMOR standard used e.g. by the CMIP models. +.. _read_native_era5_grib: + +ERA5 (in GRIB format available on DKRZ's Levante or downloaded from the CDS) +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +ERA5 data in monthly, daily, and hourly resolution is `available on Levante +`__ +in its native GRIB format. + +.. note:: + ERA5 data in its native GRIB format can also be downloaded from the + `Copernicus Climate Data Store (CDS) + `__. + For example, hourly data on pressure levels is available `here + `__. + Reading self-downloaded ERA5 data in GRIB format is experimental and likely + requires additional setup from the user like setting up the proper directory + structure for the input files and/or creating a custom :ref:`DRS + `. + +To read these data with ESMValCore, use the :ref:`rootpath +` ``/pool/data/ERA5`` with :ref:`DRS +` ``DKRZ-ERA5-GRIB`` in your configuration, for example: + +.. code-block:: yaml + + rootpath: + ... + native6: + /pool/data/ERA5: DKRZ-ERA5-GRIB + ... + +The `naming conventions +`__ +for input directories and files for native ERA5 data in GRIB format on Levante +are + +* input directories: ``{family}/{level}/{type}/{tres}/{grib_id}`` +* input files: ``{family}{level}{typeid}_{tres}_*_{grib_id}.grb`` + +All of these facets have reasonable defaults preconfigured in the corresponding +:ref:`extra facets` file, which is available here: +:download:`native6-era5.yml +`. +If necessary, these facets can be overwritten in the recipe. + +Thus, example dataset entries could look like this: + +.. code-block:: yaml + + datasets: + - {project: native6, dataset: ERA5, timerange: '2000/2001', + short_name: tas, mip: Amon} + - {project: native6, dataset: ERA5, timerange: '2000/2001', + short_name: cl, mip: Amon, tres: 1H, frequency: 1hr} + - {project: native6, dataset: ERA5, timerange: '2000/2001', + short_name: ta, mip: Amon, type: fc, typeid: '12'} + +The native ERA5 output in GRIB format is stored on a `reduced Gaussian grid +`__. +By default, these data are regridded to a regular 0.25°x0.25° grid as +`recommended by the ECMWF +`__ +using bilinear interpolation. + +To disable this, you can use the facet ``automatic_regrid: false`` in the +recipe: + +.. code-block:: yaml + + datasets: + - {project: native6, dataset: ERA5, timerange: '2000/2001', + short_name: tas, mip: Amon, automatic_regrid: false} + +- Supported variables: ``albsn``, ``cl``, ``cli``, ``clt``, ``clw``, ``hur``, + ``hus``, ``o3``, ``prw``, ``ps``, ``psl``, ``rainmxrat27``, ``sftlf``, + ``snd``, ``snowmxrat27``, ``ta``, ``tas``, ``tdps``, ``toz``, ``ts``, ``ua``, + ``uas``, ``va``, ``vas``, ``wap``, ``zg``. + .. _read_native_mswep: MSWEP @@ -140,7 +228,10 @@ MSWEP - Supported frequencies: ``mon``, ``day``, ``3hr``. - Tier: 3 -For example for monthly data, place the files in the ``/Tier3/MSWEP/version/mon/pr`` subdirectory of your ``native6`` project location. +For example for monthly data, place the files in the +``/Tier3/MSWEP/version/mon/pr`` subdirectory of your ``rootpath`` that you have +configured for the ``native6`` project (assuming you are using the ``default`` +DRS for ``native6``). .. note:: For monthly data (``V220``), the data must be postfixed with the date, i.e. rename ``global_monthly_050deg.nc`` to ``global_monthly_050deg_197901-201710.nc`` @@ -642,6 +733,8 @@ first discuss the ``drs`` parameter: as we've seen in the previous section, the DRS as a standard is used for both file naming conventions and for directory structures. +.. _config_option_drs: + Explaining ``drs: CMIP5:`` or ``drs: CMIP6:`` --------------------------------------------- Whereas ESMValCore will by default use the CMOR standard for file naming (please diff --git a/esmvalcore/_provenance.py b/esmvalcore/_provenance.py index 25ad81f5ba..89b5822c27 100644 --- a/esmvalcore/_provenance.py +++ b/esmvalcore/_provenance.py @@ -4,6 +4,7 @@ import logging import os from functools import total_ordering +from pathlib import Path from netCDF4 import Dataset from PIL import Image @@ -209,9 +210,10 @@ def _initialize_entity(self): """Initialize the entity representing the file.""" if self.attributes is None: self.attributes = {} - with Dataset(self.filename, "r") as dataset: - for attr in dataset.ncattrs(): - self.attributes[attr] = dataset.getncattr(attr) + if "nc" in Path(self.filename).suffix: + with Dataset(self.filename, "r") as dataset: + for attr in dataset.ncattrs(): + self.attributes[attr] = dataset.getncattr(attr) attributes = { "attribute:" + str(k).replace(" ", "_"): str(v) diff --git a/esmvalcore/_recipe/recipe.py b/esmvalcore/_recipe/recipe.py index 8d4809ffa0..9c5aa74553 100644 --- a/esmvalcore/_recipe/recipe.py +++ b/esmvalcore/_recipe/recipe.py @@ -37,6 +37,7 @@ PreprocessorFile, ) from esmvalcore.preprocessor._area import _update_shapefile_path +from esmvalcore.preprocessor._io import GRIB_FORMATS from esmvalcore.preprocessor._multimodel import _get_stat_identifier from esmvalcore.preprocessor._regrid import ( _spec_to_latlonvals, @@ -230,6 +231,34 @@ def _get_default_settings(dataset): return settings +def _add_dataset_specific_settings(dataset: Dataset, settings: dict) -> None: + """Add dataset-specific settings.""" + project = dataset.facets["project"] + dataset_name = dataset.facets["dataset"] + file_suffixes = [Path(file.name).suffix for file in dataset.files] + + # Automatic regridding for native ERA5 data in GRIB format if regridding + # step is not already present (can be disabled with facet + # automatic_regrid=False) + if all( + [ + project == "native6", + dataset_name == "ERA5", + any(grib_format in file_suffixes for grib_format in GRIB_FORMATS), + "regrid" not in settings, + dataset.facets.get("automatic_regrid", True), + ] + ): + # Settings recommended by ECMWF + # (https://confluence.ecmwf.int/display/CKB/ERA5%3A+What+is+the+spatial+reference#heading-Interpolation) + settings["regrid"] = {"target_grid": "0.25x0.25", "scheme": "linear"} + logger.debug( + "Automatically regrid native6 ERA5 data in GRIB format with the " + "settings %s", + settings["regrid"], + ) + + def _exclude_dataset(settings, facets, step): """Exclude dataset from specific preprocessor step if requested.""" exclude = { @@ -546,6 +575,7 @@ def _get_preprocessor_products( _apply_preprocessor_profile(settings, profile) _update_multi_dataset_settings(dataset.facets, settings) _update_preproc_functions(settings, dataset, datasets, missing_vars) + _add_dataset_specific_settings(dataset, settings) check.preprocessor_supplementaries(dataset, settings) input_datasets = _get_input_datasets(dataset) missing = _check_input_files(input_datasets) diff --git a/esmvalcore/cmor/_fixes/fix.py b/esmvalcore/cmor/_fixes/fix.py index 4d3e297e3a..9a229b2dc4 100644 --- a/esmvalcore/cmor/_fixes/fix.py +++ b/esmvalcore/cmor/_fixes/fix.py @@ -845,6 +845,8 @@ def _fix_time_bounds(self, cube: Cube, cube_coord: Coord) -> None: """Fix time bounds.""" times = {"time", "time1", "time2", "time3"} key = times.intersection(self.vardef.coordinates) + if not key: # cube has time, but CMOR variable does not + return cmor = self.vardef.coordinates[" ".join(key)] if cmor.must_have_bounds == "yes" and not cube_coord.has_bounds(): cube_coord.bounds = get_time_bounds(cube_coord, self.frequency) diff --git a/esmvalcore/cmor/_fixes/native6/era5.py b/esmvalcore/cmor/_fixes/native6/era5.py index 85b570c57d..2214238557 100644 --- a/esmvalcore/cmor/_fixes/native6/era5.py +++ b/esmvalcore/cmor/_fixes/native6/era5.py @@ -5,12 +5,16 @@ import iris import numpy as np +from iris.util import reverse -from esmvalcore.iris_helpers import date2num, safe_convert_units - -from ...table import CMOR_TABLES -from ..fix import Fix -from ..shared import add_scalar_height_coord +from esmvalcore.cmor._fixes.fix import Fix +from esmvalcore.cmor._fixes.shared import add_scalar_height_coord +from esmvalcore.cmor.table import CMOR_TABLES +from esmvalcore.iris_helpers import ( + date2num, + has_unstructured_grid, + safe_convert_units, +) logger = logging.getLogger(__name__) @@ -24,7 +28,11 @@ def get_frequency(cube): time.convert_units("days since 1850-1-1 00:00:00.0") if len(time.points) == 1: - if cube.long_name != "Geopotential": + acceptable_long_names = ( + "Geopotential", + "Percentage of the Grid Cell Occupied by Land (Including Lakes)", + ) + if cube.long_name not in acceptable_long_names: raise ValueError( "Unable to infer frequency of cube " f"with length 1 time dimension: {cube}" @@ -32,9 +40,11 @@ def get_frequency(cube): return "fx" interval = time.points[1] - time.points[0] + if interval - 1 / 24 < 1e-4: return "hourly" - + if interval - 1.0 < 1e-4: + return "daily" return "monthly" @@ -52,6 +62,11 @@ def fix_accumulated_units(cube): cube.units = cube.units * "d-1" elif get_frequency(cube) == "hourly": cube.units = cube.units * "h-1" + elif get_frequency(cube) == "daily": + raise NotImplementedError( + f"Fixing of accumulated units of cube " + f"{cube.summary(shorten=True)} is not implemented for daily data" + ) return cube @@ -76,6 +91,27 @@ def divide_by_gravity(cube): return cube +class Albsn(Fix): + """Fixes for albsn.""" + + def fix_metadata(self, cubes): + """Fix metadata.""" + for cube in cubes: + # Invalid input cube units (ignored on load) were '0-1' + cube.units = "1" + return cubes + + +class Cli(Fix): + """Fixes for cli.""" + + def fix_metadata(self, cubes): + """Fix metadata.""" + for cube in cubes: + cube.units = "kg kg-1" + return cubes + + class Clt(Fix): """Fixes for clt.""" @@ -89,6 +125,16 @@ def fix_metadata(self, cubes): return cubes +class Clw(Fix): + """Fixes for clw.""" + + def fix_metadata(self, cubes): + """Fix metadata.""" + for cube in cubes: + cube.units = "kg kg-1" + return cubes + + class Cl(Fix): """Fixes for cl.""" @@ -136,6 +182,16 @@ def fix_metadata(self, cubes): return cubes +class Hus(Fix): + """Fixes for hus.""" + + def fix_metadata(self, cubes): + """Fix metadata.""" + for cube in cubes: + cube.units = "kg kg-1" + return cubes + + class Mrro(Fix): """Fixes for mrro.""" @@ -149,6 +205,20 @@ def fix_metadata(self, cubes): return cubes +class O3(Fix): + """Fixes for o3.""" + + def fix_metadata(self, cubes): + """Convert mass mixing ratios to mole fractions.""" + for cube in cubes: + # Original units are kg kg-1. Convert these to molar mixing ratios, + # which is almost identical to mole fraction for small amounts of + # substances (which we have here) + cube.data = cube.core_data() * 28.9644 / 47.9982 + cube.units = "mol mol-1" + return cubes + + class Orog(Fix): """Fixes for orography.""" @@ -194,6 +264,26 @@ def fix_metadata(self, cubes): return cubes +class Prw(Fix): + """Fixes for prw.""" + + def fix_metadata(self, cubes): + """Fix metadata.""" + for cube in cubes: + cube.units = "kg m-2" + return cubes + + +class Ps(Fix): + """Fixes for ps.""" + + def fix_metadata(self, cubes): + """Fix metadata.""" + for cube in cubes: + cube.units = "Pa" + return cubes + + class Ptype(Fix): """Fixes for ptype.""" @@ -205,6 +295,16 @@ def fix_metadata(self, cubes): return cubes +class Rainmxrat27(Fix): + """Fixes for rainmxrat27.""" + + def fix_metadata(self, cubes): + """Fix metadata.""" + for cube in cubes: + cube.units = "kg kg-1" + return cubes + + class Rlds(Fix): """Fixes for Rlds.""" @@ -321,6 +421,27 @@ def fix_metadata(self, cubes): return cubes +class Sftlf(Fix): + """Fixes for sftlf.""" + + def fix_metadata(self, cubes): + """Fix metadata.""" + for cube in cubes: + # Invalid input cube units (ignored on load) were '0-1' + cube.units = "1" + return cubes + + +class Snowmxrat27(Fix): + """Fixes for snowmxrat27.""" + + def fix_metadata(self, cubes): + """Fix metadata.""" + for cube in cubes: + cube.units = "kg kg-1" + return cubes + + class Tasmax(Fix): """Fixes for tasmax.""" @@ -341,6 +462,22 @@ def fix_metadata(self, cubes): return cubes +class Toz(Fix): + """Fixes for toz.""" + + def fix_metadata(self, cubes): + """Convert 'kg m-2' to 'm'.""" + for cube in cubes: + # Original units are kg m-2. Convert these to m here. + # 1 DU = 0.4462 mmol m-2 = 21.415 mg m-2 = 2.1415e-5 kg m-2 + # (assuming O3 molar mass of 48 g mol-1) + # Since 1 mm of pure O3 layer is defined as 100 DU + # --> 1m ~ 2.1415 kg m-2 + cube.data = cube.core_data() / 2.1415 + cube.units = "m" + return cubes + + class Zg(Fix): """Fixes for Geopotential.""" @@ -356,21 +493,13 @@ class AllVars(Fix): def _fix_coordinates(self, cube): """Fix coordinates.""" - # Fix coordinate increasing direction - slices = [] - for coord in cube.coords(): - if coord.var_name in ("latitude", "pressure_level"): - slices.append(slice(None, None, -1)) - else: - slices.append(slice(None)) - cube = cube[tuple(slices)] - # Add scalar height coordinates if "height2m" in self.vardef.dimensions: add_scalar_height_coord(cube, 2.0) if "height10m" in self.vardef.dimensions: add_scalar_height_coord(cube, 10.0) + # Fix coord metadata for coord_def in self.vardef.coordinates.values(): axis = coord_def.axis # ERA5 uses regular pressure level coordinate. In case the cmor @@ -383,7 +512,7 @@ def _fix_coordinates(self, cube): coord = cube.coord(axis=axis) if axis == "T": coord.convert_units("days since 1850-1-1 00:00:00.0") - if axis == "Z": + if axis in ("X", "Y", "Z"): coord.convert_units(coord_def.units) coord.standard_name = coord_def.standard_name coord.var_name = coord_def.out_name @@ -394,10 +523,25 @@ def _fix_coordinates(self, cube): and len(coord.core_points()) > 1 and coord_def.must_have_bounds == "yes" ): - coord.guess_bounds() + # Do not guess bounds for lat and lon on unstructured grids + if not ( + coord.name() in ("latitude", "longitude") + and has_unstructured_grid(cube) + ): + coord.guess_bounds() self._fix_monthly_time_coord(cube) + # Fix coordinate increasing direction + if cube.coords("latitude") and not has_unstructured_grid(cube): + lat = cube.coord("latitude") + if lat.points[0] > lat.points[-1]: + cube = reverse(cube, "latitude") + if cube.coords("air_pressure"): + plev = cube.coord("air_pressure") + if plev.points[0] < plev.points[-1]: + cube = reverse(cube, "air_pressure") + return cube @staticmethod @@ -426,16 +570,18 @@ def fix_metadata(self, cubes): if self.vardef.standard_name: cube.standard_name = self.vardef.standard_name cube.long_name = self.vardef.long_name - cube = self._fix_coordinates(cube) cube = safe_convert_units(cube, self.vardef.units) - cube.data = cube.core_data().astype("float32") year = datetime.datetime.now().year cube.attributes["comment"] = ( "Contains modified Copernicus Climate Change " f"Service Information {year}" ) + if "GRIB_PARAM" in cube.attributes: + cube.attributes["GRIB_PARAM"] = str( + cube.attributes["GRIB_PARAM"] + ) fixed_cubes.append(cube) diff --git a/esmvalcore/config-developer.yml b/esmvalcore/config-developer.yml index c81324142a..faa009ec8f 100644 --- a/esmvalcore/config-developer.yml +++ b/esmvalcore/config-developer.yml @@ -99,8 +99,10 @@ native6: cmor_strict: false input_dir: default: 'Tier{tier}/{dataset}/{version}/{frequency}/{short_name}' + DKRZ-ERA5-GRIB: '{family}/{level}/{type}/{tres}/{grib_id}' input_file: default: '*.nc' + DKRZ-ERA5-GRIB: '{family}{level}{typeid}_{tres}_*_{grib_id}.grb' output_file: '{project}_{dataset}_{type}_{version}_{mip}_{short_name}' cmor_type: 'CMIP6' cmor_default_table_prefix: 'CMIP6_' diff --git a/esmvalcore/config/configurations/defaults/config-user.yml b/esmvalcore/config/configurations/defaults/config-user.yml index 39cffb67fb..a666875542 100644 --- a/esmvalcore/config/configurations/defaults/config-user.yml +++ b/esmvalcore/config/configurations/defaults/config-user.yml @@ -196,7 +196,9 @@ drs: # /work/bd0854/DATA/ESMValTool2/OBS: default # /work/bd0854/DATA/ESMValTool2/download: ESGF # ana4mips: /work/bd0854/DATA/ESMValTool2/OBS -# native6: /work/bd0854/DATA/ESMValTool2/RAWOBS +# native6: +# /work/bd0854/DATA/ESMValTool2/RAWOBS: default +# /pool/data/ERA5: DKRZ-ERA5-GRIB # RAWOBS: /work/bd0854/DATA/ESMValTool2/RAWOBS #drs: # ana4mips: default diff --git a/esmvalcore/config/extra_facets/native6-era5.yml b/esmvalcore/config/extra_facets/native6-era5.yml new file mode 100644 index 0000000000..4ab1915da9 --- /dev/null +++ b/esmvalcore/config/extra_facets/native6-era5.yml @@ -0,0 +1,196 @@ +# Extra facets for native6 ERA5 data in GRIB format +# +# See +# https://docs.dkrz.de/doc/dataservices/finding_and_accessing_data/era_data/index.html#file-and-directory-names +# for details on these facets. + +# Notes: +# - All facets can also be specified in the recipes. The values given here are +# only defaults. + +# A complete list of supported keys is given in the documentation (see +# ESMValCore/doc/quickstart/find_data.rst). +--- + +ERA5: + + # Settings for all variables of all MIPs + '*': + '*': + automatic_regrid: true + family: E5 + type: an + typeid: '00' + version: v1 + + # Variable-specific settings + albsn: + level: sf + grib_id: '032' + cl: + level: pl + grib_id: '248' + cli: + level: pl + grib_id: '247' + clt: + level: sf + grib_id: '164' + clw: + level: pl + grib_id: '246' + hur: + level: pl + grib_id: '157' + hus: + level: pl + grib_id: '133' + o3: + level: pl + grib_id: '203' + prw: + level: sf + grib_id: '137' + ps: + level: sf + grib_id: '134' + psl: + level: sf + grib_id: '151' + rainmxrat27: + level: pl + grib_id: '075' + sftlf: + level: sf + grib_id: '172' + siconc: + level: sf + grib_id: '031' + siconca: + level: sf + grib_id: '031' + snd: + level: sf + grib_id: '141' + snowmxrat27: + level: pl + grib_id: '076' + ta: + level: pl + grib_id: '130' + tas: + level: sf + grib_id: '167' + tdps: + level: sf + grib_id: '168' + tos: + level: sf + grib_id: '034' + toz: + level: sf + grib_id: '206' + ts: + level: sf + grib_id: '235' + ua: + level: pl + grib_id: '131' + uas: + level: sf + grib_id: '165' + va: + level: pl + grib_id: '132' + vas: + level: sf + grib_id: '166' + wap: + level: pl + grib_id: '135' + zg: + level: pl + grib_id: '129' + + # MIP-specific settings + AERday: + '*': + tres: 1D + AERhr: + '*': + tres: 1H + AERmon: + '*': + tres: 1M + AERmonZ: + '*': + tres: 1M + Amon: + '*': + tres: 1M + CFday: + '*': + tres: 1D + CFmon: + '*': + tres: 1M + day: + '*': + tres: 1D + E1hr: + '*': + tres: 1H + E1hrClimMon: + '*': + tres: 1H + Eday: + '*': + tres: 1D + EdayZ: + '*': + tres: 1D + Efx: + '*': + tres: IV + Emon: + '*': + tres: 1M + EmonZ: + '*': + tres: 1M + fx: + '*': + tres: IV + IfxAnt: + '*': + tres: IV + IfxGre: + '*': + tres: IV + ImonAnt: + '*': + tres: 1M + ImonGre: + '*': + tres: 1M + LImon: + '*': + tres: 1M + Lmon: + '*': + tres: 1M + Oday: + '*': + tres: 1D + Ofx: + '*': + tres: IV + Omon: + '*': + tres: 1M + SIday: + '*': + tres: 1D + SImon: + '*': + tres: 1M diff --git a/esmvalcore/preprocessor/_io.py b/esmvalcore/preprocessor/_io.py index eccac411f2..0851e1d37e 100644 --- a/esmvalcore/preprocessor/_io.py +++ b/esmvalcore/preprocessor/_io.py @@ -39,6 +39,7 @@ "reference_dataset", "alternative_dataset", } +GRIB_FORMATS = (".grib2", ".grib", ".grb2", ".grb", ".gb2", ".gb") iris.FUTURE.save_split_attrs = True @@ -142,7 +143,13 @@ def load( # warnings.filterwarnings # (see https://github.com/SciTools/cf-units/issues/240) with suppress_errors(): - raw_cubes = iris.load_raw(file, callback=_load_callback) + # GRIB files need to be loaded with iris.load, otherwise we will + # get separate (lat, lon) slices for each time step, pressure + # level, etc. + if file.suffix in GRIB_FORMATS: + raw_cubes = iris.load(file, callback=_load_callback) + else: + raw_cubes = iris.load_raw(file, callback=_load_callback) logger.debug("Done with loading %s", file) if not raw_cubes: diff --git a/pyproject.toml b/pyproject.toml index 61de91ae25..4ce0f4c303 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -168,7 +168,7 @@ enable_error_code = [ # Configure linters [tool.codespell] -skip = "*.ipynb,esmvalcore/config/extra_facets/ipslcm-mappings.yml" +skip = "*.ipynb,esmvalcore/config/extra_facets/ipslcm-mappings.yml,tests/sample_data/iris-sample-data/LICENSE" ignore-words-list = "vas,hist,oce" [tool.ruff] diff --git a/tests/integration/cmor/_fixes/native6/test_era5.py b/tests/integration/cmor/_fixes/native6/test_era5.py index 60460138a9..b65acabbff 100644 --- a/tests/integration/cmor/_fixes/native6/test_era5.py +++ b/tests/integration/cmor/_fixes/native6/test_era5.py @@ -2,16 +2,19 @@ import datetime -import iris +import dask.array as da import numpy as np import pytest from cf_units import Unit +from iris.coords import AuxCoord, DimCoord +from iris.cube import Cube, CubeList from esmvalcore.cmor._fixes.fix import Fix, GenericFix from esmvalcore.cmor._fixes.native6.era5 import ( AllVars, Evspsbl, Zg, + fix_accumulated_units, get_frequency, ) from esmvalcore.cmor.fix import fix_metadata @@ -40,12 +43,12 @@ def test_get_zg_fix(): def test_get_frequency_hourly(): """Test cubes with hourly frequency.""" - time = iris.coords.DimCoord( + time = DimCoord( [0, 1, 2], standard_name="time", units=Unit("hours since 1900-01-01"), ) - cube = iris.cube.Cube( + cube = Cube( [1, 6, 3], var_name="random_var", dim_coords_and_dims=[(time, 0)], @@ -55,14 +58,31 @@ def test_get_frequency_hourly(): assert get_frequency(cube) == "hourly" +def test_get_frequency_daily(): + """Test cubes with daily frequency.""" + time = DimCoord( + [0, 1, 2], + standard_name="time", + units=Unit("days since 1900-01-01"), + ) + cube = Cube( + [1, 6, 3], + var_name="random_var", + dim_coords_and_dims=[(time, 0)], + ) + assert get_frequency(cube) == "daily" + cube.coord("time").convert_units("hours since 1850-1-1 00:00:00.0") + assert get_frequency(cube) == "daily" + + def test_get_frequency_monthly(): """Test cubes with monthly frequency.""" - time = iris.coords.DimCoord( + time = DimCoord( [0, 31, 59], standard_name="time", units=Unit("hours since 1900-01-01"), ) - cube = iris.cube.Cube( + cube = Cube( [1, 6, 3], var_name="random_var", dim_coords_and_dims=[(time, 0)], @@ -74,27 +94,50 @@ def test_get_frequency_monthly(): def test_get_frequency_fx(): """Test cubes with time invariant frequency.""" - cube = iris.cube.Cube(1.0, long_name="Cube without time coordinate") + cube = Cube(1.0, long_name="Cube without time coordinate") assert get_frequency(cube) == "fx" - time = iris.coords.DimCoord( + + time = DimCoord( 0, standard_name="time", units=Unit("hours since 1900-01-01"), ) - cube = iris.cube.Cube( + cube = Cube( [1], var_name="cube_with_length_1_time_coord", long_name="Geopotential", dim_coords_and_dims=[(time, 0)], ) assert get_frequency(cube) == "fx" + + cube.long_name = ( + "Percentage of the Grid Cell Occupied by Land (Including Lakes)" + ) + assert get_frequency(cube) == "fx" + cube.long_name = "Not geopotential" with pytest.raises(ValueError): get_frequency(cube) +def test_fix_accumulated_units_fail(): + """Test `fix_accumulated_units`.""" + time = DimCoord( + [0, 1, 2], + standard_name="time", + units=Unit("days since 1900-01-01"), + ) + cube = Cube( + [1, 6, 3], + var_name="random_var", + dim_coords_and_dims=[(time, 0)], + ) + with pytest.raises(NotImplementedError): + fix_accumulated_units(cube) + + def _era5_latitude(): - return iris.coords.DimCoord( + return DimCoord( np.array([90.0, 0.0, -90.0]), standard_name="latitude", long_name="latitude", @@ -104,7 +147,7 @@ def _era5_latitude(): def _era5_longitude(): - return iris.coords.DimCoord( + return DimCoord( np.array([0, 180, 359.75]), standard_name="longitude", long_name="longitude", @@ -117,11 +160,15 @@ def _era5_longitude(): def _era5_time(frequency): if frequency == "invariant": timestamps = [788928] # hours since 1900 at 1 january 1990 + elif frequency == "daily": + timestamps = [788940, 788964, 788988] elif frequency == "hourly": timestamps = [788928, 788929, 788930] elif frequency == "monthly": timestamps = [788928, 789672, 790344] - return iris.coords.DimCoord( + else: + raise NotImplementedError(f"Invalid frequency {frequency}") + return DimCoord( np.array(timestamps, dtype="int32"), standard_name="time", long_name="time", @@ -137,7 +184,7 @@ def _era5_plev(): 1000, ] ) - return iris.coords.DimCoord( + return DimCoord( values, long_name="pressure", units=Unit("millibars"), @@ -153,7 +200,7 @@ def _era5_data(frequency): def _cmor_latitude(): - return iris.coords.DimCoord( + return DimCoord( np.array([-90.0, 0.0, 90.0]), standard_name="latitude", long_name="Latitude", @@ -164,7 +211,7 @@ def _cmor_latitude(): def _cmor_longitude(): - return iris.coords.DimCoord( + return DimCoord( np.array([0, 180, 359.75]), standard_name="longitude", long_name="Longitude", @@ -184,14 +231,22 @@ def _cmor_time(mip, bounds=None, shifted=False): timestamps -= 1 / 48 if bounds is not None: bounds = [[t - 1 / 48, t + 1 / 48] for t in timestamps] - elif mip == "Amon": + elif mip == "Eday": + timestamps = np.array([51134.5, 51135.5, 51136.5]) + if bounds is not None: + bounds = np.array( + [[51134.0, 51135.0], [51135.0, 51136.0], [51136.0, 51137.0]] + ) + elif "mon" in mip: timestamps = np.array([51149.5, 51179.0, 51208.5]) if bounds is not None: bounds = np.array( [[51134.0, 51165.0], [51165.0, 51193.0], [51193.0, 51224.0]] ) + else: + raise NotImplementedError() - return iris.coords.DimCoord( + return DimCoord( np.array(timestamps, dtype=float), standard_name="time", long_name="time", @@ -202,7 +257,7 @@ def _cmor_time(mip, bounds=None, shifted=False): def _cmor_aux_height(value): - return iris.coords.AuxCoord( + return AuxCoord( value, long_name="height", standard_name="height", @@ -219,7 +274,7 @@ def _cmor_plev(): 100.0, ] ) - return iris.coords.DimCoord( + return DimCoord( values, long_name="pressure", standard_name="air_pressure", @@ -235,10 +290,97 @@ def _cmor_data(mip): return np.arange(27).reshape(3, 3, 3)[:, ::-1, :] +def era5_2d(frequency): + if frequency == "monthly": + time = DimCoord( + [-31, 0, 31], standard_name="time", units="days since 1850-01-01" + ) + else: + time = _era5_time(frequency) + cube = Cube( + _era5_data(frequency), + long_name=None, + var_name=None, + units="unknown", + dim_coords_and_dims=[ + (time, 0), + (_era5_latitude(), 1), + (_era5_longitude(), 2), + ], + ) + return CubeList([cube]) + + +def era5_3d(frequency): + cube = Cube( + np.ones((3, 2, 3, 3)), + long_name=None, + var_name=None, + units="unknown", + dim_coords_and_dims=[ + (_era5_time(frequency), 0), + (_era5_plev(), 1), + (_era5_latitude(), 2), + (_era5_longitude(), 3), + ], + ) + return CubeList([cube]) + + +def cmor_2d(mip, short_name): + cmor_table = CMOR_TABLES["native6"] + vardef = cmor_table.get_variable(mip, short_name) + if "mon" in mip: + time = DimCoord( + [-15.5, 15.5, 45.0], + bounds=[[-31.0, 0.0], [0.0, 31.0], [31.0, 59.0]], + standard_name="time", + long_name="time", + var_name="time", + units="days since 1850-01-01", + ) + else: + time = _cmor_time(mip, bounds=True) + cube = Cube( + _cmor_data(mip).astype("float32"), + long_name=vardef.long_name, + var_name=vardef.short_name, + standard_name=vardef.standard_name, + units=Unit(vardef.units), + dim_coords_and_dims=[ + (time, 0), + (_cmor_latitude(), 1), + (_cmor_longitude(), 2), + ], + attributes={"comment": COMMENT}, + ) + return CubeList([cube]) + + +def cmor_3d(mip, short_name): + cmor_table = CMOR_TABLES["native6"] + vardef = cmor_table.get_variable(mip, short_name) + cube = Cube( + np.ones((3, 2, 3, 3)), + long_name=vardef.long_name, + var_name=vardef.short_name, + standard_name=vardef.standard_name, + units=Unit(vardef.units), + dim_coords_and_dims=[ + (_cmor_time(mip, bounds=True), 0), + (_cmor_plev(), 1), + (_cmor_latitude(), 2), + (_cmor_longitude(), 3), + ], + attributes={"comment": COMMENT}, + ) + return CubeList([cube]) + + def cl_era5_monthly(): time = _era5_time("monthly") data = np.ones((3, 2, 3, 3)) - cube = iris.cube.Cube( + cube = Cube( data, long_name="Percentage Cloud Cover", var_name="cl", @@ -250,7 +392,7 @@ def cl_era5_monthly(): (_era5_longitude(), 3), ], ) - return iris.cube.CubeList([cube]) + return CubeList([cube]) def cl_cmor_amon(): @@ -259,7 +401,7 @@ def cl_cmor_amon(): time = _cmor_time("Amon", bounds=True) data = np.ones((3, 2, 3, 3)) data = data * 100.0 - cube = iris.cube.Cube( + cube = Cube( data.astype("float32"), long_name=vardef.long_name, var_name=vardef.short_name, @@ -273,12 +415,12 @@ def cl_cmor_amon(): ], attributes={"comment": COMMENT}, ) - return iris.cube.CubeList([cube]) + return CubeList([cube]) def clt_era5_hourly(): time = _era5_time("hourly") - cube = iris.cube.Cube( + cube = Cube( _era5_data("hourly"), long_name="cloud cover fraction", var_name="cloud_cover", @@ -289,7 +431,7 @@ def clt_era5_hourly(): (_era5_longitude(), 2), ], ) - return iris.cube.CubeList([cube]) + return CubeList([cube]) def clt_cmor_e1hr(): @@ -297,7 +439,7 @@ def clt_cmor_e1hr(): vardef = cmor_table.get_variable("E1hr", "clt") time = _cmor_time("E1hr", bounds=True) data = _cmor_data("E1hr") * 100 - cube = iris.cube.Cube( + cube = Cube( data.astype("float32"), long_name=vardef.long_name, var_name=vardef.short_name, @@ -310,12 +452,12 @@ def clt_cmor_e1hr(): ], attributes={"comment": COMMENT}, ) - return iris.cube.CubeList([cube]) + return CubeList([cube]) def evspsbl_era5_hourly(): time = _era5_time("hourly") - cube = iris.cube.Cube( + cube = Cube( _era5_data("hourly") * -1.0, long_name="total evapotranspiration", var_name="e", @@ -326,7 +468,7 @@ def evspsbl_era5_hourly(): (_era5_longitude(), 2), ], ) - return iris.cube.CubeList([cube]) + return CubeList([cube]) def evspsbl_cmor_e1hr(): @@ -334,7 +476,7 @@ def evspsbl_cmor_e1hr(): vardef = cmor_table.get_variable("E1hr", "evspsbl") time = _cmor_time("E1hr", shifted=True, bounds=True) data = _cmor_data("E1hr") * 1000 / 3600.0 - cube = iris.cube.Cube( + cube = Cube( data.astype("float32"), long_name=vardef.long_name, var_name=vardef.short_name, @@ -347,12 +489,12 @@ def evspsbl_cmor_e1hr(): ], attributes={"comment": COMMENT}, ) - return iris.cube.CubeList([cube]) + return CubeList([cube]) def evspsblpot_era5_hourly(): time = _era5_time("hourly") - cube = iris.cube.Cube( + cube = Cube( _era5_data("hourly") * -1.0, long_name="potential evapotranspiration", var_name="epot", @@ -363,7 +505,7 @@ def evspsblpot_era5_hourly(): (_era5_longitude(), 2), ], ) - return iris.cube.CubeList([cube]) + return CubeList([cube]) def evspsblpot_cmor_e1hr(): @@ -371,7 +513,7 @@ def evspsblpot_cmor_e1hr(): vardef = cmor_table.get_variable("E1hr", "evspsblpot") time = _cmor_time("E1hr", shifted=True, bounds=True) data = _cmor_data("E1hr") * 1000 / 3600.0 - cube = iris.cube.Cube( + cube = Cube( data.astype("float32"), long_name=vardef.long_name, var_name=vardef.short_name, @@ -384,12 +526,12 @@ def evspsblpot_cmor_e1hr(): ], attributes={"comment": COMMENT}, ) - return iris.cube.CubeList([cube]) + return CubeList([cube]) def mrro_era5_hourly(): time = _era5_time("hourly") - cube = iris.cube.Cube( + cube = Cube( _era5_data("hourly"), long_name="runoff", var_name="runoff", @@ -400,7 +542,7 @@ def mrro_era5_hourly(): (_era5_longitude(), 2), ], ) - return iris.cube.CubeList([cube]) + return CubeList([cube]) def mrro_cmor_e1hr(): @@ -408,7 +550,7 @@ def mrro_cmor_e1hr(): vardef = cmor_table.get_variable("E1hr", "mrro") time = _cmor_time("E1hr", shifted=True, bounds=True) data = _cmor_data("E1hr") * 1000 / 3600.0 - cube = iris.cube.Cube( + cube = Cube( data.astype("float32"), long_name=vardef.long_name, var_name=vardef.short_name, @@ -421,12 +563,20 @@ def mrro_cmor_e1hr(): ], attributes={"comment": COMMENT}, ) - return iris.cube.CubeList([cube]) + return CubeList([cube]) + + +def o3_era5_monthly(): + cube = era5_3d("monthly")[0] + cube = cube[:, ::-1, ::-1, :] # test if correct order of plev and lat stay + cube.data = cube.data.astype("float32") + cube.data *= 47.9982 / 28.9644 + return CubeList([cube]) def orog_era5_hourly(): time = _era5_time("invariant") - cube = iris.cube.Cube( + cube = Cube( _era5_data("invariant"), long_name="geopotential height", var_name="zg", @@ -437,14 +587,14 @@ def orog_era5_hourly(): (_era5_longitude(), 2), ], ) - return iris.cube.CubeList([cube]) + return CubeList([cube]) def orog_cmor_fx(): cmor_table = CMOR_TABLES["native6"] vardef = cmor_table.get_variable("fx", "orog") data = _cmor_data("fx") / 9.80665 - cube = iris.cube.Cube( + cube = Cube( data.astype("float32"), long_name=vardef.long_name, var_name=vardef.short_name, @@ -453,12 +603,12 @@ def orog_cmor_fx(): dim_coords_and_dims=[(_cmor_latitude(), 0), (_cmor_longitude(), 1)], attributes={"comment": COMMENT}, ) - return iris.cube.CubeList([cube]) + return CubeList([cube]) def pr_era5_monthly(): time = _era5_time("monthly") - cube = iris.cube.Cube( + cube = Cube( _era5_data("monthly"), long_name="total_precipitation", var_name="tp", @@ -469,7 +619,7 @@ def pr_era5_monthly(): (_era5_longitude(), 2), ], ) - return iris.cube.CubeList([cube]) + return CubeList([cube]) def pr_cmor_amon(): @@ -477,7 +627,7 @@ def pr_cmor_amon(): vardef = cmor_table.get_variable("Amon", "pr") time = _cmor_time("Amon", bounds=True) data = _cmor_data("Amon") * 1000.0 / 3600.0 / 24.0 - cube = iris.cube.Cube( + cube = Cube( data.astype("float32"), long_name=vardef.long_name, var_name=vardef.short_name, @@ -490,12 +640,12 @@ def pr_cmor_amon(): ], attributes={"comment": COMMENT}, ) - return iris.cube.CubeList([cube]) + return CubeList([cube]) def pr_era5_hourly(): time = _era5_time("hourly") - cube = iris.cube.Cube( + cube = Cube( _era5_data("hourly"), long_name="total_precipitation", var_name="tp", @@ -506,7 +656,7 @@ def pr_era5_hourly(): (_era5_longitude(), 2), ], ) - return iris.cube.CubeList([cube]) + return CubeList([cube]) def pr_cmor_e1hr(): @@ -514,7 +664,7 @@ def pr_cmor_e1hr(): vardef = cmor_table.get_variable("E1hr", "pr") time = _cmor_time("E1hr", bounds=True, shifted=True) data = _cmor_data("E1hr") * 1000.0 / 3600.0 - cube = iris.cube.Cube( + cube = Cube( data.astype("float32"), long_name=vardef.long_name, var_name=vardef.short_name, @@ -527,12 +677,12 @@ def pr_cmor_e1hr(): ], attributes={"comment": COMMENT}, ) - return iris.cube.CubeList([cube]) + return CubeList([cube]) def prsn_era5_hourly(): time = _era5_time("hourly") - cube = iris.cube.Cube( + cube = Cube( _era5_data("hourly"), long_name="snow", var_name="snow", @@ -543,7 +693,7 @@ def prsn_era5_hourly(): (_era5_longitude(), 2), ], ) - return iris.cube.CubeList([cube]) + return CubeList([cube]) def prsn_cmor_e1hr(): @@ -551,7 +701,7 @@ def prsn_cmor_e1hr(): vardef = cmor_table.get_variable("E1hr", "prsn") time = _cmor_time("E1hr", shifted=True, bounds=True) data = _cmor_data("E1hr") * 1000 / 3600.0 - cube = iris.cube.Cube( + cube = Cube( data.astype("float32"), long_name=vardef.long_name, var_name=vardef.short_name, @@ -564,12 +714,12 @@ def prsn_cmor_e1hr(): ], attributes={"comment": COMMENT}, ) - return iris.cube.CubeList([cube]) + return CubeList([cube]) def ptype_era5_hourly(): time = _era5_time("hourly") - cube = iris.cube.Cube( + cube = Cube( _era5_data("hourly"), long_name="snow", var_name="snow", @@ -580,7 +730,7 @@ def ptype_era5_hourly(): (_era5_longitude(), 2), ], ) - return iris.cube.CubeList([cube]) + return CubeList([cube]) def ptype_cmor_e1hr(): @@ -588,7 +738,7 @@ def ptype_cmor_e1hr(): vardef = cmor_table.get_variable("E1hr", "ptype") time = _cmor_time("E1hr", shifted=False, bounds=True) data = _cmor_data("E1hr") - cube = iris.cube.Cube( + cube = Cube( data.astype("float32"), long_name=vardef.long_name, var_name=vardef.short_name, @@ -602,12 +752,12 @@ def ptype_cmor_e1hr(): ) cube.coord("latitude").long_name = "latitude" cube.coord("longitude").long_name = "longitude" - return iris.cube.CubeList([cube]) + return CubeList([cube]) def rlds_era5_hourly(): time = _era5_time("hourly") - cube = iris.cube.Cube( + cube = Cube( _era5_data("hourly"), long_name="surface thermal radiation downwards", var_name="ssrd", @@ -618,7 +768,7 @@ def rlds_era5_hourly(): (_era5_longitude(), 2), ], ) - return iris.cube.CubeList([cube]) + return CubeList([cube]) def rlds_cmor_e1hr(): @@ -626,7 +776,87 @@ def rlds_cmor_e1hr(): vardef = cmor_table.get_variable("E1hr", "rlds") time = _cmor_time("E1hr", shifted=True, bounds=True) data = _cmor_data("E1hr") / 3600 - cube = iris.cube.Cube( + cube = Cube( + data.astype("float32"), + long_name=vardef.long_name, + var_name=vardef.short_name, + standard_name=vardef.standard_name, + units=Unit(vardef.units), + dim_coords_and_dims=[ + (time, 0), + (_cmor_latitude(), 1), + (_cmor_longitude(), 2), + ], + attributes={"comment": COMMENT, "positive": "down"}, + ) + return CubeList([cube]) + + +def rlns_era5_hourly(): + freq = "hourly" + cube = Cube( + _era5_data(freq), + long_name=None, + var_name=None, + units="J m**-2", + dim_coords_and_dims=[ + (_era5_time(freq), 0), + (_era5_latitude(), 1), + (_era5_longitude(), 2), + ], + ) + return CubeList([cube]) + + +def rlns_cmor_e1hr(): + mip = "E1hr" + short_name = "rlns" + cmor_table = CMOR_TABLES["native6"] + vardef = cmor_table.get_variable(mip, short_name) + time = _cmor_time(mip, shifted=True, bounds=True) + data = _cmor_data(mip) / 3600 + cube = Cube( + data.astype("float32"), + long_name=vardef.long_name, + var_name=vardef.short_name, + standard_name=vardef.standard_name, + units=Unit(vardef.units), + dim_coords_and_dims=[ + (time, 0), + (_cmor_latitude(), 1), + (_cmor_longitude(), 2), + ], + attributes={"comment": COMMENT, "positive": "down"}, + ) + cube.coord("latitude").long_name = "latitude" # from custom table + cube.coord("longitude").long_name = "longitude" # from custom table + return CubeList([cube]) + + +def rlus_era5_hourly(): + freq = "hourly" + cube = Cube( + _era5_data(freq), + long_name=None, + var_name=None, + units="J m**-2", + dim_coords_and_dims=[ + (_era5_time(freq), 0), + (_era5_latitude(), 1), + (_era5_longitude(), 2), + ], + ) + return CubeList([cube]) + + +def rlus_cmor_e1hr(): + mip = "E1hr" + short_name = "rlus" + cmor_table = CMOR_TABLES["native6"] + vardef = cmor_table.get_variable(mip, short_name) + time = _cmor_time(mip, shifted=True, bounds=True) + data = _cmor_data(mip) / 3600 + cube = Cube( data.astype("float32"), long_name=vardef.long_name, var_name=vardef.short_name, @@ -637,17 +867,14 @@ def rlds_cmor_e1hr(): (_cmor_latitude(), 1), (_cmor_longitude(), 2), ], - attributes={ - "comment": COMMENT, - "positive": "down", - }, + attributes={"comment": COMMENT, "positive": "up"}, ) - return iris.cube.CubeList([cube]) + return CubeList([cube]) def rls_era5_hourly(): time = _era5_time("hourly") - cube = iris.cube.Cube( + cube = Cube( _era5_data("hourly"), long_name="runoff", var_name="runoff", @@ -658,7 +885,7 @@ def rls_era5_hourly(): (_era5_longitude(), 2), ], ) - return iris.cube.CubeList([cube]) + return CubeList([cube]) def rls_cmor_e1hr(): @@ -666,7 +893,7 @@ def rls_cmor_e1hr(): vardef = cmor_table.get_variable("E1hr", "rls") time = _cmor_time("E1hr", shifted=True, bounds=True) data = _cmor_data("E1hr") - cube = iris.cube.Cube( + cube = Cube( data.astype("float32"), long_name=vardef.long_name, var_name=vardef.short_name, @@ -677,17 +904,14 @@ def rls_cmor_e1hr(): (_cmor_latitude(), 1), (_cmor_longitude(), 2), ], - attributes={ - "comment": COMMENT, - "positive": "down", - }, + attributes={"comment": COMMENT, "positive": "down"}, ) - return iris.cube.CubeList([cube]) + return CubeList([cube]) def rsds_era5_hourly(): time = _era5_time("hourly") - cube = iris.cube.Cube( + cube = Cube( _era5_data("hourly"), long_name="solar_radiation_downwards", var_name="rlwd", @@ -698,7 +922,7 @@ def rsds_era5_hourly(): (_era5_longitude(), 2), ], ) - return iris.cube.CubeList([cube]) + return CubeList([cube]) def rsds_cmor_e1hr(): @@ -706,7 +930,87 @@ def rsds_cmor_e1hr(): vardef = cmor_table.get_variable("E1hr", "rsds") time = _cmor_time("E1hr", shifted=True, bounds=True) data = _cmor_data("E1hr") / 3600 - cube = iris.cube.Cube( + cube = Cube( + data.astype("float32"), + long_name=vardef.long_name, + var_name=vardef.short_name, + standard_name=vardef.standard_name, + units=Unit(vardef.units), + dim_coords_and_dims=[ + (time, 0), + (_cmor_latitude(), 1), + (_cmor_longitude(), 2), + ], + attributes={"comment": COMMENT, "positive": "down"}, + ) + return CubeList([cube]) + + +def rsns_era5_hourly(): + freq = "hourly" + cube = Cube( + _era5_data(freq), + long_name=None, + var_name=None, + units="J m**-2", + dim_coords_and_dims=[ + (_era5_time(freq), 0), + (_era5_latitude(), 1), + (_era5_longitude(), 2), + ], + ) + return CubeList([cube]) + + +def rsns_cmor_e1hr(): + mip = "E1hr" + short_name = "rsns" + cmor_table = CMOR_TABLES["native6"] + vardef = cmor_table.get_variable(mip, short_name) + time = _cmor_time(mip, shifted=True, bounds=True) + data = _cmor_data(mip) / 3600 + cube = Cube( + data.astype("float32"), + long_name=vardef.long_name, + var_name=vardef.short_name, + standard_name=vardef.standard_name, + units=Unit(vardef.units), + dim_coords_and_dims=[ + (time, 0), + (_cmor_latitude(), 1), + (_cmor_longitude(), 2), + ], + attributes={"comment": COMMENT, "positive": "down"}, + ) + cube.coord("latitude").long_name = "latitude" # from custom table + cube.coord("longitude").long_name = "longitude" # from custom table + return CubeList([cube]) + + +def rsus_era5_hourly(): + freq = "hourly" + cube = Cube( + _era5_data(freq), + long_name=None, + var_name=None, + units="J m**-2", + dim_coords_and_dims=[ + (_era5_time(freq), 0), + (_era5_latitude(), 1), + (_era5_longitude(), 2), + ], + ) + return CubeList([cube]) + + +def rsus_cmor_e1hr(): + mip = "E1hr" + short_name = "rsus" + cmor_table = CMOR_TABLES["native6"] + vardef = cmor_table.get_variable(mip, short_name) + time = _cmor_time(mip, shifted=True, bounds=True) + data = _cmor_data(mip) / 3600 + cube = Cube( data.astype("float32"), long_name=vardef.long_name, var_name=vardef.short_name, @@ -717,17 +1021,14 @@ def rsds_cmor_e1hr(): (_cmor_latitude(), 1), (_cmor_longitude(), 2), ], - attributes={ - "comment": COMMENT, - "positive": "down", - }, + attributes={"comment": COMMENT, "positive": "up"}, ) - return iris.cube.CubeList([cube]) + return CubeList([cube]) def rsdt_era5_hourly(): time = _era5_time("hourly") - cube = iris.cube.Cube( + cube = Cube( _era5_data("hourly"), long_name="thermal_radiation_downwards", var_name="strd", @@ -738,7 +1039,7 @@ def rsdt_era5_hourly(): (_era5_longitude(), 2), ], ) - return iris.cube.CubeList([cube]) + return CubeList([cube]) def rsdt_cmor_e1hr(): @@ -746,7 +1047,7 @@ def rsdt_cmor_e1hr(): vardef = cmor_table.get_variable("E1hr", "rsdt") time = _cmor_time("E1hr", shifted=True, bounds=True) data = _cmor_data("E1hr") / 3600 - cube = iris.cube.Cube( + cube = Cube( data.astype("float32"), long_name=vardef.long_name, var_name=vardef.short_name, @@ -757,17 +1058,14 @@ def rsdt_cmor_e1hr(): (_cmor_latitude(), 1), (_cmor_longitude(), 2), ], - attributes={ - "comment": COMMENT, - "positive": "down", - }, + attributes={"comment": COMMENT, "positive": "down"}, ) - return iris.cube.CubeList([cube]) + return CubeList([cube]) def rss_era5_hourly(): time = _era5_time("hourly") - cube = iris.cube.Cube( + cube = Cube( _era5_data("hourly"), long_name="net_solar_radiation", var_name="ssr", @@ -778,7 +1076,7 @@ def rss_era5_hourly(): (_era5_longitude(), 2), ], ) - return iris.cube.CubeList([cube]) + return CubeList([cube]) def rss_cmor_e1hr(): @@ -786,7 +1084,7 @@ def rss_cmor_e1hr(): vardef = cmor_table.get_variable("E1hr", "rss") time = _cmor_time("E1hr", shifted=True, bounds=True) data = _cmor_data("E1hr") / 3600 - cube = iris.cube.Cube( + cube = Cube( data.astype("float32"), long_name=vardef.long_name, var_name=vardef.short_name, @@ -797,17 +1095,43 @@ def rss_cmor_e1hr(): (_cmor_latitude(), 1), (_cmor_longitude(), 2), ], - attributes={ - "comment": COMMENT, - "positive": "down", - }, + attributes={"comment": COMMENT, "positive": "down"}, + ) + return CubeList([cube]) + + +def sftlf_era5(): + cube = Cube( + np.ones((3, 3)), + long_name=None, + var_name=None, + units="unknown", + dim_coords_and_dims=[ + (_era5_latitude(), 0), + (_era5_longitude(), 1), + ], ) - return iris.cube.CubeList([cube]) + return CubeList([cube]) + + +def sftlf_cmor_fx(): + cmor_table = CMOR_TABLES["native6"] + vardef = cmor_table.get_variable("fx", "sftlf") + cube = Cube( + np.ones((3, 3)).astype("float32") * 100.0, + long_name=vardef.long_name, + var_name=vardef.short_name, + standard_name=vardef.standard_name, + units=Unit(vardef.units), + dim_coords_and_dims=[(_cmor_latitude(), 0), (_cmor_longitude(), 1)], + attributes={"comment": COMMENT}, + ) + return CubeList([cube]) def tas_era5_hourly(): time = _era5_time("hourly") - cube = iris.cube.Cube( + cube = Cube( _era5_data("hourly"), long_name="2m_temperature", var_name="t2m", @@ -818,7 +1142,7 @@ def tas_era5_hourly(): (_era5_longitude(), 2), ], ) - return iris.cube.CubeList([cube]) + return CubeList([cube]) def tas_cmor_e1hr(): @@ -826,7 +1150,7 @@ def tas_cmor_e1hr(): vardef = cmor_table.get_variable("E1hr", "tas") time = _cmor_time("E1hr") data = _cmor_data("E1hr") - cube = iris.cube.Cube( + cube = Cube( data.astype("float32"), long_name=vardef.long_name, var_name=vardef.short_name, @@ -840,12 +1164,12 @@ def tas_cmor_e1hr(): attributes={"comment": COMMENT}, ) cube.add_aux_coord(_cmor_aux_height(2.0)) - return iris.cube.CubeList([cube]) + return CubeList([cube]) def tas_era5_monthly(): time = _era5_time("monthly") - cube = iris.cube.Cube( + cube = Cube( _era5_data("monthly"), long_name="2m_temperature", var_name="t2m", @@ -856,7 +1180,7 @@ def tas_era5_monthly(): (_era5_longitude(), 2), ], ) - return iris.cube.CubeList([cube]) + return CubeList([cube]) def tas_cmor_amon(): @@ -864,7 +1188,7 @@ def tas_cmor_amon(): vardef = cmor_table.get_variable("Amon", "tas") time = _cmor_time("Amon", bounds=True) data = _cmor_data("Amon") - cube = iris.cube.Cube( + cube = Cube( data.astype("float32"), long_name=vardef.long_name, var_name=vardef.short_name, @@ -878,13 +1202,20 @@ def tas_cmor_amon(): attributes={"comment": COMMENT}, ) cube.add_aux_coord(_cmor_aux_height(2.0)) - return iris.cube.CubeList([cube]) + return CubeList([cube]) + + +def toz_era5_monthly(): + cube = era5_2d("monthly")[0] + cube.data = cube.data.astype("float32") + cube.data *= 2.1415 + return CubeList([cube]) def zg_era5_monthly(): time = _era5_time("monthly") data = np.ones((3, 2, 3, 3)) - cube = iris.cube.Cube( + cube = Cube( data, long_name="geopotential height", var_name="zg", @@ -896,7 +1227,7 @@ def zg_era5_monthly(): (_era5_longitude(), 3), ], ) - return iris.cube.CubeList([cube]) + return CubeList([cube]) def zg_cmor_amon(): @@ -905,7 +1236,7 @@ def zg_cmor_amon(): time = _cmor_time("Amon", bounds=True) data = np.ones((3, 2, 3, 3)) data = data / 9.80665 - cube = iris.cube.Cube( + cube = Cube( data.astype("float32"), long_name=vardef.long_name, var_name=vardef.short_name, @@ -919,12 +1250,12 @@ def zg_cmor_amon(): ], attributes={"comment": COMMENT}, ) - return iris.cube.CubeList([cube]) + return CubeList([cube]) def tasmax_era5_hourly(): time = _era5_time("hourly") - cube = iris.cube.Cube( + cube = Cube( _era5_data("hourly"), long_name="maximum 2m temperature", var_name="mx2t", @@ -935,7 +1266,7 @@ def tasmax_era5_hourly(): (_era5_longitude(), 2), ], ) - return iris.cube.CubeList([cube]) + return CubeList([cube]) def tasmax_cmor_e1hr(): @@ -943,7 +1274,7 @@ def tasmax_cmor_e1hr(): vardef = cmor_table.get_variable("E1hr", "tasmax") time = _cmor_time("E1hr", shifted=True, bounds=True) data = _cmor_data("E1hr") - cube = iris.cube.Cube( + cube = Cube( data.astype("float32"), long_name=vardef.long_name, var_name=vardef.short_name, @@ -957,12 +1288,12 @@ def tasmax_cmor_e1hr(): attributes={"comment": COMMENT}, ) cube.add_aux_coord(_cmor_aux_height(2.0)) - return iris.cube.CubeList([cube]) + return CubeList([cube]) def tasmin_era5_hourly(): time = _era5_time("hourly") - cube = iris.cube.Cube( + cube = Cube( _era5_data("hourly"), long_name="minimum 2m temperature", var_name="mn2t", @@ -973,7 +1304,7 @@ def tasmin_era5_hourly(): (_era5_longitude(), 2), ], ) - return iris.cube.CubeList([cube]) + return CubeList([cube]) def tasmin_cmor_e1hr(): @@ -981,7 +1312,7 @@ def tasmin_cmor_e1hr(): vardef = cmor_table.get_variable("E1hr", "tasmin") time = _cmor_time("E1hr", shifted=True, bounds=True) data = _cmor_data("E1hr") - cube = iris.cube.Cube( + cube = Cube( data.astype("float32"), long_name=vardef.long_name, var_name=vardef.short_name, @@ -995,12 +1326,12 @@ def tasmin_cmor_e1hr(): attributes={"comment": COMMENT}, ) cube.add_aux_coord(_cmor_aux_height(2.0)) - return iris.cube.CubeList([cube]) + return CubeList([cube]) def uas_era5_hourly(): time = _era5_time("hourly") - cube = iris.cube.Cube( + cube = Cube( _era5_data("hourly"), long_name="10m_u_component_of_wind", var_name="u10", @@ -1011,7 +1342,7 @@ def uas_era5_hourly(): (_era5_longitude(), 2), ], ) - return iris.cube.CubeList([cube]) + return CubeList([cube]) def uas_cmor_e1hr(): @@ -1019,7 +1350,7 @@ def uas_cmor_e1hr(): vardef = cmor_table.get_variable("E1hr", "uas") time = _cmor_time("E1hr") data = _cmor_data("E1hr") - cube = iris.cube.Cube( + cube = Cube( data.astype("float32"), long_name=vardef.long_name, var_name=vardef.short_name, @@ -1033,12 +1364,12 @@ def uas_cmor_e1hr(): attributes={"comment": COMMENT}, ) cube.add_aux_coord(_cmor_aux_height(10.0)) - return iris.cube.CubeList([cube]) + return CubeList([cube]) def vas_era5_hourly(): time = _era5_time("hourly") - cube = iris.cube.Cube( + cube = Cube( _era5_data("hourly"), long_name="10m_v_component_of_wind", var_name="v10", @@ -1049,7 +1380,7 @@ def vas_era5_hourly(): (_era5_longitude(), 2), ], ) - return iris.cube.CubeList([cube]) + return CubeList([cube]) def vas_cmor_e1hr(): @@ -1057,7 +1388,7 @@ def vas_cmor_e1hr(): vardef = cmor_table.get_variable("E1hr", "vas") time = _cmor_time("E1hr") data = _cmor_data("E1hr") - cube = iris.cube.Cube( + cube = Cube( data.astype("float32"), long_name=vardef.long_name, var_name=vardef.short_name, @@ -1071,14 +1402,17 @@ def vas_cmor_e1hr(): attributes={"comment": COMMENT}, ) cube.add_aux_coord(_cmor_aux_height(10.0)) - return iris.cube.CubeList([cube]) + return CubeList([cube]) VARIABLES = [ pytest.param(a, b, c, d, id=c + "_" + d) for (a, b, c, d) in [ + (era5_2d("daily"), cmor_2d("Eday", "albsn"), "albsn", "Eday"), (cl_era5_monthly(), cl_cmor_amon(), "cl", "Amon"), + (era5_3d("monthly"), cmor_3d("Amon", "cli"), "cli", "Amon"), (clt_era5_hourly(), clt_cmor_e1hr(), "clt", "E1hr"), + (era5_3d("monthly"), cmor_3d("Amon", "clw"), "clw", "Amon"), (evspsbl_era5_hourly(), evspsbl_cmor_e1hr(), "evspsbl", "E1hr"), ( evspsblpot_era5_hourly(), @@ -1086,21 +1420,43 @@ def vas_cmor_e1hr(): "evspsblpot", "E1hr", ), + (era5_3d("monthly"), cmor_3d("Amon", "hus"), "hus", "Amon"), (mrro_era5_hourly(), mrro_cmor_e1hr(), "mrro", "E1hr"), + (o3_era5_monthly(), cmor_3d("Amon", "o3"), "o3", "Amon"), (orog_era5_hourly(), orog_cmor_fx(), "orog", "fx"), (pr_era5_monthly(), pr_cmor_amon(), "pr", "Amon"), (pr_era5_hourly(), pr_cmor_e1hr(), "pr", "E1hr"), (prsn_era5_hourly(), prsn_cmor_e1hr(), "prsn", "E1hr"), + (era5_2d("monthly"), cmor_2d("Amon", "prw"), "prw", "Amon"), + (era5_2d("monthly"), cmor_2d("Amon", "ps"), "ps", "Amon"), (ptype_era5_hourly(), ptype_cmor_e1hr(), "ptype", "E1hr"), + ( + era5_3d("monthly"), + cmor_3d("Emon", "rainmxrat27"), + "rainmxrat27", + "Emon", + ), (rlds_era5_hourly(), rlds_cmor_e1hr(), "rlds", "E1hr"), + (rlns_era5_hourly(), rlns_cmor_e1hr(), "rlns", "E1hr"), + (rlus_era5_hourly(), rlus_cmor_e1hr(), "rlus", "E1hr"), (rls_era5_hourly(), rls_cmor_e1hr(), "rls", "E1hr"), (rsds_era5_hourly(), rsds_cmor_e1hr(), "rsds", "E1hr"), + (rsns_era5_hourly(), rsns_cmor_e1hr(), "rsns", "E1hr"), + (rsus_era5_hourly(), rsus_cmor_e1hr(), "rsus", "E1hr"), (rsdt_era5_hourly(), rsdt_cmor_e1hr(), "rsdt", "E1hr"), (rss_era5_hourly(), rss_cmor_e1hr(), "rss", "E1hr"), + (sftlf_era5(), sftlf_cmor_fx(), "sftlf", "fx"), + ( + era5_3d("monthly"), + cmor_3d("Emon", "snowmxrat27"), + "snowmxrat27", + "Emon", + ), (tas_era5_hourly(), tas_cmor_e1hr(), "tas", "E1hr"), (tas_era5_monthly(), tas_cmor_amon(), "tas", "Amon"), (tasmax_era5_hourly(), tasmax_cmor_e1hr(), "tasmax", "E1hr"), (tasmin_era5_hourly(), tasmin_cmor_e1hr(), "tasmin", "E1hr"), + (toz_era5_monthly(), cmor_2d("AERmon", "toz"), "toz", "AERmon"), (uas_era5_hourly(), uas_cmor_e1hr(), "uas", "E1hr"), (vas_era5_hourly(), vas_cmor_e1hr(), "vas", "E1hr"), (zg_era5_monthly(), zg_cmor_amon(), "zg", "Amon"), @@ -1139,3 +1495,61 @@ def test_cmorization(era5_cubes, cmor_cubes, var, mip): for coord in fixed_cube.coords(): print(coord) assert fixed_cube == cmor_cube + + +@pytest.fixture +def unstructured_grid_cubes(): + """Sample cubes with unstructured grid.""" + time = DimCoord( + [0.0, 31.0], standard_name="time", units="days since 1950-01-01" + ) + lat = AuxCoord( + [1.0, 1.0, -1.0, -1.0], standard_name="latitude", units="degrees_north" + ) + lon = AuxCoord( + [179.0, 180.0, 180.0, 179.0], + standard_name="longitude", + units="degrees_east", + ) + cube = Cube( + da.from_array([[0.0, 1.0, 2.0, 3.0], [0.0, 0.0, 0.0, 0.0]]), + standard_name="air_temperature", + units="K", + dim_coords_and_dims=[(time, 0)], + aux_coords_and_dims=[(lat, 1), (lon, 1)], + attributes={"GRIB_PARAM": (1, 1)}, + ) + return CubeList([cube]) + + +def test_unstructured_grid(unstructured_grid_cubes): + """Test processing unstructured data.""" + fixed_cubes = fix_metadata( + unstructured_grid_cubes, + "tas", + "native6", + "era5", + "Amon", + ) + + assert len(fixed_cubes) == 1 + fixed_cube = fixed_cubes[0] + + assert fixed_cube.shape == (2, 4) + + assert fixed_cube.coords("time", dim_coords=True) + assert fixed_cube.coord_dims("time") == (0,) + + assert fixed_cube.coords("latitude", dim_coords=False) + assert fixed_cube.coord_dims("latitude") == (1,) + lat = fixed_cube.coord("latitude") + np.testing.assert_allclose(lat.points, [1, 1, -1, -1]) + assert lat.bounds is None + + assert fixed_cube.coords("longitude", dim_coords=False) + assert fixed_cube.coord_dims("longitude") == (1,) + lon = fixed_cube.coord("longitude") + np.testing.assert_allclose(lon.points, [179, 180, 180, 179]) + assert lon.bounds is None + + assert fixed_cube.attributes["GRIB_PARAM"] == "(1, 1)" diff --git a/tests/integration/cmor/test_fix.py b/tests/integration/cmor/test_fix.py index 4af47a44d7..43b9419f64 100644 --- a/tests/integration/cmor/test_fix.py +++ b/tests/integration/cmor/test_fix.py @@ -418,9 +418,6 @@ def test_fix_metadata_amon_ta_wrong_lat_units(self): with pytest.raises(CMORCheckError): cmor_check_metadata(fixed_cube, project, mip, short_name) - print(self.mock_debug.mock_calls) - print(self.mock_warning.mock_calls) - assert self.mock_debug.call_count == 3 assert self.mock_warning.call_count == 9 @@ -867,3 +864,30 @@ def test_fix_data_amon_tas(self): assert self.mock_debug.call_count == 0 assert self.mock_warning.call_count == 0 + + def test_fix_metadata_no_time_in_table(self): + """Test ``fix_data``.""" + short_name = "sftlf" + project = "CMIP6" + dataset = "__MODEL_WITH_NO_EXPLICIT_FIX__" + mip = "fx" + cube = self.cubes_2d_latlon[0][0] + cube.units = "%" + cube.data = da.full(cube.shape, 1.0, dtype=cube.dtype) + + fixed_cubes = fix_metadata( + [cube], + short_name, + project, + dataset, + mip, + ) + + assert len(fixed_cubes) == 1 + fixed_cube = fixed_cubes[0] + assert fixed_cube.has_lazy_data() + + cmor_check_metadata(fixed_cube, project, mip, short_name) + + assert self.mock_debug.call_count == 3 + assert self.mock_warning.call_count == 6 diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index e32e3ca3fa..f6251a8bb0 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -100,21 +100,35 @@ def _get_files(root_path, facets, tracking_id): return files, globs -@pytest.fixture -def patched_datafinder(tmp_path, monkeypatch): - def tracking_ids(i=0): - while True: - yield i - i += 1 +def _tracking_ids(i=0): + while True: + yield i + i += 1 - tracking_id = tracking_ids() + +def _get_find_files_func(path: Path, suffix: str = ".nc"): + tracking_id = _tracking_ids() def find_files(*, debug: bool = False, **facets): - files, file_globs = _get_files(tmp_path, facets, tracking_id) + files, file_globs = _get_files(path, facets, tracking_id) + files = [f.with_suffix(suffix) for f in files] + file_globs = [g.with_suffix(suffix) for g in file_globs] if debug: return files, file_globs return files + return find_files + + +@pytest.fixture +def patched_datafinder(tmp_path, monkeypatch): + find_files = _get_find_files_func(tmp_path) + monkeypatch.setattr(esmvalcore.local, "find_files", find_files) + + +@pytest.fixture +def patched_datafinder_grib(tmp_path, monkeypatch): + find_files = _get_find_files_func(tmp_path, suffix=".grib") monkeypatch.setattr(esmvalcore.local, "find_files", find_files) @@ -129,13 +143,7 @@ def patched_failing_datafinder(tmp_path, monkeypatch): Otherwise, return files just like `patched_datafinder`. """ - - def tracking_ids(i=0): - while True: - yield i - i += 1 - - tracking_id = tracking_ids() + tracking_id = _tracking_ids() def find_files(*, debug: bool = False, **facets): files, file_globs = _get_files(tmp_path, facets, tracking_id) diff --git a/tests/integration/preprocessor/_io/test_load.py b/tests/integration/preprocessor/_io/test_load.py index 4c76ba2651..e776b9caa2 100644 --- a/tests/integration/preprocessor/_io/test_load.py +++ b/tests/integration/preprocessor/_io/test_load.py @@ -4,12 +4,14 @@ import tempfile import unittest import warnings +from pathlib import Path import iris import numpy as np from iris.coords import DimCoord from iris.cube import Cube, CubeList +import esmvalcore from esmvalcore.preprocessor._io import load @@ -52,6 +54,24 @@ def test_load(self): (cube.coord("latitude").points == np.array([1, 2])).all() ) + def test_load_grib(self): + """Test loading a grib file.""" + grib_path = Path( + Path(esmvalcore.__file__).parents[1], + "tests", + "sample_data", + "iris-sample-data", + "polar_stereo.grib2", + ) + cubes = load(grib_path) + + assert len(cubes) == 1 + cube = cubes[0] + assert cube.standard_name == "air_temperature" + assert cube.units == "K" + assert cube.shape == (200, 247) + assert "source_file" in cube.attributes + def test_callback_remove_attributes(self): """Test callback remove unwanted attributes.""" attributes = ("history", "creation_date", "tracking_id", "comment") diff --git a/tests/integration/recipe/test_recipe.py b/tests/integration/recipe/test_recipe.py index 90b4985a6e..3077901c33 100644 --- a/tests/integration/recipe/test_recipe.py +++ b/tests/integration/recipe/test_recipe.py @@ -3392,3 +3392,116 @@ def test_invalid_interpolate(tmp_path, patched_datafinder, session): get_recipe(tmp_path, content, session) assert str(exc.value) == INITIALIZATION_ERROR_MSG assert exc.value.failed_tasks[0].message == msg + + +def test_automatic_regrid_era5_nc(tmp_path, patched_datafinder, session): + content = dedent(""" + diagnostics: + diagnostic_name: + variables: + tas: + mip: Amon + timerange: '20000101/20001231' + additional_datasets: + - {project: native6, dataset: ERA5, tier: 3} + scripts: null + """) + recipe = get_recipe(tmp_path, content, session) + + assert len(recipe.tasks) == 1 + task = recipe.tasks.pop() + + assert len(task.products) == 1 + product = task.products.pop() + + assert "regrid" not in product.settings + + +def test_automatic_regrid_era5_grib( + tmp_path, patched_datafinder_grib, session +): + content = dedent(""" + diagnostics: + diagnostic_name: + variables: + tas: + mip: Amon + timerange: '20000101/20001231' + additional_datasets: + - {project: native6, dataset: ERA5, tier: 3} + scripts: null + """) + recipe = get_recipe(tmp_path, content, session) + + assert len(recipe.tasks) == 1 + task = recipe.tasks.pop() + + assert len(task.products) == 1 + product = task.products.pop() + + assert "regrid" in product.settings + assert product.settings["regrid"] == { + "target_grid": "0.25x0.25", + "scheme": "linear", + } + + +def test_automatic_no_regrid_era5_grib( + tmp_path, patched_datafinder_grib, session +): + content = dedent(""" + diagnostics: + diagnostic_name: + variables: + tas: + mip: Amon + timerange: '20000101/20001231' + additional_datasets: + - {project: native6, dataset: ERA5, tier: 3, automatic_regrid: false} + scripts: null + """) + recipe = get_recipe(tmp_path, content, session) + + assert len(recipe.tasks) == 1 + task = recipe.tasks.pop() + + assert len(task.products) == 1 + product = task.products.pop() + + assert "regrid" not in product.settings + + +def test_automatic_already_regrid_era5_grib( + tmp_path, patched_datafinder_grib, session +): + content = dedent(""" + preprocessors: + test_automatic_regrid_era5: + regrid: + target_grid: 1x1 + scheme: nearest + + diagnostics: + diagnostic_name: + variables: + tas: + preprocessor: test_automatic_regrid_era5 + mip: Amon + timerange: '20000101/20001231' + additional_datasets: + - {project: native6, dataset: ERA5, tier: 3} + scripts: null + """) + recipe = get_recipe(tmp_path, content, session) + + assert len(recipe.tasks) == 1 + task = recipe.tasks.pop() + + assert len(task.products) == 1 + product = task.products.pop() + + assert "regrid" in product.settings + assert product.settings["regrid"] == { + "target_grid": "1x1", + "scheme": "nearest", + } diff --git a/tests/sample_data/iris-sample-data/LICENSE b/tests/sample_data/iris-sample-data/LICENSE new file mode 100644 index 0000000000..6ab33c6548 --- /dev/null +++ b/tests/sample_data/iris-sample-data/LICENSE @@ -0,0 +1,10 @@ +Data in this directory is taken from iris-sample-data (https://github.com/SciTools/iris-sample-data). + +It is licensed under the following UK's Open Government Licence (https://www.nationalarchives.gov.uk/doc/open-government-licence/version/3/): + + +(c) British Crown copyright, 2018. + +You may use and re-use the information featured in this repository (not including logos) free of charge in any format or medium, under the terms of the Open Government Licence. We encourage users to establish hypertext links to this website. + +Any email enquiries regarding the use and re-use of this information resource should be sent to: psi@nationalarchives.gsi.gov.uk. diff --git a/tests/sample_data/iris-sample-data/polar_stereo.grib2 b/tests/sample_data/iris-sample-data/polar_stereo.grib2 new file mode 100644 index 0000000000000000000000000000000000000000..ab02a2d13fe920e78498d11f07291890729c344b GIT binary patch literal 25934 zcmY(q1FSGSur0c6+qP}nwr$(CZQHhO+cv*#?|uJs?#s)2ldhF^X3}YsrcEWTBq;;{ z008tK{u5KV{|FTjfDHfu1OO0x7Zl}xwEtri1pbc(-v1*Mu)qJm{HJ^S4-z^2Z~wQl|H|l~{~Liq z6#sfH02lxe1_0pif84-j0Bgj+23bj`2?k;J+CEwFMO9zux}o03Zt(SXemx zf74$A0M!3fY(n{;L-PEo3@+a-tt6FYcp_+oCiNXvsws|XN+#((KTBcNz#r6uc_Gp$ zT^GG*;n%atNP`I31ET$(L_?|TU1DpOST4fn{fL)Y6gjk1*NS50OP_uLg@z{5@T*kt z6K~*0J^pl4$GQ#Fql!%|wYjkwS===V}CSU_4KDf7Kt^J*v5( z`FZ`G;1Or%zKEk3wo}+adlh0lU=W_eAyOqjyk_l+E=e@Y@z9TX!NbD~?y9b-T)jE( za2H1j0gxzDo#xPbD>WxA{g+lVuyY|qCKJ7?FBu6{0SRY9J_btR`66EeI*`1D=VYIn zEsP$1NtnrKW-8+pnPp%%jxc`Ziz`Tr}{~`YHYun+M zRsugaLEMy~fjy5qWUFgFC)H$FT#~}faV%}Oz$GcOX#lOFsQGvCIj5x3<2Vh<68oeV z*q1jDD5q66IjXPJxVwwaLX0`eehOfS$E4kaB0%{M5;0uG};`k#SOKNdpKEl zS+C@jN~dzQC&7{3aEm^I!-Q!RUPFvFK=nG{ZKrrbP|SOlD(u+^B4w8b z!n~KbO^17@vajk);@F#-IYfo>z--lB?KXQa&?fz15do4njyckhhqdom^j4z4RfNAuy1mHns++8U2?Y`weBkrjF0ey2Y=n{D;A^=(X`jDJ#!3$iiA)0Of@OeB62 zmhtzvt)YMW;cTXWNAa}DMf{^n(~za}J+*fiwXqIv?_KkK%dCb+`jCMvT8X1Co^Doy z{RhlH(2&kGf7NY(z~v*qj}N>Llp-==Ml|y&8I|*NtknbPB!U~*soqH%={1jpb4zp( zDiKnuoK{HUTzL=w!H<_&@bk4E`1zWJu?y1UL=k%yIm~S4T>(sDV-}I)a^{2&p6H2G zq@x=dMVo=BHTI-BTQzDOOTH74v6*J`s)D25khZw`&HY-l1j-*I2S4BastL~ze+Ng3 zgS5VHgx=Ej*D_CML=R@Wp9szEm}FlA;Jn}Ltc_)@qH1?5A?VHY0pTKln;kW6bd?HD z1NB^xWJtIj(*iil45-p?zNk8HRkS_UfmZ_>k8a`?Pn zqhLLjXxy;zuFuOUZ++*gn=r^$+wu(ovdw8d^zm{UiQVT|&v_;Z0_8Uj{j`z*NKqD2 z)KWx@_+H3DGC^D3faV;wFM_JvjH{JHR)D)CBT(PslRpxx@H#RV;ZJH}Eku3;-m^=( zo~|S;ldpK2&|h^A=Y|{({c0K&4KR9Qa7{Y0%|gPY4B5c;`>QOQd3wkt1f(J@<%Yt{ z#BFY;wkRY8!00@OXp37JrsK@-YTNEB2*lytARk{e`l(y9%8OZ=Kf+Vu2k#PdOt^Js z*wBF|@!D05s-XE*4aBeaX)U&qpc@vt+jS9`#Bw7*QZs? zozUM4r@>Yq=+^pDOT7oRa%mJyO);m@`I=bME170eF(r5PYVnP%PZ>YC>o>xq+7r?$ zT(=@O*lhT*&XLDz_B;NAqlK_^X(XvQz+Q1>0r>-?*WPKL}W12U~@>eS*u+E#!4ga|z z0f+?jA10}Q-hQ@*2e^Med{Ydo&*tBrHxNz;8Zd8aL)|bp1pGbmLLUmd;^74i8kD9~ zx?gYNX@md=E0aR0IiLG6qx>NaZ_h={x$KB9cl1`#H(F5l?hcMCLjz3_#f!S>dC=kRv?nuTU z7C~tOx^GkP#mtP&Y!tq__+TD8#knwNzOAmERY0(BSJ+}qHNuq4st!C_Y6{FAji{x= zJG6;Umfvle+e-)D9rSJ59w1-B{>@Rb;d8hy$4C;p*ZH3;iS9pKks#Mac7W-POE)HH z26BT^$}EL`<}qjzvrN?T#aJOBX4bXXo%7gIq|D-Qb51$ync=h@qm{OS1@iq9&`gQ9 z)vnv)&PvBAW)?3lVh#$7t4!>$+}tQLwb(*xeNF50nLk%E6~t#p`L{7G)=hNh-cc71 z>L-?ji>^kBg1XX+>5t$o@a#!8)CCl=TM+PWqoC$&2+2F?#O?C+@5b15a~bk0KwD66Qtu*ri7m@OgLerPlNI!H+xw|<6!UKEzKTObN z3tSEbw1p($zX+R$*|C-WA>o1~W;(kRgaRh9h{nHdw}UkyWbcqDUf2$ZiOQCiG^`_A z&?E~HORNC?CwbwS;^-rl-GPuiP1z_<*z6GX2YmGjKYPPweD<&nM6{ZIf_7j-;AY0} zkUUa|HTHBN$sSjVMIQ-}h^i*w)$CdZzFQ&N@f?|>ZZ9DEMKm1ClYOT;-b9kW!f?Td zkYQ$yOz|EAu{0vdLoQB6UFECdf(5bm4O*q5a(@g3aDMsdkTjhF4~t}=_9&ifO0xpp zAOgG6!sZ6M$<{d?YI( z22mWw!3(0R7{Sgii-H+e0XgYuV7Cf?xl=wDY*F5Y>EcZd{vDzF6H|(&Wh=lgL>a8!cqM4O~*wV zYJ$)L0c)<75pRkd=ng)A7@sfLunUSzT6V|+SQb3sW^ucX6|dfzH{~8>-SoXKB}0xD zI?o$Of;0ptdwe&h&vNd^tDDQTFgp~uoesaVSpXu;OF=2ID)>^sF!p;uWjXM zzn(VesR}o&38v>D=cHsz^=s-Rf0lw5+_Drli&Yqt-|6sAkou2CoBh38Yb>h?#*f(P zlsU*wNN{?W>%m~u`ot)vC}!|HK*?COYHS}7OF0vfZ7VrXP37^p7rCrulCzE}tPq5g z_$lbgiSS)&e%e3}Ppwr#{tW3NN!~Y?M``gO7n+mbFH`SuA@MAnhfH$e@1aN0r9I~h zAnu^{gUPVUGf5o04Gut^TN@2d6I`}F6~fi@?TXaQw+s|h8niMm!n^YOxbB(HRBwO^ z>qpcPe(RM9xNzFVB!- zDo(>7%SM4N6d{0o_hzt9d1$wAA4_uJnKvwC(^O7bCgiAMC9^@#0T4!g@vJuGYC%3m z3JZZjvS~19aCx=MssqI8|@k{1dx-DFpU!wghx-hZ_)9{d`;~ zeWdi7hPbI?X0r({HTb0x2F>i6mqkL3wv~E!h@rb&X#-l9a!wP>q_?iAKz0%v*HU&OH@m!;p zx`GKcFVd@zK3_JAq(=bw1`sdjUsbP)nc?3q=H4SM^?4_-8Q+(%=P{?I2By4E90O1Y z5CftX4HeWSQ;G@=Q{s?A6SLL_JXz&-1$rtK_$L+YV&MacHJ7B;yO1VI;`p1<<9W-;`(D=G`S$(V`K-iFDj-FVdvvQh_!@6m8*j5)VPyWpi zcvD(i8Tzu7nS_`4C%S&%y* zyA@J|{mT{G&nGjsaTmeWk~K^{-fziAl)b-A(H*k}a?rIXt!A={{MlT>X;m%C?az)j z`*~4B5NHx3%qAM(_&QC|!Y9fXpEZRO!TpU5Yv5cH;`*{SU7#CtnWsQO?^}~?|5)@o z3;W2=!KEZizflRNgK3e3kDXrv*5+n|#-H;qL_hW_2spm{UpH@cQ~v41&kF?n%#f$f zj-Yd8`avs0qS6gi8a(+DG-$&1NmK=gGyBz(F`Vt4ucQblAMv_4h}eYKSHR2@|3*lW zUAtFlrBg@OvE^hD-|tms25Nbp10^YoBD^i|Gl#$DUCX?Hwn5uCAf4q3yFlq%P{_*q zccxLC$k27W+oasfo|WzLwNCV@hYBRM=C&q=ODcjS|~X9iB~u>!=NA zH%keD%2snEt-aMFo(L&9XelIoo$9IZN8-xpz0Tcl1H}VEccegm=d-7BA+CZ^jV(eo zQCRtPmwVGup!i=EoRwhc<@yV*o~17($fK+?E4^$=aChLrnrx$-H1b!{8h9H9z-0V7 zdbI!S88s`;FNd+=fk+;5h@zcp2{mWF`clmElVQ*yG_egV$ea*2_~5BizHIn*+ zDqtNn+K)U9(oC_mFL`T-J*jw|Pt<<8Y$@BINxwCIDyS`FlA)Yq70 zO5VU;d049wU4g$Kbu9s@GTJhbb*p?lna{}VZIJVC`*j1P^m&kl9lH{>BkJ`0PNRFg zzmel`*^So+fH4^IfFzD&_yaLf5JkU#*`%w8mgcf@3BRG$8T0G_Q!)6N`i`!pFG*dU z)qC2JFc)PsRdXxXpveKTorUR3$=K0A_uw`2AvpArEKNn#EA?#;*>TZFscS&xSgf}g zo~-#`7^q0QB?gZ?K}bt9ree!&YVOAbht0 ze~AoT9lwek(xh{h56aTH4Hs{80BM2PoJ3qiFL0AkTq|Vd?yd9d>qA+Jy)0w^YccOv zv@CZEiOQfI_Ez$vD_{i)P)&DfJb&?(TOe;Hz*&sQ5w7;7%y(4%ay&_iw&L)|gC$gsz5UY)TttLv zndY{1GqnUlr8QY3Mh#g_N5n$cN_SI8GkAW5ov&yb^s#wgi8 zS=1KC5GES0Q8bZl7vO2CCF6M(b~kIj;E8$4lXqh86nSVf)yMCtb^<)2@H}n^%R$|^ z&K)W0PYSnY93HXeXoVpgc9TV>NZso+KJ5%2_YnFo&KSLsxEpn(fpbTG7V95iz!>B3 zbv^lF#0uYX6nN(Y$E$EHA|~>4!M?OD&FQaw;iB1kQIE_?24QACHa^c@eVF5(o{c z#+HS600O(6-ZpdF!E~8wuMtl+@)IP z5|%JefKX*jZ@UkYdJ1Bs4xpAwIKzwDB#MkCKSdRY=5=2~f%9c>Iyhb2?j_tC& zYuAe~*Bcr>`i+pB7lx3CakgyZxm0h^NO8GN+&KKySRw$dJy;VX4&}M_il+=_thj`$ zQJ*G0s>{1)oT&I;{`p_W&2l&|##L%>8tcagSr4@EkVpSpn_s6{tPVnmGEinuEvi|& zJ!Y$AEZ=&(M2fLuU@DzA=tcX(OQQ!8Ek3;YxZeUI6Ib)%Lp^F6T&!XnuEYce1=iSf zi8qEp%SLqy^%8o~6kg|{_|ay@lviSkGo^#jdojxl^)jDB#&--r(fYM;Z%Wd8kmyKX zuwA&Z@w;j1aqj2boHJA$5PJJ^#F-v_)`$o_D9R}$9=5iXaOODnJ(G26BfJ>ml3q&4 zj}|U)m{<08sA=AsAP5x&R1Xk{^vzYr@0JWR6{d(*g@;H|DSGt!GT+qT+RZ{@iF{xq z65X7{g7C$6TSkcJDgk~}y~4R8>NEL(l<-}Jaz9dovYWKh>O4-BCO#GkIV4X0J z>Vx2LKdd}JQ-quB%Kq;rWeZtM4WkGffR6&9bQ5YWSwjuKRSBd^ z;zJ0uXm|is$r(@U0RszXLd(+~DbXdB3^(SZ+nq#Th%MB#? z>$Nb+&lbi<_pa&PeAbfyH)hxXRU+tjT1En!3z^qN?ft_~SnU8f#y^MzZ-g0N``yEa_3w5B7O6A+ zK80=>(u#e7tOCCRSLZm!peGgZGpiUAluMq~o{ZGDxd0?3d_C(3^C0^>mxp!|vpIKN zApO(Y%NG|)(P13H50;?wu*M2XS8i+MsS@A(H3wQvbbQu(mJa3k6`uDReZlq?9ak`5 z#}gK8`->ITRz7UUXrQP|2^ZI=iZ~Mgc)M<4eQ(G&5jn&>$5zqPBw{x8M1Tj+WU~Vm z41Pyr%`J7peYeG@N}3G800lcQD_(q8*&&(Ru(w8hPDd|#)faNjOnQ4jDR#>#wGYZu zsBE{xIPka@MOD_YYyIA!QeF&XiXv%oU(z2{Jy*$fsVQ{&_#Z^nhR<)hn~algwk+%B zU|CD;#dKPs)V{nkF)iemn~|*A)ucwx^IND#litjjG+UB(OAfgVB<4laB4>QuyuCX} zO0$(ks>PRnvBCq~d1|b=ToBBCca>=O*ML74ocvJ}Gg9PN?ECvZKvl=QT`jS^x+s(& zeJuglBacTc>QzF@Aa1Rh7Q`!p+1s%X`_%!dH=G_hc*Di8$|2$V9fOpAp|%7D;eM4n z@zziPua9_t{t|84j#crep&CL+*Uumo=e}_VHQN#kIv;obd8`M4r`~wO?X#C4I+`y{ zyY9Eeh1hcnGmA9@yJcP=GNuSQ89*^qcrdT0DZ-enGQ6k1PqvP=_@`$vn0@ccf}_$EhsuF$Kz z&UdA-)oR@VW$93H3|7l$b|8R9y#()G@AVvbxO!@F1>QonC$GzLd5nHX-Gkm~OJ`O( z|Ay-mCV+eo^qir1C({UN;?9U4mS|wYaI`J$fl;6zBG2E?)mGgeP`e#?))HA-OJwQ3 z|J#!%u_3#zKA%ON&}>t*d$FkPE|D?N!Ew$^Pp>~D(p3RQD9~pca_xa=bzeVeu~7Lc z))M-~!dWFtf*LVju^}Mwa;cxB@ni>CpAB&Z1DH>J!Hs!g$yRe;WI`X$0TdG^nYz-as%-bpSr*a}uontL0}LPS_UOp$|L_hMlH zc)MVF6WVu5dbF}Ss80+N4&8Fw7!yyBlT@os)1#8Wo*04KifQbtDE0~mK}RCaQKxI*0n zqkVb`ZVhqyB72=g-qhR2;?Sak+2@Q&Oj_LQ8y654P~QfHC_M&_MMO7yzG@#n#7H~*dehHqlH~1EM}Jn!YJNS`g1sZx!j4}4pcIqBdTC$0zDMRULy6TKutX{_ksU- zXFlqu1H8F#x6TN7H%g30H6*}c@m`WQcPd>8iX~*SK+5z{lDa1BSB{oB1nW^W31DzY2*vJb%fE>XZv6_J%g zA=xRbDyY#>nqYB<7Xvgzb&3*%NEsbS?ZwojGW~TI*nqsQ|68w)Pjhv*8ba3l;_*Sv zb!0$ySaQ*mt5!(!%_pA3d(~Ofi~f|FL?KU-89NyY<7QbP$=KTXhc~Q(w6?;whvnX- zbWPqRCfZ0>?i5mKOVmH|US_3YXSgvZ%_9>@iTq{g^Rkm(s9CqGU%l%HJpY9F6Vnd4 zI_#hlphl77OU)%z>3v!H;}sF&6WH8ESO3f_?4c3CPgr*b95xh99pYbx(RctX)Ency z8+S~CEV(&Mu+sWYa2t~ZSFsLroyl;&pJLgPbs+RV^UTLmlY4{QH*D6MC5rh0ya9Rs92?f-5<^K+;-X(g{ zJm$Bm8XWEyv*u=j5mjZ#*>bdFx+P#MG)Xd`nw0SAuBf{!>o37*%hLC14r z0|4`>yhLG{4k(eel2m#t`#P0NA1e^@gw^m zguaR+_j_<_hkh#;NME7t%)g=Wrl~rH`~zz!u$H~8_`Or-HKCi5EV;%7pBx$_`Jto9 zB$&K`N{nMpxMzxn_fJyXWQRFYzGEx*suki?mh@>Phm3zG4|JjXuk*ir(R>EpM^>Lc z3ho8qTjXD=-Bc*@okJq_(9;>uE?YBvpjY*N^QHS(xY3*`J#|idtr%avD32|i&CUna zyv&&~&-J)+@UK^i?aLG`w| z2VV0tVnayn11ZH%XT5=d+YFOG^m46&1RLzHR2zz1Pw`V+W}(ER=62N@102ISh%UpL z2HVO+%DY!IFxbM~-mPm$Drr+h;XIYw5^ zbEu~kM0i%)!FJ!p&Fs@6+B;~ICX?7gz!t2FZX9(sK_nFgj>QVF$Hy9Qx{qSPH4Lvh z7OyOI>>MXM#=~}iJo7yr*ooYK8(JVzU>gb{H$(R6K~PaUmc0~Rp?SZ5wVlBHs+fm` z9b#4l^J662oIWA6S4#M+nPs1QD?*H{d3E{N?l9g z&6oHwRE321ktGovI$rGmLR*Sf$dZ(pZC5H%f6+~L-%%%gboXfd4MiZ%Y8n5x?Pd`g z!4!BrK8bJDRk7miXq(97OLyvev0yGmh%K5za{HxJURrT>padO{1A+oYM^;}2JejJk z4d8qUMiPkgrU6lDh6Y$jJbhVkF|<+l+Tsc%Sj>kekecTMC!*2lG;o_3ho*`8@C$Qh zhoiWL5K-G|K=`DUE6i2Cja2}JNCeeM`FTv$eo8FhCvPDoRD5- zpkU$}KQCv6CnS~&QgDe{FR;B5J>W*Mk#hXZ)$7L=XFgz?CEYbEew@~3TlXECwC(Bk zQR6tR)wIO7ccKNc>sP!+Y?U(?_*zls`Xp{<*=O@s{Ml7}Pm{%J|4hexK6`r-ILshe z5FWB-=FAVAfSG84TWOu3f-^aHsh{E+RXa;ro7FMy#-^Auvw_4F^qvKAPCs(fOYTuc zE?zwrzPrx8z6Ie)nWqj2<&Uozv|$8krl2d@Xjv;VXl=2p;#P|-EZ>2r5q5(J_hsI= zN6TEz>xMDv{K_3^m>=1niSBbRQcm`Ujh2Lh$*bXT#lz9?7Kzr}Q{~halkrOpN^`R^ zaD9YE?)$KUfkfS z55h%YaSipd>cQj@`goPqTru$N+NiGhC( z&RPv4EEI2gh~v$E2Q#6NUJpvD93>;}j1z*Ya0o4q8`(AEEEu}r{p(8MV6eoTS|F*F zpim2qbYsHo)?l<~Fyg@%>&EU;EhyPzp>Qk}<>CjDg~g*3$}f^F8v{9I1lSeLmlude z05#*i{@>Bqhk^A;0X!l+)BCTr`W1db#(XbU#b|N}?G{-N14{H|x8wmp2Fx^Tf~5XiKZGHbF7W__2oxo~5W3ZVE@k{2ZeZ?dz^>Lj0$Cnpq*Hb%Dc5m%m~&cvfPx zQsW>`=*|#r42Nu4QJPj82%?NPS~LT6u?*e=bY8j#i942G;NA6P%7_eb9)mkJEHbL_tVRK0?c@cPL0-5>eSgfF`^#+zFfUnbVJ0Csyt; z#pPB0l50=-So#(L+~wnmqz2;(!&}v~3`sqRVl+3@i6dCOxOy$V9R&zt83iG0nC>cQ z>`vRB222N+l1YJ3v|ZD??QSQ(B~ef(!PS2!955e!*hBg^5JHo6lmxrT@zaAz*@2_{Hq0?$GYIAt^f`JZv{{f)JmBWnO9H|505Bv0NoW+z;@8@C|Wc_ z38C21f^;R3lDl0$M9dpCAqL9LFoQ?neD9@#V0fhM-p+5uOtz;tyG5cKzDC2hTt|W? z(-P~i4Y?(i@pW&^GE1|!M9AAS31&tOo-Hj9-=F3c7YAlRxzlm*kuwK4Hcv;a`YWG* z^5c(Yn{6jwH&fsKKU!(ZPRO3uIpo?pG~&z`T~FsyPgDRX1HRcIUogN1q$|L>Qc5#Zh0_rU{3=A}k!yliSqYY9^i@E*Zo67~P}x7nfQ% z0ik;ea|oBZ8EplbcEf=RG*>Y`PlarI0G)-MRkBN)&e3Cr9?IO;s&t1LlDycM!pHq> z-=88oXc!$2gP6dD$)@a?y~3tkyEz6urDV(G<52C)IUph{uB#dg9B|S<*W@eJTaA`Y zfWjx4>=qwr(x>CF>mc_6QFexl@+|Y&!oF9M^~)rZCA0|jgrv^Uj^62ZJk6SO#=Cpl z8BtQBL}!kOoBJ6ode2XLcaEP(!PA@UZ#Hpk%@5IMm{QbYJvsqnLS4jW^)AiiJd zaN|f)B&PAgqwDZTws>Ha43t$N{nZP>Z{m1(X^HgsOpa+QeuKf;b4;-N2fRNnGFgvK zx}#~gI?r?^%$V)E(7HEt5N7UF^wF0m$ffay;B(%{DiL)p{&^>XXA(_CjvjjdMA;8B zxXwj^s|sx{A|$tVu6XH;9ot1_zA;OV;j!Bsgyr9C~gvAf&6<5um zNe*=9q^C114ev z7c-@sh(X;jB`S{Ah#yewT~%B(MEh8tbn_D*$0$c*

@*iEWsdr>GVDt}UbgL%&3=4@oRzS$%>W&B|W$JEY~$ z3r}A7Rrrf6MxG_BI4qzfb}FTyV%=}-0_ z|HEh$>h(|H>ZG;^#c0;>10Ij7 zp?MDKL?A3LX#hGDNM&DZKD_qSb`@h;Cs|1YWz*&XU+FvhGJ9Pcz5>f4etyN3nUv}e z(vuq|-KaDLaK*zs_wha6BULq5BK@RJGF(QiTzi?%J<$V7qURkHlBNgu{c~iN0vhwO z{zdNYv%bL`olC8Ex|0$0AmkQZY+Io(MLsiyS=tnU8nkAAcSd<|?|O(8;0*+3RQnyg zxredtaiF`N3Waj(yZrEFbM$k{5eO)FzhgQCTxx{p4f;T!+qna7hk&=6kt>X8n0Wf+ zK@0+JTra3Ol=61|_TgIpw#xDmV=i#OwuAnuI=9e_j9)A=O)(<&Y4)R5w!g-$Yn4T% z=G=sk*2#>R6u$9SDRpHKD;8&pyPM#y$Z|RZI(D-!4NaI$?LHO~0V*q(Dw4{hWm?G` z;$JIR6VZw8dx?<%QAF!8pIkv$h)8ggRfA0UB-?7kkPD0lvZ`hg1RPcL%=gQ%ZLEEg z_1o(V*ZgFC>zu2wD^Hm5r>mNHNGmXd2OU}+xUmZp>(Vki7n23!US~3jy>zpD^>77S zFS@nyIu^iI;vMiJnfPiqY_hO_tSP{j*g;SB11D!H5L3*m;&bf>sdH}dED0aIUhJ=L zpJ32C2P`LtHpi3zgglO9$*|(lU5NMNpWM~ifwJ0UYwL_Yh|@AAj4d`l#fStUKfN)nT^PT ze%`K7+L0J3AW5G}XQP_o<R~J~t-NBFU8$ z-LtvAOCZ1zV-gqLz;iQwiZQTEVSEoE@Hi87i*!*h024nnn5&QYj*2z{SF7A87tjDs z8j0Bj9N_qX8_;R<%KOgEd|EA>u(K<>nsqwF9dod8lu)9W{=+CfrO4dhI*sdL;gxUCiJP z{96bU7si9gWMyNobYOR=G~*2(BAGxiZf7L9RV58ge{Nw|Hvh@@e`HI$FjSs%Ql}k8 zq(iu?1wF~WePC<_jB0Aj@%eM-PTDYPqM}fqtSZ&s^O0=)CLx{jS)!?&F}VqnO$E+7 z&~)yY9V|VFkHcjGM4PVVE5=aylcpGH*4G$!WY`8c*J_>0r1SH$(&XYk~G| z6^rli9+~?7sFIC7NGzW-1zEDmye_Hwzrl(WCzIDL*ozM`;r-NJ#FJQw)?d4 zr>YL@8rnLiikl^Ke7F|F1=59gyh0z`A5oSwmI_k9Lz4Uc@u&9VY1ZiVvgcxZ&0YoX$nHwQbZsvpJ={UC}56|-{=1>qW6Kabf zM?)%<-E^_(C9raT<-lGoa)`iyxsL^xB$uzmWb+=J^O@XQ65<8dl?s3P=LH5XcB#0e zopAcN_V{2@(oXe=!ruj8B}wjUu~~}SzK5_ZZAK`315rd1o7^dgn!EmoJxn9SUhEBa zWsk|{1t&Rs7raR2q$xo{yI^nV(7481JTEjmJ)Xy%8%!=KT2Lj)j;6-jb1#;j=7q89)Oy*SgxZ&!OY&gRo?-{vUr* z~`WEDdmK5NPtP7cO zq^v(EDi`FItN;Ui*TdJ%Q7@8;(5O9+*ge^!?UKcE!?hMBjyQCHxhMak^BL#~fySKg zHHGZdyQR$NCFUKWmNM^l7W7oaAt0zpU?1OfLy6zF*TcZvK7Rk2qm;WC)}P}3PTdU~ zC=sQk7DVjnpa0Znu-99RI-OPz*_|_Y_MkE{5e)f~8d#4ECkmJu4@iKd+lVOzH$XNg z>te1%b=`ei_`SmI-bcL#-zj?cD1HIFPdMK93BQa|Hskt+glp*yRsj< zwPt-M`}@FW!s1Gr*HhUO^TfaWYyXk2;I$a{f5<*x)p)YdauNb^VMqqpd0;xv(}kt?8vEI zoflmQh<%w9QAaDPewP^#Q72nr>kitS!MTr_R&H-K%cseekn2?$fslE57s)&0wr8dC zE<6zfPVm!_xO2z^Y6rnNBTNi~m8Rp~33XV$P$(q-RFLs=?x3)fT#;$X732z#2tv^I zNxWQVsUaw4(^<~7*v<43O3FD`2WoG7Geap@_-f9R>Z1h3=$L0JTSRFpt~f!&)<6l? z`~9DZi#ly%)iH$A4(03j2wCstOvw4W>1Wv07djlSxHKYV7dq4`mYWm6yUIpRKlTp{ zlH(-0D+eufrbXsx*hS`Fic=d7#3o}x7!)+Qxw#=frC+SCUL8~auq3=k^W^HS1Q`4Y zOwxlriwF>C*r8~emCALUQH3jp#ZlHAQGM0m9Ri-fD;TL0*d&NF zh5Syx`=XatP@QcnX?U8SnTW6#4H_RBTltmMVVbTYwug`P;hLUqZ%OLh zZrb5K%Yvp%BYHIb^GLD_X`;@aAZ49eKezO5X^%wG((~wpB2tZB^BHxo@Ct^0#(aab zlzV&*ZF&bGhr(W{b@#FQRD zEg^GbH+%GeI7MY>vj%(7?*UpusP#!O{vBdW{JRuQ*L|7lN}{>W!^VUB5YW9G==(FG z1IpgIz8idrgwC*Iy$Po8re5d94@=$COrux@L&#GlrX3NJ{G*k8Ui2c6iY$X;7c zqzO7x2S3bF-*7KrrPVy$H|UP$LM@l;r@fegbYY>~3g&uoCjii5KHszF@$-^D0Ln$= z5hXX+Yk5>F!r?o|R{sSkcBvZJhSp{vb_mnxw+Qon^AE0~xFF3x^_Q`EwUTMww z@F#pEUzObf6GU@8n_$D4`1G-yjrjfVZ`XKPlxEgr9mj@ci;Nenn$4`A(+p8uIxn_g zCkOWsRW++c5X?i=`Lx0Z6xp& z;QCnzNAso(^*TB~!|bBAGARbwRzY>Xo)Acg#NcC}az?WCzd5vNW})U}72i;e7+Uae z;Bm|ld~`$#E9u?t=cV3K^kbnM*nq7hE$hZ%p4L-<`54)pe)OI+* zWhC+4$rSh`cC9>AszLNnH~gjR6!3;(!BqM~xob&eZ#O&zRf5MfHA$B{IPZ=aOs_wX z8NepYkx5`X0{$cM2Kne&MST` zuhf^5;Trh?2b{<}I^z={KadG`fVBGf?OIw{ZaiCPZ36B-y?0O+xx51=B-Vu%#TR1; zk`E>VtPQ{%LmrA)V9209;u|^b{Ok8pX9yNT7xRaF^@ZrFEJd4Fcrw|06x0ySN^7ub}G5BE1F5jm9I+hqU~AkGXQs*=@RJR zYRQu+rG%0O4^?iQjlk-JnazWhMs4474|6z)L@XO8nf$rv$5GERUs$=vO`l*=gK-tz zUKKhhYz}=`m}cKS9royAqa~Z35X?At>7GR;>FTHh4|50ZG5xM=FL=d(G5L z=VJUo6*aC>nvVg@*cRD`SW;C^;sQ71++`jUg+g!T4>V&PgW24`1wm@P(B-GSlAS!G z*Jpk1oj3;-sMHVBU=9m?BOSMrR{>gft-lW9S#cXUylehjzRIP6Ytf zlaHq@U#{EW64xap-ge#m85p>G#hBxjk~04X;qis&E!4Gqa7<3OID;rM_bLH*61T#B z>u}}WrA9`~Mq65u&U$_cyO2;XpF5dlZ0$OXWe z&{0YeZRpy(6h`qSsAfn5eS0;|MAlKydj$4Wsy5Or8tk?}B99I!T|_+g7d(^TdYe7Z zC(Kg`*Lg_w1IG`p6lE42yyn+}FnXx2kRwOR^V=*Y1mfn6P{Br?*8o(&T&J92N9WUv z&*6bX=eIq`LDYYeQ_>29%X=yCedc`G7h>-7GMrN3GX7Nz~y$BPnVChNn@vNN;C0>wv$Syp)iL zBW}j*M!WaJs?fMM<-i-O@=*}HA;`gLdjUuHR0gv}>HmYt8!Fr_-GIHscg3VlN1Z&H z#FrN0mfclqr4Aly1uSR{@a=-CE`MAgH|)+1U;2_pB~9_(&6Qz%N+ zK=vIVr@)C@W=p(ZdiFAmU^vh$I#M%l!pL0ol#83lWA*&HiOc%V@qT`W?s`p2DFd0! zgbNKx^6c}ZuW-f_3i#4362G;zs(sf-gn7ssgpck9fK#KGYb51ZA+@FuGV6c zr3%=-o3~hC6zSZ$cQG6VZh;zdI_bk7e?e!OU)nw)iWGp99RN2r@2VNWK6(tKRGjvy z)vy`|q}LVTkp>tA#S$OC2_9(27AvhbpNI2KYyp1bzh>{Vl)HKa&rXUt_;7(ayb3+5 z=Pz*K@a{=)?t>lOtJ8Gn4euyQcSlCmDN#)HLjs@&_s|7Q-cXDFxIX*9It)Rcp3d%} z=wt=7SlQv&Uw(AOD?-q(XcmqK+VJ?dURJ}0-*zA*8n*f4!%UDywdumqIc@0)D@=7F zL97*Y%!*gA1~9CpV|Nc0ALCanJ#edEbvUth51gTZODX?4iO9lZ2fA23&ig7}+pSRC zO0X|NpR!~_l<*{Hlsj$TROV$dtNAVbngEV52J?a;k2|eePbnC=U?*o*LS1q}JqoEY zwRg7vOx4&B`9|9p{85MEpP>t z_O$Q^N=7b~MH&>3WZ8HhJU>isiQ|DX!`32|XM^`KWZCrY7CMk&g#r#2I8G+WHclw_!_`x)oWMzEWowt0Pf%poJ7sfa1N9fqh%W-cN^F_O`t|Gu zu1r;L!tkSO`@mNdhi7xAlk~ap5XIIBJXFmoyCXFcHUB4IH0K2=N*5z}XZS{vnbT+! zn5c~xIZc2X%!EeL>ivYdQeoA%TSvCik;!wW zVzA!~f;-AadgwtX+=>#^;<5tDSDh8dspMT9z8>%5(R@yL?huSpmrBu_PJbIk>Hr&u z6C^Ter9S4db~~jH>}fk(iIhY7wQQ^bPJniOsn#Tq2X9kOc``0Oo`IV&WT6aSA~q#3 zjYESKpYFPmi!p*3_f|kPJ9~#ozsskdiAwvjb#OE*6JPin*Tw$^?ZQ)f5sw8onw@%~ z>SoJ+{eG;Nqy7QRiH--#+}`Dj7DWlXQM$hv?}!4&rHS*`NnqKg(5&Ake2vyt{OdFr zL{FC1F}^0G+u>4Cfsi50;8gEw@5k2)lY)4t4wq zNw?a>Q*sSaz>&#|9`GWMf(aC@{Wm`8a7 z?6+yYplic87AFH^BC%EoVYFql(vcVr zP{y;a{JOD&11oD$Af<1!KdEBA1ZlZzo;(Tc0xxM1*^^6wEuQ|-YLCxPDC%5p{JAFGB@I!{*x3L2_nHIOR66y8(gE6zvRC%VYZ`N03 zeM-SE;5Bjc zH!F>H2KI#XFy@@*{!Xt$MDPRQ%VSP|UKv7*#K{Sm++a?6(R>?NPk5J}<%KYdV$p2r zKMjiEbw2A(!x7D3`?D8|vc%pO-rG<79cW)eGN0Z+eE@wtaWVePXsjHr@ymcyXr60i zDJNtpc2nqk88SjAB#!Qb$U!Iqcon14^TGmKBJpLdIV}*(!TfM330cn}1_e61Eic;G z>xO|CU2hUaqF%&e_X4#Mx_^vmz$|;74~&ve^{nMGqWtNV+0}xjqmtZg##6S8c9vcuoKV$rlakrD!+G@{MPUP6kPSwPXg8zb<#6y=N# zCL?&<(E*s)|5S0VPzF0vrjsL=6rFQaD}1p8{WhYOP_p?T+WpOsJF8}OC@lw%NviEb z0$n|cio6}&Y$NjivfJPcR_bnmblq|`2b0$91%l_}P<23oIBc9n(au}zX>a=e4o zt*fq8uSM7MeirsWajB{j;j1ecRKoF;o0s?UPt0PYiPWzSwti+@0UbuG_KK$1v4DZ@ zcF@>h;$SSd6zq`%MAiR2WcL0`u^N_k$i%4of{_3vpu5I^M6iVi2N>NECDKL2W*ovV zJh(RnkISuT#+m1wk_xlc@rB8ce}fnX>iP6Vc{`xfJW#ia`?lI0Y{G?7@9AF*n&{9I zp^;y7@a3gu1{7mlKBADS4vFpn=H3H0jD>^XSLm##>h_70SzmHVeNtzWDfQ$jaqSch z$X%!4S>k9Jkex2#MQq|#HcOT*Q);7q9QYxSzvUwVb9>}=Z0{X%uTR>=dmRX~=p~LY zO30!o!1}qY`*4-dK;?Mj;R zJRjBV5ul9xG~Mg06U~=8KGH7fnrisr*zz_yaci{>oUNMjmQL(?g3R|^e~Bj=XOyg` zEmJ#$;MBh3tE|}2k#6iMnoy!gC_-Sjxz6XtUD*vK$KcJux|_0s(RJei}ts+DJwQvUA`0(uP{`@+S0yIL;CHm!A# z0>X2o2VFi|rCEV^vMK=Bh|KwR!ggU8U##uKhu4l0R-#9eu39t*={O@3sVDRuhdK__TQjN0rf>KSPBLx%;5s_^MGH zWLg|@Qlq=Xp)>uh-nLr+T{C0qA#m$c<&0*r?qW41<@8>K6QMHp7oTcmR7~*}@M|5G z7g-+FLOkhc4mq*e&%d7H+2hat!!Or8-^>?2Jj=exq%hloOMo_uk^ERM@YFm=699o) z1*C|AkCNuDJhRBwN&b}Ri0WM|U+P-%WMs{A%7jbOs>{%(!j8+C2NGJS2Ff$U?zJPi zy{H}NtN(D4jiRu6VpnDOZ(0bw+67nX>*nM9#ykCh_12B$;IkLuakI%%^qH*SNd>dw zMjBU@Sq~3D9E9LwM7r3A(ZqA1_~Y!h!Upr;*xvyY@Usn!(ncaEpVHWPNzI5tOr|7G zSuy_;WL$b03mDngJBcpdTi~wktXBFlVn^)IzpU#ZEpBV;`;^y8xz68AdTXOA_wwcj zvb?epr_du9v`r-}vQL)P^(Y~3jv0qq&4Yo+n^aU=GA3+YA1pJvvF3zxL$Ta&`4c^YhOhZqUv_q`nIKw866cOe-!ndLA|+3AO#*em>et~fhH z9w#1(`24QY);Cd=;6pFOm1!0gbw+%zMlvlw8I~Nn$z)rnd?SfM=_wFU2A81ksmy;E zf$$%y3GP>YO6RJ<-`3zxsphv?@4{7fIt2OsrAt&G3lvW0UQ0=uAq`^MCx`SxZT{(PhBZW@QW+EhuOL*zPocN=_Ar zE|Ne67pdo9g|{L-x##Zd2!x@wN%g4`dlmK;Qa)b~qZ)ddLT^*7?GOU6IH?=rrx)z1 zgIv#KWPa7%pYMjPVEXz)NLETup3Rra1;qKFUFcI(M~e-DDVL64E+e8L71l~gmBnKg!#_knU1}=Wt9g7EtzSs8+H*(8} z)ntw6`W}9f>v&RAa>}HS3oZJ<;?I~N(hX{lKukm4X8PSgN2oUuyArFd#eb7pr?k^& zxWy%ZD*ih{)Kw90upj>o?bH^a@^SbXEe1A09(V6Iu~W0bEBBlRonY_gLjn$Ioh^JU zTR*EzrI{XB0W(6b|Cl?0v!SjJmCRG)v|jc~iw4a4(wo z>@l{@$seeam!YCj^QY0mr11NNO0NMLjMP=kNv3hvXc?KZ95J~W#Pf^&bSyfM);6_J z2cN3#9J&JKf(tpm^P?n>?6nEV1^|eiNKcpf6+7i%xhXgmz)@tTbrSXR{T*xFZRQ-a zxj`96YX!JLN_DS}YkWc1M}oBB5;Lm*Qb2eYf_hoOhpHajC&=6{;5z(mP#PhHKL?>d zh(sK#?17&B?uiu|*9M{TSQ&*|FTo#4JfJ;Bk;2hZ$oKC~EUT$*!7fWPpg-=?&*mYy z#Ys=&A@G=&v-!9oECv${wR&8G(v>bU{T@CAL)kXOv;sm)MKpTO^*ht^?rbsX&$^Zp z4DnB^l+lon3K-;m;3)kb{~RQjMU~sdZqAWi|74urZO|Q(uxr|ANPo2qR@Oin7z5!lBkN&~ zNgv;^7@!6nLj^}lj_@bBQ?p7s8OD`RhSq84be|NKUzWwpp*Ty(|dNrxp@gq0j zF{!KNqiq}BGL@U)(^f`@)9&~3NoeZ3`JrR%IDxg!Xxlb=Y|9zci0!5^(PGvXSvR+u zuu2O3n+Mf=C5VA5^wXSX3|yT9_WfkBXLU+{-iyI0LFwn{@^dqiG+S!r{E8I&98+4R zTs=y9;^L54g~_NL4eF<_K9y5KOl~T)f9Mpja0VmKm_24wh`)Qnq2uQ@UZ&QvAzXBu zFn?QBTC2g~CRKoaZu7s92Q9YRV>PIr3jFn>>n2XvQM`a;w6qu`SRe3wuHTmykRKM7!%Sq2jFS-InR)KGL7Y^IViLdH(61;R zS5CYOAcHWi{bU)i_4oa^OU4@R)^xEb|07L3s@g|V*v(kt&f!KdkT#P3Gnpixa z6FM{;nEop;Thx3u>od)1%@INk`cfLz51O+lC80=_)NqP=vCeEShFSBvgXn7xAX0sQ(GyldF?^JHSRcn_q3kea6W8m2n?w zhdPCPC(nOTj9(%EQdI%oYmeQ$omdG53tFNx{~w{4=5yxfmzlNZf26B`obi9eu;W)+ zN?Wiu9{j{B&m~om5)=Jc1xf<5zfK)gY_UEVOPxX)qxU;Up|c#WNz2$y3}{X4Zmw|6 zp^6|a*$u0xF3U2ynh#-#35#$#dv;6bLdGqI$q|LF$pbBz6MWfVhQ4F~oyx5nQ)29d zb<kd9D}p4SU5vu5S8&(u+&_t-^hX4q@Ew)N`w;vaxzMr37> zd-tt6BtH8gv34;@t{mTy{;f86B%uYkVm?r-gDOCN3q{{ZSs_h=tf|cJb@EYw}y%SZ2Q^X-C4^@(Z4ci<+Th(w~#r z_FMLpLOxs{zHz!BJ?tJbODa&o_kpuoxizFuiE$b^Qud`T?z=Rqc=M< zeY>~t!zf95ENg=`%b5q|Z7$K858(FW{qOgbQa6u~49#P81LH!^cjK z^*3)}ki+kl9jAy?UJ52gwP$;(RT&dT278aj;j?niY;SLzij362xJbakl_VQ!pJ+pz zx33`x_Ztn!s^ewf%0i%*XwsT7_OO6Av2v^9ziM+OVRH2|~V98*3xKOBAhx`bi8HwKhy!Ra)^hQX*p=b@HZAmclcJw)9Eu6%Z~8I8o=y562n%2f}of#>!#*flB4hq zwvjr%du~F~L;|v)4FrsMK$>9Cc~kKI?Rp0|U*N~4oDJ_~8IH|f0RogFvvllc7v|9S z-A)txSoq{p&!{nxq%$!xQ4)C;?-5jCJ|bdchlS;<5Yi?G9`pF0oeX0YwA_@bZQ8z_iL!zy1(FrB^W)xrVqOKpbl4 zF4oIU;W&2)zOcocU-hjI22L_-?Uj&c&~|Bgh;~SRJ>pR8rS1e_+!$QD8KJ_O{*59OQC2 zj?-DDh4+wS+N?W_#|Q1P(0Sop^*du#B7rq=1qFkCh&6GU?xIfy_45rnp9o=O)o+4O z>ouPm7Bx^h+|eBFtIodu1s~w}jAGKumFZ8=kK#|EMaHiYBnLSM_Xv0sGZ1tHmaPK& zYV|H+RPvIP*AL0Z&UdXRrXu7Wk~{j}*KDYuLTm~n=vP}Lu5j<{>0*df7Xj}iG~WDGo48;Nal zZKPX^I11PeSh6g%lKiHX^jT3mjD6K#kfGHbMfS3eGm^7CHy~C<_4&&>3%|RCWeEEm z#O^HU@d-Jtc%FSw@;Y+E`hINXEQA}{MhI|iT)63lVvw%JqKd0_FZ9NJ*t%+%T$U4o zvW%1tYUs1*Xy{`=ZjA{@>Ms)8|u$pZ`Ak$8KVsHQKr#7WsP*T4!_Hj7;mX2$N0Q;2{k1bF8F6)+qr~ zM%SrQhQ*>2OTWNVxl%Ktl@BVz)0bFqw7rkQ&VqGMmnCFGNWcL)X-B7+$h|=RZASHb ziKahZ6HWdZg7wylGb!>pbUfMKRD&qo-Mqwe`WjE4rbQ3q3GUzsprhAbrALQzZS50p zyll0>G?;${&Eh1|Kv4YWGn=8~+H2x5i)??MH+SV{8p<`B=S!C>N zvO8Jh67{)7)ea;+>k-G4%U4^EN`7PmUGp-7#)DRY9eB#Dkzrwbe*)($02Gr&{~a{9 z_3ze0L?ybyjfk0r>AX!k6uOZpc6_IZm7n9%I&E->2)m-_wRCa)SDsolepAd2Mu1)S ztbW(nJ*~Yv z$rQlj_Hcp*Ab)_3xJ)qGOZ&WeF~f)SC8c224`F<(i#(DK-Zd|=>lYuJR+m|bH%yL& zS5j~De61(V_;?vj+ClJJj)F3$m2iqy1=o%%1O6Gf&&*)FtA!bcEA5n*1juq{F@~O1 z#AWFv@T*}0IF7SFm+I1xa&m_%b!~ozwvj6Es_e#ht=7v!1^5MD=Mw_d?$0DJ@E9_` zU|*MM#`=XmVPw5YF2CRS$v^*eb`D@O#ez*3a=Jiw?q4Dn*2>K_kvX$G69iHAd{I=E zhk#35LfZZ!4~m34VrN3+jk4+>;Hj`1nFgc@TYcA!F)(ybZN zkRi6+w8FX!9s8=y%>!p?;04Yuu<$>W|2cMDREKK0r}e17G>IdFRGsVwo;rAJE9Gxt zmohx_cA(4Y_Gl6`!r++ZpBbF&tl^vsaWhwTQ&D!DEvw!6JMcwHRyG`_XIL7If;_oi bV<`A7G(w&j?v4)$LIh3>fA9a Date: Tue, 10 Dec 2024 14:14:31 +0000 Subject: [PATCH 06/36] [pre-commit.ci] pre-commit autoupdate (#2612) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 8b7004b93d..05934f3059 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -33,7 +33,7 @@ repos: - id: codespell additional_dependencies: [tomli] # required for Python 3.10 - repo: https://github.com/astral-sh/ruff-pre-commit - rev: "v0.8.1" + rev: "v0.8.2" hooks: - id: ruff args: [--fix] From 5353ab03fadbdc0afcb2bc5d9af6c02823ffa6a8 Mon Sep 17 00:00:00 2001 From: Valeriu Predoi Date: Tue, 10 Dec 2024 14:21:33 +0000 Subject: [PATCH 07/36] switch back to Python 3.12 for conda lock file creation due to mamba<2 pin (#2606) --- .github/workflows/create-condalock-file.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/create-condalock-file.yml b/.github/workflows/create-condalock-file.yml index 97501f657c..6a896a1d43 100644 --- a/.github/workflows/create-condalock-file.yml +++ b/.github/workflows/create-condalock-file.yml @@ -27,7 +27,7 @@ jobs: with: auto-update-conda: true activate-environment: esmvaltool-fromlock - python-version: "3.13" + python-version: "3.12" # switch to 3.13 when mamba>2 available miniforge-version: "latest" use-mamba: true - name: Update and show conda config From e09e39640331fb9a72d2f281861565dda5eea03c Mon Sep 17 00:00:00 2001 From: Karen Garcia Perdomo <85649962+Karen-A-Garcia@users.noreply.github.com> Date: Tue, 10 Dec 2024 06:24:02 -0800 Subject: [PATCH 08/36] Monotonicity fixes for Fgoals (#2603) Co-authored-by: Karen Garcia Perdomo --- esmvalcore/cmor/_fixes/cmip6/fgoals_g3.py | 76 +++++++++++++++++++ .../cmor/_fixes/cmip6/test_cesm2.py | 2 +- .../cmor/_fixes/cmip6/test_fgoals_g3.py | 72 +++++++++++++++++- 3 files changed, 148 insertions(+), 2 deletions(-) diff --git a/esmvalcore/cmor/_fixes/cmip6/fgoals_g3.py b/esmvalcore/cmor/_fixes/cmip6/fgoals_g3.py index 591fa54b86..2d5206f8c8 100644 --- a/esmvalcore/cmor/_fixes/cmip6/fgoals_g3.py +++ b/esmvalcore/cmor/_fixes/cmip6/fgoals_g3.py @@ -2,6 +2,7 @@ import dask.array as da import iris +import numpy as np from ..common import OceanFixGrid from ..fix import Fix @@ -84,3 +85,78 @@ def fix_metadata(self, cubes): iris.util.promote_aux_coord_to_dim_coord(cube, "longitude") return super().fix_metadata(cubes) + + +class Tas(Fix): + """Fixes for tas.""" + + def fix_metadata(self, cubes): + """Fix time coordinates. + + Parameters + ---------- + cubes : iris.cube.CubeList + Cubes to fix + + Returns + ------- + iris.cube.CubeList + """ + new_list = iris.cube.CubeList() + for cube in cubes: + try: + old_time = cube.coord("time") + except iris.exceptions.CoordinateNotFoundError: + new_list.append(cube) + else: + if old_time.is_monotonic(): + new_list.append(cube) + else: + time_units = old_time.units + time_data = old_time.points + + # erase erroneously copy-pasted points + time_diff = np.diff(time_data) + idx_neg = np.where(time_diff <= 0.0)[0] + while len(idx_neg) > 0: + time_data = np.delete(time_data, idx_neg[0] + 1) + time_diff = np.diff(time_data) + idx_neg = np.where(time_diff <= 0.0)[0] + + # create the new time coord + new_time = iris.coords.DimCoord( + time_data, + standard_name="time", + var_name="time", + units=time_units, + ) + + # create a new cube with the right shape + dims = ( + time_data.shape[0], + cube.coord("latitude").shape[0], + cube.coord("longitude").shape[0], + ) + data = cube.data + new_data = np.ma.append( + data[: dims[0] - 1, :, :], data[-1, :, :] + ) + new_data = new_data.reshape(dims) + + tmp_cube = iris.cube.Cube( + new_data, + standard_name=cube.standard_name, + long_name=cube.long_name, + var_name=cube.var_name, + units=cube.units, + attributes=cube.attributes, + cell_methods=cube.cell_methods, + dim_coords_and_dims=[ + (new_time, 0), + (cube.coord("latitude"), 1), + (cube.coord("longitude"), 2), + ], + ) + + new_list.append(tmp_cube) + return new_list diff --git a/tests/integration/cmor/_fixes/cmip6/test_cesm2.py b/tests/integration/cmor/_fixes/cmip6/test_cesm2.py index 24df5db059..0bccf89186 100644 --- a/tests/integration/cmor/_fixes/cmip6/test_cesm2.py +++ b/tests/integration/cmor/_fixes/cmip6/test_cesm2.py @@ -507,7 +507,7 @@ def test_pr_fix_metadata(pr_cubes): out_cubes = fix.fix_metadata(pr_cubes) for cube in out_cubes: - if cube.var_name == "tas": + if cube.var_name == "pr": assert cube.coord("time").is_monotonic() diff --git a/tests/integration/cmor/_fixes/cmip6/test_fgoals_g3.py b/tests/integration/cmor/_fixes/cmip6/test_fgoals_g3.py index eb16a4d2ba..0621c56221 100644 --- a/tests/integration/cmor/_fixes/cmip6/test_fgoals_g3.py +++ b/tests/integration/cmor/_fixes/cmip6/test_fgoals_g3.py @@ -4,8 +4,10 @@ import iris import numpy as np +import pandas as pd +import pytest -from esmvalcore.cmor._fixes.cmip6.fgoals_g3 import Mrsos, Siconc, Tos +from esmvalcore.cmor._fixes.cmip6.fgoals_g3 import Mrsos, Siconc, Tas, Tos from esmvalcore.cmor._fixes.common import OceanFixGrid from esmvalcore.cmor._fixes.fix import GenericFix from esmvalcore.cmor.fix import Fix @@ -163,3 +165,71 @@ def test_mrsos_fix_metadata_2(mock_base_fix_metadata): [[0.5, 1.5], [1.5, 2.5], [2.5, 3.5]], ) mock_base_fix_metadata.assert_called_once_with(fix, cubes) + + +@pytest.fixture +def tas_cubes(): + correct_time_coord = iris.coords.DimCoord( + points=[1.0, 2.0, 3.0, 4.0, 5.0], + var_name="time", + standard_name="time", + units="days since 1850-01-01", + ) + + lat_coord = iris.coords.DimCoord( + [0.0], var_name="lat", standard_name="latitude" + ) + + lon_coord = iris.coords.DimCoord( + [0.0], var_name="lon", standard_name="longitude" + ) + + correct_coord_specs = [ + (correct_time_coord, 0), + (lat_coord, 1), + (lon_coord, 2), + ] + + correct_tas_cube = iris.cube.Cube( + np.ones((5, 1, 1)), + var_name="tas", + units="K", + dim_coords_and_dims=correct_coord_specs, + ) + + scalar_cube = iris.cube.Cube(0.0, var_name="ps") + + return iris.cube.CubeList([correct_tas_cube, scalar_cube]) + + +def test_get_tas_fix(): + """Test tas fix.""" + fix = Fix.get_fixes("CMIP6", "FGOALS-g3", "day", "tas") + assert fix == [Tas(None), GenericFix(None)] + + +def test_tas_fix_metadata(tas_cubes): + """Test metadata fix.""" + vardef = get_var_info("CMIP6", "day", "tas") + fix = Tas(vardef) + + out_cubes = fix.fix_metadata(tas_cubes) + assert out_cubes[0].var_name == "tas" + coord = out_cubes[0].coord("time") + assert pd.Series(coord.points).is_monotonic_increasing + + # de-monotonize time points + for cube in tas_cubes: + if cube.var_name == "tas": + time = cube.coord("time") + points = np.array(time.points) + points[-1] = points[0] + dims = cube.coord_dims(time) + cube.remove_coord(time) + time = iris.coords.AuxCoord.from_coord(time) + cube.add_aux_coord(time.copy(points), dims) + + out_cubes = fix.fix_metadata(tas_cubes) + for cube in out_cubes: + if cube.var_name == "tas": + assert cube.coord("time").is_monotonic() From 6532a0a4f7fb1b74713fdb91aa400a17a0ff09ac Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Wed, 11 Dec 2024 14:18:14 +0000 Subject: [PATCH 09/36] [Condalock] Update Linux condalock file (#2614) Co-authored-by: valeriupredoi --- conda-linux-64.lock | 377 +++++++++++++++++++++----------------------- 1 file changed, 176 insertions(+), 201 deletions(-) diff --git a/conda-linux-64.lock b/conda-linux-64.lock index 260725b5df..7dce7db4ec 100644 --- a/conda-linux-64.lock +++ b/conda-linux-64.lock @@ -1,6 +1,6 @@ # Generated by conda-lock. # platform: linux-64 -# input_hash: efb0d40da21331f3809f3aac8456fd160657e1a1f90bfc9642cbde579ae0920e +# input_hash: c08346d7b64ddb822a23f6a72356a10c21221039987cdf35737057f500071d7f @EXPLICIT https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2#d7c89558ba9fa0495403155b64376d81 https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2024.8.30-hbcca054_0.conda#c27d1c142233b5bc9ca570c6e2e0c244 @@ -8,8 +8,7 @@ https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed37_0.tar.bz2#34893075a5c9e55cdafac56607368fc6 https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2#4d59c254e01d9cde7957100457e2d5fb https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-h77eed37_3.conda#49023d73832ef61042f6a237cb2687e7 -https://conda.anaconda.org/conda-forge/linux-64/pandoc-3.5-ha770c72_0.conda#2889e6b9c666c3a564ab90cedc5832fd -https://conda.anaconda.org/conda-forge/noarch/poppler-data-0.4.12-hd8ed1ab_0.conda#d8d7293c5b37f39b2ac32940621c6592 +https://conda.anaconda.org/conda-forge/linux-64/pandoc-3.6-ha770c72_0.conda#38ee82616a780cf22ec6355e386e2563 https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.13-5_cp313.conda#381bbd2a92c863f640a55b6ff3c35161 https://conda.anaconda.org/conda-forge/noarch/tzdata-2024b-hc8b5060_0.conda#8ac3367aafb1cc0a068483c580af8015 https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-0.tar.bz2#f766549260d6815b0c52253f1fb1bb29 @@ -20,29 +19,28 @@ https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2# https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2#fee5683a3f04bd15cbd8318b096a27ab https://conda.anaconda.org/conda-forge/linux-64/libegl-1.7.0-ha4b6fd6_2.conda#c151d5eb730e9b7480e6d48c0fc44048 https://conda.anaconda.org/conda-forge/linux-64/libgcc-14.2.0-h77fa898_1.conda#3cb76c3f10d3bc7f1105b2fc9db984df -https://conda.anaconda.org/conda-forge/linux-64/aws-c-common-0.10.3-hb9d3cd8_0.conda#ff3653946d34a6a6ba10babb139d96ef -https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.34.3-heb4867d_0.conda#09a6c610d002e54e18353c06ef61a253 +https://conda.anaconda.org/conda-forge/linux-64/aws-c-common-0.10.5-hb9d3cd8_0.conda#d8288fbad9d809b9ca139b8beb6553ef +https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.34.3-hb9d3cd8_1.conda#ee228789a85f961d14567252a03e725f https://conda.anaconda.org/conda-forge/linux-64/json-c-0.18-h6688a6e_0.conda#38f5dbc9ac808e31c00650f7be1db93f https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.1.0-hb9d3cd8_2.conda#41b599ed2b02abcfdd84302bff174b23 https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.22-hb9d3cd8_0.conda#b422943d5d772b7cc858b36ad2a92db5 https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.6.4-h5888daf_0.conda#db833e03127376d461e1e13e76f09b6c https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-14.2.0-h69a702a_1.conda#e39480b9ca41323497b05492a63bc35b https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-14.2.0-hd5240d6_1.conda#9822b874ea29af082e5d36098d25427d +https://conda.anaconda.org/conda-forge/linux-64/liblzma-5.6.3-hb9d3cd8_1.conda#2ecf2f1c7e4e21fcfe6423a51a992d84 https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-14.2.0-hc0a3c3a_1.conda#234a5554c53625688d51062645337328 +https://conda.anaconda.org/conda-forge/linux-64/libutf8proc-2.9.0-hb9d3cd8_1.conda#1e936bd23d737aac62a18e9a1e7f8b18 https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.3.1-hb9d3cd8_2.conda#edb0dca6bc32e4f4789199455a1dbeb8 https://conda.anaconda.org/conda-forge/linux-64/openssl-3.4.0-hb9d3cd8_0.conda#23cc74f77eb99315c0360ec3533147a9 https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-hb9d3cd8_1002.conda#b3c17d95b5a10c6e64a21fa17573e70e -https://conda.anaconda.org/conda-forge/linux-64/tzcode-2024b-hb9d3cd8_0.conda#db124840386e1f842f93372897d1b857 https://conda.anaconda.org/conda-forge/linux-64/xorg-libice-1.1.1-hb9d3cd8_1.conda#19608a9656912805b2b9a2f6bd257b04 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxau-1.0.11-hb9d3cd8_1.conda#77cbc488235ebbaab2b6e912d3934bae https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdmcp-1.1.5-hb9d3cd8_0.conda#8035c64cb77ed555e3f150b7b3972480 -https://conda.anaconda.org/conda-forge/linux-64/xorg-xorgproto-2024.1-hb9d3cd8_1.conda#7c21106b851ec72c037b162c216d8f05 -https://conda.anaconda.org/conda-forge/linux-64/aws-c-cal-0.8.0-hecf86a2_2.conda#c54459d686ad9d0502823cacff7e8423 -https://conda.anaconda.org/conda-forge/linux-64/aws-c-compression-0.3.0-hf42f96a_2.conda#257f4ae92fe11bd8436315c86468c39b -https://conda.anaconda.org/conda-forge/linux-64/aws-c-sdkutils-0.2.1-hf42f96a_1.conda#bbdd20fb1994a9f0ba98078fcb6c12ab -https://conda.anaconda.org/conda-forge/linux-64/aws-checksums-0.2.2-hf42f96a_1.conda#d908d43d87429be24edfb20e96543c20 +https://conda.anaconda.org/conda-forge/linux-64/aws-c-cal-0.8.1-h0f28dba_2.conda#94faebd978282d2a4a8514141daec756 +https://conda.anaconda.org/conda-forge/linux-64/aws-c-compression-0.3.0-h9cc6398_4.conda#076717670d5406e90070120314ff9b4f +https://conda.anaconda.org/conda-forge/linux-64/aws-c-sdkutils-0.2.1-h9cc6398_3.conda#10bdb7fc3763760dcea1cd908ece6b2b +https://conda.anaconda.org/conda-forge/linux-64/aws-checksums-0.2.2-h9cc6398_3.conda#d6dd8b87b95195d8d26893611d94ba3b https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h4bc722e_7.conda#62ee74e96c5ebb0af99386de58cf9553 -https://conda.anaconda.org/conda-forge/linux-64/capnproto-1.0.2-h766bdaa_3.conda#7ea5f8afe8041beee8bad281dee62414 https://conda.anaconda.org/conda-forge/linux-64/dav1d-1.2.1-hd590300_0.conda#418c6ca5929a611cbd69204907a83995 https://conda.anaconda.org/conda-forge/linux-64/fribidi-1.0.10-h36c2ea0_0.tar.bz2#ac7bc6a654f8f41b352b38f4051135f8 https://conda.anaconda.org/conda-forge/linux-64/geos-3.13.0-h5888daf_0.conda#40b4ab956c90390e407bb177f8a58bab @@ -58,35 +56,35 @@ https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2# https://conda.anaconda.org/conda-forge/linux-64/libgfortran-14.2.0-h69a702a_1.conda#f1fd30127802683586f768875127a987 https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.17-hd590300_2.conda#d66573916ffcf376178462f1b61c941e https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-3.0.0-hd590300_1.conda#ea25936bb4080d843790b586850f82b8 +https://conda.anaconda.org/conda-forge/linux-64/liblzma-devel-5.6.3-hb9d3cd8_1.conda#cc4687e1814ed459f3bd6d8e05251ab2 https://conda.anaconda.org/conda-forge/linux-64/libmpdec-4.0.0-h4bc722e_0.conda#aeb98fdeb2e8f25d43ef71fbacbeec80 https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hd590300_0.conda#30fd6e37fe21f86f4bd26d6ee73eeec7 -https://conda.anaconda.org/conda-forge/linux-64/libntlm-1.4-h7f98852_1002.tar.bz2#e728e874159b042d92b90238a3cb0dc2 https://conda.anaconda.org/conda-forge/linux-64/libpciaccess-0.18-hd590300_0.conda#48f4330bfcd959c3cfb704d424903c82 https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.44-hadc24fc_0.conda#f4cc49d7aa68316213e4b12be35308d1 https://conda.anaconda.org/conda-forge/linux-64/libsodium-1.0.20-h4ab18f5_0.conda#a587892d3c13b6621a6091be690dbca2 -https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.47.0-hadc24fc_1.conda#b6f02b52a174e612e89548f4663ce56a -https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.0-h0841786_0.conda#1f5a58e686b13bcfde88b93f547d23fe +https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.47.2-hee588c1_0.conda#b58da17db24b6e08bcbf8fed2fb8c915 +https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.1-hf672d98_0.conda#be2de152d8073ef1c01b7728475f2fe7 https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-14.2.0-h4852527_1.conda#8371ac6457591af2cf6159439c1fd051 https://conda.anaconda.org/conda-forge/linux-64/libudunits2-2.2.28-h40f5838_3.conda#4bdace082e911a3e1f1f0b721bed5b56 -https://conda.anaconda.org/conda-forge/linux-64/libutf8proc-2.8.0-h166bdaf_0.tar.bz2#ede4266dc02e875fe1ea77b25dd43747 https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda#40b61aab5c7ba9ff276c41cfffe6b80b https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.4.0-hd590300_0.conda#b26e8aa824079e1be0294e7152ca4559 https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.17.0-h8a09558_0.conda#92ed62436b625154323d40d5f2f11dd7 https://conda.anaconda.org/conda-forge/linux-64/lzo-2.10-hd590300_1001.conda#ec7398d21e2651e0dcb0044d03b9a339 https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.5-he02047a_1.conda#70caf8bb6cf39a0b6b7efc885f51c0fe -https://conda.anaconda.org/conda-forge/linux-64/nspr-4.36-h5888daf_0.conda#de9cd5bca9e4918527b9b72b6e2e1409 +https://conda.anaconda.org/conda-forge/linux-64/pixman-0.44.2-h29eaf8c_0.conda#5e2a7acfa2c24188af39e7944e1b3604 https://conda.anaconda.org/conda-forge/linux-64/rav1e-0.6.6-he8a937b_2.conda#77d9955b4abddb811cb8ab1aa7d743e4 https://conda.anaconda.org/conda-forge/linux-64/s2n-1.5.9-h0fd0ee4_0.conda#f472432f3753c5ca763d2497e2ea30bf +https://conda.anaconda.org/conda-forge/linux-64/snappy-1.2.1-h8bd8927_1.conda#3b3e64af585eadfb52bb90b553db5edf https://conda.anaconda.org/conda-forge/linux-64/svt-av1-2.3.0-h5888daf_0.conda#355898d24394b2af353eb96358db9fdd https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h4845f30_101.conda#d453b98d9c83e71da0741bb0ff4d76bc https://conda.anaconda.org/conda-forge/linux-64/xxhash-0.8.2-hd590300_0.conda#f08fb5c89edfc4aadee1c81d4cfb1fa1 -https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2#2161070d867d1b1204ea749c8eec4ef0 +https://conda.anaconda.org/conda-forge/linux-64/xz-gpl-tools-5.6.3-hbcc6ac9_1.conda#f529917bab7862aaad6867bf2ea47a99 +https://conda.anaconda.org/conda-forge/linux-64/xz-tools-5.6.3-hb9d3cd8_1.conda#de3f31a6eed01bc2b8c7dcad07ad9034 https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h7f98852_2.tar.bz2#4cb3ad778ec2d5a7acbdf254eb1c42ae https://conda.anaconda.org/conda-forge/linux-64/zlib-1.3.1-hb9d3cd8_2.conda#c9f075ab2f33b3bbee9e62d4ad0a6cd8 https://conda.anaconda.org/conda-forge/linux-64/aom-3.9.1-hac33072_0.conda#346722a0be40f6edc53f12640d301338 -https://conda.anaconda.org/conda-forge/linux-64/aws-c-io-0.15.2-hdeadb07_2.conda#461a1eaa075fd391add91bcffc9de0c1 +https://conda.anaconda.org/conda-forge/linux-64/aws-c-io-0.15.3-hfd54f12_3.conda#c0b9f79cd2f5797b913415511bfa2cd6 https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.1.0-hb9d3cd8_2.conda#c63b5e52939e795ba8d26e35d767a843 -https://conda.anaconda.org/conda-forge/linux-64/fmt-11.0.2-h434a139_0.conda#995f7e13598497691c1dc476d889bc04 https://conda.anaconda.org/conda-forge/linux-64/freetype-2.12.1-h267a509_2.conda#9ae35c3d96db2c94ce0cef86efdfa2cb https://conda.anaconda.org/conda-forge/linux-64/glog-0.7.1-hbabe93e_0.conda#ff862eebdfeb2fd048ae9dc92510baca https://conda.anaconda.org/conda-forge/linux-64/graphite2-1.3.13-h59595ed_1003.conda#f87c7b7c2cb45f323ffbce941c78ab7c @@ -96,9 +94,8 @@ https://conda.anaconda.org/conda-forge/linux-64/lerc-4.0.0-h27087fc_0.tar.bz2#76 https://conda.anaconda.org/conda-forge/linux-64/libaec-1.1.3-h59595ed_0.conda#5e97e271911b8b2001a8b71860c32faa https://conda.anaconda.org/conda-forge/linux-64/libcrc32c-1.1.2-h9c3ff4c_0.tar.bz2#c965a5aa0d5c1c37ffc62dff36e28400 https://conda.anaconda.org/conda-forge/linux-64/libde265-1.0.15-h00ab1b0_0.conda#407fee7a5d7ab2dca12c9ca7f62310ad -https://conda.anaconda.org/conda-forge/linux-64/libdrm-2.4.123-hb9d3cd8_0.conda#ee605e794bdc14e2b7f84c4faa0d8c2c +https://conda.anaconda.org/conda-forge/linux-64/libdrm-2.4.124-hb9d3cd8_0.conda#8bc89311041d7fcb510238cf0848ccae https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2.tar.bz2#4d331e44109e3f0e19b4cb8f9b82f3e1 -https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-14.2.0-h69a702a_1.conda#0a7f4cd238267c88e5d69f7826a407eb https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.64.0-h161d5f1_0.conda#19e57602824042dfd0446292ef90488b https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.28-pthreads_h94d23a6_1.conda#62857b389e42b36b686331bec0922050 https://conda.anaconda.org/conda-forge/linux-64/libprotobuf-5.28.2-h5b01275_0.conda#ab0bff36363bec94720275a681af8b83 @@ -107,20 +104,18 @@ https://conda.anaconda.org/conda-forge/linux-64/librttopo-1.1.0-h97f6797_17.cond https://conda.anaconda.org/conda-forge/linux-64/libthrift-0.21.0-h0e7cc3e_0.conda#dcb95c0a98ba9ff737f7ae482aef7833 https://conda.anaconda.org/conda-forge/linux-64/libzip-1.11.2-h6991a6a_0.conda#a7b27c075c9b7f459f1c022090697cba https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.9.4-hcb278e6_0.conda#318b08df404f9c9be5712aaa5a6f0bb0 -https://conda.anaconda.org/conda-forge/linux-64/nss-3.106-hdf54f9c_0.conda#efe735c7dc47dddbb14b3433d11c6feb https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.44-hba22ea6_2.conda#df359c09c41cd186fffb93a2d87aa6f5 -https://conda.anaconda.org/conda-forge/linux-64/pixman-0.43.2-h59595ed_0.conda#71004cbf7924e19c02746ccde9fd7123 https://conda.anaconda.org/conda-forge/linux-64/qhull-2020.2-h434a139_5.conda#353823361b1d27eb3960efb076dfcaf6 https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda#47d31b792659ce70f470b5c82fdfb7a4 -https://conda.anaconda.org/conda-forge/linux-64/snappy-1.2.1-ha2e4443_0.conda#6b7dcc7349efd123d493d2dbe85a045f https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.28-h40f5838_3.conda#6bb8deb138f87c9d48320ac21b87e7a1 https://conda.anaconda.org/conda-forge/linux-64/uriparser-0.9.8-hac33072_0.conda#d71d3a66528853c0a1ac2c02d79a0284 https://conda.anaconda.org/conda-forge/linux-64/x265-3.5-h924138e_3.tar.bz2#e7f6ed84d4623d52ee581325c1587a6b https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.4-he73a12e_1.conda#05a8ea5f446de33006171a7afe6ae857 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.10-h4f16b4b_0.conda#0b666058a179b744a622d0a4a0c56353 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.10-h4f16b4b_1.conda#125f34a17d7b4bea418a83904ea82ea6 +https://conda.anaconda.org/conda-forge/linux-64/xz-5.6.3-hbcc6ac9_1.conda#62aae173382a8aae284726353c6a6a24 https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.6-ha6fb4c9_0.conda#4d056880988120e29d75bfff282e0f45 -https://conda.anaconda.org/conda-forge/linux-64/aws-c-event-stream-0.5.0-h1ffe551_7.conda#7cce4dfab184f4bbdfc160789251b3c5 -https://conda.anaconda.org/conda-forge/linux-64/aws-c-http-0.9.1-hab05fe4_2.conda#fb409f7053fa3dbbdf6eb41045a87795 +https://conda.anaconda.org/conda-forge/linux-64/aws-c-event-stream-0.5.0-hf811eff_10.conda#5046c78dd139a333b6acd7376a10e0a7 +https://conda.anaconda.org/conda-forge/linux-64/aws-c-http-0.9.2-hce7dc5d_3.conda#c0f54e8975ad42d2864f4b1918356b3b https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.6-hef167b5_0.conda#54fe76ab3d0189acaef95156874db7f9 https://conda.anaconda.org/conda-forge/linux-64/brotli-1.1.0-hb9d3cd8_2.conda#98514fe74548d768907ce7a13f680e8f https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.15.0-h7e30c49_1.conda#8f5b0b297b59e1ac160ad4beec99dbee @@ -130,63 +125,62 @@ https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-25_linux64_openbla https://conda.anaconda.org/conda-forge/linux-64/libglib-2.82.2-h2ff4ddf_0.conda#13e8e54035ddd2b91875ba399f0f7c04 https://conda.anaconda.org/conda-forge/linux-64/libglx-1.7.0-ha4b6fd6_2.conda#c8013e438185f33b13814c5c488acd5c https://conda.anaconda.org/conda-forge/linux-64/libkml-1.3.0-hf539b9f_1021.conda#e8c7620cc49de0c6a2349b6dd6e39beb -https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.7.0-he137b08_1.conda#63872517c98aa305da58a757c443698e -https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.13.5-hb346dea_0.conda#c81a9f1118541aaa418ccb22190c817e +https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.7.0-hc4654cb_2.conda#be54fb40ea32e8fe9dbaa94d4528b57e +https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.13.5-h8d12d68_1.conda#1a21e49e190d1ffe58531a81b6e400e1 https://conda.anaconda.org/conda-forge/linux-64/minizip-4.0.7-h401b404_0.conda#4474532a312b2245c5c77f1176989b46 https://conda.anaconda.org/conda-forge/linux-64/orc-2.0.3-he039a57_0.conda#052499acd6d6b79952197a13b23e2600 -https://conda.anaconda.org/conda-forge/linux-64/python-3.13.0-h9ebbce0_100_cp313.conda#08e9aef080f33daeb192b2ddc7e4721f +https://conda.anaconda.org/conda-forge/linux-64/python-3.13.1-ha99a958_102_cp313.conda#6e7535f1d1faf524e9210d2689b3149b https://conda.anaconda.org/conda-forge/linux-64/re2-2024.07.02-h77b4e00_1.conda#01093ff37c1b5e6bf9f17c0116747d11 -https://conda.anaconda.org/conda-forge/linux-64/spdlog-1.14.1-hed91bc2_1.conda#909188c8979846bac8e586908cf1ca6a -https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.47.0-h9eae976_1.conda#53abf1ef70b9ae213b22caa5350f97a9 +https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.47.2-h9eae976_0.conda#64a954de15d114281535a26fd4d1f294 https://conda.anaconda.org/conda-forge/linux-64/xerces-c-3.2.5-h988505b_2.conda#9dda9667feba914e0e80b95b82f7402b https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.6-hb9d3cd8_0.conda#febbab7d15033c913d53c7a2c102309d https://conda.anaconda.org/conda-forge/linux-64/xorg-libxfixes-6.0.1-hb9d3cd8_0.conda#4bdb303603e9821baf5fe5fdff1dc8f8 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.11-hb9d3cd8_1.conda#a7a49a8b85122b49214798321e2e96b4 -https://conda.anaconda.org/conda-forge/noarch/alabaster-1.0.0-pyhd8ed1ab_0.conda#7d78a232029458d0077ede6cda30ed0c -https://conda.anaconda.org/conda-forge/linux-64/astroid-3.3.5-py313h78bf25f_0.conda#5266713116fd050a2e4d3c2de84e9fd5 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.11-hb9d3cd8_2.conda#eef3132295d92678c17ffc8b114b8371 +https://conda.anaconda.org/conda-forge/noarch/alabaster-1.0.0-pyhd8ed1ab_1.conda#1fd9696649f65fd6611fcdb4ffec738a +https://conda.anaconda.org/conda-forge/linux-64/astroid-3.3.6-py313h78bf25f_0.conda#3347a6c8504883a216d914e476b46d4e +https://conda.anaconda.org/conda-forge/noarch/asttokens-3.0.0-pyhd8ed1ab_1.conda#8f587de4bcf981e26228f268df374a9b https://conda.anaconda.org/conda-forge/linux-64/atk-1.0-2.38.0-h04ea711_2.conda#f730d54ba9cd543666d7220c9f7ed563 -https://conda.anaconda.org/conda-forge/noarch/attrs-24.2.0-pyh71513ae_0.conda#6732fa52eb8e66e5afeb32db8701a791 -https://conda.anaconda.org/conda-forge/linux-64/aws-c-auth-0.8.0-hb88c0a9_10.conda#409b7ee6d3473cc62bda7280f6ac20d0 -https://conda.anaconda.org/conda-forge/linux-64/aws-c-mqtt-0.11.0-h7bd072d_8.conda#0e9d67838114c0dbd267a9311268b331 +https://conda.anaconda.org/conda-forge/noarch/attrs-24.2.0-pyh71513ae_1.conda#2018839db45c79654b57a924fcdd27d0 +https://conda.anaconda.org/conda-forge/linux-64/aws-c-auth-0.8.0-h8c8080f_14.conda#a9284141081982473ebf41b92566bbcb +https://conda.anaconda.org/conda-forge/linux-64/aws-c-mqtt-0.11.0-ha3c2ba9_11.conda#93c5070d6f9b4cb2ed9de52ce247cebb https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.1.0-py313h46c70d0_2.conda#f6bb3742e17a4af0dc3c8ca942683ef6 -https://conda.anaconda.org/conda-forge/linux-64/cairo-1.18.0-hebfffa5_3.conda#fceaedf1cdbcb02df9699a0d9b005292 +https://conda.anaconda.org/conda-forge/linux-64/cairo-1.18.2-h3394656_1.conda#b34c2833a1f56db610aeb27f206d800d https://conda.anaconda.org/conda-forge/noarch/certifi-2024.8.30-pyhd8ed1ab_0.conda#12f7d00853807b0531775e9be891cb11 https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_0.tar.bz2#ebb5f5f7dc4f1a3780ef7ea7738db08c -https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.4.0-pyhd8ed1ab_0.conda#a374efa97290b8799046df7c5ca17164 -https://conda.anaconda.org/conda-forge/noarch/click-8.1.7-unix_pyh707e725_0.conda#f3ad426304898027fc619827ff428eca +https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.4.0-pyhd8ed1ab_1.conda#6581a17bba6b948bb60130026404a9d6 +https://conda.anaconda.org/conda-forge/noarch/click-8.1.7-unix_pyh707e725_1.conda#cb8e52f28f5e592598190c562e7b5bf1 https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.1.0-pyhd8ed1ab_1.conda#c88ca2bb7099167912e3b26463fff079 -https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_0.tar.bz2#3faab06a954c2a04039983f2c4a50d99 -https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.1-pyhd8ed1ab_0.conda#5cd86562580f274031ede6aa6aa24441 -https://conda.anaconda.org/conda-forge/linux-64/cyrus-sasl-2.1.27-h54b06d7_7.conda#dce22f70b4e5a407ce88f2be046f4ceb +https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_1.conda#962b9857ee8e7018c22f2776ffa0b2d7 +https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.1-pyhd8ed1ab_1.conda#44600c4667a319d67dbe0681fc0bc833 https://conda.anaconda.org/conda-forge/linux-64/cython-3.0.11-py313hc66aa0d_3.conda#1778443eb12b2da98428fa69152a2a2e -https://conda.anaconda.org/conda-forge/noarch/decorator-5.1.1-pyhd8ed1ab_0.tar.bz2#43afe5ab04e35e17ba28649471dd7364 +https://conda.anaconda.org/conda-forge/noarch/decorator-5.1.1-pyhd8ed1ab_1.conda#d622d8d7ee8868870f9cbe259f381181 https://conda.anaconda.org/conda-forge/noarch/defusedxml-0.7.1-pyhd8ed1ab_0.tar.bz2#961b3a227b437d82ad7054484cfa71b2 -https://conda.anaconda.org/conda-forge/noarch/dill-0.3.9-pyhd8ed1ab_0.conda#27faec84454995f6774786c7e5833cd6 -https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.9-pyhd8ed1ab_0.conda#fe521c1608280cc2803ebd26dc252212 -https://conda.anaconda.org/conda-forge/noarch/docutils-0.21.2-pyhd8ed1ab_0.conda#e8cd5d629f65bdf0f3bb312cde14659e -https://conda.anaconda.org/conda-forge/noarch/entrypoints-0.4-pyhd8ed1ab_0.tar.bz2#3cf04868fee0a029769bd41f4b2fbf2d -https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.2.2-pyhd8ed1ab_0.conda#d02ae936e42063ca46af6cdad2dbd1e0 -https://conda.anaconda.org/conda-forge/noarch/execnet-2.1.1-pyhd8ed1ab_0.conda#15dda3cdbf330abfe9f555d22f66db46 -https://conda.anaconda.org/conda-forge/noarch/executing-2.1.0-pyhd8ed1ab_0.conda#d0441db20c827c11721889a241df1220 -https://conda.anaconda.org/conda-forge/noarch/filelock-3.16.1-pyhd8ed1ab_0.conda#916f8ec5dd4128cd5f207a3c4c07b2c6 +https://conda.anaconda.org/conda-forge/noarch/dill-0.3.9-pyhd8ed1ab_1.conda#5e11310fca410e9f31381157079dee55 +https://conda.anaconda.org/conda-forge/noarch/distlib-0.3.9-pyhd8ed1ab_1.conda#8d88f4a2242e6b96f9ecff9a6a05b2f1 +https://conda.anaconda.org/conda-forge/noarch/docutils-0.21.2-pyhd8ed1ab_1.conda#24c1ca34138ee57de72a943237cde4cc +https://conda.anaconda.org/conda-forge/noarch/entrypoints-0.4-pyhd8ed1ab_1.conda#3366592d3c219f2731721f11bc93755c +https://conda.anaconda.org/conda-forge/noarch/exceptiongroup-1.2.2-pyhd8ed1ab_1.conda#a16662747cdeb9abbac74d0057cc976e +https://conda.anaconda.org/conda-forge/noarch/execnet-2.1.1-pyhd8ed1ab_1.conda#a71efeae2c160f6789900ba2631a2c90 +https://conda.anaconda.org/conda-forge/noarch/executing-2.1.0-pyhd8ed1ab_1.conda#ef8b5fca76806159fc25b4f48d8737eb +https://conda.anaconda.org/conda-forge/noarch/filelock-3.16.1-pyhd8ed1ab_1.conda#d692e9ba6f92dc51484bf3477e36ce7c https://conda.anaconda.org/conda-forge/noarch/findlibs-0.0.5-pyhd8ed1ab_0.conda#8f325f63020af6f7acbe2c4cb4c920db https://conda.anaconda.org/conda-forge/linux-64/freexl-2.0.0-h743c826_0.conda#12e6988845706b2cfbc3bc35c9a61a95 -https://conda.anaconda.org/conda-forge/noarch/fsspec-2024.10.0-pyhff2d567_0.conda#816dbc4679a64e4417cd1385d661bb31 +https://conda.anaconda.org/conda-forge/noarch/fsspec-2024.10.0-pyhd8ed1ab_1.conda#906fe13095e734cb413b57a49116cdc8 https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.12-hb9ae30d_0.conda#201db6c2d9a3c5e46573ac4cb2e92f4f https://conda.anaconda.org/conda-forge/noarch/geographiclib-2.0-pyhd8ed1ab_0.tar.bz2#6b1f32359fc5d2ab7b491d0029bfffeb https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h977cf35_4.conda#4d8df0b0db060d33c9a702ada998a8fe -https://conda.anaconda.org/conda-forge/noarch/hpack-4.0.0-pyh9f0ad1d_0.tar.bz2#914d6646c4dbb1fd3ff539830a12fd71 +https://conda.anaconda.org/conda-forge/noarch/hpack-4.0.0-pyhd8ed1ab_1.conda#2aa5ff7fa34a81b9196532c84c10d865 https://conda.anaconda.org/conda-forge/noarch/humanfriendly-10.0-pyhd81877a_7.conda#74fbff91ca7c1b9a36b15903f2242f86 -https://conda.anaconda.org/conda-forge/noarch/hyperframe-6.0.1-pyhd8ed1ab_0.tar.bz2#9f765cbfab6870c8435b9eefecd7a1f4 -https://conda.anaconda.org/conda-forge/noarch/idna-3.10-pyhd8ed1ab_0.conda#7ba2ede0e7c795ff95088daf0dc59753 +https://conda.anaconda.org/conda-forge/noarch/hyperframe-6.0.1-pyhd8ed1ab_1.conda#566e75c90c1d0c8c459eb0ad9833dc7a +https://conda.anaconda.org/conda-forge/noarch/idna-3.10-pyhd8ed1ab_1.conda#39a4f67be3286c86d696df570b1201b7 https://conda.anaconda.org/conda-forge/noarch/imagesize-1.4.1-pyhd8ed1ab_0.tar.bz2#7de5386c8fea29e76b303f37dde4c352 -https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.0.0-pyhd8ed1ab_0.conda#f800d2da156d08e289b14e87e43c1ae5 -https://conda.anaconda.org/conda-forge/noarch/isodate-0.7.2-pyhd8ed1ab_0.conda#d68d25aca67d1a06bf6f5b43aea9430d -https://conda.anaconda.org/conda-forge/noarch/itsdangerous-2.2.0-pyhd8ed1ab_0.conda#ff7ca04134ee8dde1d7cf491a78ef7c7 +https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.0.0-pyhd8ed1ab_1.conda#6837f3eff7dcea42ecd714ce1ac2b108 +https://conda.anaconda.org/conda-forge/noarch/isodate-0.7.2-pyhd8ed1ab_1.conda#14c42a6334f38c412449f5a5e4043a5a +https://conda.anaconda.org/conda-forge/noarch/itsdangerous-2.2.0-pyhd8ed1ab_1.conda#7ac5f795c15f288984e32add616cdc59 https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.7-py313h33d0bda_0.conda#9862d13a5e466273d5a4738cffcb8d6c https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.16-hb7c19ff_0.conda#51bb7010fc86f70eee639b4bb7a894f5 https://conda.anaconda.org/conda-forge/noarch/legacy-cgi-2.6.1-pyh5b84bb0_3.conda#f258b7f54b5d9ddd02441f10c4dca2ac -https://conda.anaconda.org/conda-forge/linux-64/libarchive-3.7.4-hfca40fe_0.conda#32ddb97f897740641d8d46a829ce1704 +https://conda.anaconda.org/conda-forge/linux-64/libarchive-3.7.7-hadbb8c3_0.conda#4a099677417658748239616b6ca96bb6 https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-25_linux64_openblas.conda#5dbd1b0fc0d01ec5e0e1fbe667281a11 https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.10.1-hbbe4b11_0.conda#6e801c50a40301f6978c53976917b277 https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-hd3e95f3_10.conda#30ee3a29c84cf7b842a8c5828c4b7c13 @@ -197,225 +191,206 @@ https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-25_linux64_openb https://conda.anaconda.org/conda-forge/linux-64/libxslt-1.1.39-h76b75d6_0.conda#e71f31f8cfb0a91439f2086fc8aa0461 https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2#91e27ef3d05cc772ce627e51cff111c4 https://conda.anaconda.org/conda-forge/linux-64/lz4-4.3.3-py313h010b13d_1.conda#08a6b03e282748f599c55bbbdbd722fa -https://conda.anaconda.org/conda-forge/linux-64/markupsafe-3.0.2-py313h8060acc_0.conda#ab825f8b676368beb91350c6a2da6e11 -https://conda.anaconda.org/conda-forge/noarch/mccabe-0.7.0-pyhd8ed1ab_0.tar.bz2#34fc335fc50eef0b5ea708f2b5f54e0c -https://conda.anaconda.org/conda-forge/noarch/mistune-3.0.2-pyhd8ed1ab_0.conda#5cbee699846772cc939bef23a0d524ed +https://conda.anaconda.org/conda-forge/linux-64/markupsafe-3.0.2-py313h8060acc_1.conda#21b62c55924f01b6eef6827167b46acb +https://conda.anaconda.org/conda-forge/noarch/mccabe-0.7.0-pyhd8ed1ab_1.conda#827064ddfe0de2917fb29f1da4f8f533 +https://conda.anaconda.org/conda-forge/noarch/mdurl-0.1.2-pyhd8ed1ab_1.conda#592132998493b3ff25fd7479396e8351 +https://conda.anaconda.org/conda-forge/noarch/mistune-3.0.2-pyhd8ed1ab_1.conda#c46df05cae629e55426773ac1f85d68f https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.1.0-py313h33d0bda_0.conda#7f907b1065247efa419bb70d3a3341b5 https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2#2ba8498c1018c1e9c61eb99b973dfe19 https://conda.anaconda.org/conda-forge/noarch/networkx-3.4.2-pyh267e887_2.conda#fd40bf7f7f4bc4b647dc8512053d9873 -https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.2-h488ebb8_0.conda#7f2e286780f072ed750df46dc2631138 -https://conda.anaconda.org/conda-forge/noarch/packaging-24.2-pyhff2d567_1.conda#8508b703977f4c4ada34d657d051972c +https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.3-h5fbd93e_0.conda#9e5816bc95d285c115a3ebc2f8563564 +https://conda.anaconda.org/conda-forge/noarch/packaging-24.2-pyhd8ed1ab_2.conda#3bfed7e6228ebf2f7b9eaa47f1b4e2aa https://conda.anaconda.org/conda-forge/noarch/pandocfilters-1.5.0-pyhd8ed1ab_0.tar.bz2#457c2c8c08e54905d6954e79cb5b5db9 -https://conda.anaconda.org/conda-forge/noarch/parso-0.8.4-pyhd8ed1ab_0.conda#81534b420deb77da8833f2289b8d47ac -https://conda.anaconda.org/conda-forge/noarch/pickleshare-0.7.5-py_1003.tar.bz2#415f0ebb6198cc2801c73438a9fb5761 +https://conda.anaconda.org/conda-forge/noarch/parso-0.8.4-pyhd8ed1ab_1.conda#5c092057b6badd30f75b06244ecd01c9 +https://conda.anaconda.org/conda-forge/noarch/pickleshare-0.7.5-pyhd8ed1ab_1004.conda#11a9d1d09a3615fc07c3faf79bc0b943 https://conda.anaconda.org/conda-forge/noarch/pip-24.3.1-pyh145f28c_0.conda#ca3afe2d7b893a8c8cdf489d30a2b1a3 -https://conda.anaconda.org/conda-forge/noarch/pkgutil-resolve-name-1.3.10-pyhd8ed1ab_1.conda#405678b942f2481cecdb3e010f4925d9 -https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.3.6-pyhd8ed1ab_0.conda#fd8f2b18b65bbf62e8f653100690c8d2 -https://conda.anaconda.org/conda-forge/noarch/pluggy-1.5.0-pyhd8ed1ab_0.conda#d3483c8fc2dc2cc3f5cf43e26d60cabf +https://conda.anaconda.org/conda-forge/noarch/pkgutil-resolve-name-1.3.10-pyhd8ed1ab_2.conda#5a5870a74432aa332f7d32180633ad05 +https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.3.6-pyhd8ed1ab_1.conda#577852c7e53901ddccc7e6a9959ddebe +https://conda.anaconda.org/conda-forge/noarch/pluggy-1.5.0-pyhd8ed1ab_1.conda#e9dcbce5f45f9ee500e728ae58b605b6 https://conda.anaconda.org/conda-forge/linux-64/psutil-6.1.0-py313h536fd9c_0.conda#b50a00ebd2fda55306b8a095363ce27f -https://conda.anaconda.org/conda-forge/noarch/ptyprocess-0.7.0-pyhd3deb0d_0.tar.bz2#359eeb6536da0e687af562ed265ec263 -https://conda.anaconda.org/conda-forge/noarch/pure_eval-0.2.3-pyhd8ed1ab_0.conda#0f051f09d992e0d08941706ad519ee0e -https://conda.anaconda.org/conda-forge/noarch/pycparser-2.22-pyhd8ed1ab_0.conda#844d9eb3b43095b031874477f7d70088 -https://conda.anaconda.org/conda-forge/noarch/pygments-2.18.0-pyhd8ed1ab_0.conda#b7f5c092b8f9800150d998a71b76d5a1 -https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.2.0-pyhd8ed1ab_1.conda#035c17fbf099f50ff60bf2eb303b0a83 -https://conda.anaconda.org/conda-forge/noarch/pyshp-2.3.1-pyhd8ed1ab_0.tar.bz2#92a889dc236a5197612bc85bee6d7174 -https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha2e5f31_6.tar.bz2#2a7de29fb590ca14b5243c4c812c8025 -https://conda.anaconda.org/conda-forge/noarch/python-fastjsonschema-2.20.0-pyhd8ed1ab_0.conda#b98d2018c01ce9980c03ee2850690fab -https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2024.2-pyhd8ed1ab_0.conda#986287f89929b2d629bd6ef6497dc307 +https://conda.anaconda.org/conda-forge/noarch/ptyprocess-0.7.0-pyhd8ed1ab_1.conda#7d9daffbb8d8e0af0f769dbbcd173a54 +https://conda.anaconda.org/conda-forge/noarch/pure_eval-0.2.3-pyhd8ed1ab_1.conda#3bfdfb8dbcdc4af1ae3f9a8eb3948f04 +https://conda.anaconda.org/conda-forge/noarch/pycparser-2.22-pyh29332c3_1.conda#12c566707c80111f9799308d9e265aef +https://conda.anaconda.org/conda-forge/noarch/pygments-2.18.0-pyhd8ed1ab_1.conda#b38dc0206e2a530e5c2cf11dc086b31a +https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.2.0-pyhd8ed1ab_2.conda#4c05a2bcf87bb495512374143b57cf28 +https://conda.anaconda.org/conda-forge/noarch/pyshp-2.3.1-pyhd8ed1ab_1.conda#856b387c270e9eaf6e41e978057a2b62 +https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha55dd90_7.conda#461219d1a5bd61342293efa2c0c90eac +https://conda.anaconda.org/conda-forge/noarch/python-fastjsonschema-2.21.1-pyhd8ed1ab_0.conda#38e34d2d1d9dca4fb2b9a0a04f604e2c +https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2024.2-pyhd8ed1ab_1.conda#c0def296b2f6d2dd7b030c2a7f66bb1f https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.5.0-py313h536fd9c_1.conda#5c44ffac1f568dc8b4afb09a3e825d49 https://conda.anaconda.org/conda-forge/noarch/pytz-2024.1-pyhd8ed1ab_0.conda#3eeeeb9e4827ace8c0c1419c85d590ad https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.2-py313h536fd9c_1.conda#3789f360de131c345e96fbfc955ca80b -https://conda.anaconda.org/conda-forge/linux-64/rpds-py-0.21.0-py313h920b4c0_0.conda#4877cdeada83444c17df70a77a243da9 -https://conda.anaconda.org/conda-forge/noarch/setuptools-75.5.0-pyhff2d567_0.conda#ade63405adb52eeff89d506cd55908c0 -https://conda.anaconda.org/conda-forge/noarch/six-1.16.0-pyh6c4a22f_0.tar.bz2#e5f25f8dbc060e9a8d912e432202afc2 +https://conda.anaconda.org/conda-forge/linux-64/rpds-py-0.22.3-py313h920b4c0_0.conda#f21c21a167b2e25292e436dcb8e7cf3e +https://conda.anaconda.org/conda-forge/noarch/setuptools-75.6.0-pyhff2d567_1.conda#fc80f7995e396cbaeabd23cf46c413dc +https://conda.anaconda.org/conda-forge/noarch/six-1.17.0-pyhd8ed1ab_0.conda#a451d576819089b0d672f18768be0f65 https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-2.2.0-pyhd8ed1ab_0.tar.bz2#4d22a9315e78c6827f806065957d566e https://conda.anaconda.org/conda-forge/noarch/sortedcontainers-2.4.0-pyhd8ed1ab_0.tar.bz2#6d6552722448103793743dabfbda532d https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.5-pyhd8ed1ab_1.conda#3f144b2c34f8cb5a9abd9ed23a39c561 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jsmath-1.0.1-pyhd8ed1ab_0.conda#da1d979339e2714c30a8e806a33ec087 -https://conda.anaconda.org/conda-forge/noarch/tblib-3.0.0-pyhd8ed1ab_0.conda#04eedddeb68ad39871c8127dd1c21f4f -https://conda.anaconda.org/conda-forge/noarch/termcolor-2.5.0-pyhd8ed1ab_0.conda#29a5d22565b850099cd9959862d1b154 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jsmath-1.0.1-pyhd8ed1ab_1.conda#fa839b5ff59e192f411ccc7dae6588bb +https://conda.anaconda.org/conda-forge/noarch/tblib-3.0.0-pyhd8ed1ab_1.conda#60ce69f73f3e75b21f1c27b1b471320c +https://conda.anaconda.org/conda-forge/noarch/termcolor-2.5.0-pyhd8ed1ab_1.conda#1ce02d60767af357e864ce61895268d2 https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2#f832c45a477c78bebd107098db465095 -https://conda.anaconda.org/conda-forge/noarch/tomli-2.1.0-pyhff2d567_0.conda#3fa1089b4722df3a900135925f4519d9 -https://conda.anaconda.org/conda-forge/noarch/tomlkit-0.13.2-pyha770c72_0.conda#0062a5f3347733f67b0f33ca48cc21dd -https://conda.anaconda.org/conda-forge/noarch/toolz-1.0.0-pyhd8ed1ab_0.conda#34feccdd4177f2d3d53c73fc44fd9a37 -https://conda.anaconda.org/conda-forge/linux-64/tornado-6.4.1-py313h536fd9c_1.conda#70b5b6dfd7d1760cd59849e2271d937b -https://conda.anaconda.org/conda-forge/noarch/traitlets-5.14.3-pyhd8ed1ab_0.conda#3df84416a021220d8b5700c613af2dc5 -https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.12.2-pyha770c72_0.conda#ebe6952715e1d5eb567eeebf25250fa7 +https://conda.anaconda.org/conda-forge/noarch/tomli-2.2.1-pyhd8ed1ab_1.conda#ac944244f1fed2eb49bae07193ae8215 +https://conda.anaconda.org/conda-forge/noarch/tomlkit-0.13.2-pyha770c72_1.conda#1d9ab4fc875c52db83f9c9b40af4e2c8 +https://conda.anaconda.org/conda-forge/noarch/toolz-1.0.0-pyhd8ed1ab_1.conda#40d0ed782a8aaa16ef248e68c06c168d +https://conda.anaconda.org/conda-forge/linux-64/tornado-6.4.2-py313h536fd9c_0.conda#5f5cbdd527d2e74e270d8b6255ba714f +https://conda.anaconda.org/conda-forge/noarch/traitlets-5.14.3-pyhd8ed1ab_1.conda#019a7385be9af33791c989871317e1ed +https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.12.2-pyha770c72_1.conda#d17f13df8b65464ca316cbc000a3cb64 https://conda.anaconda.org/conda-forge/linux-64/ujson-5.10.0-py313h46c70d0_1.conda#7f4872b663aafde0f532543488656f5d -https://conda.anaconda.org/conda-forge/noarch/wcwidth-0.2.13-pyhd8ed1ab_0.conda#68f0738df502a14213624b288c60c9ad -https://conda.anaconda.org/conda-forge/noarch/webencodings-0.5.1-pyhd8ed1ab_2.conda#daf5160ff9cde3a468556965329085b9 +https://conda.anaconda.org/conda-forge/noarch/wcwidth-0.2.13-pyhd8ed1ab_1.conda#b68980f2495d096e71c7fd9d7ccf63e6 +https://conda.anaconda.org/conda-forge/noarch/webencodings-0.5.1-pyhd8ed1ab_3.conda#2841eb5bfc75ce15e9a0054b98dcd64d https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdamage-1.1.6-hb9d3cd8_0.conda#b5fcc7172d22516e1f965490e65e33a4 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxi-1.8.2-hb9d3cd8_0.conda#17dcc85db3c7886650b8908b183d6876 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxxf86vm-1.1.5-hb9d3cd8_4.conda#7da9007c0582712c4bad4131f89c8372 -https://conda.anaconda.org/conda-forge/noarch/xyzservices-2024.9.0-pyhd8ed1ab_0.conda#156c91e778c1d4d57b709f8c5333fd06 +https://conda.anaconda.org/conda-forge/noarch/xyzservices-2024.9.0-pyhd8ed1ab_1.conda#c79cea50b258f652010cb6c8d81591b5 https://conda.anaconda.org/conda-forge/linux-64/zeromq-4.3.5-h3b0a872_7.conda#3947a35e916fcc6b9825449affbf4214 -https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_0.conda#cf30c2c15b82aacb07f9c09e28ff2275 -https://conda.anaconda.org/conda-forge/noarch/zipp-3.21.0-pyhd8ed1ab_0.conda#fee389bf8a4843bd7a2248ce11b7f188 +https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_1.conda#e52c2ef711ccf31bb7f70ca87d144b9e +https://conda.anaconda.org/conda-forge/noarch/zipp-3.21.0-pyhd8ed1ab_1.conda#0c3cc595284c5e8f0f9900a9b228a332 https://conda.anaconda.org/conda-forge/noarch/accessible-pygments-0.0.5-pyhd8ed1ab_0.conda#1bb1ef9806a9a20872434f58b3e7fc1a -https://conda.anaconda.org/conda-forge/noarch/asttokens-2.4.1-pyhd8ed1ab_0.conda#5f25798dcefd8252ce5f9dc494d5f571 -https://conda.anaconda.org/conda-forge/linux-64/aws-c-s3-0.7.1-h3a84f74_3.conda#e7a54821aaa774cfd64efcd45114a4d7 +https://conda.anaconda.org/conda-forge/linux-64/aws-c-s3-0.7.5-h55e9418_4.conda#faec629f0eb306cfe17ed1615249e188 https://conda.anaconda.org/conda-forge/linux-64/azure-core-cpp-1.14.0-h5cfcd09_0.conda#0a8838771cc2e985cd295e01ae83baf1 -https://conda.anaconda.org/conda-forge/noarch/babel-2.16.0-pyhd8ed1ab_0.conda#6d4e9ecca8d88977147e109fc7053184 -https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.3-pyha770c72_0.conda#332493000404d8411859539a5a630865 -https://conda.anaconda.org/conda-forge/noarch/bleach-6.2.0-pyhd8ed1ab_0.conda#461bcfab8e65c166e297222ae919a2d4 +https://conda.anaconda.org/conda-forge/noarch/babel-2.16.0-pyhd8ed1ab_1.conda#3e23f7db93ec14c80525257d8affac28 +https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.3-pyha770c72_1.conda#d48f7e9fdec44baf6d1da416fe402b04 +https://conda.anaconda.org/conda-forge/noarch/bleach-6.2.0-pyhd8ed1ab_1.conda#707af59db75b066217403a8f00c1d826 https://conda.anaconda.org/conda-forge/linux-64/cffi-1.17.1-py313hfab6e84_0.conda#ce6386a5892ef686d6d680c345c40ad1 -https://conda.anaconda.org/conda-forge/linux-64/cfitsio-4.4.1-ha728647_2.conda#dab65ce7f9da0b25f53f0ec0d37ee09c -https://conda.anaconda.org/conda-forge/noarch/click-plugins-1.1.1-py_0.tar.bz2#4fd2c6b53934bd7d96d1f3fdaf99b79f -https://conda.anaconda.org/conda-forge/noarch/cligj-0.7.2-pyhd8ed1ab_1.tar.bz2#a29b7c141d6b2de4bb67788a5f107734 -https://conda.anaconda.org/conda-forge/linux-64/coverage-7.6.7-py313h8060acc_0.conda#e87423953e8fc4eaab4a80e3e82c256e +https://conda.anaconda.org/conda-forge/noarch/click-plugins-1.1.1-pyhd8ed1ab_1.conda#82bea35e4dac4678ba623cf10e95e375 +https://conda.anaconda.org/conda-forge/noarch/cligj-0.7.2-pyhd8ed1ab_2.conda#55c7804f428719241a90b152016085a1 +https://conda.anaconda.org/conda-forge/linux-64/coverage-7.6.9-py313h8060acc_0.conda#dc7f212c995a2126d955225844888dcb https://conda.anaconda.org/conda-forge/linux-64/cytoolz-1.0.0-py313h536fd9c_1.conda#f536889754b62dad2e509cb858f525ee https://conda.anaconda.org/conda-forge/noarch/fire-0.7.0-pyhd8ed1ab_0.conda#c8eefdf1e822c56a6034602e67bc92a5 -https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.55.0-py313h8060acc_0.conda#0ff3a44b54d02157f6e99074432b7396 +https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.55.2-py313h8060acc_0.conda#bcefb389907b2882f2c90dee23f07231 https://conda.anaconda.org/conda-forge/linux-64/freeglut-3.2.2-ha6d2627_3.conda#84ec3f5b46f3076be49f2cf3f1cfbf02 https://conda.anaconda.org/conda-forge/noarch/geopy-2.4.1-pyhd8ed1ab_1.conda#358c17429c97883b2cb9ab5f64bc161b -https://conda.anaconda.org/conda-forge/noarch/h2-4.1.0-pyhd8ed1ab_0.tar.bz2#b748fbf7060927a6e82df7cb5ee8f097 +https://conda.anaconda.org/conda-forge/noarch/h2-4.1.0-pyhd8ed1ab_1.conda#825927dc7b0f287ef8d4d0011bb113b1 https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-9.0.0-hda332d3_1.conda#76b32dcf243444aea9c6b804bcfa40b8 -https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.3-nompi_hdf9ad27_105.conda#7e1729554e209627636a0f6fabcdd115 -https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.5.0-pyha770c72_0.conda#54198435fce4d64d8a89af22573012a8 -https://conda.anaconda.org/conda-forge/noarch/importlib_resources-6.4.5-pyhd8ed1ab_0.conda#c808991d29b9838fb4d96ce8267ec9ec -https://conda.anaconda.org/conda-forge/noarch/isort-5.13.2-pyhd8ed1ab_0.conda#1d25ed2b95b92b026aaa795eabec8d91 -https://conda.anaconda.org/conda-forge/noarch/jedi-0.19.2-pyhff2d567_0.conda#11ead81b00e0f7cc901fceb7ccfb92c1 -https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.4-pyhd8ed1ab_0.conda#7b86ecb7d3557821c649b3c31e3eb9f2 +https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.4-nompi_h2d575fe_105.conda#d76fff0092b6389a12134ddebc0929bd +https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.5.0-pyha770c72_1.conda#315607a3030ad5d5227e76e0733798ff +https://conda.anaconda.org/conda-forge/noarch/importlib_resources-6.4.5-pyhd8ed1ab_1.conda#15798fa69312d433af690c8c42b3fb36 +https://conda.anaconda.org/conda-forge/noarch/isort-5.13.2-pyhd8ed1ab_1.conda#ef7dc847f19fe4859d5aaa33385bf509 +https://conda.anaconda.org/conda-forge/noarch/jedi-0.19.2-pyhd8ed1ab_1.conda#a4f4c5dc9b80bc50e0d3dc4e6e8f1bd9 +https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.4-pyhd8ed1ab_1.conda#08cce3151bde4ecad7885bd9fb647532 https://conda.anaconda.org/conda-forge/noarch/jupyter_core-5.7.2-pyh31011fe_1.conda#0a2980dada0dd7fd0998f0342308b1b1 -https://conda.anaconda.org/conda-forge/noarch/jupyterlab_pygments-0.3.0-pyhd8ed1ab_1.conda#afcd1b53bcac8844540358e33f33d28f +https://conda.anaconda.org/conda-forge/noarch/jupyterlab_pygments-0.3.0-pyhd8ed1ab_2.conda#fd312693df06da3578383232528c468d https://conda.anaconda.org/conda-forge/noarch/latexcodec-2.0.1-pyh9f0ad1d_0.tar.bz2#8d67904973263afd2985ba56aa2d6bb4 https://conda.anaconda.org/conda-forge/linux-64/libglu-9.0.3-h03adeef_0.conda#b1df5affe904efe82ef890826b68881d https://conda.anaconda.org/conda-forge/linux-64/libgoogle-cloud-2.31.0-h804f50b_0.conda#35ab838423b60f233391eb86d324a830 https://conda.anaconda.org/conda-forge/linux-64/lxml-5.3.0-py313h6eb7059_2.conda#48d1a2d9b1f12ff5180ffb4154050c48 -https://conda.anaconda.org/conda-forge/noarch/matplotlib-inline-0.1.7-pyhd8ed1ab_0.conda#779345c95648be40d22aaa89de7d4254 +https://conda.anaconda.org/conda-forge/noarch/markdown-it-py-3.0.0-pyhd8ed1ab_1.conda#fee3164ac23dfca50cfcc8b85ddefb81 +https://conda.anaconda.org/conda-forge/noarch/matplotlib-inline-0.1.7-pyhd8ed1ab_1.conda#af6ab708897df59bd6e7283ceab1b56b https://conda.anaconda.org/conda-forge/noarch/nested-lookup-0.2.25-pyhd8ed1ab_1.tar.bz2#2f59daeb14581d41b1e2dda0895933b2 https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.9.1-pyhd8ed1ab_0.conda#dfe0528d0f1c16c1f7c528ea5536ab30 -https://conda.anaconda.org/conda-forge/linux-64/numpy-2.1.3-py313h4bf6692_0.conda#17bcf851cceab793dad11ab8089d4bc4 -https://conda.anaconda.org/conda-forge/linux-64/openldap-2.6.8-hedd0468_0.conda#dcd0ed5147d8876b0848a552b416ce76 +https://conda.anaconda.org/conda-forge/linux-64/numpy-2.2.0-py313hb30382a_0.conda#5aa2240f061c27ddabaa2a4924c1a066 https://conda.anaconda.org/conda-forge/noarch/partd-1.4.2-pyhd8ed1ab_0.conda#0badf9c54e24cecfb0ad2f99d680c163 -https://conda.anaconda.org/conda-forge/noarch/pexpect-4.9.0-pyhd8ed1ab_0.conda#629f3203c99b32e0988910c93e77f3b6 +https://conda.anaconda.org/conda-forge/noarch/pexpect-4.9.0-pyhd8ed1ab_1.conda#d0d408b1f18883a944376da5cf8101ea https://conda.anaconda.org/conda-forge/linux-64/pillow-11.0.0-py313h2d7ed13_0.conda#0d95e1cda6bf9ce501e751c02561204e -https://conda.anaconda.org/conda-forge/linux-64/poppler-24.08.0-h47131b8_1.conda#0854b9ff0cc10a1f6f67b0f352b8e75a -https://conda.anaconda.org/conda-forge/linux-64/proj-9.5.0-h12925eb_0.conda#8c29983ebe50cc7e0998c34bc7614222 -https://conda.anaconda.org/conda-forge/noarch/prompt-toolkit-3.0.48-pyha770c72_0.conda#4c05134c48b6a74f33bbb9938e4a115e -https://conda.anaconda.org/conda-forge/noarch/pytest-8.3.3-pyhd8ed1ab_0.conda#c03d61f31f38fdb9facf70c29958bf7a -https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.9.0.post0-pyhff2d567_0.conda#b6dfd90a2141e573e4b6a81630b56df5 +https://conda.anaconda.org/conda-forge/linux-64/proj-9.5.1-h0054346_0.conda#398cabfd9bd75e90d0901db95224f25f +https://conda.anaconda.org/conda-forge/noarch/prompt-toolkit-3.0.48-pyha770c72_1.conda#368d4aa48358439e07a97ae237491785 +https://conda.anaconda.org/conda-forge/noarch/pytest-8.3.4-pyhd8ed1ab_1.conda#799ed216dc6af62520f32aa39bc1c2bb +https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.9.0.post0-pyhff2d567_1.conda#5ba79d7c71f03c678c8ead841f347d6e https://conda.anaconda.org/conda-forge/linux-64/pyzmq-26.2.0-py313h8e95178_3.conda#8ab50c9c9c3824ac0ffac9e9dcf5619e https://conda.anaconda.org/conda-forge/noarch/rdflib-7.1.1-pyh0610db2_0.conda#325219de79481bcf5b6446d327e3d492 -https://conda.anaconda.org/conda-forge/noarch/referencing-0.35.1-pyhd8ed1ab_0.conda#0fc8b52192a8898627c3efae1003e9f6 +https://conda.anaconda.org/conda-forge/noarch/referencing-0.35.1-pyhd8ed1ab_1.conda#8c9083612c1bfe6878715ed5732605f8 +https://conda.anaconda.org/conda-forge/noarch/stack_data-0.6.3-pyhd8ed1ab_1.conda#b1b505328da7a6b246787df4b5a49fbc https://conda.anaconda.org/conda-forge/noarch/tinycss2-1.4.0-pyhd8ed1ab_0.conda#f1acf5fdefa8300de697982bcb1761c9 -https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.12.2-hd8ed1ab_0.conda#52d648bd608f5737b123f510bb5514b5 +https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.12.2-hd8ed1ab_1.conda#b6a408c64b78ec7b779a3e5c7a902433 https://conda.anaconda.org/conda-forge/noarch/url-normalize-1.4.3-pyhd8ed1ab_0.tar.bz2#7c4076e494f0efe76705154ac9302ba6 -https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.27.1-pyhd8ed1ab_0.conda#dae21509d62aa7bf676279ced3edcb3f -https://conda.anaconda.org/conda-forge/noarch/webob-1.8.9-pyhd8ed1ab_0.conda#ff98f23ad74d2a3256debcd9df65d37d -https://conda.anaconda.org/conda-forge/noarch/yamale-5.2.1-pyhca7485f_0.conda#c089f90a086b6214c5606368d0d3bad0 -https://conda.anaconda.org/conda-forge/linux-64/aws-crt-cpp-0.29.4-h21d7256_1.conda#963a310ba64fd6a113eb4f7fcf89f935 +https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.28.0-pyhd8ed1ab_0.conda#1d601bc1d28b5ce6d112b90f4b9b8ede +https://conda.anaconda.org/conda-forge/noarch/webob-1.8.9-pyhd8ed1ab_1.conda#293718ddac83a0fbc0f2193ff77d1e1c +https://conda.anaconda.org/conda-forge/noarch/yamale-5.2.1-pyhd8ed1ab_1.conda#8380155472575eec439a47eef8f62b80 +https://conda.anaconda.org/conda-forge/linux-64/aws-crt-cpp-0.29.7-hed26007_5.conda#7c64e4ac7a484fc525a4ce7b9baf709a https://conda.anaconda.org/conda-forge/linux-64/azure-identity-cpp-1.10.0-h113e628_0.conda#73f73f60854f325a55f1d31459f2ab73 https://conda.anaconda.org/conda-forge/linux-64/azure-storage-common-cpp-12.8.0-h736e048_1.conda#13de36be8de3ae3f05ba127631599213 -https://conda.anaconda.org/conda-forge/noarch/cattrs-24.1.2-pyhd8ed1ab_0.conda#ac582de2324988b79870b50c89c91c75 +https://conda.anaconda.org/conda-forge/noarch/cattrs-24.1.2-pyhd8ed1ab_1.conda#53eca64665361194ca4bbaf87c0ded99 https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.4-py313ha014f3b_1.conda#b20667f9b1d016c1141051a433f76dfc https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.3.1-py313h33d0bda_0.conda#6b6768e7c585d7029f79a04cbc4cbff0 -https://conda.anaconda.org/conda-forge/linux-64/cryptography-43.0.3-py313h6556f6e_0.conda#4df31328181600b08e18f709269d6f52 -https://conda.anaconda.org/conda-forge/noarch/dask-core-2024.11.2-pyhff2d567_1.conda#ae2be36dab764e655a22f240837cef75 +https://conda.anaconda.org/conda-forge/linux-64/cryptography-44.0.0-py313h6556f6e_0.conda#a75161b68e899739b89057b15b1c63cd +https://conda.anaconda.org/conda-forge/noarch/dask-core-2024.12.0-pyhd8ed1ab_1.conda#c3bd6d4f36c0e1ef9a8cce53997460c2 https://conda.anaconda.org/conda-forge/linux-64/geotiff-1.7.3-h77b800c_3.conda#4eb52aecb43e7c72f8e4fca0c386354e +https://conda.anaconda.org/conda-forge/noarch/ipython-8.30.0-pyh707e725_0.conda#5d6e5cb3a4b820f61b2073f0ad5431f1 https://conda.anaconda.org/conda-forge/linux-64/jasper-4.2.4-h536e39c_0.conda#9518ab7016cf4564778aef08b6bd8792 -https://conda.anaconda.org/conda-forge/noarch/jsonschema-specifications-2024.10.1-pyhd8ed1ab_0.conda#720745920222587ef942acfbc578b584 -https://conda.anaconda.org/conda-forge/noarch/jupyter_client-8.6.3-pyhd8ed1ab_0.conda#a14218cfb29662b4a19ceb04e93e298e -https://conda.anaconda.org/conda-forge/linux-64/kealib-1.5.3-hf8d3e68_2.conda#ffe68c611ae0ccfda4e7a605195e22b3 +https://conda.anaconda.org/conda-forge/noarch/jsonschema-specifications-2024.10.1-pyhd8ed1ab_1.conda#3b519bc21bc80e60b456f1e62962a766 +https://conda.anaconda.org/conda-forge/noarch/jupyter_client-8.6.3-pyhd8ed1ab_1.conda#4ebae00eae9705b0c3d6d1018a81d047 https://conda.anaconda.org/conda-forge/linux-64/libgoogle-cloud-storage-2.31.0-h0121fbd_0.conda#568d6a09a6ed76337a7b97c84ae7c0f8 -https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h135f659_114.conda#a908e463c710bd6b10a9eaa89fdf003c -https://conda.anaconda.org/conda-forge/linux-64/libpq-17.1-h04577a9_0.conda#c2560bae9f56de89b8c50355f7c84910 +https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h5ddbaa4_116.conda#f51573abc223afed7e5374f34135ce05 https://conda.anaconda.org/conda-forge/linux-64/libspatialite-5.1.0-h1b4f908_11.conda#43a7f3df7d100e8fc280e6636680a870 https://conda.anaconda.org/conda-forge/linux-64/pandas-2.2.3-py313ha87cce1_1.conda#c5d63dd501db554b84a30dea33824164 -https://conda.anaconda.org/conda-forge/linux-64/pango-1.54.0-h4c5309f_1.conda#7df02e445367703cd87a574046e3a6f0 +https://conda.anaconda.org/conda-forge/linux-64/pango-1.54.0-h3a902e7_3.conda#8c12547e7b143fb70873fb732a4056b9 https://conda.anaconda.org/conda-forge/noarch/pybtex-0.24.0-pyhd8ed1ab_2.tar.bz2#2099b86a7399c44c0c61cdb6de6915ba -https://conda.anaconda.org/conda-forge/noarch/pylint-3.3.1-pyhd8ed1ab_0.conda#2a3426f75e2172c932131f4e3d51bcf4 +https://conda.anaconda.org/conda-forge/noarch/pylint-3.3.2-pyhd8ed1ab_1.conda#2d8d45003973eb746f9465ca6b02c050 https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.7.0-py313hdb96ca5_0.conda#2a0d20f16832a170218b474bcec57acf -https://conda.anaconda.org/conda-forge/noarch/pytest-cov-6.0.0-pyhd8ed1ab_0.conda#cb8a11b6d209e3d85e5094bdbd9ebd9c +https://conda.anaconda.org/conda-forge/noarch/pytest-cov-6.0.0-pyhd8ed1ab_1.conda#79963c319d1be62c8fd3e34555816e01 https://conda.anaconda.org/conda-forge/noarch/pytest-env-1.1.5-pyhd8ed1ab_0.conda#ecd5e850bcd3eca02143e7df030ee50f https://conda.anaconda.org/conda-forge/noarch/pytest-metadata-3.1.1-pyhd8ed1ab_0.conda#52b91ecba854d55b28ad916a8b10da24 -https://conda.anaconda.org/conda-forge/noarch/pytest-mock-3.14.0-pyhd8ed1ab_0.conda#4b9b5e086812283c052a9105ab1e254e -https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.6.1-pyhd8ed1ab_0.conda#b39568655c127a9c4a44d178ac99b6d0 -https://conda.anaconda.org/conda-forge/linux-64/scipy-1.14.1-py313h27c5614_1.conda#c5c52b95724a6d4adb72499912eea085 +https://conda.anaconda.org/conda-forge/noarch/pytest-mock-3.14.0-pyhd8ed1ab_1.conda#1ca25f3fdf32ebd8a51ee9efa97c9a45 +https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.6.1-pyhd8ed1ab_1.conda#59aad4fb37cabc0bacc73cf344612ddd +https://conda.anaconda.org/conda-forge/noarch/rich-13.9.4-pyhd8ed1ab_1.conda#7aed65d4ff222bfb7335997aa40b7da5 +https://conda.anaconda.org/conda-forge/linux-64/scipy-1.14.1-py313h27c5614_2.conda#25c0eda0d2ed28962c5f3e8f7fbeace3 https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.6-py313h3f71f02_2.conda#dd0b742e8e61b8f15e4b64efcc103ad6 -https://conda.anaconda.org/conda-forge/noarch/stack_data-0.6.2-pyhd8ed1ab_0.conda#e7df0fdd404616638df5ece6e69ba7af https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py313h33d0bda_5.conda#5bcffe10a500755da4a71cc0fb62a420 https://conda.anaconda.org/conda-forge/linux-64/zstandard-0.23.0-py313h80202fe_1.conda#c178558ff516cd507763ffee230c20b2 -https://conda.anaconda.org/conda-forge/linux-64/aws-sdk-cpp-1.11.449-h1a02111_2.conda#109ff9aa7347ca004a3f496a5160cdb9 +https://conda.anaconda.org/conda-forge/linux-64/aws-sdk-cpp-1.11.458-h571fd1c_3.conda#374cf1add8af327b15b1b1e4873f4955 https://conda.anaconda.org/conda-forge/linux-64/azure-storage-blobs-cpp-12.13.0-h3cf044e_1.conda#7eb66060455c7a47d9dcdbfa9f46579b -https://conda.anaconda.org/conda-forge/noarch/bokeh-3.6.1-pyhd8ed1ab_0.conda#e88d74bb7b9b89d4c9764286ceb94cc9 +https://conda.anaconda.org/conda-forge/noarch/bokeh-3.6.2-pyhd8ed1ab_1.conda#976ff24762f1f991b08f7a7a41875086 https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.3.0-py313ha014f3b_0.conda#aecffd7a21d698e374487644ce67d6eb -https://conda.anaconda.org/conda-forge/linux-64/eccodes-2.38.3-h8bb6dbc_1.conda#73265d4acc551063cc5c5beab37f33c5 +https://conda.anaconda.org/conda-forge/linux-64/eccodes-2.39.0-hf413ef6_1.conda#63ea3e2f32daf4670182a3e6aad0b47b https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h6470451_5.conda#1483ba046164be27df7f6eddbcec3a12 -https://conda.anaconda.org/conda-forge/noarch/identify-2.6.2-pyhd8ed1ab_0.conda#636950f839e065401e2031624a414f0b -https://conda.anaconda.org/conda-forge/noarch/ipython-8.29.0-pyh707e725_0.conda#56db21d7d51410fcfbfeca3d1a6b4269 -https://conda.anaconda.org/conda-forge/noarch/jsonschema-4.23.0-pyhd8ed1ab_0.conda#da304c192ad59975202859b367d0f6a2 -https://conda.anaconda.org/conda-forge/linux-64/libgdal-core-3.10.0-hef9eae6_1.conda#6271d1929f8c1964f5f1d56a7f996b19 +https://conda.anaconda.org/conda-forge/noarch/identify-2.6.3-pyhd8ed1ab_0.conda#dd3acd023fc358afab730866a0e5e3f5 +https://conda.anaconda.org/conda-forge/noarch/jsonschema-4.23.0-pyhd8ed1ab_1.conda#a3cead9264b331b32fe8f0aabc967522 +https://conda.anaconda.org/conda-forge/linux-64/libgdal-core-3.10.0-h7250d82_6.conda#4e14dd6eef7e961a54258cab6482a656 https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.58.4-hc0ffecb_0.conda#83f045969988f5c7a65f3950b95a8b35 -https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.9.2-py313h129903b_2.conda#71d8f34a558d7e4d6656679c609b65d5 -https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.1-nompi_h22f9119_106.conda#5b911bfe75855326bae6857451268e59 -https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.7.1-nompi_py313h2a70696_102.conda#f4e34c42e744348631b5c6c37efe7cd4 -https://conda.anaconda.org/conda-forge/linux-64/postgresql-17.1-h1122569_0.conda#10dcb54ee745ee2a51d5370ba8e5657e -https://conda.anaconda.org/conda-forge/noarch/pyopenssl-24.2.1-pyhd8ed1ab_2.conda#85fa2fdd26d5a38792eb57bc72463f07 +https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.9.3-py313h129903b_0.conda#e60c1296decc1bb82cc55e8a9da0ceb4 +https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.1-nompi_ha5d1325_108.conda#3b3c67ab7987ec0416037fc14386ed70 +https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.7.2-nompi_py313h1dd084c_101.conda#7acb7a454880b024f7d67487a7495631 +https://conda.anaconda.org/conda-forge/noarch/pyopenssl-24.3.0-pyhd8ed1ab_0.conda#269109707b3810adce78b6afb2a82c80 https://conda.anaconda.org/conda-forge/noarch/pytest-html-4.1.1-pyhd8ed1ab_0.conda#4d2040212307d18392a2687772b3a96d https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.3.0-py313ha014f3b_3.conda#041b8326743c64bd02b8c0f34f05e1ef -https://conda.anaconda.org/conda-forge/noarch/urllib3-2.2.3-pyhd8ed1ab_0.conda#6b55867f385dd762ed99ea687af32a69 -https://conda.anaconda.org/conda-forge/noarch/xarray-2024.10.0-pyhd8ed1ab_0.conda#53e365732dfa053c4d19fc6b927392c4 +https://conda.anaconda.org/conda-forge/noarch/urllib3-2.2.3-pyhd8ed1ab_1.conda#4a2d8ef7c37b8808c5b9b750501fffce +https://conda.anaconda.org/conda-forge/noarch/xarray-2024.11.0-pyhd8ed1ab_0.conda#7358eeedbffd742549d372e0066999d3 https://conda.anaconda.org/conda-forge/linux-64/azure-storage-files-datalake-cpp-12.12.0-ha633028_1.conda#7c1980f89dd41b097549782121a73490 https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.24.0-py313ha87cce1_0.conda#44c2091019480603a885aa01e7b710e7 https://conda.anaconda.org/conda-forge/noarch/cf_xarray-0.10.0-pyhd8ed1ab_0.conda#9437cfe346eab83b011b4def99f0e879 -https://conda.anaconda.org/conda-forge/noarch/distributed-2024.11.2-pyhff2d567_1.conda#171408408370e59126dc3e39352c6218 -https://conda.anaconda.org/conda-forge/linux-64/esmf-8.6.1-nompi_h4441c20_3.conda#1afc1e85414e228916732df2b8c5d93b -https://conda.anaconda.org/conda-forge/linux-64/gdal-3.10.0-py313h7cbee32_1.conda#f6c287930ef6b23a23cb1952e19d2aa9 +https://conda.anaconda.org/conda-forge/noarch/distributed-2024.12.0-pyhd8ed1ab_1.conda#1838762b4a8e33ee7d8281494b22ff80 +https://conda.anaconda.org/conda-forge/linux-64/esmf-8.7.0-nompi_h6063b07_1.conda#15e28a0e5e651ba11495c87608652316 +https://conda.anaconda.org/conda-forge/linux-64/fiona-1.10.1-py313hab4ff3b_3.conda#69a5fbc032a6a01aa6cf7010dd2164a0 https://conda.anaconda.org/conda-forge/linux-64/graphviz-12.0.0-hba01fac_0.conda#953e31ea00d46beb7e64a79fc291ec44 -https://conda.anaconda.org/conda-forge/linux-64/libgdal-fits-3.10.0-he1674de_1.conda#415c6f3d27f39731b38f89db57f785f7 -https://conda.anaconda.org/conda-forge/linux-64/libgdal-grib-3.10.0-ha360943_1.conda#c8ec329a2a0e09deae512b24bebba974 -https://conda.anaconda.org/conda-forge/linux-64/libgdal-hdf4-3.10.0-h380f24e_1.conda#06b598afa8b4d73818d6ae5fbf57cce1 -https://conda.anaconda.org/conda-forge/linux-64/libgdal-hdf5-3.10.0-hefe6d7a_1.conda#ff882b327028dd49f9db1eb0c4ca4225 -https://conda.anaconda.org/conda-forge/linux-64/libgdal-jp2openjpeg-3.10.0-h9fdfae1_1.conda#b5284debccc01a949a6d744e4e793b2d -https://conda.anaconda.org/conda-forge/linux-64/libgdal-pdf-3.10.0-h697c966_1.conda#0b8c19eaf166d18bb1f10d759038825f -https://conda.anaconda.org/conda-forge/linux-64/libgdal-pg-3.10.0-h5cc4e75_1.conda#4749862355fee05f7a5a7c41a2cfac7d -https://conda.anaconda.org/conda-forge/linux-64/libgdal-postgisraster-3.10.0-h5cc4e75_1.conda#8d3f0806eb386761975cff7345858c6c -https://conda.anaconda.org/conda-forge/linux-64/libgdal-xls-3.10.0-h1e14832_1.conda#5419c1134aabe809f67059808be80195 https://conda.anaconda.org/conda-forge/noarch/myproxyclient-2.1.1-pyhd8ed1ab_0.conda#bcdbeb2b693eba886583a907840c6421 -https://conda.anaconda.org/conda-forge/noarch/nbformat-5.10.4-pyhd8ed1ab_0.conda#0b57b5368ab7fc7cdc9e3511fa867214 +https://conda.anaconda.org/conda-forge/noarch/nbformat-5.10.4-pyhd8ed1ab_1.conda#bbe1963f1e47f594070ffe87cdf612ea https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_0.tar.bz2#281b58948bf60a2582de9e548bcc5369 https://conda.anaconda.org/conda-forge/noarch/pre-commit-4.0.1-pyha770c72_0.conda#5971cc64048943605f352f7f8612de6c https://conda.anaconda.org/conda-forge/linux-64/python-eccodes-2.37.0-py313ha014f3b_0.conda#b28717a6d595cdc42737d6669d422b1d -https://conda.anaconda.org/conda-forge/noarch/requests-2.32.3-pyhd8ed1ab_0.conda#5ede4753180c7a550a443c430dc8ab52 -https://conda.anaconda.org/conda-forge/linux-64/tiledb-2.26.2-h84bbdfb_10.conda#c9ad5ee546eba614b7fe7b420f6b7763 +https://conda.anaconda.org/conda-forge/noarch/requests-2.32.3-pyhd8ed1ab_1.conda#a9b9368f3701a417eac9edbcae7cb737 https://conda.anaconda.org/conda-forge/noarch/dask-jobqueue-0.9.0-pyhd8ed1ab_0.conda#a201de7d36907f2355426e019168d337 -https://conda.anaconda.org/conda-forge/noarch/esmpy-8.6.1-pyhc1e730c_0.conda#25a9661177fd68bfdb4314fd658e5c3b +https://conda.anaconda.org/conda-forge/noarch/esmpy-8.7.0-pyhecae5ae_0.conda#80851ac5ec3916496d7f353351c48846 https://conda.anaconda.org/conda-forge/noarch/iris-3.11.0-pyha770c72_0.conda#a5e36260789ce92074c3736533ecdd61 -https://conda.anaconda.org/conda-forge/linux-64/libarrow-18.0.0-h3b997a5_7_cpu.conda#32897a50e7f68187c4a524c439c0943c -https://conda.anaconda.org/conda-forge/linux-64/libgdal-kea-3.10.0-h38e673a_1.conda#1b4358b735ef045fbd87d2ec3341a6a2 -https://conda.anaconda.org/conda-forge/linux-64/libgdal-netcdf-3.10.0-hba670d9_1.conda#85699d0969e7c92ba75c7bb0e7cbed19 -https://conda.anaconda.org/conda-forge/linux-64/libgdal-tiledb-3.10.0-hec57c18_1.conda#eb39051813bc34137bff2e4ad8dfe64e -https://conda.anaconda.org/conda-forge/noarch/nbclient-0.10.0-pyhd8ed1ab_0.conda#15b51397e0fe8ea7d7da60d83eb76ebc -https://conda.anaconda.org/conda-forge/noarch/pooch-1.8.2-pyhd8ed1ab_0.conda#8dab97d8a9616e07d779782995710aed +https://conda.anaconda.org/conda-forge/linux-64/libarrow-18.1.0-h3b07799_4_cpu.conda#27675c7172667268440306533e4928de +https://conda.anaconda.org/conda-forge/noarch/nbclient-0.10.1-pyhd8ed1ab_0.conda#3ee79082e59a28e1db11e2a9c3bcd85a +https://conda.anaconda.org/conda-forge/noarch/pooch-1.8.2-pyhd8ed1ab_1.conda#b3e783e8e8ed7577cf0b6dee37d1fbac https://conda.anaconda.org/conda-forge/linux-64/pydot-3.0.2-py313h78bf25f_0.conda#45f3a293c1709b761bd450917cecd8c6 https://conda.anaconda.org/conda-forge/noarch/requests-cache-1.2.1-pyhd8ed1ab_0.conda#c6089540fed51a9a829aa19590fa925b https://conda.anaconda.org/conda-forge/noarch/esgf-pyclient-0.3.1-pyhd8ed1ab_4.conda#f481c17430f801e68ee3b57cc30ecd2e https://conda.anaconda.org/conda-forge/noarch/iris-grib-0.20.0-pyhd8ed1ab_1.conda#d8dced41fc56982c81190ba0eb10c3de -https://conda.anaconda.org/conda-forge/linux-64/libarrow-acero-18.0.0-h5888daf_7_cpu.conda#786a275d019708cd1c963b12a8fb0c72 -https://conda.anaconda.org/conda-forge/linux-64/libgdal-3.10.0-ha770c72_1.conda#f32b9e97d0394dcc2f6f5758dc18afa1 -https://conda.anaconda.org/conda-forge/linux-64/libparquet-18.0.0-h6bd9018_7_cpu.conda#687870f7d9cba5262fdd7e730e9e9ba8 -https://conda.anaconda.org/conda-forge/noarch/nbconvert-core-7.16.4-pyhd8ed1ab_1.conda#e2d2abb421c13456a9a9f80272fdf543 +https://conda.anaconda.org/conda-forge/linux-64/libarrow-acero-18.1.0-h8bbc2ab_4_cpu.conda#82bcbfe424868ce66b5ab986999f534d +https://conda.anaconda.org/conda-forge/linux-64/libparquet-18.1.0-hf4f6db6_4_cpu.conda#f18b10bf19bb384183f2aa546e9f6f0a +https://conda.anaconda.org/conda-forge/noarch/nbconvert-core-7.16.4-pyhff2d567_2.conda#0457fdf55c88e52e0e7b63691eafcc48 https://conda.anaconda.org/conda-forge/noarch/prov-2.0.0-pyhd3deb0d_0.tar.bz2#aa9b3ad140f6c0668c646f32e20ccf82 https://conda.anaconda.org/conda-forge/noarch/py-cordex-0.9.0-pyhd8ed1ab_0.conda#177a9651dc31c11a81eddc2a5e2e524e -https://conda.anaconda.org/conda-forge/linux-64/pyarrow-core-18.0.0-py313he5f92c8_1_cpu.conda#34918674d521ab777f11ab3c1f2ab797 -https://conda.anaconda.org/conda-forge/linux-64/fiona-1.10.1-py313hab20ce0_2.conda#c0cf01c18f0c4f38c84cd906409ec5e4 -https://conda.anaconda.org/conda-forge/linux-64/libarrow-dataset-18.0.0-h5888daf_7_cpu.conda#a742b9a0452b55020ccf662721c1ce44 -https://conda.anaconda.org/conda-forge/noarch/nbconvert-pandoc-7.16.4-hd8ed1ab_1.conda#37cec2cf68f4c09563d8bc833791096b -https://conda.anaconda.org/conda-forge/linux-64/libarrow-substrait-18.0.0-h5c8f2c3_7_cpu.conda#be76013fa3fdaec2c0c504e6fdfd282d -https://conda.anaconda.org/conda-forge/noarch/nbconvert-7.16.4-hd8ed1ab_1.conda#ab83e3b9ca2b111d8f332e9dc8b2170f -https://conda.anaconda.org/conda-forge/linux-64/pyarrow-18.0.0-py313h78bf25f_1.conda#7ce246ff42b7797a9c270964c94faf05 -https://conda.anaconda.org/conda-forge/noarch/dask-expr-1.1.19-pyhd8ed1ab_0.conda#09ea33eb6525cc703ce1d39c88378320 -https://conda.anaconda.org/conda-forge/noarch/dask-2024.11.2-pyhff2d567_1.conda#4ea56955c9922ac99c35d0784cffeb96 +https://conda.anaconda.org/conda-forge/linux-64/pyarrow-core-18.1.0-py313he5f92c8_0_cpu.conda#5380e12f4468e891911dbbd4248b521a +https://conda.anaconda.org/conda-forge/linux-64/libarrow-dataset-18.1.0-h8bbc2ab_4_cpu.conda#fa31464c75b20c2f3ac8fc758e034887 +https://conda.anaconda.org/conda-forge/noarch/nbconvert-pandoc-7.16.4-hd8ed1ab_2.conda#28701f71ce0b88b86783df822dd9d7b9 +https://conda.anaconda.org/conda-forge/linux-64/libarrow-substrait-18.1.0-had74209_4_cpu.conda#bf261e5fa25ce4acc11a80bdc73b88b2 +https://conda.anaconda.org/conda-forge/noarch/nbconvert-7.16.4-hd8ed1ab_2.conda#9337002f0dd2fcb8e1064f8023c8e0c0 +https://conda.anaconda.org/conda-forge/linux-64/pyarrow-18.1.0-py313h78bf25f_0.conda#a11d880ceedc33993c6f5c14a80ea9d3 +https://conda.anaconda.org/conda-forge/noarch/dask-expr-1.1.20-pyhd8ed1ab_1.conda#46f5089b7828d82517a98366820c5e85 +https://conda.anaconda.org/conda-forge/noarch/dask-2024.12.0-pyhd8ed1ab_1.conda#466d56f3108523402be464e4192f584e https://conda.anaconda.org/conda-forge/noarch/iris-esmf-regrid-0.11.0-pyhd8ed1ab_1.conda#86286b197e33e3b034416c18ba0f574c https://conda.anaconda.org/conda-forge/noarch/autodocsumm-0.2.14-pyhd8ed1ab_0.conda#351a11ac1215eb4f6c5b82e30070277a https://conda.anaconda.org/conda-forge/noarch/nbsphinx-0.9.5-pyhd8ed1ab_0.conda#b808b8a0494c5cca76200c73e260a060 https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.16.0-pyhd8ed1ab_0.conda#344261b0e77f5d2faaffb4eac225eeb7 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-2.0.0-pyhd8ed1ab_0.conda#9075bd8c033f0257122300db914e49c9 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-2.0.0-pyhd8ed1ab_0.conda#b3bcc38c471ebb738854f52a36059b48 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.1.0-pyhd8ed1ab_0.conda#e25640d692c02e8acfff0372f547e940 -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-2.0.0-pyhd8ed1ab_0.conda#d6e5ea5fe00164ac6c2dcc5d76a42192 -https://conda.anaconda.org/conda-forge/noarch/sphinx-8.1.3-pyhd8ed1ab_0.conda#05706dd5a145a9c91861495cd435409a -https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.10-pyhd8ed1ab_0.conda#e507335cb4ca9cff4c3d0fa9cdab255e +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-2.0.0-pyhd8ed1ab_1.conda#16e3f039c0aa6446513e94ab18a8784b +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-2.0.0-pyhd8ed1ab_1.conda#910f28a05c178feba832f842155cbfff +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.1.0-pyhd8ed1ab_1.conda#e9fb3fe8a5b758b4aff187d434f94f03 +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-2.0.0-pyhd8ed1ab_1.conda#00534ebcc0375929b45c3039b5ba7636 +https://conda.anaconda.org/conda-forge/noarch/sphinx-8.1.3-pyhd8ed1ab_1.conda#1a3281a0dc355c02b5506d87db2d78ac +https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.10-pyhd8ed1ab_1.conda#3bc61f7161d28137797e038263c04c54 # pip scitools-iris @ https://files.pythonhosted.org/packages/20/89/109d116f778fd148782598eb1796db00d47de8ca0d68503d248b55154581/scitools_iris-3.11.0-py3-none-any.whl#sha256=97bb7d7e349808684a5326a1ec06a459702a2b4f435c9a1502378d41e24a32f3 # pip esmvaltool-sample-data @ https://files.pythonhosted.org/packages/58/fa/4ecc84665e0ed04c8c4c797405c19c12900bdba6438ab2f5541bf8aa1d42/ESMValTool_sample_data-0.0.3-py3-none-any.whl#sha256=81f0f02182eacb3b639cb207abae5ac469c6dd83fb6dfe6d2430c69723d85461 From 92746ccd4fd49e93c10029176ded1ee316f21e6f Mon Sep 17 00:00:00 2001 From: Manuel Schlund <32543114+schlunma@users.noreply.github.com> Date: Mon, 16 Dec 2024 09:24:05 +0100 Subject: [PATCH 10/36] Added more variables to EMAC extra facets (#2617) --- esmvalcore/config/extra_facets/emac-mappings.yml | 13 +++++++++++++ 1 file changed, 13 insertions(+) diff --git a/esmvalcore/config/extra_facets/emac-mappings.yml b/esmvalcore/config/extra_facets/emac-mappings.yml index 4a2379605f..5e0accde06 100644 --- a/esmvalcore/config/extra_facets/emac-mappings.yml +++ b/esmvalcore/config/extra_facets/emac-mappings.yml @@ -35,6 +35,8 @@ EMAC: # 1D/2D dynamical/meteorological variables '*': + asr: # ESMValCore-derivation + channel: Amon awhea: # non-CMOR variable raw_name: [awhea_cav, awhea_ave, awhea] channel: Omon @@ -65,6 +67,13 @@ EMAC: raw_name: [rh_2m_cav, rh_2m_ave, rh_2m] raw_units: '1' channel: Amon + lwcre: # ESMValCore-derivation + channel: Amon + lwp: + raw_name: [xlvi_cav, xlvi_ave, xlvi] + channel: Amon + netcre: # ESMValCore-derivation + channel: Amon od550aer: raw_name: [aot_opt_TOT_550_total_cav, aot_opt_TOT_550_total_ave, aot_opt_TOT_550_total] raw_units: '1' @@ -123,6 +132,8 @@ EMAC: channel: Amon rtmt: # derived from flxttop_*, flxstop_* channel: Amon + rtnt: # ESMValCore-derivation + channel: Amon sfcWind: raw_name: [wind10_cav, wind10_ave, wind10] channel: Amon @@ -137,6 +148,8 @@ EMAC: sithick: raw_name: [siced_cav, siced_ave, siced] channel: Amon + swcre: # derived from CMIP variables rsut and rsutcs + channel: Amon tas: raw_name: [temp2_cav, temp2_ave, temp2] channel: Amon From b535cfd855879eab92becb7ed3536876eff69e47 Mon Sep 17 00:00:00 2001 From: Manuel Schlund <32543114+schlunma@users.noreply.github.com> Date: Tue, 17 Dec 2024 15:00:45 +0100 Subject: [PATCH 11/36] Expand Amon fix of FIO-ESM-2-0 (CMIP6) (#2619) --- esmvalcore/cmor/_fixes/cmip6/fio_esm_2_0.py | 13 +++++++++ .../cmor/_fixes/cmip6/test_fio_esm_2_0.py | 29 +++++++++++++++++++ 2 files changed, 42 insertions(+) diff --git a/esmvalcore/cmor/_fixes/cmip6/fio_esm_2_0.py b/esmvalcore/cmor/_fixes/cmip6/fio_esm_2_0.py index 28b56082a4..f5ba037ca0 100644 --- a/esmvalcore/cmor/_fixes/cmip6/fio_esm_2_0.py +++ b/esmvalcore/cmor/_fixes/cmip6/fio_esm_2_0.py @@ -3,6 +3,7 @@ import logging import numpy as np +from iris.util import promote_aux_coord_to_dim_coord from ..common import OceanFixGrid from ..fix import Fix @@ -64,12 +65,24 @@ def fix_metadata(self, cubes): if np.any(latitude.bounds[1:, 0] != latitude.bounds[:-1, 1]): latitude.bounds = None latitude.guess_bounds() + if np.any(latitude.bounds[:, 0] == latitude.bounds[:, 1]): + latitude.bounds = None + latitude.guess_bounds() longitude = cube.coord("longitude") if longitude.has_bounds(): if np.any(longitude.bounds[1:, 0] != longitude.bounds[:-1, 1]): longitude.bounds = None longitude.guess_bounds() + if np.any(longitude.bounds[:, 0] == longitude.bounds[:, 1]): + longitude.bounds = None + longitude.guess_bounds() + + if not cube.coords("latitude", dim_coords=True): + promote_aux_coord_to_dim_coord(cube, latitude) + if not cube.coords("longitude", dim_coords=True): + promote_aux_coord_to_dim_coord(cube, longitude) + return cubes diff --git a/tests/integration/cmor/_fixes/cmip6/test_fio_esm_2_0.py b/tests/integration/cmor/_fixes/cmip6/test_fio_esm_2_0.py index 85e77aed88..1583871ca7 100644 --- a/tests/integration/cmor/_fixes/cmip6/test_fio_esm_2_0.py +++ b/tests/integration/cmor/_fixes/cmip6/test_fio_esm_2_0.py @@ -4,6 +4,7 @@ import numpy as np import pytest from cf_units import Unit +from iris.coords import AuxCoord from esmvalcore.cmor._fixes.cmip6.fio_esm_2_0 import Amon, Omon, Tos from esmvalcore.cmor._fixes.common import OceanFixGrid @@ -170,3 +171,31 @@ def test_amon_fix_metadata(tas_cubes): assert all(time.bounds[1:, 0] == time.bounds[:-1, 1]) assert all(lat.bounds[1:, 0] == lat.bounds[:-1, 1]) assert all(lon.bounds[1:, 0] == lon.bounds[:-1, 1]) + + +@pytest.mark.parametrize("coord_name", ["latitude", "longitude"]) +def test_amon_fix_metadata_wrong_lat_end(tas_cubes, coord_name): + vardef = get_var_info("CMIP6", "Amon", "tas") + fix = Amon(vardef) + + cube = tas_cubes[0] + cube.remove_coord(coord_name) + new_coord = AuxCoord( + [1.0, 3.0], + bounds=[[0.0, 3.0], [3.0, 3.0]], + standard_name=coord_name, + units="degrees", + ) + if coord_name == "latitude": + cube.add_aux_coord(new_coord, 1) + else: + cube.add_aux_coord(new_coord, 2) + + out_cubes = fix.fix_metadata([cube]) + + assert len(out_cubes) == 1 + out_cube = out_cubes[0] + assert out_cube.coords(coord_name, dim_coords=True) + np.testing.assert_allclose( + out_cube.coord(coord_name).bounds, [[0.0, 2.0], [2.0, 4.0]] + ) From 58934e545af8945c637d4b6471b07cdf479db44f Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 17 Dec 2024 14:02:12 +0000 Subject: [PATCH 12/36] [pre-commit.ci] pre-commit autoupdate (#2618) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 05934f3059..d286dc552a 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -33,7 +33,7 @@ repos: - id: codespell additional_dependencies: [tomli] # required for Python 3.10 - repo: https://github.com/astral-sh/ruff-pre-commit - rev: "v0.8.2" + rev: "v0.8.3" hooks: - id: ruff args: [--fix] From 90d11a23fd85783dd6549a6670662bfe887bd7b9 Mon Sep 17 00:00:00 2001 From: Manuel Schlund <32543114+schlunma@users.noreply.github.com> Date: Wed, 18 Dec 2024 12:24:11 +0100 Subject: [PATCH 13/36] Added fixes for some 3D atmospheric variables of E3SM-1-1 (CMIP6) (#2620) --- esmvalcore/cmor/_fixes/cmip6/e3sm_1_1.py | 43 ++++++++++++ .../cmor/_fixes/cmip6/test_e3sm_1_1.py | 65 +++++++++++++++++++ 2 files changed, 108 insertions(+) create mode 100644 esmvalcore/cmor/_fixes/cmip6/e3sm_1_1.py create mode 100644 tests/integration/cmor/_fixes/cmip6/test_e3sm_1_1.py diff --git a/esmvalcore/cmor/_fixes/cmip6/e3sm_1_1.py b/esmvalcore/cmor/_fixes/cmip6/e3sm_1_1.py new file mode 100644 index 0000000000..6918743978 --- /dev/null +++ b/esmvalcore/cmor/_fixes/cmip6/e3sm_1_1.py @@ -0,0 +1,43 @@ +"""Fixes for E3SM-1-1 model.""" + +from iris.cube import Cube + +from esmvalcore.cmor.fix import Fix +from esmvalcore.preprocessor._shared import get_array_module + + +def _mask_greater(cube: Cube, value: float) -> Cube: + """Mask all data of cube which is greater than ``value``.""" + npx = get_array_module(cube.core_data()) + cube.data = npx.ma.masked_greater(cube.core_data(), value) + return cube + + +class Hus(Fix): + """Fixes for ``hus``.""" + + def fix_data(self, cube: Cube) -> Cube: + """Fix data. + + Fix values that are not properly masked. + + Parameters + ---------- + cube: iris.cube.Cube + Input cube. + + Returns + ------- + iris.cube.Cube + + """ + return _mask_greater(cube, 1000.0) + + +Ta = Hus + + +Ua = Hus + + +Va = Hus diff --git a/tests/integration/cmor/_fixes/cmip6/test_e3sm_1_1.py b/tests/integration/cmor/_fixes/cmip6/test_e3sm_1_1.py new file mode 100644 index 0000000000..1b3e947d3a --- /dev/null +++ b/tests/integration/cmor/_fixes/cmip6/test_e3sm_1_1.py @@ -0,0 +1,65 @@ +"""Tests for the fixes of E3SM-1-1.""" + +import numpy as np +import pytest +from iris.cube import Cube + +from esmvalcore.cmor._fixes.cmip6.e3sm_1_1 import Hus, Ta, Ua, Va +from esmvalcore.cmor._fixes.fix import GenericFix +from esmvalcore.cmor.fix import Fix +from tests import assert_array_equal + + +def test_get_hus_fix(): + """Test getting of fix.""" + fix = Fix.get_fixes("CMIP6", "E3SM-1-1", "Amon", "hus") + assert fix == [Hus(None), GenericFix(None)] + + +def test_get_ta_fix(): + """Test getting of fix.""" + fix = Fix.get_fixes("CMIP6", "E3SM-1-1", "Amon", "ta") + assert fix == [Ta(None), GenericFix(None)] + + +def test_get_ua_fix(): + """Test getting of fix.""" + fix = Fix.get_fixes("CMIP6", "E3SM-1-1", "Amon", "ua") + assert fix == [Ua(None), GenericFix(None)] + + +def test_get_va_fix(): + """Test getting of fix.""" + fix = Fix.get_fixes("CMIP6", "E3SM-1-1", "Amon", "va") + assert fix == [Va(None), GenericFix(None)] + + +@pytest.mark.parametrize("lazy", [True, False]) +def test_hus_fix(lazy): + """Test fix for ``hus``.""" + cube = Cube([1.0, 1e35]) + if lazy: + cube.data = cube.lazy_data() + + fix = Hus(None) + + fixed_cube = fix.fix_data(cube) + + assert fixed_cube is cube + assert fixed_cube.has_lazy_data() is lazy + assert_array_equal(fixed_cube.data, np.ma.masked_invalid([1.0, np.nan])) + + +def test_ta_fix(): + """Test fix for ``ta``.""" + assert Ta == Hus + + +def test_ua_fix(): + """Test fix for ``ua``.""" + assert Ua == Hus + + +def test_va_fix(): + """Test fix for ``va``.""" + assert Va == Hus From 23400b17a1a469d72a10fab29d54e077a6ca2e45 Mon Sep 17 00:00:00 2001 From: Manuel Schlund <32543114+schlunma@users.noreply.github.com> Date: Fri, 20 Dec 2024 15:11:03 +0100 Subject: [PATCH 14/36] Make Dask configurable in our configuration (#2616) Co-authored-by: Bouwe Andela --- doc/conf.py | 2 +- doc/contributing.rst | 2 +- doc/quickstart/configure.rst | 426 +++++++++++------- doc/quickstart/output.rst | 2 +- esmvalcore/_main.py | 8 +- esmvalcore/_recipe/recipe.py | 3 + esmvalcore/config/_config_object.py | 26 +- esmvalcore/config/_config_validators.py | 1 + esmvalcore/config/_dask.py | 234 ++++++++-- .../config/configurations/defaults/dask.yml | 10 + ...options.yml => more_top_level_options.yml} | 1 - esmvalcore/experimental/recipe.py | 3 +- esmvalcore/local.py | 2 +- esmvalcore/preprocessor/_dask_progress.py | 4 +- esmvalcore/preprocessor/_volume.py | 2 +- tests/conftest.py | 11 + tests/integration/cmor/test_fix.py | 2 +- tests/parse_pymon.py | 2 +- tests/unit/config/test_config.py | 16 + tests/unit/config/test_config_object.py | 16 + tests/unit/config/test_dask.py | 360 ++++++++++++++- tests/unit/main/test_esmvaltool.py | 12 +- 22 files changed, 890 insertions(+), 255 deletions(-) create mode 100644 esmvalcore/config/configurations/defaults/dask.yml rename esmvalcore/config/configurations/defaults/{more_options.yml => more_top_level_options.yml} (76%) diff --git a/doc/conf.py b/doc/conf.py index 7e0b4b988d..3bf62cee4e 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- # # ESMValTool documentation build configuration file, created by -# sphinx-quickstart on Tue Jun 2 11:34:13 2015. +# sphinx-quickstart on Tue Jun 2 11:34:13 2015. # # This file is execfile()d with the current directory set to its # containing dir. diff --git a/doc/contributing.rst b/doc/contributing.rst index de7d319cc3..1381e45158 100644 --- a/doc/contributing.rst +++ b/doc/contributing.rst @@ -158,7 +158,7 @@ These include in particular: branch. If a strong objection is raised the backward-incompatible change should not be merged until the objection is resolved. - 🛠 Information required for the “*backward-incompatible changes*” - section in the PR that introduces the *backward-incompatible change* + section in the PR that introduces the *backward-incompatible change* available. .. _scientific_relevance: diff --git a/doc/quickstart/configure.rst b/doc/quickstart/configure.rst index 78ce5dcea2..5fee34db5b 100644 --- a/doc/quickstart/configure.rst +++ b/doc/quickstart/configure.rst @@ -140,79 +140,82 @@ For example, Python's ``None`` is YAML's ``null``, Python's ``True`` is YAML's | Option | Description | Type | Default value | +===============================+========================================+=============================+========================================+ | ``auxiliary_data_dir`` | Directory where auxiliary data is | :obj:`str` | ``~/auxiliary_data`` | -| | stored [#f1]_ | | | +| | stored. [#f1]_ | | | +-------------------------------+----------------------------------------+-----------------------------+----------------------------------------+ | ``check_level`` | Sensitivity of the CMOR check | :obj:`str` | ``default`` | | | (``debug``, ``strict``, ``default`` | | | | | ``relaxed``, ``ignore``), see | | | -| | :ref:`cmor_check_strictness` | | | +| | :ref:`cmor_check_strictness`. | | | +-------------------------------+----------------------------------------+-----------------------------+----------------------------------------+ -| ``compress_netcdf`` | Use netCDF compression | :obj:`bool` | ``False`` | +| ``compress_netcdf`` | Use netCDF compression. | :obj:`bool` | ``False`` | +-------------------------------+----------------------------------------+-----------------------------+----------------------------------------+ | ``config_developer_file`` | Path to custom | :obj:`str` | ``None`` (default file) | -| | :ref:`config-developer` | | | +| | :ref:`config-developer`. | | | ++-------------------------------+----------------------------------------+-----------------------------+----------------------------------------+ +| ``dask`` | :ref:`config-dask`. | :obj:`dict` | See :ref:`config-dask-defaults` | +-------------------------------+----------------------------------------+-----------------------------+----------------------------------------+ | ``diagnostics`` | Only run the selected diagnostics from | :obj:`list` or :obj:`str` | ``None`` (all diagnostics) | -| | the recipe, see :ref:`running` | | | +| | the recipe, see :ref:`running`. | | | +-------------------------------+----------------------------------------+-----------------------------+----------------------------------------+ | ``download_dir`` | Directory where downloaded data will | :obj:`str` | ``~/climate_data`` | -| | be stored [#f4]_ | | | +| | be stored. [#f4]_ | | | +-------------------------------+----------------------------------------+-----------------------------+----------------------------------------+ -| ``drs`` | Directory structure for input data | :obj:`dict` | ``{CMIP3: ESGF, CMIP5: ESGF, CMIP6: | +| ``drs`` | Directory structure for input data. | :obj:`dict` | ``{CMIP3: ESGF, CMIP5: ESGF, CMIP6: | | | [#f2]_ | | ESGF, CORDEX: ESGF, obs4MIPs: ESGF}`` | +-------------------------------+----------------------------------------+-----------------------------+----------------------------------------+ | ``exit_on_warning`` | Exit on warning (only used in NCL | :obj:`bool` | ``False`` | -| | diagnostic scripts) | | | +| | diagnostic scripts). | | | +-------------------------------+----------------------------------------+-----------------------------+----------------------------------------+ | ``extra_facets_dir`` | Additional custom directory for | :obj:`list` of :obj:`str` | ``[]`` | -| | :ref:`extra_facets` | | | +| | :ref:`extra_facets`. | | | +-------------------------------+----------------------------------------+-----------------------------+----------------------------------------+ | ``log_level`` | Log level of the console (``debug``, | :obj:`str` | ``info`` | -| | ``info``, ``warning``, ``error``) | | | +| | ``info``, ``warning``, ``error``). | | | +-------------------------------+----------------------------------------+-----------------------------+----------------------------------------+ -| ``logging`` | :ref:`config-logging` | :obj:`dict` | | +| ``logging`` | :ref:`config-logging`. | :obj:`dict` | See :ref:`config-logging` | +-------------------------------+----------------------------------------+-----------------------------+----------------------------------------+ | ``max_datasets`` | Maximum number of datasets to use, see | :obj:`int` | ``None`` (all datasets from recipe) | -| | :ref:`running` | | | +| | :ref:`running`. | | | +-------------------------------+----------------------------------------+-----------------------------+----------------------------------------+ | ``max_parallel_tasks`` | Maximum number of parallel processes, | :obj:`int` | ``None`` (number of available CPUs) | -| | see also :ref:`task_priority` | | | +| | see also :ref:`task_priority`. | | | +-------------------------------+----------------------------------------+-----------------------------+----------------------------------------+ | ``max_years`` | Maximum number of years to use, see | :obj:`int` | ``None`` (all years from recipe) | -| | :ref:`running` | | | +| | :ref:`running`. | | | +-------------------------------+----------------------------------------+-----------------------------+----------------------------------------+ | ``output_dir`` | Directory where all output will be | :obj:`str` | ``~/esmvaltool_output`` | -| | written, see :ref:`outputdata` | | | +| | written, see :ref:`outputdata`. | | | +-------------------------------+----------------------------------------+-----------------------------+----------------------------------------+ -| ``output_file_type`` | Plot file type | :obj:`str` | ``png`` | +| ``output_file_type`` | Plot file type. | :obj:`str` | ``png`` | +-------------------------------+----------------------------------------+-----------------------------+----------------------------------------+ | ``profile_diagnostic`` | Use a profiling tool for the | :obj:`bool` | ``False`` | -| | diagnostic run [#f3]_ | | | +| | diagnostic run. [#f3]_ | | | +-------------------------------+----------------------------------------+-----------------------------+----------------------------------------+ | ``remove_preproc_dir`` | Remove the ``preproc`` directory if | :obj:`bool` | ``True`` | | | the run was successful, see also | | | -| | :ref:`preprocessed_datasets` | | | +| | :ref:`preprocessed_datasets`. | | | +-------------------------------+----------------------------------------+-----------------------------+----------------------------------------+ | ``resume_from`` | Resume previous run(s) by using | :obj:`list` of :obj:`str` | ``[]`` | | | preprocessor output files from these | | | -| | output directories, see :ref:`running` | | | +| | output directories, see | | | +| | ref:`running`. | | | +-------------------------------+----------------------------------------+-----------------------------+----------------------------------------+ | ``rootpath`` | Rootpaths to the data from different | :obj:`dict` | ``{default: ~/climate_data}`` | -| | projects [#f2]_ | | | +| | projects. [#f2]_ | | | +-------------------------------+----------------------------------------+-----------------------------+----------------------------------------+ | ``run_diagnostic`` | Run diagnostic scripts, see | :obj:`bool` | ``True`` | -| | :ref:`running` | | | +| | :ref:`running`. | | | +-------------------------------+----------------------------------------+-----------------------------+----------------------------------------+ | ``save_intermediary_cubes`` | Save intermediary cubes from the | :obj:`bool` | ``False`` | | | preprocessor, see also | | | -| | :ref:`preprocessed_datasets` | | | +| | :ref:`preprocessed_datasets`. | | | +-------------------------------+----------------------------------------+-----------------------------+----------------------------------------+ | ``search_esgf`` | Automatic data download from ESGF | :obj:`str` | ``never`` | | | (``never``, ``when_missing``, | | | -| | ``always``) [#f4]_ | | | +| | ``always``). [#f4]_ | | | +-------------------------------+----------------------------------------+-----------------------------+----------------------------------------+ | ``skip_nonexistent`` | Skip non-existent datasets, see | :obj:`bool` | ``False`` | -| | :ref:`running` | | | +| | :ref:`running`. | | | +-------------------------------+----------------------------------------+-----------------------------+----------------------------------------+ .. [#f1] The ``auxiliary_data_dir`` setting is the path to place any required @@ -271,157 +274,144 @@ For example, Python's ``None`` is YAML's ``null``, Python's ``True`` is YAML's will be downloaded; otherwise, local data will be used. -.. _config-logging: - -Logging configuration -===================== - -Configure what information is logged and how it is presented in the ``logging`` -section. - -.. note:: - - Not all logging configuration is available here yet, see :issue:`2596`. - -Configuration file example: - -.. code:: yaml - - logging: - log_progress_interval: 10s - -will log progress of Dask computations every 10 seconds instead of showing a -progress bar. - -Command line example: - -.. code:: bash - - esmvaltool run --logging='{"log_progress_interval": "1m"}' recipe_example.yml - - -will log progress of Dask computations every minute instead of showing a -progress bar. - -Available options: - -+-------------------------------+----------------------------------------+-----------------------------+----------------------------------------+ -| Option | Description | Type | Default value | -+===============================+========================================+=============================+========================================+ -| ``log_progress_interval`` | When running computations with Dask, | :obj:`str` or :obj:`float` | 0 | -| | log progress every | | | -| | ``log_progress_interval`` instead of | | | -| | showing a progress bar. The value can | | | -| | be specified in the format accepted by | | | -| | :func:`dask.utils.parse_timedelta`. A | | | -| | negative value disables any progress | | | -| | reporting. A progress bar is only | | | -| | shown if ``max_parallel_tasks: 1``. | | | -+-------------------------------+----------------------------------------+-----------------------------+----------------------------------------+ - .. _config-dask: Dask configuration ================== +Configure Dask in the ``dask`` section. + The :ref:`preprocessor functions ` and many of the :ref:`Python diagnostics in ESMValTool ` make use of the :ref:`Iris ` library to work with the data. In Iris, data can be either :ref:`real or lazy `. -Lazy data is represented by `dask arrays `_. +Lazy data is represented by `dask arrays `__. Dask arrays consist of many small -`numpy arrays `_ +`numpy arrays `__ (called chunks) and if possible, computations are run on those small arrays in parallel. In order to figure out what needs to be computed when, Dask makes use of a -'`scheduler `_'. -The default scheduler in Dask is rather basic, so it can only run on a single -computer and it may not always find the optimal task scheduling solution, -resulting in excessive memory use when using e.g. the +'`scheduler `__'. +The default (thread-based) scheduler in Dask is rather basic, so it can only +run on a single computer and it may not always find the optimal task scheduling +solution, resulting in excessive memory use when using e.g. the :func:`esmvalcore.preprocessor.multi_model_statistics` preprocessor function. Therefore it is recommended that you take a moment to configure the -`Dask distributed `_ scheduler. +`Dask distributed `__ scheduler. A Dask scheduler and the 'workers' running the actual computations, are collectively called a 'Dask cluster'. -Dask distributed configuration ------------------------------- +Dask profiles +------------- -In ESMValCore, the Dask Distributed cluster can configured by creating a file called -``~/.esmvaltool/dask.yml``, where ``~`` is short for your home directory. -In this file, under the ``client`` keyword, the arguments to -:obj:`distributed.Client` can be provided. -Under the ``cluster`` keyword, the type of cluster (e.g. -:obj:`distributed.LocalCluster`), as well as any arguments required to start -the cluster can be provided. -Extensive documentation on setting up Dask Clusters is available -`here `__. +Because some recipes require more computational resources than others, +ESMValCore provides the option to define "Dask profiles". +These profiles can be used to update the `Dask user configuration +`__ per recipe run. +The Dask profile can be selected in a YAML configuration file via -.. warning:: +.. code:: yaml + + dask: + use: + +or alternatively in the command line via + +.. code:: bash - The format of the ``~/.esmvaltool/dask.yml`` configuration file is not yet - fixed and may change in the next release of ESMValCore. + esmvaltool run --dask='{"use": ""}' recipe_example.yml + +Available predefined Dask profiles: + +- ``local_threaded`` (selected by default): use `threaded scheduler + `__ without + any further options. +- ``local_distributed``: use `local distributed scheduler + `__ + without any further options. +- ``debug``: use `synchronous Dask scheduler + `__ for + debugging purposes. + Best used with ``max_parallel_tasks: 1``. + +Dask distributed scheduler configuration +---------------------------------------- + +Here, some examples are provided on how to use a custom Dask distributed +scheduler. +Extensive documentation on setting up Dask Clusters is available `here +`__. .. note:: If not all preprocessor functions support lazy data, computational - performance may be best with the :ref:`default scheduler `. + performance may be best with the :ref:`threaded scheduler + `. See :issue:`674` for progress on making all preprocessor functions lazy. -**Example configurations** - *Personal computer* -Create a Dask distributed cluster on the computer running ESMValCore using -all available resources: +Create a :class:`distributed.LocalCluster` on the computer running ESMValCore +using all available resources: .. code:: yaml - cluster: - type: distributed.LocalCluster + dask: + use: local_cluster # use "local_cluster" defined below + profiles: + local_cluster: + cluster: + type: distributed.LocalCluster -this should work well for most personal computers. +This should work well for most personal computers. .. note:: - Note that, if running this configuration on a shared node of an HPC cluster, - Dask will try and use as many resources it can find available, and this may - lead to overcrowding the node by a single user (you)! + If running this configuration on a shared node of an HPC cluster, Dask will + try and use as many resources it can find available, and this may lead to + overcrowding the node by a single user (you)! *Shared computer* -Create a Dask distributed cluster on the computer running ESMValCore, with -2 workers with 4 threads/4 GiB of memory each (8 GiB in total): +Create a :class:`distributed.LocalCluster` on the computer running ESMValCore, +with 2 workers with 2 threads/4 GiB of memory each (8 GiB in total): .. code:: yaml - cluster: - type: distributed.LocalCluster - n_workers: 2 - threads_per_worker: 4 - memory_limit: 4 GiB + dask: + use: local_cluster # use "local_cluster" defined below + profiles: + local_cluster: + cluster: + type: distributed.LocalCluster + n_workers: 2 + threads_per_worker: 2 + memory_limit: 4GiB this should work well for shared computers. *Computer cluster* -Create a Dask distributed cluster on the -`Levante `_ -supercomputer using the `Dask-Jobqueue `_ -package: +Create a Dask distributed cluster on the `Levante +`__ supercomputer +using the `Dask-Jobqueue `__ package: .. code:: yaml - cluster: - type: dask_jobqueue.SLURMCluster - queue: shared - account: bk1088 - cores: 8 - memory: 7680MiB - processes: 2 - interface: ib0 - local_directory: "/scratch/b/b381141/dask-tmp" - n_workers: 24 + dask: + use: slurm_cluster # use "slurm_cluster" defined below + profiles: + slurm_cluster: + cluster: + type: dask_jobqueue.SLURMCluster + queue: shared + account: + cores: 8 + memory: 7680MiB + processes: 2 + interface: ib0 + local_directory: "/scratch/b//dask-tmp" + n_workers: 24 This will start 24 workers with ``cores / processes = 4`` threads each, resulting in ``n_workers / processes = 12`` Slurm jobs, where each Slurm job @@ -429,34 +419,38 @@ will request 8 CPU cores and 7680 MiB of memory and start ``processes = 2`` workers. This example will use the fast infiniband network connection (called ``ib0`` on Levante) for communication between workers running on different nodes. -It is -`important to set the right location for temporary storage `__, -in this case the ``/scratch`` space is used. +It is `important to set the right location for temporary storage +`__, in this +case the ``/scratch`` space is used. It is also possible to use environmental variables to configure the temporary storage location, if you cluster provides these. A configuration like this should work well for larger computations where it is advantageous to use multiple nodes in a compute cluster. -See -`Deploying Dask Clusters on High Performance Computers `_ -for more information. +See `Deploying Dask Clusters on High Performance Computers +`__ for more information. *Externally managed Dask cluster* -Use an externally managed cluster, e.g. a cluster that you started using the -`Dask Jupyterlab extension `_: +To use an externally managed cluster, specify an ``scheduler_address`` for the +selected profile. +Such a cluster can e.g. be started using the `Dask Jupyterlab extension +`__: .. code:: yaml - client: - address: '127.0.0.1:8786' + dask: + use: external # Use the `external` profile defined below + profiles: + external: + scheduler_address: "tcp://127.0.0.1:43605" -See `here `_ +See `here `__ for an example of how to configure this on a remote system. For debugging purposes, it can be useful to start the cluster outside of ESMValCore because then -`Dask dashboard `_ remains +`Dask dashboard `__ remains available after ESMValCore has finished running. **Advice on choosing performant configurations** @@ -477,60 +471,148 @@ Therefore, it may be beneficial to use fewer threads per worker if the computation is very simple and the runtime is determined by the speed with which the data can be read from and/or written to disk. -.. _config-dask-default-scheduler: +.. _config-dask-threaded-scheduler: -Dask default scheduler configuration ------------------------------------- +Custom Dask threaded scheduler configuration +-------------------------------------------- -The Dask default scheduler can be a good choice for recipes using a small +The Dask threaded scheduler can be a good choice for recipes using a small amount of data or when running a recipe where not all preprocessor functions -are lazy yet (see :issue:`674` for the current status). To use the the Dask -default scheduler, comment out or remove all content of ``~/.esmvaltool/dask.yml``. +are lazy yet (see :issue:`674` for the current status). To avoid running out of memory, it is important to set the number of workers -(threads) used by Dask to run its computations to a reasonable number. By -default the number of CPU cores in the machine will be used, but this may be -too many on shared machines or laptops with a large number of CPU cores +(threads) used by Dask to run its computations to a reasonable number. +By default, the number of CPU cores in the machine will be used, but this may +be too many on shared machines or laptops with a large number of CPU cores compared to the amount of memory they have available. -Typically, Dask requires about 2GB of RAM per worker, but this may be more +Typically, Dask requires about 2 GiB of RAM per worker, but this may be more depending on the computation. -To set the number of workers used by the Dask default scheduler, create a file -called ``~/.config/dask/dask.yml`` and add the following -content: +To set the number of workers used by the Dask threaded scheduler, use the +following configuration: .. code:: yaml - scheduler: threads - num_workers: 4 # this example sets the number of workers to 4 + dask: + use: local_threaded # This can be omitted + profiles: + local_threaded: + num_workers: 4 + +.. _config-dask-defaults: +Default options +--------------- -Note that the file name is arbitrary, only the directory it is in matters, as -explained in more detail -`here `__. -See the `Dask documentation `__ -for more information. +By default, the following Dask configuration is used: -Configuring Dask for debugging ------------------------------- +.. code:: yaml -For debugging purposes, it can be useful to disable all parallelism, as this -will often result in more clear error messages. This can be achieved by -setting ``max_parallel_tasks: 1`` in the configuration, -commenting out or removing all content of ``~/.esmvaltool/dask.yml``, and -creating a file called ``~/.config/dask/dask.yml`` with the following -content: + dask: + use: local_threaded # use the `local_threaded` profile defined below + profiles: + local_threaded: + scheduler: threads + local_distributed: + cluster: + type: distributed.LocalCluster + debug: + scheduler: synchronous + +All available options +--------------------- + ++-------------------------------+----------------------------------------+-----------------------------+----------------------------------------+ +| Option | Description | Type | Default value | ++===============================+========================================+=============================+========================================+ +| ``profiles`` | Different Dask profiles that can be | :obj:`dict` | See :ref:`config-dask-defaults` | +| | selected via the ``use`` option. Each | | | +| | profile has a name (:obj:`dict` keys) | | | +| | and corresponding options (:obj:`dict` | | | +| | values). See | | | +| | :ref:`config-dask-profiles` for | | | +| | details. | | | ++-------------------------------+----------------------------------------+-----------------------------+----------------------------------------+ +| ``use`` | Dask profile that is used; must be | :obj:`str` | ``local_threaded`` | +| | defined in the option ``profiles``. | | | ++-------------------------------+----------------------------------------+-----------------------------+----------------------------------------+ + +.. _config-dask-profiles: + +Options for Dask profiles +------------------------- + ++-------------------------------+----------------------------------------+-----------------------------+----------------------------------------+ +| Option | Description | Type | Default value | ++===============================+========================================+=============================+========================================+ +| ``cluster`` | Keyword arguments to initialize a Dask | :obj:`dict` | If omitted, use externally managed | +| | distributed cluster. Needs the option | | cluster if ``scheduler_address`` is | +| | ``type``, which specifies the class of | | given or a :ref:`Dask threaded | +| | the cluster. The remaining options are | | scheduler | +| | passed as keyword arguments to | | ` | +| | initialize that class. Cannot be used | | otherwise. | +| | in combination with | | | +| | ``scheduler_address``. | | | ++-------------------------------+----------------------------------------+-----------------------------+----------------------------------------+ +| ``scheduler_address`` | Scheduler address of an externally | :obj:`str` | If omitted, use a Dask distributed | +| | managed cluster. Will be passed to | | cluster if ``cluster`` is given or a | +| | :class:`distributed.Client`. Cannot be | | :ref:`Dask threaded scheduler | +| | used in combination with ``cluster``. | | ` | +| | | | otherwise. | ++-------------------------------+----------------------------------------+-----------------------------+----------------------------------------+ +| All other options | Passed as keyword arguments to | Any | No defaults. | +| | :func:`dask.config.set`. | | | ++-------------------------------+----------------------------------------+-----------------------------+----------------------------------------+ + + +.. _config-logging: + +Logging configuration +===================== + +Configure what information is logged and how it is presented in the ``logging`` +section. + +.. note:: + + Not all logging configuration is available here yet, see :issue:`2596`. + +Configuration file example: .. code:: yaml - scheduler: synchronous + logging: + log_progress_interval: 10s + +will log progress of Dask computations every 10 seconds instead of showing a +progress bar. + +Command line example: + +.. code:: bash + + esmvaltool run --logging='{"log_progress_interval": "1m"}' recipe_example.yml + + +will log progress of Dask computations every minute instead of showing a +progress bar. -Note that the file name is arbitrary, only the directory it is in matters, as -explained in more detail -`here `__. -See the `Dask documentation `__ -for more information. +Available options: + ++-------------------------------+----------------------------------------+-----------------------------+----------------------------------------+ +| Option | Description | Type | Default value | ++===============================+========================================+=============================+========================================+ +| ``log_progress_interval`` | When running computations with Dask, | :obj:`str` or :obj:`float` | 0 | +| | log progress every | | | +| | ``log_progress_interval`` instead of | | | +| | showing a progress bar. The value can | | | +| | be specified in the format accepted by | | | +| | :func:`dask.utils.parse_timedelta`. A | | | +| | negative value disables any progress | | | +| | reporting. A progress bar is only | | | +| | shown if ``max_parallel_tasks: 1``. | | | ++-------------------------------+----------------------------------------+-----------------------------+----------------------------------------+ .. _config-esgf: @@ -713,7 +795,7 @@ The resulting directory path would look something like this: CMIP/MOHC/HadGEM3-GC31-LL/historical/r1i1p1f3/Omon/tos/gn/latest -Please, bear in mind that ``input_dirs`` can also be a list for those cases in +Please, bear in mind that ``input_dirs`` can also be a list for those cases in which may be needed: .. code-block:: yaml diff --git a/doc/quickstart/output.rst b/doc/quickstart/output.rst index 2698456c6b..71d35a00e5 100644 --- a/doc/quickstart/output.rst +++ b/doc/quickstart/output.rst @@ -150,6 +150,6 @@ Here is an example metadata.yml file: As you can see, this is effectively a dictionary with several items including data paths, metadata and other information. -There are several tools available in python which are built to read and parse +There are several tools available in python which are built to read and parse these files. The tools are available in the shared directory in the diagnostics directory. diff --git a/esmvalcore/_main.py b/esmvalcore/_main.py index 42fbc16092..422908e464 100755 --- a/esmvalcore/_main.py +++ b/esmvalcore/_main.py @@ -84,7 +84,6 @@ def process_recipe(recipe_file: Path, session): import shutil from esmvalcore._recipe.recipe import read_recipe_file - from esmvalcore.config._dask import check_distributed_config if not recipe_file.is_file(): import errno @@ -121,8 +120,6 @@ def process_recipe(recipe_file: Path, session): "'max_parallel_tasks' in your configuration." ) - check_distributed_config() - if session["compress_netcdf"]: logger.warning( "You have enabled NetCDF compression. Accessing .nc files can be " @@ -399,6 +396,7 @@ def run(self, recipe, **kwargs): """ from .config import CFG + from .config._dask import warn_if_old_dask_config_exists from .exceptions import InvalidConfigParameter cli_config_dir = kwargs.pop("config_dir", None) @@ -439,7 +437,7 @@ def run(self, recipe, **kwargs): recipe = self._get_recipe(recipe) - CFG.update(kwargs) + CFG.nested_update(kwargs) CFG["resume_from"] = parse_resume(CFG["resume_from"], recipe) session = CFG.start_session(recipe.stem) @@ -455,6 +453,8 @@ def run(self, recipe, **kwargs): if cli_config_dir is not None: CFG.update_from_dirs([cli_config_dir]) + warn_if_old_dask_config_exists() + @staticmethod def _create_session_dir(session): """Create `session.session_dir` or an alternative if it exists.""" diff --git a/esmvalcore/_recipe/recipe.py b/esmvalcore/_recipe/recipe.py index 9c5aa74553..f42463f5a7 100644 --- a/esmvalcore/_recipe/recipe.py +++ b/esmvalcore/_recipe/recipe.py @@ -18,6 +18,7 @@ from esmvalcore._provenance import get_recipe_provenance from esmvalcore._task import DiagnosticTask, ResumeTask, TaskSet from esmvalcore.config._config import TASKSEP +from esmvalcore.config._dask import validate_dask_config from esmvalcore.config._diagnostics import TAGS from esmvalcore.dataset import Dataset from esmvalcore.exceptions import InputFilesNotFound, RecipeError @@ -786,6 +787,8 @@ class Recipe: def __init__(self, raw_recipe, session, recipe_file: Path): """Parse a recipe file into an object.""" + validate_dask_config(session["dask"]) + # Clear the global variable containing the set of files to download DOWNLOAD_FILES.clear() USED_DATASETS.clear() diff --git a/esmvalcore/config/_config_object.py b/esmvalcore/config/_config_object.py index 489e2301b2..29aed379ce 100644 --- a/esmvalcore/config/_config_object.py +++ b/esmvalcore/config/_config_object.py @@ -5,7 +5,7 @@ import os import sys import warnings -from collections.abc import Iterable +from collections.abc import Iterable, Mapping from datetime import datetime from pathlib import Path from typing import Optional @@ -464,9 +464,29 @@ def update_from_dirs(self, dirs: Iterable[str | Path]) -> None: """ new_config_dict = self._get_config_dict_from_dirs(dirs) - merged_config_dict = dask.config.merge(self, new_config_dict) - self.update(merged_config_dict) + self.nested_update(new_config_dict) + + def nested_update(self, new_options: Mapping) -> None: + """Nested update of configuration object with another mapping. + Merge the existing configuration object with a new mapping using + :func:`dask.config.merge` (new values are preferred over old values). + Nested objects are properly considered; see :func:`dask.config.update` + for details. + + Parameters + ---------- + new_options: + New configuration options. + + Raises + ------ + esmvalcore.exceptions.InvalidConfigParameter + Invalid configuration option given. + + """ + merged_config_dict = dask.config.merge(self, new_options) + self.update(merged_config_dict) self.check_missing() diff --git a/esmvalcore/config/_config_validators.py b/esmvalcore/config/_config_validators.py index b12ed08204..1f7edbc390 100644 --- a/esmvalcore/config/_config_validators.py +++ b/esmvalcore/config/_config_validators.py @@ -326,6 +326,7 @@ def validate_extra_facets_dir(value): "check_level": validate_check_level, "compress_netcdf": validate_bool, "config_developer_file": validate_config_developer, + "dask": validate_dict, "diagnostics": validate_diagnostics, "download_dir": validate_path, "drs": validate_drs, diff --git a/esmvalcore/config/_dask.py b/esmvalcore/config/_dask.py index 4de51e4aef..f9562d161b 100644 --- a/esmvalcore/config/_dask.py +++ b/esmvalcore/config/_dask.py @@ -3,76 +3,212 @@ import contextlib import importlib import logging +import os +import warnings +from collections.abc import Generator, Mapping +from copy import deepcopy from pathlib import Path +from typing import Any +import dask.config import yaml from distributed import Client +from distributed.deploy import Cluster + +from esmvalcore.config import CFG +from esmvalcore.exceptions import ( + ESMValCoreDeprecationWarning, + InvalidConfigParameter, +) logger = logging.getLogger(__name__) +# TODO: Remove in v2.14.0 CONFIG_FILE = Path.home() / ".esmvaltool" / "dask.yml" -def check_distributed_config(): - """Check the Dask distributed configuration.""" - if not CONFIG_FILE.exists(): - logger.warning( - "Using the Dask basic scheduler. This may lead to slow " - "computations and out-of-memory errors. " - "Note that the basic scheduler may still be the best choice for " - "preprocessor functions that are not lazy. " - "In that case, you can safely ignore this warning. " - "See https://docs.esmvaltool.org/projects/ESMValCore/en/latest/" - "quickstart/configure.html#dask-distributed-configuration for " - "more information. " +# TODO: Remove in v2.14.0 +def warn_if_old_dask_config_exists() -> None: + """Warn user if deprecated dask configuration file exists.""" + if CONFIG_FILE.exists() and not os.environ.get( + "ESMVALTOOL_USE_NEW_DASK_CONFIG" + ): + deprecation_msg = ( + "Usage of Dask configuration file ~/.esmvaltool/dask.yml " + "has been deprecated in ESMValCore version 2.12.0 and is " + "scheduled for removal in version 2.14.0. Please use the " + "configuration option `dask` instead (see " + "https://docs.esmvaltool.org/projects/ESMValCore/en/latest/" + "quickstart/configure.html#dask-configuration for details). " + "Ignoring all existing `dask` configuration options for this run. " + "To enable the new `dask` configuration options, delete or move " + "the file ~/.esmvaltool/dask.yml or set the environment variable " + "ESMVALTOOL_USE_NEW_DASK_CONFIG=1." + ) + warnings.warn( + deprecation_msg, ESMValCoreDeprecationWarning, stacklevel=2 + ) + + +def validate_dask_config(dask_config: Mapping) -> None: + """Validate dask configuration options.""" + for option in ("profiles", "use"): + if option not in dask_config: + raise InvalidConfigParameter( + f"Key '{option}' needs to be defined for 'dask' configuration" + ) + profiles = dask_config["profiles"] + use = dask_config["use"] + if not isinstance(profiles, Mapping): + raise InvalidConfigParameter( + f"Key 'dask.profiles' needs to be a mapping, got " + f"{type(profiles)}" + ) + for profile, profile_cfg in profiles.items(): + has_scheduler_address = any( + [ + "scheduler_address" in profile_cfg, + "scheduler-address" in profile_cfg, + ] ) + if "cluster" in profile_cfg and has_scheduler_address: + raise InvalidConfigParameter( + f"Key 'dask.profiles.{profile}' uses 'cluster' and " + f"'scheduler_address', can only have one of those" + ) + if "cluster" in profile_cfg: + cluster = profile_cfg["cluster"] + if not isinstance(cluster, Mapping): + raise InvalidConfigParameter( + f"Key 'dask.profiles.{profile}.cluster' needs to be a " + f"mapping, got {type(cluster)}" + ) + if "type" not in cluster: + raise InvalidConfigParameter( + f"Key 'dask.profiles.{profile}.cluster' does not have a " + f"'type'" + ) + if use not in profiles: + raise InvalidConfigParameter( + f"Key 'dask.use' needs to point to an element of 'dask.profiles'; " + f"got '{use}', expected one of {list(profiles.keys())}" + ) + + +# TODO: Remove in v2.14.0 +def _get_old_dask_config() -> dict: + """Get dask configuration dict from old dask configuration file.""" + dask_config: dict[str, Any] = { + "use": "local_threaded", + "profiles": {"local_threaded": {"scheduler": "threads"}}, + } + config = yaml.safe_load(CONFIG_FILE.read_text(encoding="utf-8")) + + # Use settings from file if this is not empty + if config is not None: + client_kwargs = config.get("client", {}) + cluster_kwargs = config.get("cluster", {}) + + # Externally managed cluster + if "address" in client_kwargs: + if cluster_kwargs: + logger.warning( + "Not using Dask 'cluster' settings from %s because a " + "cluster 'address' is already provided in 'client'.", + CONFIG_FILE, + ) + dask_config = { + "use": "external", + "profiles": { + "external": { + "scheduler_address": client_kwargs.pop("address"), + }, + }, + } + + # Dask distributed cluster + elif cluster_kwargs: + cluster_kwargs.setdefault("type", "distributed.LocalCluster") + dask_config = { + "use": "cluster_from_file", + "profiles": { + "cluster_from_file": { + "cluster": cluster_kwargs, + }, + }, + } + + dask_config["client"] = client_kwargs + + return dask_config + + +# TODO: Remove in v2.14.0; used deepcopy(CFG["dask"]) instead +def _get_dask_config() -> dict: + """Get Dask configuration dictionary.""" + if CONFIG_FILE.exists() and not os.environ.get( + "ESMVALTOOL_USE_NEW_DASK_CONFIG" + ): + dask_config = _get_old_dask_config() + else: + dask_config = deepcopy(CFG["dask"]) + return dask_config @contextlib.contextmanager -def get_distributed_client(): +def get_distributed_client() -> Generator[None | Client]: """Get a Dask distributed client.""" - dask_args = {} - if CONFIG_FILE.exists(): - config = yaml.safe_load(CONFIG_FILE.read_text(encoding="utf-8")) - if config is not None: - dask_args = config - - client_args = dask_args.get("client") or {} - cluster_args = dask_args.get("cluster") or {} - - # Start a cluster, if requested - if "address" in client_args: - # Use an externally managed cluster. + warn_if_old_dask_config_exists() + dask_config = _get_dask_config() + validate_dask_config(dask_config) + + # TODO: Remove in v2.14.0 + client_kwargs = dask_config.get("client", {}) + + # Set up cluster and client according to the selected profile + # Note: we already ensured earlier that the selected profile (via `use`) + # actually exists in `profiles`, so we don't have to check that again here + logger.debug("Using Dask profile '%s'", dask_config["use"]) + profile = dask_config["profiles"][dask_config["use"]] + cluster_kwargs = profile.pop("cluster", None) + + logger.debug("Using additional Dask settings %s", profile) + dask.config.set(profile) + + cluster: None | Cluster + client: None | Client + + # Threaded scheduler + if cluster_kwargs is None: cluster = None - if cluster_args: - logger.warning( - "Not using Dask 'cluster' settings from %s because a cluster " - "'address' is already provided in 'client'.", - CONFIG_FILE, - ) - elif cluster_args: - # Start cluster. - cluster_type = cluster_args.pop( - "type", - "distributed.LocalCluster", - ) + + # Distributed scheduler + else: + cluster_type = cluster_kwargs.pop("type") cluster_module_name, cluster_cls_name = cluster_type.rsplit(".", 1) cluster_module = importlib.import_module(cluster_module_name) cluster_cls = getattr(cluster_module, cluster_cls_name) - cluster = cluster_cls(**cluster_args) - client_args["address"] = cluster.scheduler_address - else: - # No cluster configured, use Dask basic scheduler, or a LocalCluster - # managed through Client. - cluster = None + cluster = cluster_cls(**cluster_kwargs) + dask.config.set({"scheduler_address": cluster.scheduler_address}) + logger.debug("Using Dask cluster %s", cluster) - # Start a client, if requested - if dask_args: - client = Client(**client_args) - logger.info("Dask dashboard: %s", client.dashboard_link) - else: - logger.info("Using the Dask basic scheduler.") + if dask.config.get("scheduler_address", None) is None: client = None + logger.info( + "Using Dask threaded scheduler. The distributed scheduler is " + "recommended, please read https://docs.esmvaltool.org/projects/" + "ESMValCore/en/latest/quickstart/" + "configure.html#dask-configuration how to use a distributed " + "scheduler." + ) + else: + client = Client(**client_kwargs) + logger.info( + "Using Dask distributed scheduler (address: %s, dashboard link: " + "%s)", + dask.config.get("scheduler_address"), + client.dashboard_link, + ) try: yield client diff --git a/esmvalcore/config/configurations/defaults/dask.yml b/esmvalcore/config/configurations/defaults/dask.yml new file mode 100644 index 0000000000..33f5579532 --- /dev/null +++ b/esmvalcore/config/configurations/defaults/dask.yml @@ -0,0 +1,10 @@ +dask: + use: local_threaded # use the `local_threaded` profile defined below + profiles: + local_threaded: + scheduler: threads + local_distributed: + cluster: + type: distributed.LocalCluster + debug: + scheduler: synchronous diff --git a/esmvalcore/config/configurations/defaults/more_options.yml b/esmvalcore/config/configurations/defaults/more_top_level_options.yml similarity index 76% rename from esmvalcore/config/configurations/defaults/more_options.yml rename to esmvalcore/config/configurations/defaults/more_top_level_options.yml index c61a70a493..2e0dc8a49c 100644 --- a/esmvalcore/config/configurations/defaults/more_options.yml +++ b/esmvalcore/config/configurations/defaults/more_top_level_options.yml @@ -1,4 +1,3 @@ -# Other options not included in config-user.yml check_level: default diagnostics: null extra_facets_dir: [] diff --git a/esmvalcore/experimental/recipe.py b/esmvalcore/experimental/recipe.py index f199ef719f..ce54b7c792 100644 --- a/esmvalcore/experimental/recipe.py +++ b/esmvalcore/experimental/recipe.py @@ -10,7 +10,7 @@ import yaml from esmvalcore._recipe.recipe import Recipe as RecipeEngine -from esmvalcore.config import CFG, Session, _dask +from esmvalcore.config import CFG, Session from ._logging import log_to_dir from .recipe_info import RecipeInfo @@ -133,7 +133,6 @@ def run( session["diagnostics"] = task with log_to_dir(session.run_dir): - _dask.check_distributed_config() self._engine = self._load(session=session) self._engine.run() diff --git a/esmvalcore/local.py b/esmvalcore/local.py index 6483ee7a8f..41cf424476 100644 --- a/esmvalcore/local.py +++ b/esmvalcore/local.py @@ -183,7 +183,7 @@ def _dates_to_timerange(start_date, end_date): Note ---- This function ensures that dates in years format follow the pattern YYYY - (i.e., that they have at least 4 digits). Other formats, such as wildcards + (i.e., that they have at least 4 digits). Other formats, such as wildcards (``'*'``) and relative time ranges (e.g., ``'P6Y'``) are used unchanged. Parameters diff --git a/esmvalcore/preprocessor/_dask_progress.py b/esmvalcore/preprocessor/_dask_progress.py index bcfc3380d5..55594c40da 100644 --- a/esmvalcore/preprocessor/_dask_progress.py +++ b/esmvalcore/preprocessor/_dask_progress.py @@ -20,7 +20,7 @@ class RichProgressBar(dask.diagnostics.Callback): - """Progress bar using `rich` for the Dask default scheduler.""" + """Progress bar using `rich` for the Dask threaded scheduler.""" # Disable warnings about design choices that have been made in the base class. # pylint: disable=method-hidden,super-init-not-called,too-few-public-methods,unused-argument,useless-suppression @@ -109,7 +109,7 @@ def _draw_stop(self, **kwargs): class ProgressLogger(dask.diagnostics.ProgressBar): - """Progress logger for the Dask default scheduler.""" + """Progress logger for the Dask threaded scheduler.""" # Disable warnings about design choices that have been made in the base class. # pylint: disable=too-few-public-methods,unused-argument,useless-suppression diff --git a/esmvalcore/preprocessor/_volume.py b/esmvalcore/preprocessor/_volume.py index 8d56d7b51a..4c2f7574d0 100644 --- a/esmvalcore/preprocessor/_volume.py +++ b/esmvalcore/preprocessor/_volume.py @@ -457,7 +457,7 @@ def extract_transect( transect along 28 West. Also, `'extract_transect(cube, longitude=-28, latitude=[-50, 50])'` will - produce a transect along 28 West between 50 south and 50 North. + produce a transect along 28 West between 50 south and 50 North. This function is not yet implemented for irregular arrays - instead try the extract_trajectory function, but note that it is currently diff --git a/tests/conftest.py b/tests/conftest.py index d973b76695..d16442d302 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -3,6 +3,7 @@ import pytest +import esmvalcore.config._dask from esmvalcore.config import CFG, Config @@ -32,3 +33,13 @@ def ignore_old_config_user(tmp_path, monkeypatch): monkeypatch.setattr( Config, "_DEFAULT_USER_CONFIG_DIR", nonexistent_config_dir ) + + +# TODO: remove in v2.14.0 +@pytest.fixture(autouse=True) +def ignore_old_dask_config_file(tmp_path, monkeypatch): + """Ignore potentially existing old dask.yml file in all tests.""" + nonexistent_file = tmp_path / "nonexistent_file.yml" + monkeypatch.setattr( + esmvalcore.config._dask, "CONFIG_FILE", nonexistent_file + ) diff --git a/tests/integration/cmor/test_fix.py b/tests/integration/cmor/test_fix.py index 43b9419f64..4ce1b01e60 100644 --- a/tests/integration/cmor/test_fix.py +++ b/tests/integration/cmor/test_fix.py @@ -526,7 +526,7 @@ def test_fix_metadata_cfmon_ta_alternative(self): assert self.mock_warning.call_count == 9 def test_fix_metadata_cfmon_ta_no_alternative(self, mocker): - """Test ``fix_metadata`` with no alternative coordinate.""" + """Test ``fix_metadata`` with no alternative coordinate.""" short_name = "ta" project = "CMIP6" dataset = "__MODEL_WITH_NO_EXPLICIT_FIX__" diff --git a/tests/parse_pymon.py b/tests/parse_pymon.py index 3f05929703..b3b87be6e6 100644 --- a/tests/parse_pymon.py +++ b/tests/parse_pymon.py @@ -72,7 +72,7 @@ def _parse_pymon_database(): # Be sure to close the connection con.close() - # Throw a sys exit so test fails if we have >4GB tests + # Throw a sys exit so test fails if we have >4GB tests if big_mem_tests: print("Some tests exceed 4GB of RES memory, look into them!") print(big_mem_tests) diff --git a/tests/unit/config/test_config.py b/tests/unit/config/test_config.py index 44b8d6ee3e..04d10e2459 100644 --- a/tests/unit/config/test_config.py +++ b/tests/unit/config/test_config.py @@ -179,6 +179,22 @@ def test_load_default_config(cfg_default, monkeypatch): "check_level": CheckLevels.DEFAULT, "compress_netcdf": False, "config_developer_file": default_dev_file, + "dask": { + "profiles": { + "local_threaded": { + "scheduler": "threads", + }, + "local_distributed": { + "cluster": { + "type": "distributed.LocalCluster", + }, + }, + "debug": { + "scheduler": "synchronous", + }, + }, + "use": "local_threaded", + }, "diagnostics": None, "download_dir": Path.home() / "climate_data", "drs": { diff --git a/tests/unit/config/test_config_object.py b/tests/unit/config/test_config_object.py index 09fc93ed3a..8c9f4e1913 100644 --- a/tests/unit/config/test_config_object.py +++ b/tests/unit/config/test_config_object.py @@ -588,3 +588,19 @@ def test_update_from_dirs(dirs, output_file_type, rootpath, tmp_path): assert cfg["output_file_type"] == output_file_type assert cfg["rootpath"] == rootpath assert cfg["search_esgf"] == "when_missing" + + +def test_nested_update(): + """Test `Config.update_from_dirs`.""" + cfg = Config() + assert not cfg + + cfg["drs"] = {"X": "x", "Z": "z"} + cfg["search_esgf"] = "when_missing" + + cfg.nested_update({"drs": {"Y": "y", "X": "xx"}, "max_years": 1}) + + assert len(cfg) == 3 + assert cfg["drs"] == {"Y": "y", "X": "xx", "Z": "z"} + assert cfg["search_esgf"] == "when_missing" + assert cfg["max_years"] == 1 diff --git a/tests/unit/config/test_dask.py b/tests/unit/config/test_dask.py index 8efc305023..3baf9d5c2f 100644 --- a/tests/unit/config/test_dask.py +++ b/tests/unit/config/test_dask.py @@ -1,21 +1,165 @@ import pytest import yaml -from esmvalcore.config import _dask +from esmvalcore.config import CFG, _dask +from esmvalcore.exceptions import ( + ESMValCoreDeprecationWarning, + InvalidConfigParameter, +) -def test_get_no_distributed_client(mocker, tmp_path): - mocker.patch.object(_dask, "CONFIG_FILE", tmp_path / "nonexistent.yml") +@pytest.fixture +def mock_dask_config_set(mocker): + dask_config_dict = {} + mock_dask_set = mocker.patch("dask.config.set", autospec=True) + mock_dask_set.side_effect = dask_config_dict.update + mock_dask_get = mocker.patch("dask.config.get", autospec=True) + mock_dask_get.side_effect = dask_config_dict.get + return mock_dask_set + + +def test_get_no_distributed_client(): with _dask.get_distributed_client() as client: assert client is None +# TODO: Remove in v2.14.0 +def test_get_distributed_client_empty_dask_file(mocker, tmp_path): + # Create mock client configuration. + cfg_file = tmp_path / "dask.yml" + with cfg_file.open("w", encoding="utf-8") as file: + file.write("") + mocker.patch.object(_dask, "CONFIG_FILE", cfg_file) + + # Create mock distributed.Client + with pytest.warns(ESMValCoreDeprecationWarning): + with _dask.get_distributed_client() as client: + assert client is None + + +# TODO: Remove in v2.14.0 +@pytest.mark.parametrize("use_new_dask_config", ["", "1"]) +def test_force_new_dask_config( + monkeypatch, mocker, tmp_path, mock_dask_config_set, use_new_dask_config +): + # Old config -> threaded scheduler + cfg_file = tmp_path / "dask.yml" + with cfg_file.open("w", encoding="utf-8") as file: + file.write("") + mocker.patch.object(_dask, "CONFIG_FILE", cfg_file) + + # New config -> distributed scheduler + slurm_cluster = { + "type": "dask_jobqueue.SLURMCluster", + "queue": "interactive", + "cores": "8", + "memory": "16GiB", + } + monkeypatch.setitem( + CFG, + "dask", + { + "use": "slurm_cluster", + "profiles": {"slurm_cluster": {"cluster": slurm_cluster}}, + }, + ) + + # Create mock distributed.Client + mock_client = mocker.Mock() + mocker.patch.object( + _dask, "Client", create_autospec=True, return_value=mock_client + ) + + mock_module = mocker.Mock() + mock_cluster_cls = mocker.Mock() + mock_module.SLURMCluster = mock_cluster_cls + mocker.patch.object( + _dask.importlib, + "import_module", + create_autospec=True, + return_value=mock_module, + ) + + monkeypatch.setenv("ESMVALTOOL_USE_NEW_DASK_CONFIG", use_new_dask_config) + + with _dask.get_distributed_client() as client: + if use_new_dask_config: + assert client is not None + else: + assert client is None + + +# TODO: Remove in v2.14.0 +def test_get_old_dask_config(mocker, tmp_path): + # Create mock client configuration. + cfg = {"cluster": {"n_workers": 2}} + cfg_file = tmp_path / "dask.yml" + with cfg_file.open("w", encoding="utf-8") as file: + yaml.safe_dump(cfg, file) + mocker.patch.object(_dask, "CONFIG_FILE", cfg_file) + + dask_cfg = _dask._get_old_dask_config() + + expected_cfg = { + "use": "cluster_from_file", + "profiles": { + "cluster_from_file": { + "cluster": { + "n_workers": 2, + "type": "distributed.LocalCluster", + }, + }, + }, + "client": {}, + } + assert dask_cfg == expected_cfg + + +def test_get_distributed_client_external( + monkeypatch, mocker, mock_dask_config_set +): + monkeypatch.setitem( + CFG, + "dask", + { + "use": "external", + "profiles": { + "external": { + "scheduler_address": "tcp://127.0.0.1:42021", + }, + }, + }, + ) + + # Create mock distributed.Client + mock_client = mocker.Mock() + mocker.patch.object( + _dask, "Client", create_autospec=True, return_value=mock_client + ) + + with _dask.get_distributed_client() as client: + assert client is mock_client + _dask.Client.assert_called_once_with() + mock_client.close.assert_called_once_with() + assert ( + mocker.call({"scheduler_address": "tcp://127.0.0.1:42021"}) + in mock_dask_config_set.mock_calls + ) + + +# TODO: Remove in v2.14.0 @pytest.mark.parametrize("warn_unused_args", [False, True]) -def test_get_distributed_client_external(mocker, tmp_path, warn_unused_args): +def test_get_distributed_client_external_old( + mocker, + tmp_path, + mock_dask_config_set, + warn_unused_args, +): # Create mock client configuration. cfg = { "client": { "address": "tcp://127.0.0.1:42021", + "other_client_options": 1, }, } if warn_unused_args: @@ -31,14 +175,78 @@ def test_get_distributed_client_external(mocker, tmp_path, warn_unused_args): _dask, "Client", create_autospec=True, return_value=mock_client ) + with pytest.warns(ESMValCoreDeprecationWarning): + with _dask.get_distributed_client() as client: + assert client is mock_client + _dask.Client.assert_called_once_with(other_client_options=1) + mock_client.close.assert_called_once_with() + assert ( + mocker.call({"scheduler_address": "tcp://127.0.0.1:42021"}) + in mock_dask_config_set.mock_calls + ) + + +@pytest.mark.parametrize("shutdown_timeout", [False, True]) +def test_get_distributed_client_slurm( + monkeypatch, mocker, mock_dask_config_set, shutdown_timeout +): + slurm_cluster = { + "type": "dask_jobqueue.SLURMCluster", + "queue": "interactive", + "cores": "8", + "memory": "16GiB", + } + monkeypatch.setitem( + CFG, + "dask", + { + "use": "slurm_cluster", + "profiles": { + "slurm_cluster": { + "cluster": slurm_cluster, + "num_workers": 42, + }, + }, + }, + ) + + # Create mock distributed.Client + mock_client = mocker.Mock() + mocker.patch.object( + _dask, "Client", create_autospec=True, return_value=mock_client + ) + + mock_module = mocker.Mock() + mock_cluster_cls = mocker.Mock() + mock_module.SLURMCluster = mock_cluster_cls + mocker.patch.object( + _dask.importlib, + "import_module", + create_autospec=True, + return_value=mock_module, + ) + mock_cluster = mock_cluster_cls.return_value + if shutdown_timeout: + mock_cluster.close.side_effect = TimeoutError with _dask.get_distributed_client() as client: assert client is mock_client - _dask.Client.assert_called_with(**cfg["client"]) - mock_client.close.assert_called() + mock_client.close.assert_called_once_with() + _dask.Client.assert_called_once_with() + args = {k: v for k, v in slurm_cluster.items() if k != "type"} + mock_cluster_cls.assert_called_once_with(**args) + mock_cluster.close.assert_called() + assert mocker.call({"num_workers": 42}) in mock_dask_config_set.mock_calls + assert ( + mocker.call({"scheduler_address": mock_cluster.scheduler_address}) + in mock_dask_config_set.mock_calls + ) +# TODO: Remove in v2.14.0 @pytest.mark.parametrize("shutdown_timeout", [False, True]) -def test_get_distributed_client_slurm(mocker, tmp_path, shutdown_timeout): +def test_get_distributed_client_slurm_old( + mocker, tmp_path, mock_dask_config_set, shutdown_timeout +): cfg = { "cluster": { "type": "dask_jobqueue.SLURMCluster", @@ -70,10 +278,138 @@ def test_get_distributed_client_slurm(mocker, tmp_path, shutdown_timeout): mock_cluster = mock_cluster_cls.return_value if shutdown_timeout: mock_cluster.close.side_effect = TimeoutError - with _dask.get_distributed_client() as client: - assert client is mock_client - mock_client.close.assert_called() - _dask.Client.assert_called_with(address=mock_cluster.scheduler_address) + with pytest.warns(ESMValCoreDeprecationWarning): + with _dask.get_distributed_client() as client: + assert client is mock_client + mock_client.close.assert_called_once_with() + _dask.Client.assert_called_once_with() args = {k: v for k, v in cfg["cluster"].items() if k != "type"} - mock_cluster_cls.assert_called_with(**args) + mock_cluster_cls.assert_called_once_with(**args) mock_cluster.close.assert_called() + assert ( + mocker.call({"scheduler_address": mock_cluster.scheduler_address}) + in mock_dask_config_set.mock_calls + ) + + +def test_custom_default_scheduler(monkeypatch, mock_dask_config_set): + default_scheduler = {"num_workers": 42, "scheduler": "processes"} + monkeypatch.setitem( + CFG, + "dask", + { + "use": "process_scheduler", + "profiles": {"process_scheduler": default_scheduler}, + }, + ) + + with _dask.get_distributed_client() as client: + assert client is None + + mock_dask_config_set.assert_called_once_with( + {"num_workers": 42, "scheduler": "processes"} + ) + + +def test_invalid_dask_config_no_profiles(monkeypatch): + monkeypatch.setitem(CFG, "dask", {}) + + msg = "Key 'profiles' needs to be defined for 'dask' configuration" + with pytest.raises(InvalidConfigParameter, match=msg): + with _dask.get_distributed_client(): + pass + + +def test_invalid_dask_config_no_use(monkeypatch): + monkeypatch.setitem(CFG, "dask", {"profiles": {}}) + + msg = "Key 'use' needs to be defined for 'dask' configuration" + with pytest.raises(InvalidConfigParameter, match=msg): + with _dask.get_distributed_client(): + pass + + +def test_invalid_dask_config_invalid_profiles(monkeypatch): + monkeypatch.setitem(CFG, "dask", {"use": "test", "profiles": 1}) + + msg = "Key 'dask.profiles' needs to be a mapping, got" + with pytest.raises(InvalidConfigParameter, match=msg): + with _dask.get_distributed_client(): + pass + + +@pytest.mark.parametrize( + "address_name", ["scheduler_address", "scheduler-address"] +) +def test_invalid_dask_config_profile_with_cluster_and_address( + monkeypatch, address_name +): + monkeypatch.setitem( + CFG, + "dask", + { + "use": "test", + "profiles": { + "test": {"cluster": {}, address_name: "8786"}, + }, + }, + ) + + msg = "Key 'dask.profiles.test' uses 'cluster' and 'scheduler_address'" + with pytest.raises(InvalidConfigParameter, match=msg): + with _dask.get_distributed_client(): + pass + + +def test_invalid_dask_config_profile_invalid_cluster(monkeypatch): + monkeypatch.setitem( + CFG, + "dask", + { + "use": "test", + "profiles": { + "test": {"cluster": 1}, + }, + }, + ) + + msg = "Key 'dask.profiles.test.cluster' needs to be a mapping" + with pytest.raises(InvalidConfigParameter, match=msg): + with _dask.get_distributed_client(): + pass + + +def test_invalid_dask_config_cluster_no_type(monkeypatch): + monkeypatch.setitem( + CFG, + "dask", + { + "use": "test", + "profiles": { + "test": {"cluster": {}}, + }, + }, + ) + + msg = "Key 'dask.profiles.test.cluster' does not have a 'type'" + with pytest.raises(InvalidConfigParameter, match=msg): + with _dask.get_distributed_client(): + pass + + +def test_invalid_dask_config_invalid_use(monkeypatch): + monkeypatch.setitem( + CFG, + "dask", + { + "use": "not_in_profiles", + "profiles": { + "test": {}, + }, + }, + ) + + msg = "Key 'dask.use' needs to point to an element of 'dask.profiles'" + with pytest.raises(InvalidConfigParameter, match=msg): + with _dask.get_distributed_client(): + pass diff --git a/tests/unit/main/test_esmvaltool.py b/tests/unit/main/test_esmvaltool.py index 1e03bbe5b1..03985363d7 100644 --- a/tests/unit/main/test_esmvaltool.py +++ b/tests/unit/main/test_esmvaltool.py @@ -21,17 +21,23 @@ @pytest.fixture def cfg(mocker, tmp_path): """Mock `esmvalcore.config.CFG`.""" - cfg_dict = {"resume_from": []} + cfg_dict = { + "dask": { + "profiles": {"local_threaded": {"scheduler": "threads"}}, + "use": "local_threaded", + }, + "resume_from": [], + } cfg = mocker.MagicMock() cfg.__getitem__.side_effect = cfg_dict.__getitem__ cfg.__setitem__.side_effect = cfg_dict.__setitem__ - cfg.update.side_effect = cfg_dict.update + cfg.nested_update.side_effect = cfg_dict.update session = mocker.MagicMock() session.__getitem__.side_effect = cfg.__getitem__ session.__setitem__.side_effect = cfg.__setitem__ - session.update.side_effect = cfg.update + session.nested_update.side_effect = cfg.nested_update output_dir = tmp_path / "esmvaltool_output" session.session_dir = output_dir / "recipe_test" From 90e12d470897e762a7ad4f945528f2a7f25376ea Mon Sep 17 00:00:00 2001 From: Bouwe Andela Date: Fri, 20 Dec 2024 15:28:35 +0100 Subject: [PATCH 15/36] Set iris.FUTURE flags in one place (#2622) --- esmvalcore/config/_config.py | 12 ++++++++++++ esmvalcore/preprocessor/_io.py | 2 -- 2 files changed, 12 insertions(+), 2 deletions(-) diff --git a/esmvalcore/config/_config.py b/esmvalcore/config/_config.py index 6df9e9bf52..ee4d8ac8e8 100644 --- a/esmvalcore/config/_config.py +++ b/esmvalcore/config/_config.py @@ -10,6 +10,7 @@ from importlib.resources import files as importlib_files from pathlib import Path +import iris import yaml from esmvalcore.cmor.table import CMOR_TABLES, read_cmor_tables @@ -23,6 +24,17 @@ CFG = {} +# Set iris.FUTURE flags +for attr, value in { + "save_split_attrs": True, + "date_microseconds": True, +}.items(): + try: + setattr(iris.FUTURE, attr, value) + except AttributeError: + pass + + def _deep_update(dictionary, update): for key, value in update.items(): if isinstance(value, collections.abc.Mapping): diff --git a/esmvalcore/preprocessor/_io.py b/esmvalcore/preprocessor/_io.py index 0851e1d37e..0c554c3d9a 100644 --- a/esmvalcore/preprocessor/_io.py +++ b/esmvalcore/preprocessor/_io.py @@ -41,8 +41,6 @@ } GRIB_FORMATS = (".grib2", ".grib", ".grb2", ".grb", ".gb2", ".gb") -iris.FUTURE.save_split_attrs = True - def _get_attr_from_field_coord(ncfield, coord_name, attr): if coord_name is not None: From 85526a895c6b067f36d5486f4a359eee5c67adc7 Mon Sep 17 00:00:00 2001 From: Manuel Schlund <32543114+schlunma@users.noreply.github.com> Date: Fri, 20 Dec 2024 18:21:30 +0100 Subject: [PATCH 16/36] Always ignore user's configuration when running Dask tests (#2624) --- tests/conftest.py | 12 ++++++++++-- tests/unit/config/test_dask.py | 25 +++++++++++++++++++------ 2 files changed, 29 insertions(+), 8 deletions(-) diff --git a/tests/conftest.py b/tests/conftest.py index d16442d302..05a33e703e 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -5,16 +5,24 @@ import esmvalcore.config._dask from esmvalcore.config import CFG, Config +from esmvalcore.config._config_object import _get_global_config @pytest.fixture -def cfg_default(mocker): +def cfg_default(): """Create a configuration object with default values.""" - cfg = deepcopy(CFG) + cfg = _get_global_config() cfg.load_from_dirs([]) return cfg +@pytest.fixture() +def ignore_existing_user_config(monkeypatch, cfg_default): + """Ignore user's configuration when running tests.""" + for key, value in cfg_default.items(): + monkeypatch.setitem(CFG, key, deepcopy(value)) + + @pytest.fixture def session(tmp_path: Path, cfg_default, monkeypatch): """Session object with default settings.""" diff --git a/tests/unit/config/test_dask.py b/tests/unit/config/test_dask.py index 3baf9d5c2f..41cefbfbad 100644 --- a/tests/unit/config/test_dask.py +++ b/tests/unit/config/test_dask.py @@ -18,7 +18,7 @@ def mock_dask_config_set(mocker): return mock_dask_set -def test_get_no_distributed_client(): +def test_get_no_distributed_client(ignore_existing_user_config): with _dask.get_distributed_client() as client: assert client is None @@ -40,7 +40,12 @@ def test_get_distributed_client_empty_dask_file(mocker, tmp_path): # TODO: Remove in v2.14.0 @pytest.mark.parametrize("use_new_dask_config", ["", "1"]) def test_force_new_dask_config( - monkeypatch, mocker, tmp_path, mock_dask_config_set, use_new_dask_config + monkeypatch, + mocker, + tmp_path, + mock_dask_config_set, + ignore_existing_user_config, + use_new_dask_config, ): # Old config -> threaded scheduler cfg_file = tmp_path / "dask.yml" @@ -116,7 +121,7 @@ def test_get_old_dask_config(mocker, tmp_path): def test_get_distributed_client_external( - monkeypatch, mocker, mock_dask_config_set + monkeypatch, mocker, mock_dask_config_set, ignore_existing_user_config ): monkeypatch.setitem( CFG, @@ -188,7 +193,11 @@ def test_get_distributed_client_external_old( @pytest.mark.parametrize("shutdown_timeout", [False, True]) def test_get_distributed_client_slurm( - monkeypatch, mocker, mock_dask_config_set, shutdown_timeout + monkeypatch, + mocker, + mock_dask_config_set, + ignore_existing_user_config, + shutdown_timeout, ): slurm_cluster = { "type": "dask_jobqueue.SLURMCluster", @@ -292,7 +301,11 @@ def test_get_distributed_client_slurm_old( ) -def test_custom_default_scheduler(monkeypatch, mock_dask_config_set): +def test_custom_default_scheduler( + monkeypatch, + mock_dask_config_set, + ignore_existing_user_config, +): default_scheduler = {"num_workers": 42, "scheduler": "processes"} monkeypatch.setitem( CFG, @@ -306,7 +319,7 @@ def test_custom_default_scheduler(monkeypatch, mock_dask_config_set): with _dask.get_distributed_client() as client: assert client is None - mock_dask_config_set.assert_called_once_with( + mock_dask_config_set.assert_called_with( {"num_workers": 42, "scheduler": "processes"} ) From 73bae2c2ff2a2862f9b68d5845ddf7eed96f4aa7 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 6 Jan 2025 12:14:47 +0000 Subject: [PATCH 17/36] [pre-commit.ci] pre-commit autoupdate (#2629) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index d286dc552a..b20c3be813 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -33,13 +33,13 @@ repos: - id: codespell additional_dependencies: [tomli] # required for Python 3.10 - repo: https://github.com/astral-sh/ruff-pre-commit - rev: "v0.8.3" + rev: "v0.8.4" hooks: - id: ruff args: [--fix] - id: ruff-format - repo: https://github.com/pre-commit/mirrors-mypy - rev: 'v1.13.0' + rev: 'v1.14.0' hooks: - id: mypy additional_dependencies: From 0271935c8bfddff1f8f86639ddee486d16e641fe Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Mon, 6 Jan 2025 12:15:39 +0000 Subject: [PATCH 18/36] [Condalock] Update Linux condalock file (#2627) Co-authored-by: valeriupredoi --- conda-linux-64.lock | 219 ++++++++++++++++++++++---------------------- 1 file changed, 108 insertions(+), 111 deletions(-) diff --git a/conda-linux-64.lock b/conda-linux-64.lock index 7dce7db4ec..1d97290b15 100644 --- a/conda-linux-64.lock +++ b/conda-linux-64.lock @@ -3,12 +3,12 @@ # input_hash: c08346d7b64ddb822a23f6a72356a10c21221039987cdf35737057f500071d7f @EXPLICIT https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2#d7c89558ba9fa0495403155b64376d81 -https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2024.8.30-hbcca054_0.conda#c27d1c142233b5bc9ca570c6e2e0c244 +https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2024.12.14-hbcca054_0.conda#720523eb0d6a9b0f6120c16b2aa4e7de https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2#0c96522c6bdaed4b1566d11387caaf45 https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed37_0.tar.bz2#34893075a5c9e55cdafac56607368fc6 https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2#4d59c254e01d9cde7957100457e2d5fb https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-h77eed37_3.conda#49023d73832ef61042f6a237cb2687e7 -https://conda.anaconda.org/conda-forge/linux-64/pandoc-3.6-ha770c72_0.conda#38ee82616a780cf22ec6355e386e2563 +https://conda.anaconda.org/conda-forge/linux-64/pandoc-3.6.1-ha770c72_0.conda#e94dd7479ba12963364d855fb23cce4f https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.13-5_cp313.conda#381bbd2a92c863f640a55b6ff3c35161 https://conda.anaconda.org/conda-forge/noarch/tzdata-2024b-hc8b5060_0.conda#8ac3367aafb1cc0a068483c580af8015 https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-0.tar.bz2#f766549260d6815b0c52253f1fb1bb29 @@ -19,27 +19,28 @@ https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2# https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2#fee5683a3f04bd15cbd8318b096a27ab https://conda.anaconda.org/conda-forge/linux-64/libegl-1.7.0-ha4b6fd6_2.conda#c151d5eb730e9b7480e6d48c0fc44048 https://conda.anaconda.org/conda-forge/linux-64/libgcc-14.2.0-h77fa898_1.conda#3cb76c3f10d3bc7f1105b2fc9db984df -https://conda.anaconda.org/conda-forge/linux-64/aws-c-common-0.10.5-hb9d3cd8_0.conda#d8288fbad9d809b9ca139b8beb6553ef -https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.34.3-hb9d3cd8_1.conda#ee228789a85f961d14567252a03e725f +https://conda.anaconda.org/conda-forge/linux-64/aws-c-common-0.10.6-hb9d3cd8_0.conda#d7d4680337a14001b0e043e96529409b +https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.34.4-hb9d3cd8_0.conda#e2775acf57efd5af15b8e3d1d74d72d3 https://conda.anaconda.org/conda-forge/linux-64/json-c-0.18-h6688a6e_0.conda#38f5dbc9ac808e31c00650f7be1db93f https://conda.anaconda.org/conda-forge/linux-64/libbrotlicommon-1.1.0-hb9d3cd8_2.conda#41b599ed2b02abcfdd84302bff174b23 -https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.22-hb9d3cd8_0.conda#b422943d5d772b7cc858b36ad2a92db5 +https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.23-h4ddbbb0_0.conda#8dfae1d2e74767e9ce36d5fa0d8605db https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.6.4-h5888daf_0.conda#db833e03127376d461e1e13e76f09b6c https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-14.2.0-h69a702a_1.conda#e39480b9ca41323497b05492a63bc35b https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-14.2.0-hd5240d6_1.conda#9822b874ea29af082e5d36098d25427d https://conda.anaconda.org/conda-forge/linux-64/liblzma-5.6.3-hb9d3cd8_1.conda#2ecf2f1c7e4e21fcfe6423a51a992d84 https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-14.2.0-hc0a3c3a_1.conda#234a5554c53625688d51062645337328 https://conda.anaconda.org/conda-forge/linux-64/libutf8proc-2.9.0-hb9d3cd8_1.conda#1e936bd23d737aac62a18e9a1e7f8b18 +https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.5.0-h851e524_0.conda#63f790534398730f59e1b899c3644d4a https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.3.1-hb9d3cd8_2.conda#edb0dca6bc32e4f4789199455a1dbeb8 https://conda.anaconda.org/conda-forge/linux-64/openssl-3.4.0-hb9d3cd8_0.conda#23cc74f77eb99315c0360ec3533147a9 https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-hb9d3cd8_1002.conda#b3c17d95b5a10c6e64a21fa17573e70e -https://conda.anaconda.org/conda-forge/linux-64/xorg-libice-1.1.1-hb9d3cd8_1.conda#19608a9656912805b2b9a2f6bd257b04 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxau-1.0.11-hb9d3cd8_1.conda#77cbc488235ebbaab2b6e912d3934bae +https://conda.anaconda.org/conda-forge/linux-64/xorg-libice-1.1.2-hb9d3cd8_0.conda#fb901ff28063514abb6046c9ec2c4a45 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxau-1.0.12-hb9d3cd8_0.conda#f6ebe2cb3f82ba6c057dde5d9debe4f7 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdmcp-1.1.5-hb9d3cd8_0.conda#8035c64cb77ed555e3f150b7b3972480 -https://conda.anaconda.org/conda-forge/linux-64/aws-c-cal-0.8.1-h0f28dba_2.conda#94faebd978282d2a4a8514141daec756 -https://conda.anaconda.org/conda-forge/linux-64/aws-c-compression-0.3.0-h9cc6398_4.conda#076717670d5406e90070120314ff9b4f -https://conda.anaconda.org/conda-forge/linux-64/aws-c-sdkutils-0.2.1-h9cc6398_3.conda#10bdb7fc3763760dcea1cd908ece6b2b -https://conda.anaconda.org/conda-forge/linux-64/aws-checksums-0.2.2-h9cc6398_3.conda#d6dd8b87b95195d8d26893611d94ba3b +https://conda.anaconda.org/conda-forge/linux-64/aws-c-cal-0.8.1-h1a47875_3.conda#55a8561fdbbbd34f50f57d9be12ed084 +https://conda.anaconda.org/conda-forge/linux-64/aws-c-compression-0.3.0-h4e1184b_5.conda#3f4c1197462a6df2be6dc8241828fe93 +https://conda.anaconda.org/conda-forge/linux-64/aws-c-sdkutils-0.2.1-h4e1184b_4.conda#a5126a90e74ac739b00564a4c7ddcc36 +https://conda.anaconda.org/conda-forge/linux-64/aws-checksums-0.2.2-h4e1184b_4.conda#74e8c3e4df4ceae34aa2959df4b28101 https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h4bc722e_7.conda#62ee74e96c5ebb0af99386de58cf9553 https://conda.anaconda.org/conda-forge/linux-64/dav1d-1.2.1-hd590300_0.conda#418c6ca5929a611cbd69204907a83995 https://conda.anaconda.org/conda-forge/linux-64/fribidi-1.0.10-h36c2ea0_0.tar.bz2#ac7bc6a654f8f41b352b38f4051135f8 @@ -47,7 +48,7 @@ https://conda.anaconda.org/conda-forge/linux-64/geos-3.13.0-h5888daf_0.conda#40b https://conda.anaconda.org/conda-forge/linux-64/gflags-2.2.2-h5888daf_1005.conda#d411fc29e338efb48c5fd4576d71d881 https://conda.anaconda.org/conda-forge/linux-64/giflib-5.2.2-hd590300_0.conda#3bf7b9fd5a7136126e0234db4b87c8b6 https://conda.anaconda.org/conda-forge/linux-64/keyutils-1.6.1-h166bdaf_0.tar.bz2#30186d27e2c9fa62b45fb1476b7200e3 -https://conda.anaconda.org/conda-forge/linux-64/libabseil-20240722.0-cxx17_h5888daf_1.conda#e1f604644fe8d78e22660e2fec6756bc +https://conda.anaconda.org/conda-forge/linux-64/libabseil-20240722.0-cxx17_hbbce691_2.conda#48099a5f37e331f5570abbf22b229961 https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.1.0-hb9d3cd8_2.conda#9566f0bd264fbd463002e759b8a82401 https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.1.0-hb9d3cd8_2.conda#06f70867945ea6a84d35836af780f1de https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-hd590300_2.conda#172bf1cd1ff8629f2b1179945ed45055 @@ -56,7 +57,6 @@ https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2# https://conda.anaconda.org/conda-forge/linux-64/libgfortran-14.2.0-h69a702a_1.conda#f1fd30127802683586f768875127a987 https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.17-hd590300_2.conda#d66573916ffcf376178462f1b61c941e https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-3.0.0-hd590300_1.conda#ea25936bb4080d843790b586850f82b8 -https://conda.anaconda.org/conda-forge/linux-64/liblzma-devel-5.6.3-hb9d3cd8_1.conda#cc4687e1814ed459f3bd6d8e05251ab2 https://conda.anaconda.org/conda-forge/linux-64/libmpdec-4.0.0-h4bc722e_0.conda#aeb98fdeb2e8f25d43ef71fbacbeec80 https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hd590300_0.conda#30fd6e37fe21f86f4bd26d6ee73eeec7 https://conda.anaconda.org/conda-forge/linux-64/libpciaccess-0.18-hd590300_0.conda#48f4330bfcd959c3cfb704d424903c82 @@ -67,23 +67,21 @@ https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.1-hf672d98_0.conda# https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-14.2.0-h4852527_1.conda#8371ac6457591af2cf6159439c1fd051 https://conda.anaconda.org/conda-forge/linux-64/libudunits2-2.2.28-h40f5838_3.conda#4bdace082e911a3e1f1f0b721bed5b56 https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda#40b61aab5c7ba9ff276c41cfffe6b80b -https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.4.0-hd590300_0.conda#b26e8aa824079e1be0294e7152ca4559 https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.17.0-h8a09558_0.conda#92ed62436b625154323d40d5f2f11dd7 +https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.10.0-h5888daf_1.conda#9de5350a85c4a20c685259b889aa6393 https://conda.anaconda.org/conda-forge/linux-64/lzo-2.10-hd590300_1001.conda#ec7398d21e2651e0dcb0044d03b9a339 https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.5-he02047a_1.conda#70caf8bb6cf39a0b6b7efc885f51c0fe https://conda.anaconda.org/conda-forge/linux-64/pixman-0.44.2-h29eaf8c_0.conda#5e2a7acfa2c24188af39e7944e1b3604 https://conda.anaconda.org/conda-forge/linux-64/rav1e-0.6.6-he8a937b_2.conda#77d9955b4abddb811cb8ab1aa7d743e4 -https://conda.anaconda.org/conda-forge/linux-64/s2n-1.5.9-h0fd0ee4_0.conda#f472432f3753c5ca763d2497e2ea30bf +https://conda.anaconda.org/conda-forge/linux-64/s2n-1.5.10-hb5b8611_0.conda#999f3673f2a011f59287f2969e3749e4 https://conda.anaconda.org/conda-forge/linux-64/snappy-1.2.1-h8bd8927_1.conda#3b3e64af585eadfb52bb90b553db5edf https://conda.anaconda.org/conda-forge/linux-64/svt-av1-2.3.0-h5888daf_0.conda#355898d24394b2af353eb96358db9fdd https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h4845f30_101.conda#d453b98d9c83e71da0741bb0ff4d76bc https://conda.anaconda.org/conda-forge/linux-64/xxhash-0.8.2-hd590300_0.conda#f08fb5c89edfc4aadee1c81d4cfb1fa1 -https://conda.anaconda.org/conda-forge/linux-64/xz-gpl-tools-5.6.3-hbcc6ac9_1.conda#f529917bab7862aaad6867bf2ea47a99 -https://conda.anaconda.org/conda-forge/linux-64/xz-tools-5.6.3-hb9d3cd8_1.conda#de3f31a6eed01bc2b8c7dcad07ad9034 https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h7f98852_2.tar.bz2#4cb3ad778ec2d5a7acbdf254eb1c42ae https://conda.anaconda.org/conda-forge/linux-64/zlib-1.3.1-hb9d3cd8_2.conda#c9f075ab2f33b3bbee9e62d4ad0a6cd8 https://conda.anaconda.org/conda-forge/linux-64/aom-3.9.1-hac33072_0.conda#346722a0be40f6edc53f12640d301338 -https://conda.anaconda.org/conda-forge/linux-64/aws-c-io-0.15.3-hfd54f12_3.conda#c0b9f79cd2f5797b913415511bfa2cd6 +https://conda.anaconda.org/conda-forge/linux-64/aws-c-io-0.15.3-h831e299_5.conda#80dd9f0ddf935290d1dc00ec75ff3023 https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.1.0-hb9d3cd8_2.conda#c63b5e52939e795ba8d26e35d767a843 https://conda.anaconda.org/conda-forge/linux-64/freetype-2.12.1-h267a509_2.conda#9ae35c3d96db2c94ce0cef86efdfa2cb https://conda.anaconda.org/conda-forge/linux-64/glog-0.7.1-hbabe93e_0.conda#ff862eebdfeb2fd048ae9dc92510baca @@ -98,58 +96,56 @@ https://conda.anaconda.org/conda-forge/linux-64/libdrm-2.4.124-hb9d3cd8_0.conda# https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2.tar.bz2#4d331e44109e3f0e19b4cb8f9b82f3e1 https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.64.0-h161d5f1_0.conda#19e57602824042dfd0446292ef90488b https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.28-pthreads_h94d23a6_1.conda#62857b389e42b36b686331bec0922050 -https://conda.anaconda.org/conda-forge/linux-64/libprotobuf-5.28.2-h5b01275_0.conda#ab0bff36363bec94720275a681af8b83 -https://conda.anaconda.org/conda-forge/linux-64/libre2-11-2024.07.02-hbbce691_1.conda#2124de47357b7a516c0a3efd8f88c143 +https://conda.anaconda.org/conda-forge/linux-64/libprotobuf-5.28.3-h6128344_1.conda#d8703f1ffe5a06356f06467f1d0b9464 +https://conda.anaconda.org/conda-forge/linux-64/libre2-11-2024.07.02-hbbce691_2.conda#b2fede24428726dd867611664fb372e8 https://conda.anaconda.org/conda-forge/linux-64/librttopo-1.1.0-h97f6797_17.conda#e16e9b1333385c502bf915195f421934 https://conda.anaconda.org/conda-forge/linux-64/libthrift-0.21.0-h0e7cc3e_0.conda#dcb95c0a98ba9ff737f7ae482aef7833 https://conda.anaconda.org/conda-forge/linux-64/libzip-1.11.2-h6991a6a_0.conda#a7b27c075c9b7f459f1c022090697cba -https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.9.4-hcb278e6_0.conda#318b08df404f9c9be5712aaa5a6f0bb0 https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.44-hba22ea6_2.conda#df359c09c41cd186fffb93a2d87aa6f5 https://conda.anaconda.org/conda-forge/linux-64/qhull-2020.2-h434a139_5.conda#353823361b1d27eb3960efb076dfcaf6 https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda#47d31b792659ce70f470b5c82fdfb7a4 https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.28-h40f5838_3.conda#6bb8deb138f87c9d48320ac21b87e7a1 https://conda.anaconda.org/conda-forge/linux-64/uriparser-0.9.8-hac33072_0.conda#d71d3a66528853c0a1ac2c02d79a0284 https://conda.anaconda.org/conda-forge/linux-64/x265-3.5-h924138e_3.tar.bz2#e7f6ed84d4623d52ee581325c1587a6b -https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.4-he73a12e_1.conda#05a8ea5f446de33006171a7afe6ae857 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.5-he73a12e_0.conda#4c3e9fab69804ec6077697922d70c6e2 https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.10-h4f16b4b_1.conda#125f34a17d7b4bea418a83904ea82ea6 -https://conda.anaconda.org/conda-forge/linux-64/xz-5.6.3-hbcc6ac9_1.conda#62aae173382a8aae284726353c6a6a24 https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.6-ha6fb4c9_0.conda#4d056880988120e29d75bfff282e0f45 -https://conda.anaconda.org/conda-forge/linux-64/aws-c-event-stream-0.5.0-hf811eff_10.conda#5046c78dd139a333b6acd7376a10e0a7 -https://conda.anaconda.org/conda-forge/linux-64/aws-c-http-0.9.2-hce7dc5d_3.conda#c0f54e8975ad42d2864f4b1918356b3b -https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.6-hef167b5_0.conda#54fe76ab3d0189acaef95156874db7f9 +https://conda.anaconda.org/conda-forge/linux-64/aws-c-event-stream-0.5.0-h7959bf6_11.conda#9b3fb60fe57925a92f399bc3fc42eccf +https://conda.anaconda.org/conda-forge/linux-64/aws-c-http-0.9.2-hefd7a92_4.conda#5ce4df662d32d3123ea8da15571b6f51 +https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.6-he440d0b_1.conda#2c2fae981fd2afd00812c92ac47d023d https://conda.anaconda.org/conda-forge/linux-64/brotli-1.1.0-hb9d3cd8_2.conda#98514fe74548d768907ce7a13f680e8f https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.15.0-h7e30c49_1.conda#8f5b0b297b59e1ac160ad4beec99dbee https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.3-h659f571_0.conda#3f43953b7d3fb3aaa1d0d0723d91e368 https://conda.anaconda.org/conda-forge/linux-64/libavif16-1.1.1-h1909e37_2.conda#21e468ed3786ebcb2124b123aa2484b7 -https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-25_linux64_openblas.conda#8ea26d42ca88ec5258802715fe1ee10b +https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-26_linux64_openblas.conda#ac52800af2e0c0e7dac770b435ce768a https://conda.anaconda.org/conda-forge/linux-64/libglib-2.82.2-h2ff4ddf_0.conda#13e8e54035ddd2b91875ba399f0f7c04 https://conda.anaconda.org/conda-forge/linux-64/libglx-1.7.0-ha4b6fd6_2.conda#c8013e438185f33b13814c5c488acd5c https://conda.anaconda.org/conda-forge/linux-64/libkml-1.3.0-hf539b9f_1021.conda#e8c7620cc49de0c6a2349b6dd6e39beb -https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.7.0-hc4654cb_2.conda#be54fb40ea32e8fe9dbaa94d4528b57e +https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.7.0-hd9ff511_3.conda#0ea6510969e1296cc19966fad481f6de https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.13.5-h8d12d68_1.conda#1a21e49e190d1ffe58531a81b6e400e1 -https://conda.anaconda.org/conda-forge/linux-64/minizip-4.0.7-h401b404_0.conda#4474532a312b2245c5c77f1176989b46 -https://conda.anaconda.org/conda-forge/linux-64/orc-2.0.3-he039a57_0.conda#052499acd6d6b79952197a13b23e2600 +https://conda.anaconda.org/conda-forge/linux-64/minizip-4.0.7-h05a5f5f_3.conda#eec77634ccdb2ba6c231290c399b1dae +https://conda.anaconda.org/conda-forge/linux-64/orc-2.0.3-h12ee42a_2.conda#4f6f9f3f80354ad185e276c120eac3f0 https://conda.anaconda.org/conda-forge/linux-64/python-3.13.1-ha99a958_102_cp313.conda#6e7535f1d1faf524e9210d2689b3149b -https://conda.anaconda.org/conda-forge/linux-64/re2-2024.07.02-h77b4e00_1.conda#01093ff37c1b5e6bf9f17c0116747d11 +https://conda.anaconda.org/conda-forge/linux-64/re2-2024.07.02-h9925aae_2.conda#e84ddf12bde691e8ec894b00ea829ddf https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.47.2-h9eae976_0.conda#64a954de15d114281535a26fd4d1f294 https://conda.anaconda.org/conda-forge/linux-64/xerces-c-3.2.5-h988505b_2.conda#9dda9667feba914e0e80b95b82f7402b https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.6-hb9d3cd8_0.conda#febbab7d15033c913d53c7a2c102309d https://conda.anaconda.org/conda-forge/linux-64/xorg-libxfixes-6.0.1-hb9d3cd8_0.conda#4bdb303603e9821baf5fe5fdff1dc8f8 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.11-hb9d3cd8_2.conda#eef3132295d92678c17ffc8b114b8371 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.12-hb9d3cd8_0.conda#96d57aba173e878a2089d5638016dc5e https://conda.anaconda.org/conda-forge/noarch/alabaster-1.0.0-pyhd8ed1ab_1.conda#1fd9696649f65fd6611fcdb4ffec738a -https://conda.anaconda.org/conda-forge/linux-64/astroid-3.3.6-py313h78bf25f_0.conda#3347a6c8504883a216d914e476b46d4e +https://conda.anaconda.org/conda-forge/linux-64/astroid-3.3.8-py313h78bf25f_0.conda#cd3ab05349bc9be61760883382598624 https://conda.anaconda.org/conda-forge/noarch/asttokens-3.0.0-pyhd8ed1ab_1.conda#8f587de4bcf981e26228f268df374a9b https://conda.anaconda.org/conda-forge/linux-64/atk-1.0-2.38.0-h04ea711_2.conda#f730d54ba9cd543666d7220c9f7ed563 -https://conda.anaconda.org/conda-forge/noarch/attrs-24.2.0-pyh71513ae_1.conda#2018839db45c79654b57a924fcdd27d0 -https://conda.anaconda.org/conda-forge/linux-64/aws-c-auth-0.8.0-h8c8080f_14.conda#a9284141081982473ebf41b92566bbcb -https://conda.anaconda.org/conda-forge/linux-64/aws-c-mqtt-0.11.0-ha3c2ba9_11.conda#93c5070d6f9b4cb2ed9de52ce247cebb +https://conda.anaconda.org/conda-forge/noarch/attrs-24.3.0-pyh71513ae_0.conda#356927ace43302bf6f5926e2a58dae6a +https://conda.anaconda.org/conda-forge/linux-64/aws-c-auth-0.8.0-hb921021_15.conda#c79d50f64cffa5ad51ecc1a81057962f +https://conda.anaconda.org/conda-forge/linux-64/aws-c-mqtt-0.11.0-h11f4f37_12.conda#96c3e0221fa2da97619ee82faa341a73 https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.1.0-py313h46c70d0_2.conda#f6bb3742e17a4af0dc3c8ca942683ef6 https://conda.anaconda.org/conda-forge/linux-64/cairo-1.18.2-h3394656_1.conda#b34c2833a1f56db610aeb27f206d800d -https://conda.anaconda.org/conda-forge/noarch/certifi-2024.8.30-pyhd8ed1ab_0.conda#12f7d00853807b0531775e9be891cb11 -https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_0.tar.bz2#ebb5f5f7dc4f1a3780ef7ea7738db08c +https://conda.anaconda.org/conda-forge/noarch/certifi-2024.12.14-pyhd8ed1ab_0.conda#6feb87357ecd66733be3279f16a8c400 +https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_1.conda#57df494053e17dce2ac3a0b33e1b2a2e https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.4.0-pyhd8ed1ab_1.conda#6581a17bba6b948bb60130026404a9d6 -https://conda.anaconda.org/conda-forge/noarch/click-8.1.7-unix_pyh707e725_1.conda#cb8e52f28f5e592598190c562e7b5bf1 -https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.1.0-pyhd8ed1ab_1.conda#c88ca2bb7099167912e3b26463fff079 +https://conda.anaconda.org/conda-forge/noarch/click-8.1.8-pyh707e725_0.conda#f22f4d4970e09d68a10b922cbb0408d3 +https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.1.0-pyhd8ed1ab_2.conda#1f76b7e2b3ab88def5aa2f158322c7e6 https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_1.conda#962b9857ee8e7018c22f2776ffa0b2d7 https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.1-pyhd8ed1ab_1.conda#44600c4667a319d67dbe0681fc0bc833 https://conda.anaconda.org/conda-forge/linux-64/cython-3.0.11-py313hc66aa0d_3.conda#1778443eb12b2da98428fa69152a2a2e @@ -164,13 +160,13 @@ https://conda.anaconda.org/conda-forge/noarch/execnet-2.1.1-pyhd8ed1ab_1.conda#a https://conda.anaconda.org/conda-forge/noarch/executing-2.1.0-pyhd8ed1ab_1.conda#ef8b5fca76806159fc25b4f48d8737eb https://conda.anaconda.org/conda-forge/noarch/filelock-3.16.1-pyhd8ed1ab_1.conda#d692e9ba6f92dc51484bf3477e36ce7c https://conda.anaconda.org/conda-forge/noarch/findlibs-0.0.5-pyhd8ed1ab_0.conda#8f325f63020af6f7acbe2c4cb4c920db -https://conda.anaconda.org/conda-forge/linux-64/freexl-2.0.0-h743c826_0.conda#12e6988845706b2cfbc3bc35c9a61a95 -https://conda.anaconda.org/conda-forge/noarch/fsspec-2024.10.0-pyhd8ed1ab_1.conda#906fe13095e734cb413b57a49116cdc8 +https://conda.anaconda.org/conda-forge/linux-64/freexl-2.0.0-h9dce30a_2.conda#ecb5d11305b8ba1801543002e69d2f2f +https://conda.anaconda.org/conda-forge/noarch/fsspec-2024.12.0-pyhd8ed1ab_0.conda#e041ad4c43ab5e10c74587f95378ebc7 https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.12-hb9ae30d_0.conda#201db6c2d9a3c5e46573ac4cb2e92f4f -https://conda.anaconda.org/conda-forge/noarch/geographiclib-2.0-pyhd8ed1ab_0.tar.bz2#6b1f32359fc5d2ab7b491d0029bfffeb +https://conda.anaconda.org/conda-forge/noarch/geographiclib-2.0-pyhd8ed1ab_1.conda#8b9328ab4aafb8fde493ab32c5eba731 https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h977cf35_4.conda#4d8df0b0db060d33c9a702ada998a8fe https://conda.anaconda.org/conda-forge/noarch/hpack-4.0.0-pyhd8ed1ab_1.conda#2aa5ff7fa34a81b9196532c84c10d865 -https://conda.anaconda.org/conda-forge/noarch/humanfriendly-10.0-pyhd81877a_7.conda#74fbff91ca7c1b9a36b15903f2242f86 +https://conda.anaconda.org/conda-forge/noarch/humanfriendly-10.0-pyh707e725_8.conda#7fe569c10905402ed47024fc481bb371 https://conda.anaconda.org/conda-forge/noarch/hyperframe-6.0.1-pyhd8ed1ab_1.conda#566e75c90c1d0c8c459eb0ad9833dc7a https://conda.anaconda.org/conda-forge/noarch/idna-3.10-pyhd8ed1ab_1.conda#39a4f67be3286c86d696df570b1201b7 https://conda.anaconda.org/conda-forge/noarch/imagesize-1.4.1-pyhd8ed1ab_0.tar.bz2#7de5386c8fea29e76b303f37dde4c352 @@ -180,17 +176,17 @@ https://conda.anaconda.org/conda-forge/noarch/itsdangerous-2.2.0-pyhd8ed1ab_1.co https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.7-py313h33d0bda_0.conda#9862d13a5e466273d5a4738cffcb8d6c https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.16-hb7c19ff_0.conda#51bb7010fc86f70eee639b4bb7a894f5 https://conda.anaconda.org/conda-forge/noarch/legacy-cgi-2.6.1-pyh5b84bb0_3.conda#f258b7f54b5d9ddd02441f10c4dca2ac -https://conda.anaconda.org/conda-forge/linux-64/libarchive-3.7.7-hadbb8c3_0.conda#4a099677417658748239616b6ca96bb6 -https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-25_linux64_openblas.conda#5dbd1b0fc0d01ec5e0e1fbe667281a11 -https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.10.1-hbbe4b11_0.conda#6e801c50a40301f6978c53976917b277 +https://conda.anaconda.org/conda-forge/linux-64/libarchive-3.7.7-h4585015_3.conda#a28808eae584c7f519943719b2a2b386 +https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-26_linux64_openblas.conda#ebcc5f37a435aa3c19640533c82f8d76 +https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.11.1-h332b0f4_0.conda#2b3e0081006dc21e8bf53a91c83a055c https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-hd3e95f3_10.conda#30ee3a29c84cf7b842a8c5828c4b7c13 https://conda.anaconda.org/conda-forge/linux-64/libgl-1.7.0-ha4b6fd6_2.conda#928b8be80851f5d8ffb016f9c81dae7a -https://conda.anaconda.org/conda-forge/linux-64/libgrpc-1.67.1-hc2c308b_0.conda#4606a4647bfe857e3cfe21ca12ac3afb -https://conda.anaconda.org/conda-forge/linux-64/libheif-1.18.2-gpl_hffcb242_100.conda#76ac2c07b62d45c192940f010eea11fa -https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-25_linux64_openblas.conda#4dc03a53fc69371a6158d0ed37214cd3 +https://conda.anaconda.org/conda-forge/linux-64/libgrpc-1.67.1-h25350d4_1.conda#0c6497a760b99a926c7c12b74951a39c +https://conda.anaconda.org/conda-forge/linux-64/libheif-1.19.5-gpl_hc21c24c_100.conda#3b57852666eaacc13414ac811dde3f8a +https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-26_linux64_openblas.conda#3792604c43695d6a273bc5faaac47d48 https://conda.anaconda.org/conda-forge/linux-64/libxslt-1.1.39-h76b75d6_0.conda#e71f31f8cfb0a91439f2086fc8aa0461 https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2#91e27ef3d05cc772ce627e51cff111c4 -https://conda.anaconda.org/conda-forge/linux-64/lz4-4.3.3-py313h010b13d_1.conda#08a6b03e282748f599c55bbbdbd722fa +https://conda.anaconda.org/conda-forge/linux-64/lz4-4.3.3-py313h8756d67_2.conda#135da13cb96aba211acd7feeca301154 https://conda.anaconda.org/conda-forge/linux-64/markupsafe-3.0.2-py313h8060acc_1.conda#21b62c55924f01b6eef6827167b46acb https://conda.anaconda.org/conda-forge/noarch/mccabe-0.7.0-pyhd8ed1ab_1.conda#827064ddfe0de2917fb29f1da4f8f533 https://conda.anaconda.org/conda-forge/noarch/mdurl-0.1.2-pyhd8ed1ab_1.conda#592132998493b3ff25fd7479396e8351 @@ -203,16 +199,16 @@ https://conda.anaconda.org/conda-forge/noarch/packaging-24.2-pyhd8ed1ab_2.conda# https://conda.anaconda.org/conda-forge/noarch/pandocfilters-1.5.0-pyhd8ed1ab_0.tar.bz2#457c2c8c08e54905d6954e79cb5b5db9 https://conda.anaconda.org/conda-forge/noarch/parso-0.8.4-pyhd8ed1ab_1.conda#5c092057b6badd30f75b06244ecd01c9 https://conda.anaconda.org/conda-forge/noarch/pickleshare-0.7.5-pyhd8ed1ab_1004.conda#11a9d1d09a3615fc07c3faf79bc0b943 -https://conda.anaconda.org/conda-forge/noarch/pip-24.3.1-pyh145f28c_0.conda#ca3afe2d7b893a8c8cdf489d30a2b1a3 +https://conda.anaconda.org/conda-forge/noarch/pip-24.3.1-pyh145f28c_2.conda#76601b0ccfe1fe13a21a5f8813cb38de https://conda.anaconda.org/conda-forge/noarch/pkgutil-resolve-name-1.3.10-pyhd8ed1ab_2.conda#5a5870a74432aa332f7d32180633ad05 https://conda.anaconda.org/conda-forge/noarch/platformdirs-4.3.6-pyhd8ed1ab_1.conda#577852c7e53901ddccc7e6a9959ddebe https://conda.anaconda.org/conda-forge/noarch/pluggy-1.5.0-pyhd8ed1ab_1.conda#e9dcbce5f45f9ee500e728ae58b605b6 -https://conda.anaconda.org/conda-forge/linux-64/psutil-6.1.0-py313h536fd9c_0.conda#b50a00ebd2fda55306b8a095363ce27f +https://conda.anaconda.org/conda-forge/linux-64/psutil-6.1.1-py313h536fd9c_0.conda#79969031e331ecd8036a7c1992b64f9b https://conda.anaconda.org/conda-forge/noarch/ptyprocess-0.7.0-pyhd8ed1ab_1.conda#7d9daffbb8d8e0af0f769dbbcd173a54 https://conda.anaconda.org/conda-forge/noarch/pure_eval-0.2.3-pyhd8ed1ab_1.conda#3bfdfb8dbcdc4af1ae3f9a8eb3948f04 https://conda.anaconda.org/conda-forge/noarch/pycparser-2.22-pyh29332c3_1.conda#12c566707c80111f9799308d9e265aef https://conda.anaconda.org/conda-forge/noarch/pygments-2.18.0-pyhd8ed1ab_1.conda#b38dc0206e2a530e5c2cf11dc086b31a -https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.2.0-pyhd8ed1ab_2.conda#4c05a2bcf87bb495512374143b57cf28 +https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.2.1-pyhd8ed1ab_0.conda#285e237b8f351e85e7574a2c7bfa6d46 https://conda.anaconda.org/conda-forge/noarch/pyshp-2.3.1-pyhd8ed1ab_1.conda#856b387c270e9eaf6e41e978057a2b62 https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha55dd90_7.conda#461219d1a5bd61342293efa2c0c90eac https://conda.anaconda.org/conda-forge/noarch/python-fastjsonschema-2.21.1-pyhd8ed1ab_0.conda#38e34d2d1d9dca4fb2b9a0a04f604e2c @@ -229,7 +225,7 @@ https://conda.anaconda.org/conda-forge/noarch/soupsieve-2.5-pyhd8ed1ab_1.conda#3 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-jsmath-1.0.1-pyhd8ed1ab_1.conda#fa839b5ff59e192f411ccc7dae6588bb https://conda.anaconda.org/conda-forge/noarch/tblib-3.0.0-pyhd8ed1ab_1.conda#60ce69f73f3e75b21f1c27b1b471320c https://conda.anaconda.org/conda-forge/noarch/termcolor-2.5.0-pyhd8ed1ab_1.conda#1ce02d60767af357e864ce61895268d2 -https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_0.tar.bz2#f832c45a477c78bebd107098db465095 +https://conda.anaconda.org/conda-forge/noarch/toml-0.10.2-pyhd8ed1ab_1.conda#b0dd904de08b7db706167240bf37b164 https://conda.anaconda.org/conda-forge/noarch/tomli-2.2.1-pyhd8ed1ab_1.conda#ac944244f1fed2eb49bae07193ae8215 https://conda.anaconda.org/conda-forge/noarch/tomlkit-0.13.2-pyha770c72_1.conda#1d9ab4fc875c52db83f9c9b40af4e2c8 https://conda.anaconda.org/conda-forge/noarch/toolz-1.0.0-pyhd8ed1ab_1.conda#40d0ed782a8aaa16ef248e68c06c168d @@ -241,13 +237,13 @@ https://conda.anaconda.org/conda-forge/noarch/wcwidth-0.2.13-pyhd8ed1ab_1.conda# https://conda.anaconda.org/conda-forge/noarch/webencodings-0.5.1-pyhd8ed1ab_3.conda#2841eb5bfc75ce15e9a0054b98dcd64d https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdamage-1.1.6-hb9d3cd8_0.conda#b5fcc7172d22516e1f965490e65e33a4 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxi-1.8.2-hb9d3cd8_0.conda#17dcc85db3c7886650b8908b183d6876 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxxf86vm-1.1.5-hb9d3cd8_4.conda#7da9007c0582712c4bad4131f89c8372 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxxf86vm-1.1.6-hb9d3cd8_0.conda#5efa5fa6243a622445fdfd72aee15efa https://conda.anaconda.org/conda-forge/noarch/xyzservices-2024.9.0-pyhd8ed1ab_1.conda#c79cea50b258f652010cb6c8d81591b5 https://conda.anaconda.org/conda-forge/linux-64/zeromq-4.3.5-h3b0a872_7.conda#3947a35e916fcc6b9825449affbf4214 https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_1.conda#e52c2ef711ccf31bb7f70ca87d144b9e https://conda.anaconda.org/conda-forge/noarch/zipp-3.21.0-pyhd8ed1ab_1.conda#0c3cc595284c5e8f0f9900a9b228a332 -https://conda.anaconda.org/conda-forge/noarch/accessible-pygments-0.0.5-pyhd8ed1ab_0.conda#1bb1ef9806a9a20872434f58b3e7fc1a -https://conda.anaconda.org/conda-forge/linux-64/aws-c-s3-0.7.5-h55e9418_4.conda#faec629f0eb306cfe17ed1615249e188 +https://conda.anaconda.org/conda-forge/noarch/accessible-pygments-0.0.5-pyhd8ed1ab_1.conda#74ac5069774cdbc53910ec4d631a3999 +https://conda.anaconda.org/conda-forge/linux-64/aws-c-s3-0.7.7-hf454442_0.conda#947c82025693bebd557f782bb5d6b469 https://conda.anaconda.org/conda-forge/linux-64/azure-core-cpp-1.14.0-h5cfcd09_0.conda#0a8838771cc2e985cd295e01ae83baf1 https://conda.anaconda.org/conda-forge/noarch/babel-2.16.0-pyhd8ed1ab_1.conda#3e23f7db93ec14c80525257d8affac28 https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.3-pyha770c72_1.conda#d48f7e9fdec44baf6d1da416fe402b04 @@ -255,31 +251,31 @@ https://conda.anaconda.org/conda-forge/noarch/bleach-6.2.0-pyhd8ed1ab_1.conda#70 https://conda.anaconda.org/conda-forge/linux-64/cffi-1.17.1-py313hfab6e84_0.conda#ce6386a5892ef686d6d680c345c40ad1 https://conda.anaconda.org/conda-forge/noarch/click-plugins-1.1.1-pyhd8ed1ab_1.conda#82bea35e4dac4678ba623cf10e95e375 https://conda.anaconda.org/conda-forge/noarch/cligj-0.7.2-pyhd8ed1ab_2.conda#55c7804f428719241a90b152016085a1 -https://conda.anaconda.org/conda-forge/linux-64/coverage-7.6.9-py313h8060acc_0.conda#dc7f212c995a2126d955225844888dcb -https://conda.anaconda.org/conda-forge/linux-64/cytoolz-1.0.0-py313h536fd9c_1.conda#f536889754b62dad2e509cb858f525ee +https://conda.anaconda.org/conda-forge/linux-64/coverage-7.6.10-py313h8060acc_0.conda#b76045c1b72b2db6e936bc1226a42c99 +https://conda.anaconda.org/conda-forge/linux-64/cytoolz-1.0.1-py313h536fd9c_0.conda#e886bb6a3c24f8b9dd4fcd1d617a1f64 https://conda.anaconda.org/conda-forge/noarch/fire-0.7.0-pyhd8ed1ab_0.conda#c8eefdf1e822c56a6034602e67bc92a5 -https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.55.2-py313h8060acc_0.conda#bcefb389907b2882f2c90dee23f07231 +https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.55.3-py313h8060acc_1.conda#f89b4b415c5be34d24f74f30954792b5 https://conda.anaconda.org/conda-forge/linux-64/freeglut-3.2.2-ha6d2627_3.conda#84ec3f5b46f3076be49f2cf3f1cfbf02 -https://conda.anaconda.org/conda-forge/noarch/geopy-2.4.1-pyhd8ed1ab_1.conda#358c17429c97883b2cb9ab5f64bc161b +https://conda.anaconda.org/conda-forge/noarch/geopy-2.4.1-pyhd8ed1ab_2.conda#40182a8d62a61d147ec7d3e4c5c36ac2 https://conda.anaconda.org/conda-forge/noarch/h2-4.1.0-pyhd8ed1ab_1.conda#825927dc7b0f287ef8d4d0011bb113b1 -https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-9.0.0-hda332d3_1.conda#76b32dcf243444aea9c6b804bcfa40b8 +https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-10.1.0-h0b3b770_0.conda#ab1d7d56034814f4c3ed9f69f8c68806 https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.4-nompi_h2d575fe_105.conda#d76fff0092b6389a12134ddebc0929bd https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.5.0-pyha770c72_1.conda#315607a3030ad5d5227e76e0733798ff https://conda.anaconda.org/conda-forge/noarch/importlib_resources-6.4.5-pyhd8ed1ab_1.conda#15798fa69312d433af690c8c42b3fb36 https://conda.anaconda.org/conda-forge/noarch/isort-5.13.2-pyhd8ed1ab_1.conda#ef7dc847f19fe4859d5aaa33385bf509 https://conda.anaconda.org/conda-forge/noarch/jedi-0.19.2-pyhd8ed1ab_1.conda#a4f4c5dc9b80bc50e0d3dc4e6e8f1bd9 -https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.4-pyhd8ed1ab_1.conda#08cce3151bde4ecad7885bd9fb647532 +https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.5-pyhd8ed1ab_0.conda#2752a6ed44105bfb18c9bef1177d9dcd https://conda.anaconda.org/conda-forge/noarch/jupyter_core-5.7.2-pyh31011fe_1.conda#0a2980dada0dd7fd0998f0342308b1b1 https://conda.anaconda.org/conda-forge/noarch/jupyterlab_pygments-0.3.0-pyhd8ed1ab_2.conda#fd312693df06da3578383232528c468d https://conda.anaconda.org/conda-forge/noarch/latexcodec-2.0.1-pyh9f0ad1d_0.tar.bz2#8d67904973263afd2985ba56aa2d6bb4 https://conda.anaconda.org/conda-forge/linux-64/libglu-9.0.3-h03adeef_0.conda#b1df5affe904efe82ef890826b68881d -https://conda.anaconda.org/conda-forge/linux-64/libgoogle-cloud-2.31.0-h804f50b_0.conda#35ab838423b60f233391eb86d324a830 +https://conda.anaconda.org/conda-forge/linux-64/libgoogle-cloud-2.33.0-h2b5623c_1.conda#61829a8dd5f4e2327e707572065bae41 https://conda.anaconda.org/conda-forge/linux-64/lxml-5.3.0-py313h6eb7059_2.conda#48d1a2d9b1f12ff5180ffb4154050c48 https://conda.anaconda.org/conda-forge/noarch/markdown-it-py-3.0.0-pyhd8ed1ab_1.conda#fee3164ac23dfca50cfcc8b85ddefb81 https://conda.anaconda.org/conda-forge/noarch/matplotlib-inline-0.1.7-pyhd8ed1ab_1.conda#af6ab708897df59bd6e7283ceab1b56b -https://conda.anaconda.org/conda-forge/noarch/nested-lookup-0.2.25-pyhd8ed1ab_1.tar.bz2#2f59daeb14581d41b1e2dda0895933b2 -https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.9.1-pyhd8ed1ab_0.conda#dfe0528d0f1c16c1f7c528ea5536ab30 -https://conda.anaconda.org/conda-forge/linux-64/numpy-2.2.0-py313hb30382a_0.conda#5aa2240f061c27ddabaa2a4924c1a066 +https://conda.anaconda.org/conda-forge/noarch/nested-lookup-0.2.25-pyhd8ed1ab_2.conda#cc2da171723d50bc1a7f8a53a8d0319f +https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.9.1-pyhd8ed1ab_1.conda#7ba3f09fceae6a120d664217e58fe686 +https://conda.anaconda.org/conda-forge/linux-64/numpy-2.2.1-py313hb30382a_0.conda#bacc73d89e22828efedf31fdc4b54b4e https://conda.anaconda.org/conda-forge/noarch/partd-1.4.2-pyhd8ed1ab_0.conda#0badf9c54e24cecfb0ad2f99d680c163 https://conda.anaconda.org/conda-forge/noarch/pexpect-4.9.0-pyhd8ed1ab_1.conda#d0d408b1f18883a944376da5cf8101ea https://conda.anaconda.org/conda-forge/linux-64/pillow-11.0.0-py313h2d7ed13_0.conda#0d95e1cda6bf9ce501e751c02561204e @@ -288,39 +284,39 @@ https://conda.anaconda.org/conda-forge/noarch/prompt-toolkit-3.0.48-pyha770c72_1 https://conda.anaconda.org/conda-forge/noarch/pytest-8.3.4-pyhd8ed1ab_1.conda#799ed216dc6af62520f32aa39bc1c2bb https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.9.0.post0-pyhff2d567_1.conda#5ba79d7c71f03c678c8ead841f347d6e https://conda.anaconda.org/conda-forge/linux-64/pyzmq-26.2.0-py313h8e95178_3.conda#8ab50c9c9c3824ac0ffac9e9dcf5619e -https://conda.anaconda.org/conda-forge/noarch/rdflib-7.1.1-pyh0610db2_0.conda#325219de79481bcf5b6446d327e3d492 +https://conda.anaconda.org/conda-forge/noarch/rdflib-7.1.1-pyhd8ed1ab_1.conda#2405a5561bffdef682167ca6db14683c https://conda.anaconda.org/conda-forge/noarch/referencing-0.35.1-pyhd8ed1ab_1.conda#8c9083612c1bfe6878715ed5732605f8 https://conda.anaconda.org/conda-forge/noarch/stack_data-0.6.3-pyhd8ed1ab_1.conda#b1b505328da7a6b246787df4b5a49fbc https://conda.anaconda.org/conda-forge/noarch/tinycss2-1.4.0-pyhd8ed1ab_0.conda#f1acf5fdefa8300de697982bcb1761c9 https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.12.2-hd8ed1ab_1.conda#b6a408c64b78ec7b779a3e5c7a902433 -https://conda.anaconda.org/conda-forge/noarch/url-normalize-1.4.3-pyhd8ed1ab_0.tar.bz2#7c4076e494f0efe76705154ac9302ba6 +https://conda.anaconda.org/conda-forge/noarch/url-normalize-1.4.3-pyhd8ed1ab_1.conda#c7b1961b139c21381764de4704b6bbfb https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.28.0-pyhd8ed1ab_0.conda#1d601bc1d28b5ce6d112b90f4b9b8ede https://conda.anaconda.org/conda-forge/noarch/webob-1.8.9-pyhd8ed1ab_1.conda#293718ddac83a0fbc0f2193ff77d1e1c https://conda.anaconda.org/conda-forge/noarch/yamale-5.2.1-pyhd8ed1ab_1.conda#8380155472575eec439a47eef8f62b80 -https://conda.anaconda.org/conda-forge/linux-64/aws-crt-cpp-0.29.7-hed26007_5.conda#7c64e4ac7a484fc525a4ce7b9baf709a +https://conda.anaconda.org/conda-forge/linux-64/aws-crt-cpp-0.29.7-hd92328a_7.conda#02b95564257d5c3db9c06beccf711f95 https://conda.anaconda.org/conda-forge/linux-64/azure-identity-cpp-1.10.0-h113e628_0.conda#73f73f60854f325a55f1d31459f2ab73 https://conda.anaconda.org/conda-forge/linux-64/azure-storage-common-cpp-12.8.0-h736e048_1.conda#13de36be8de3ae3f05ba127631599213 https://conda.anaconda.org/conda-forge/noarch/cattrs-24.1.2-pyhd8ed1ab_1.conda#53eca64665361194ca4bbaf87c0ded99 https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.4-py313ha014f3b_1.conda#b20667f9b1d016c1141051a433f76dfc https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.3.1-py313h33d0bda_0.conda#6b6768e7c585d7029f79a04cbc4cbff0 https://conda.anaconda.org/conda-forge/linux-64/cryptography-44.0.0-py313h6556f6e_0.conda#a75161b68e899739b89057b15b1c63cd -https://conda.anaconda.org/conda-forge/noarch/dask-core-2024.12.0-pyhd8ed1ab_1.conda#c3bd6d4f36c0e1ef9a8cce53997460c2 +https://conda.anaconda.org/conda-forge/noarch/dask-core-2024.12.1-pyhd8ed1ab_0.conda#48060c395f1e87a80330c0adaad332f7 https://conda.anaconda.org/conda-forge/linux-64/geotiff-1.7.3-h77b800c_3.conda#4eb52aecb43e7c72f8e4fca0c386354e -https://conda.anaconda.org/conda-forge/noarch/ipython-8.30.0-pyh707e725_0.conda#5d6e5cb3a4b820f61b2073f0ad5431f1 +https://conda.anaconda.org/conda-forge/noarch/ipython-8.31.0-pyh707e725_0.conda#1d7fcd803dfa936a6c3bd051b293241c https://conda.anaconda.org/conda-forge/linux-64/jasper-4.2.4-h536e39c_0.conda#9518ab7016cf4564778aef08b6bd8792 https://conda.anaconda.org/conda-forge/noarch/jsonschema-specifications-2024.10.1-pyhd8ed1ab_1.conda#3b519bc21bc80e60b456f1e62962a766 https://conda.anaconda.org/conda-forge/noarch/jupyter_client-8.6.3-pyhd8ed1ab_1.conda#4ebae00eae9705b0c3d6d1018a81d047 -https://conda.anaconda.org/conda-forge/linux-64/libgoogle-cloud-storage-2.31.0-h0121fbd_0.conda#568d6a09a6ed76337a7b97c84ae7c0f8 +https://conda.anaconda.org/conda-forge/linux-64/libgoogle-cloud-storage-2.33.0-h0121fbd_1.conda#b0cfb5044685a7a9fa43ae669124f0a0 https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h5ddbaa4_116.conda#f51573abc223afed7e5374f34135ce05 -https://conda.anaconda.org/conda-forge/linux-64/libspatialite-5.1.0-h1b4f908_11.conda#43a7f3df7d100e8fc280e6636680a870 +https://conda.anaconda.org/conda-forge/linux-64/libspatialite-5.1.0-h1b4f908_12.conda#641f91ac6f984a91a78ba2411fe4f106 https://conda.anaconda.org/conda-forge/linux-64/pandas-2.2.3-py313ha87cce1_1.conda#c5d63dd501db554b84a30dea33824164 -https://conda.anaconda.org/conda-forge/linux-64/pango-1.54.0-h3a902e7_3.conda#8c12547e7b143fb70873fb732a4056b9 -https://conda.anaconda.org/conda-forge/noarch/pybtex-0.24.0-pyhd8ed1ab_2.tar.bz2#2099b86a7399c44c0c61cdb6de6915ba -https://conda.anaconda.org/conda-forge/noarch/pylint-3.3.2-pyhd8ed1ab_1.conda#2d8d45003973eb746f9465ca6b02c050 +https://conda.anaconda.org/conda-forge/linux-64/pango-1.54.0-h861ebed_4.conda#e501a460d7574686d514f87d420135dd +https://conda.anaconda.org/conda-forge/noarch/pybtex-0.24.0-pyhd8ed1ab_3.conda#556a52a96313364aa79990ed1337b9a5 +https://conda.anaconda.org/conda-forge/noarch/pylint-3.3.3-pyhd8ed1ab_0.conda#5842a1fa3b9b4f9fe7069b9ca5ed068d https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.7.0-py313hdb96ca5_0.conda#2a0d20f16832a170218b474bcec57acf https://conda.anaconda.org/conda-forge/noarch/pytest-cov-6.0.0-pyhd8ed1ab_1.conda#79963c319d1be62c8fd3e34555816e01 -https://conda.anaconda.org/conda-forge/noarch/pytest-env-1.1.5-pyhd8ed1ab_0.conda#ecd5e850bcd3eca02143e7df030ee50f -https://conda.anaconda.org/conda-forge/noarch/pytest-metadata-3.1.1-pyhd8ed1ab_0.conda#52b91ecba854d55b28ad916a8b10da24 +https://conda.anaconda.org/conda-forge/noarch/pytest-env-1.1.5-pyhd8ed1ab_1.conda#b92ee0cf3b26087455244248b1a04e0a +https://conda.anaconda.org/conda-forge/noarch/pytest-metadata-3.1.1-pyhd8ed1ab_1.conda#c4b7295798eff80144dc4ca4551efa80 https://conda.anaconda.org/conda-forge/noarch/pytest-mock-3.14.0-pyhd8ed1ab_1.conda#1ca25f3fdf32ebd8a51ee9efa97c9a45 https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.6.1-pyhd8ed1ab_1.conda#59aad4fb37cabc0bacc73cf344612ddd https://conda.anaconda.org/conda-forge/noarch/rich-13.9.4-pyhd8ed1ab_1.conda#7aed65d4ff222bfb7335997aa40b7da5 @@ -328,69 +324,70 @@ https://conda.anaconda.org/conda-forge/linux-64/scipy-1.14.1-py313h27c5614_2.con https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.6-py313h3f71f02_2.conda#dd0b742e8e61b8f15e4b64efcc103ad6 https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py313h33d0bda_5.conda#5bcffe10a500755da4a71cc0fb62a420 https://conda.anaconda.org/conda-forge/linux-64/zstandard-0.23.0-py313h80202fe_1.conda#c178558ff516cd507763ffee230c20b2 -https://conda.anaconda.org/conda-forge/linux-64/aws-sdk-cpp-1.11.458-h571fd1c_3.conda#374cf1add8af327b15b1b1e4873f4955 +https://conda.anaconda.org/conda-forge/linux-64/aws-sdk-cpp-1.11.458-hc430e4a_4.conda#aeefac461bea1f126653c1285cf5af08 https://conda.anaconda.org/conda-forge/linux-64/azure-storage-blobs-cpp-12.13.0-h3cf044e_1.conda#7eb66060455c7a47d9dcdbfa9f46579b https://conda.anaconda.org/conda-forge/noarch/bokeh-3.6.2-pyhd8ed1ab_1.conda#976ff24762f1f991b08f7a7a41875086 https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.3.0-py313ha014f3b_0.conda#aecffd7a21d698e374487644ce67d6eb https://conda.anaconda.org/conda-forge/linux-64/eccodes-2.39.0-hf413ef6_1.conda#63ea3e2f32daf4670182a3e6aad0b47b -https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h6470451_5.conda#1483ba046164be27df7f6eddbcec3a12 -https://conda.anaconda.org/conda-forge/noarch/identify-2.6.3-pyhd8ed1ab_0.conda#dd3acd023fc358afab730866a0e5e3f5 +https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h8ee276e_7.conda#28a9681054948a7d7e96a7b8fe9b604e +https://conda.anaconda.org/conda-forge/noarch/identify-2.6.4-pyhd8ed1ab_0.conda#5ec16e7ad9bab911ff0696940953f505 https://conda.anaconda.org/conda-forge/noarch/jsonschema-4.23.0-pyhd8ed1ab_1.conda#a3cead9264b331b32fe8f0aabc967522 -https://conda.anaconda.org/conda-forge/linux-64/libgdal-core-3.10.0-h7250d82_6.conda#4e14dd6eef7e961a54258cab6482a656 -https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.58.4-hc0ffecb_0.conda#83f045969988f5c7a65f3950b95a8b35 -https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.9.3-py313h129903b_0.conda#e60c1296decc1bb82cc55e8a9da0ceb4 +https://conda.anaconda.org/conda-forge/linux-64/libgdal-core-3.10.0-h3359108_13.conda#e9c2fb75425038991370f72231eca6e8 +https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.58.4-h49af25d_2.conda#b9846db0abffb09847e2cb0fec4b4db6 +https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.10.0-py313h129903b_0.conda#ab5b84154e1d9e41d4f11aea76d74096 https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.1-nompi_ha5d1325_108.conda#3b3c67ab7987ec0416037fc14386ed70 https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.7.2-nompi_py313h1dd084c_101.conda#7acb7a454880b024f7d67487a7495631 https://conda.anaconda.org/conda-forge/noarch/pyopenssl-24.3.0-pyhd8ed1ab_0.conda#269109707b3810adce78b6afb2a82c80 -https://conda.anaconda.org/conda-forge/noarch/pytest-html-4.1.1-pyhd8ed1ab_0.conda#4d2040212307d18392a2687772b3a96d +https://conda.anaconda.org/conda-forge/noarch/pytest-html-4.1.1-pyhd8ed1ab_1.conda#010e50e74c467db278f1398a74106a04 https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.3.0-py313ha014f3b_3.conda#041b8326743c64bd02b8c0f34f05e1ef -https://conda.anaconda.org/conda-forge/noarch/urllib3-2.2.3-pyhd8ed1ab_1.conda#4a2d8ef7c37b8808c5b9b750501fffce +https://conda.anaconda.org/conda-forge/noarch/urllib3-2.3.0-pyhd8ed1ab_0.conda#32674f8dbfb7b26410ed580dd3c10a29 https://conda.anaconda.org/conda-forge/noarch/xarray-2024.11.0-pyhd8ed1ab_0.conda#7358eeedbffd742549d372e0066999d3 https://conda.anaconda.org/conda-forge/linux-64/azure-storage-files-datalake-cpp-12.12.0-ha633028_1.conda#7c1980f89dd41b097549782121a73490 https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.24.0-py313ha87cce1_0.conda#44c2091019480603a885aa01e7b710e7 -https://conda.anaconda.org/conda-forge/noarch/cf_xarray-0.10.0-pyhd8ed1ab_0.conda#9437cfe346eab83b011b4def99f0e879 -https://conda.anaconda.org/conda-forge/noarch/distributed-2024.12.0-pyhd8ed1ab_1.conda#1838762b4a8e33ee7d8281494b22ff80 +https://conda.anaconda.org/conda-forge/noarch/cf_xarray-0.10.0-pyhd8ed1ab_2.conda#ed15dcf944706ae6ea54968dfa4a06a5 +https://conda.anaconda.org/conda-forge/noarch/distributed-2024.12.1-pyhd8ed1ab_0.conda#58df114d7649ddb3f68c9b9adc6fbabe https://conda.anaconda.org/conda-forge/linux-64/esmf-8.7.0-nompi_h6063b07_1.conda#15e28a0e5e651ba11495c87608652316 https://conda.anaconda.org/conda-forge/linux-64/fiona-1.10.1-py313hab4ff3b_3.conda#69a5fbc032a6a01aa6cf7010dd2164a0 https://conda.anaconda.org/conda-forge/linux-64/graphviz-12.0.0-hba01fac_0.conda#953e31ea00d46beb7e64a79fc291ec44 -https://conda.anaconda.org/conda-forge/noarch/myproxyclient-2.1.1-pyhd8ed1ab_0.conda#bcdbeb2b693eba886583a907840c6421 +https://conda.anaconda.org/conda-forge/noarch/myproxyclient-2.1.1-pyhd8ed1ab_1.conda#1efb1227abaf20324ceb7ac9c06bb86d https://conda.anaconda.org/conda-forge/noarch/nbformat-5.10.4-pyhd8ed1ab_1.conda#bbe1963f1e47f594070ffe87cdf612ea -https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_0.tar.bz2#281b58948bf60a2582de9e548bcc5369 -https://conda.anaconda.org/conda-forge/noarch/pre-commit-4.0.1-pyha770c72_0.conda#5971cc64048943605f352f7f8612de6c +https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_1.conda#9a2be7d0089f5934b550933ca0d9fe85 +https://conda.anaconda.org/conda-forge/noarch/pre-commit-4.0.1-pyha770c72_1.conda#d0ea6ed474bf7f6db88fc85e6dc809b1 https://conda.anaconda.org/conda-forge/linux-64/python-eccodes-2.37.0-py313ha014f3b_0.conda#b28717a6d595cdc42737d6669d422b1d https://conda.anaconda.org/conda-forge/noarch/requests-2.32.3-pyhd8ed1ab_1.conda#a9b9368f3701a417eac9edbcae7cb737 https://conda.anaconda.org/conda-forge/noarch/dask-jobqueue-0.9.0-pyhd8ed1ab_0.conda#a201de7d36907f2355426e019168d337 -https://conda.anaconda.org/conda-forge/noarch/esmpy-8.7.0-pyhecae5ae_0.conda#80851ac5ec3916496d7f353351c48846 +https://conda.anaconda.org/conda-forge/noarch/esmpy-8.7.0-pyhecae5ae_1.conda#06552fcd493ed61a8a9a6e1ee014ca8a https://conda.anaconda.org/conda-forge/noarch/iris-3.11.0-pyha770c72_0.conda#a5e36260789ce92074c3736533ecdd61 -https://conda.anaconda.org/conda-forge/linux-64/libarrow-18.1.0-h3b07799_4_cpu.conda#27675c7172667268440306533e4928de -https://conda.anaconda.org/conda-forge/noarch/nbclient-0.10.1-pyhd8ed1ab_0.conda#3ee79082e59a28e1db11e2a9c3bcd85a +https://conda.anaconda.org/conda-forge/linux-64/libarrow-18.1.0-hd595efa_7_cpu.conda#08d4aff5ee6dee9a1b9ab13fca927697 +https://conda.anaconda.org/conda-forge/noarch/nbclient-0.10.2-pyhd8ed1ab_0.conda#6bb0d77277061742744176ab555b723c https://conda.anaconda.org/conda-forge/noarch/pooch-1.8.2-pyhd8ed1ab_1.conda#b3e783e8e8ed7577cf0b6dee37d1fbac -https://conda.anaconda.org/conda-forge/linux-64/pydot-3.0.2-py313h78bf25f_0.conda#45f3a293c1709b761bd450917cecd8c6 -https://conda.anaconda.org/conda-forge/noarch/requests-cache-1.2.1-pyhd8ed1ab_0.conda#c6089540fed51a9a829aa19590fa925b +https://conda.anaconda.org/conda-forge/linux-64/pydot-3.0.3-py313h78bf25f_0.conda#7c460c46b2f701a9733bf931223fe4b8 +https://conda.anaconda.org/conda-forge/noarch/requests-cache-1.2.1-pyhd8ed1ab_1.conda#584e6aab3a5cffde537c575ad6a673ff https://conda.anaconda.org/conda-forge/noarch/esgf-pyclient-0.3.1-pyhd8ed1ab_4.conda#f481c17430f801e68ee3b57cc30ecd2e https://conda.anaconda.org/conda-forge/noarch/iris-grib-0.20.0-pyhd8ed1ab_1.conda#d8dced41fc56982c81190ba0eb10c3de -https://conda.anaconda.org/conda-forge/linux-64/libarrow-acero-18.1.0-h8bbc2ab_4_cpu.conda#82bcbfe424868ce66b5ab986999f534d -https://conda.anaconda.org/conda-forge/linux-64/libparquet-18.1.0-hf4f6db6_4_cpu.conda#f18b10bf19bb384183f2aa546e9f6f0a -https://conda.anaconda.org/conda-forge/noarch/nbconvert-core-7.16.4-pyhff2d567_2.conda#0457fdf55c88e52e0e7b63691eafcc48 +https://conda.anaconda.org/conda-forge/linux-64/libarrow-acero-18.1.0-hcb10f89_7_cpu.conda#12d84228204c56fec6ed113288014d11 +https://conda.anaconda.org/conda-forge/linux-64/libparquet-18.1.0-h081d1f1_7_cpu.conda#b97013ef4e1dd2cf11594f06d5b5e83a +https://conda.anaconda.org/conda-forge/noarch/nbconvert-core-7.16.4-pyhd8ed1ab_3.conda#fbc9010c36dd6fe433c045394df78efa https://conda.anaconda.org/conda-forge/noarch/prov-2.0.0-pyhd3deb0d_0.tar.bz2#aa9b3ad140f6c0668c646f32e20ccf82 -https://conda.anaconda.org/conda-forge/noarch/py-cordex-0.9.0-pyhd8ed1ab_0.conda#177a9651dc31c11a81eddc2a5e2e524e +https://conda.anaconda.org/conda-forge/noarch/py-cordex-0.9.0-pyhd8ed1ab_1.conda#53912b9ade4f2ea4dd1d5d6d3de7df70 https://conda.anaconda.org/conda-forge/linux-64/pyarrow-core-18.1.0-py313he5f92c8_0_cpu.conda#5380e12f4468e891911dbbd4248b521a -https://conda.anaconda.org/conda-forge/linux-64/libarrow-dataset-18.1.0-h8bbc2ab_4_cpu.conda#fa31464c75b20c2f3ac8fc758e034887 -https://conda.anaconda.org/conda-forge/noarch/nbconvert-pandoc-7.16.4-hd8ed1ab_2.conda#28701f71ce0b88b86783df822dd9d7b9 -https://conda.anaconda.org/conda-forge/linux-64/libarrow-substrait-18.1.0-had74209_4_cpu.conda#bf261e5fa25ce4acc11a80bdc73b88b2 -https://conda.anaconda.org/conda-forge/noarch/nbconvert-7.16.4-hd8ed1ab_2.conda#9337002f0dd2fcb8e1064f8023c8e0c0 +https://conda.anaconda.org/conda-forge/linux-64/libarrow-dataset-18.1.0-hcb10f89_7_cpu.conda#0a81eb63d7cd150f598c752e86388d57 +https://conda.anaconda.org/conda-forge/noarch/nbconvert-pandoc-7.16.4-hd8ed1ab_3.conda#68b369ee7e24bd895c92722d22d0560f +https://conda.anaconda.org/conda-forge/linux-64/libarrow-substrait-18.1.0-h08228c5_7_cpu.conda#e128def53c133e8a23ac00cd4a479335 +https://conda.anaconda.org/conda-forge/noarch/nbconvert-7.16.4-hd8ed1ab_3.conda#59b074b5c8185ec0f66b0b0229592c83 https://conda.anaconda.org/conda-forge/linux-64/pyarrow-18.1.0-py313h78bf25f_0.conda#a11d880ceedc33993c6f5c14a80ea9d3 -https://conda.anaconda.org/conda-forge/noarch/dask-expr-1.1.20-pyhd8ed1ab_1.conda#46f5089b7828d82517a98366820c5e85 -https://conda.anaconda.org/conda-forge/noarch/dask-2024.12.0-pyhd8ed1ab_1.conda#466d56f3108523402be464e4192f584e +https://conda.anaconda.org/conda-forge/noarch/dask-expr-1.1.21-pyhd8ed1ab_0.conda#e72a014dbbd35545dcfba4de9c92fb1d +https://conda.anaconda.org/conda-forge/noarch/dask-2024.12.1-pyhd8ed1ab_0.conda#f3134df9565c4d4415ff0e61f3aa28d0 https://conda.anaconda.org/conda-forge/noarch/iris-esmf-regrid-0.11.0-pyhd8ed1ab_1.conda#86286b197e33e3b034416c18ba0f574c https://conda.anaconda.org/conda-forge/noarch/autodocsumm-0.2.14-pyhd8ed1ab_0.conda#351a11ac1215eb4f6c5b82e30070277a -https://conda.anaconda.org/conda-forge/noarch/nbsphinx-0.9.5-pyhd8ed1ab_0.conda#b808b8a0494c5cca76200c73e260a060 -https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.16.0-pyhd8ed1ab_0.conda#344261b0e77f5d2faaffb4eac225eeb7 +https://conda.anaconda.org/conda-forge/noarch/nbsphinx-0.9.6-pyhd8ed1ab_0.conda#2e4c30e09d50d025836279d80140d0a4 +https://conda.anaconda.org/conda-forge/noarch/pydata-sphinx-theme-0.16.1-pyhd8ed1ab_0.conda#837aaf71ddf3b27acae0e7e9015eebc6 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-applehelp-2.0.0-pyhd8ed1ab_1.conda#16e3f039c0aa6446513e94ab18a8784b https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-devhelp-2.0.0-pyhd8ed1ab_1.conda#910f28a05c178feba832f842155cbfff https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-htmlhelp-2.1.0-pyhd8ed1ab_1.conda#e9fb3fe8a5b758b4aff187d434f94f03 https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-qthelp-2.0.0-pyhd8ed1ab_1.conda#00534ebcc0375929b45c3039b5ba7636 https://conda.anaconda.org/conda-forge/noarch/sphinx-8.1.3-pyhd8ed1ab_1.conda#1a3281a0dc355c02b5506d87db2d78ac https://conda.anaconda.org/conda-forge/noarch/sphinxcontrib-serializinghtml-1.1.10-pyhd8ed1ab_1.conda#3bc61f7161d28137797e038263c04c54 -# pip scitools-iris @ https://files.pythonhosted.org/packages/20/89/109d116f778fd148782598eb1796db00d47de8ca0d68503d248b55154581/scitools_iris-3.11.0-py3-none-any.whl#sha256=97bb7d7e349808684a5326a1ec06a459702a2b4f435c9a1502378d41e24a32f3 +# pip dask @ https://files.pythonhosted.org/packages/ad/8f/65830f3db64bd8a9a67e6e19c209e236689ea46c5871a536cb187fe53ab1/dask-2024.8.2-py3-none-any.whl#sha256=484c317ee870140dbeccc759a91fd98821bba98d71a43eda7856fc9e859b03a1 +# pip scitools-iris @ https://files.pythonhosted.org/packages/d5/57/e874d6ba8fef5eb7aa609a571bbbbb4f00a7339bc77ffbdcc69c03b12842/scitools_iris-3.11.1-py3-none-any.whl#sha256=8aee22c95fc47901a7686f8ffe714937fae24b388460e283df62ed436c2891d1 # pip esmvaltool-sample-data @ https://files.pythonhosted.org/packages/58/fa/4ecc84665e0ed04c8c4c797405c19c12900bdba6438ab2f5541bf8aa1d42/ESMValTool_sample_data-0.0.3-py3-none-any.whl#sha256=81f0f02182eacb3b639cb207abae5ac469c6dd83fb6dfe6d2430c69723d85461 From bd617f5ea5ee81c4f7b7cacec16e80d6573c3b6c Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 7 Jan 2025 14:04:26 +0100 Subject: [PATCH 19/36] [pre-commit.ci] pre-commit autoupdate (#2630) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index b20c3be813..347ed69be0 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -33,13 +33,13 @@ repos: - id: codespell additional_dependencies: [tomli] # required for Python 3.10 - repo: https://github.com/astral-sh/ruff-pre-commit - rev: "v0.8.4" + rev: "v0.8.6" hooks: - id: ruff args: [--fix] - id: ruff-format - repo: https://github.com/pre-commit/mirrors-mypy - rev: 'v1.14.0' + rev: 'v1.14.1' hooks: - id: mypy additional_dependencies: From b0b2799d3ec3cb3e57728fd0636aed01655d9875 Mon Sep 17 00:00:00 2001 From: FranziskaWinterstein <119339136+FranziskaWinterstein@users.noreply.github.com> Date: Thu, 9 Jan 2025 14:47:27 +0100 Subject: [PATCH 20/36] Correct incorrect time bounds in EMAC data (#2621) Co-authored-by: Manuel Schlund <32543114+schlunma@users.noreply.github.com> --- doc/quickstart/find_data.rst | 31 ++++++++++--------- esmvalcore/cmor/_fixes/emac/emac.py | 4 +++ .../integration/cmor/_fixes/emac/test_emac.py | 19 ++++++++++++ 3 files changed, 40 insertions(+), 14 deletions(-) diff --git a/doc/quickstart/find_data.rst b/doc/quickstart/find_data.rst index d93f114f21..41e0cc06ee 100644 --- a/doc/quickstart/find_data.rst +++ b/doc/quickstart/find_data.rst @@ -380,20 +380,23 @@ For some variables, extra facets are necessary; otherwise ESMValCore cannot read them properly. Supported keys for extra facets are: -==================== ====================================== ================================= -Key Description Default value if not specified -==================== ====================================== ================================= -``channel`` Channel in which the desired variable No default (needs to be specified - is stored in extra facets or recipe if - default DRS is used) -``postproc_flag`` Postprocessing flag of the data ``''`` (empty string) -``raw_name`` Variable name of the variable in the CMOR variable name of the - raw input file corresponding variable -``raw_units`` Units of the variable in the raw If specified, the value given by - input file the ``units`` attribute in the - raw input file; otherwise - ``unknown`` -==================== ====================================== ================================= +===================== ====================================== ================================= +Key Description Default value if not specified +===================== ====================================== ================================= +``channel`` Channel in which the desired variable No default (needs to be specified + is stored in extra facets or recipe if + default DRS is used) +``postproc_flag`` Postprocessing flag of the data ``''`` (empty string) +``raw_name`` Variable name of the variable in the CMOR variable name of the + raw input file corresponding variable +``raw_units`` Units of the variable in the raw If specified, the value given by + input file the ``units`` attribute in the + raw input file; otherwise + ``unknown`` +``reset_time_bounds`` Boolean if time bounds are deleted, ``False`` + and automatically recalculated by + iris +===================== ====================================== ================================= .. note:: diff --git a/esmvalcore/cmor/_fixes/emac/emac.py b/esmvalcore/cmor/_fixes/emac/emac.py index e5150f16f3..b60302932c 100644 --- a/esmvalcore/cmor/_fixes/emac/emac.py +++ b/esmvalcore/cmor/_fixes/emac/emac.py @@ -67,6 +67,10 @@ def fix_metadata(self, cubes): cube = self.get_cube(cubes) # Fix time, latitude, and longitude coordinates + if cube.coords("time") and self.extra_facets.get( + "reset_time_bounds", False + ): + cube.coord("time").bounds = None self.fix_regular_time(cube) self.fix_regular_lat(cube) self.fix_regular_lon(cube) diff --git a/tests/integration/cmor/_fixes/emac/test_emac.py b/tests/integration/cmor/_fixes/emac/test_emac.py index 6c738e8ee0..28865eb165 100644 --- a/tests/integration/cmor/_fixes/emac/test_emac.py +++ b/tests/integration/cmor/_fixes/emac/test_emac.py @@ -2806,3 +2806,22 @@ def test_fix_invalid_units(): assert "positive" not in cube.attributes np.testing.assert_allclose(cube.data, 1.0) + + +# Test fix invalid time bounds + + +def test_fix_time_bounds(cubes_2d): + """Test fix.""" + cubes_2d[0].var_name = "tsurf" + cubes_2d[0].units = "K" + cubes_2d[0].coord("time").bounds = [0.0, 0.5] + + fix = get_allvars_fix("Amon", "ts") + fix.extra_facets["reset_time_bounds"] = True + + fixed_cubes = fix.fix_metadata(cubes_2d) + + cube = fixed_cubes[0] + + assert not cube.coord("time").has_bounds() From 3c6977ee7d2b6d377792ff3fb1244d2584feffa3 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Thu, 16 Jan 2025 12:12:07 +0000 Subject: [PATCH 21/36] [Condalock] Update Linux condalock file (#2631) Co-authored-by: valeriupredoi --- conda-linux-64.lock | 61 +++++++++++++++++++++++---------------------- 1 file changed, 31 insertions(+), 30 deletions(-) diff --git a/conda-linux-64.lock b/conda-linux-64.lock index 1d97290b15..0b8873c723 100644 --- a/conda-linux-64.lock +++ b/conda-linux-64.lock @@ -32,7 +32,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-14.2.0-hc0a3c3a_1.cond https://conda.anaconda.org/conda-forge/linux-64/libutf8proc-2.9.0-hb9d3cd8_1.conda#1e936bd23d737aac62a18e9a1e7f8b18 https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.5.0-h851e524_0.conda#63f790534398730f59e1b899c3644d4a https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.3.1-hb9d3cd8_2.conda#edb0dca6bc32e4f4789199455a1dbeb8 -https://conda.anaconda.org/conda-forge/linux-64/openssl-3.4.0-hb9d3cd8_0.conda#23cc74f77eb99315c0360ec3533147a9 +https://conda.anaconda.org/conda-forge/linux-64/openssl-3.4.0-h7b32b05_1.conda#4ce6875f75469b2757a65e10a5d05e31 https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-hb9d3cd8_1002.conda#b3c17d95b5a10c6e64a21fa17573e70e https://conda.anaconda.org/conda-forge/linux-64/xorg-libice-1.1.2-hb9d3cd8_0.conda#fb901ff28063514abb6046c9ec2c4a45 https://conda.anaconda.org/conda-forge/linux-64/xorg-libxau-1.0.12-hb9d3cd8_0.conda#f6ebe2cb3f82ba6c057dde5d9debe4f7 @@ -48,7 +48,7 @@ https://conda.anaconda.org/conda-forge/linux-64/geos-3.13.0-h5888daf_0.conda#40b https://conda.anaconda.org/conda-forge/linux-64/gflags-2.2.2-h5888daf_1005.conda#d411fc29e338efb48c5fd4576d71d881 https://conda.anaconda.org/conda-forge/linux-64/giflib-5.2.2-hd590300_0.conda#3bf7b9fd5a7136126e0234db4b87c8b6 https://conda.anaconda.org/conda-forge/linux-64/keyutils-1.6.1-h166bdaf_0.tar.bz2#30186d27e2c9fa62b45fb1476b7200e3 -https://conda.anaconda.org/conda-forge/linux-64/libabseil-20240722.0-cxx17_hbbce691_2.conda#48099a5f37e331f5570abbf22b229961 +https://conda.anaconda.org/conda-forge/linux-64/libabseil-20240722.0-cxx17_hbbce691_4.conda#488f260ccda0afaf08acb286db439c2f https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.1.0-hb9d3cd8_2.conda#9566f0bd264fbd463002e759b8a82401 https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.1.0-hb9d3cd8_2.conda#06f70867945ea6a84d35836af780f1de https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-hd590300_2.conda#172bf1cd1ff8629f2b1179945ed45055 @@ -60,7 +60,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-3.0.0-hd590300_1.c https://conda.anaconda.org/conda-forge/linux-64/libmpdec-4.0.0-h4bc722e_0.conda#aeb98fdeb2e8f25d43ef71fbacbeec80 https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hd590300_0.conda#30fd6e37fe21f86f4bd26d6ee73eeec7 https://conda.anaconda.org/conda-forge/linux-64/libpciaccess-0.18-hd590300_0.conda#48f4330bfcd959c3cfb704d424903c82 -https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.44-hadc24fc_0.conda#f4cc49d7aa68316213e4b12be35308d1 +https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.45-h943b412_0.conda#85cbdaacad93808395ac295b5667d25b https://conda.anaconda.org/conda-forge/linux-64/libsodium-1.0.20-h4ab18f5_0.conda#a587892d3c13b6621a6091be690dbca2 https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.47.2-hee588c1_0.conda#b58da17db24b6e08bcbf8fed2fb8c915 https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.1-hf672d98_0.conda#be2de152d8073ef1c01b7728475f2fe7 @@ -93,7 +93,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libaec-1.1.3-h59595ed_0.conda#5e https://conda.anaconda.org/conda-forge/linux-64/libcrc32c-1.1.2-h9c3ff4c_0.tar.bz2#c965a5aa0d5c1c37ffc62dff36e28400 https://conda.anaconda.org/conda-forge/linux-64/libde265-1.0.15-h00ab1b0_0.conda#407fee7a5d7ab2dca12c9ca7f62310ad https://conda.anaconda.org/conda-forge/linux-64/libdrm-2.4.124-hb9d3cd8_0.conda#8bc89311041d7fcb510238cf0848ccae -https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2.tar.bz2#4d331e44109e3f0e19b4cb8f9b82f3e1 +https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20240808-pl5321h7949ede_0.conda#8247f80f3dc464d9322e85007e307fe8 https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.64.0-h161d5f1_0.conda#19e57602824042dfd0446292ef90488b https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.28-pthreads_h94d23a6_1.conda#62857b389e42b36b686331bec0922050 https://conda.anaconda.org/conda-forge/linux-64/libprotobuf-5.28.3-h6128344_1.conda#d8703f1ffe5a06356f06467f1d0b9464 @@ -125,7 +125,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.7.0-hd9ff511_3.conda#0 https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.13.5-h8d12d68_1.conda#1a21e49e190d1ffe58531a81b6e400e1 https://conda.anaconda.org/conda-forge/linux-64/minizip-4.0.7-h05a5f5f_3.conda#eec77634ccdb2ba6c231290c399b1dae https://conda.anaconda.org/conda-forge/linux-64/orc-2.0.3-h12ee42a_2.conda#4f6f9f3f80354ad185e276c120eac3f0 -https://conda.anaconda.org/conda-forge/linux-64/python-3.13.1-ha99a958_102_cp313.conda#6e7535f1d1faf524e9210d2689b3149b +https://conda.anaconda.org/conda-forge/linux-64/python-3.13.1-ha99a958_104_cp313.conda#f07c94533999146026ff5194a9e3906b https://conda.anaconda.org/conda-forge/linux-64/re2-2024.07.02-h9925aae_2.conda#e84ddf12bde691e8ec894b00ea829ddf https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.47.2-h9eae976_0.conda#64a954de15d114281535a26fd4d1f294 https://conda.anaconda.org/conda-forge/linux-64/xerces-c-3.2.5-h988505b_2.conda#9dda9667feba914e0e80b95b82f7402b @@ -143,7 +143,7 @@ https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.1.0-py313h46c70d https://conda.anaconda.org/conda-forge/linux-64/cairo-1.18.2-h3394656_1.conda#b34c2833a1f56db610aeb27f206d800d https://conda.anaconda.org/conda-forge/noarch/certifi-2024.12.14-pyhd8ed1ab_0.conda#6feb87357ecd66733be3279f16a8c400 https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_1.conda#57df494053e17dce2ac3a0b33e1b2a2e -https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.4.0-pyhd8ed1ab_1.conda#6581a17bba6b948bb60130026404a9d6 +https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.4.1-pyhd8ed1ab_0.conda#e83a31202d1c0a000fce3e9cf3825875 https://conda.anaconda.org/conda-forge/noarch/click-8.1.8-pyh707e725_0.conda#f22f4d4970e09d68a10b922cbb0408d3 https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.1.0-pyhd8ed1ab_2.conda#1f76b7e2b3ab88def5aa2f158322c7e6 https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_1.conda#962b9857ee8e7018c22f2776ffa0b2d7 @@ -190,7 +190,6 @@ https://conda.anaconda.org/conda-forge/linux-64/lz4-4.3.3-py313h8756d67_2.conda# https://conda.anaconda.org/conda-forge/linux-64/markupsafe-3.0.2-py313h8060acc_1.conda#21b62c55924f01b6eef6827167b46acb https://conda.anaconda.org/conda-forge/noarch/mccabe-0.7.0-pyhd8ed1ab_1.conda#827064ddfe0de2917fb29f1da4f8f533 https://conda.anaconda.org/conda-forge/noarch/mdurl-0.1.2-pyhd8ed1ab_1.conda#592132998493b3ff25fd7479396e8351 -https://conda.anaconda.org/conda-forge/noarch/mistune-3.0.2-pyhd8ed1ab_1.conda#c46df05cae629e55426773ac1f85d68f https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.1.0-py313h33d0bda_0.conda#7f907b1065247efa419bb70d3a3341b5 https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2#2ba8498c1018c1e9c61eb99b973dfe19 https://conda.anaconda.org/conda-forge/noarch/networkx-3.4.2-pyh267e887_2.conda#fd40bf7f7f4bc4b647dc8512053d9873 @@ -207,7 +206,7 @@ https://conda.anaconda.org/conda-forge/linux-64/psutil-6.1.1-py313h536fd9c_0.con https://conda.anaconda.org/conda-forge/noarch/ptyprocess-0.7.0-pyhd8ed1ab_1.conda#7d9daffbb8d8e0af0f769dbbcd173a54 https://conda.anaconda.org/conda-forge/noarch/pure_eval-0.2.3-pyhd8ed1ab_1.conda#3bfdfb8dbcdc4af1ae3f9a8eb3948f04 https://conda.anaconda.org/conda-forge/noarch/pycparser-2.22-pyh29332c3_1.conda#12c566707c80111f9799308d9e265aef -https://conda.anaconda.org/conda-forge/noarch/pygments-2.18.0-pyhd8ed1ab_1.conda#b38dc0206e2a530e5c2cf11dc086b31a +https://conda.anaconda.org/conda-forge/noarch/pygments-2.19.1-pyhd8ed1ab_0.conda#232fb4577b6687b2d503ef8e254270c9 https://conda.anaconda.org/conda-forge/noarch/pyparsing-3.2.1-pyhd8ed1ab_0.conda#285e237b8f351e85e7574a2c7bfa6d46 https://conda.anaconda.org/conda-forge/noarch/pyshp-2.3.1-pyhd8ed1ab_1.conda#856b387c270e9eaf6e41e978057a2b62 https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha55dd90_7.conda#461219d1a5bd61342293efa2c0c90eac @@ -217,7 +216,7 @@ https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.5.0-py313h536fd9 https://conda.anaconda.org/conda-forge/noarch/pytz-2024.1-pyhd8ed1ab_0.conda#3eeeeb9e4827ace8c0c1419c85d590ad https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.2-py313h536fd9c_1.conda#3789f360de131c345e96fbfc955ca80b https://conda.anaconda.org/conda-forge/linux-64/rpds-py-0.22.3-py313h920b4c0_0.conda#f21c21a167b2e25292e436dcb8e7cf3e -https://conda.anaconda.org/conda-forge/noarch/setuptools-75.6.0-pyhff2d567_1.conda#fc80f7995e396cbaeabd23cf46c413dc +https://conda.anaconda.org/conda-forge/noarch/setuptools-75.8.0-pyhff2d567_0.conda#8f28e299c11afdd79e0ec1e279dcdc52 https://conda.anaconda.org/conda-forge/noarch/six-1.17.0-pyhd8ed1ab_0.conda#a451d576819089b0d672f18768be0f65 https://conda.anaconda.org/conda-forge/noarch/snowballstemmer-2.2.0-pyhd8ed1ab_0.tar.bz2#4d22a9315e78c6827f806065957d566e https://conda.anaconda.org/conda-forge/noarch/sortedcontainers-2.4.0-pyhd8ed1ab_0.tar.bz2#6d6552722448103793743dabfbda532d @@ -247,7 +246,7 @@ https://conda.anaconda.org/conda-forge/linux-64/aws-c-s3-0.7.7-hf454442_0.conda# https://conda.anaconda.org/conda-forge/linux-64/azure-core-cpp-1.14.0-h5cfcd09_0.conda#0a8838771cc2e985cd295e01ae83baf1 https://conda.anaconda.org/conda-forge/noarch/babel-2.16.0-pyhd8ed1ab_1.conda#3e23f7db93ec14c80525257d8affac28 https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.3-pyha770c72_1.conda#d48f7e9fdec44baf6d1da416fe402b04 -https://conda.anaconda.org/conda-forge/noarch/bleach-6.2.0-pyhd8ed1ab_1.conda#707af59db75b066217403a8f00c1d826 +https://conda.anaconda.org/conda-forge/noarch/bleach-6.2.0-pyhd8ed1ab_3.conda#b33551d9bac06d754762e8ccb3c4df03 https://conda.anaconda.org/conda-forge/linux-64/cffi-1.17.1-py313hfab6e84_0.conda#ce6386a5892ef686d6d680c345c40ad1 https://conda.anaconda.org/conda-forge/noarch/click-plugins-1.1.1-pyhd8ed1ab_1.conda#82bea35e4dac4678ba623cf10e95e375 https://conda.anaconda.org/conda-forge/noarch/cligj-0.7.2-pyhd8ed1ab_2.conda#55c7804f428719241a90b152016085a1 @@ -259,9 +258,9 @@ https://conda.anaconda.org/conda-forge/linux-64/freeglut-3.2.2-ha6d2627_3.conda# https://conda.anaconda.org/conda-forge/noarch/geopy-2.4.1-pyhd8ed1ab_2.conda#40182a8d62a61d147ec7d3e4c5c36ac2 https://conda.anaconda.org/conda-forge/noarch/h2-4.1.0-pyhd8ed1ab_1.conda#825927dc7b0f287ef8d4d0011bb113b1 https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-10.1.0-h0b3b770_0.conda#ab1d7d56034814f4c3ed9f69f8c68806 -https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.4-nompi_h2d575fe_105.conda#d76fff0092b6389a12134ddebc0929bd +https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.3-nompi_h2d575fe_108.conda#b74598031529dafb2a66f9e90f26f2dc https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.5.0-pyha770c72_1.conda#315607a3030ad5d5227e76e0733798ff -https://conda.anaconda.org/conda-forge/noarch/importlib_resources-6.4.5-pyhd8ed1ab_1.conda#15798fa69312d433af690c8c42b3fb36 +https://conda.anaconda.org/conda-forge/noarch/importlib_resources-6.5.2-pyhd8ed1ab_0.conda#c85c76dc67d75619a92f51dfbce06992 https://conda.anaconda.org/conda-forge/noarch/isort-5.13.2-pyhd8ed1ab_1.conda#ef7dc847f19fe4859d5aaa33385bf509 https://conda.anaconda.org/conda-forge/noarch/jedi-0.19.2-pyhd8ed1ab_1.conda#a4f4c5dc9b80bc50e0d3dc4e6e8f1bd9 https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.5-pyhd8ed1ab_0.conda#2752a6ed44105bfb18c9bef1177d9dcd @@ -273,12 +272,13 @@ https://conda.anaconda.org/conda-forge/linux-64/libgoogle-cloud-2.33.0-h2b5623c_ https://conda.anaconda.org/conda-forge/linux-64/lxml-5.3.0-py313h6eb7059_2.conda#48d1a2d9b1f12ff5180ffb4154050c48 https://conda.anaconda.org/conda-forge/noarch/markdown-it-py-3.0.0-pyhd8ed1ab_1.conda#fee3164ac23dfca50cfcc8b85ddefb81 https://conda.anaconda.org/conda-forge/noarch/matplotlib-inline-0.1.7-pyhd8ed1ab_1.conda#af6ab708897df59bd6e7283ceab1b56b +https://conda.anaconda.org/conda-forge/noarch/mistune-3.1.0-pyhd8ed1ab_0.conda#d10024c163a52eeecbb166fdeaef8b12 https://conda.anaconda.org/conda-forge/noarch/nested-lookup-0.2.25-pyhd8ed1ab_2.conda#cc2da171723d50bc1a7f8a53a8d0319f https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.9.1-pyhd8ed1ab_1.conda#7ba3f09fceae6a120d664217e58fe686 https://conda.anaconda.org/conda-forge/linux-64/numpy-2.2.1-py313hb30382a_0.conda#bacc73d89e22828efedf31fdc4b54b4e https://conda.anaconda.org/conda-forge/noarch/partd-1.4.2-pyhd8ed1ab_0.conda#0badf9c54e24cecfb0ad2f99d680c163 https://conda.anaconda.org/conda-forge/noarch/pexpect-4.9.0-pyhd8ed1ab_1.conda#d0d408b1f18883a944376da5cf8101ea -https://conda.anaconda.org/conda-forge/linux-64/pillow-11.0.0-py313h2d7ed13_0.conda#0d95e1cda6bf9ce501e751c02561204e +https://conda.anaconda.org/conda-forge/linux-64/pillow-11.1.0-py313h8db990d_0.conda#1e86810c6c3fb6d6aebdba26564eb2e8 https://conda.anaconda.org/conda-forge/linux-64/proj-9.5.1-h0054346_0.conda#398cabfd9bd75e90d0901db95224f25f https://conda.anaconda.org/conda-forge/noarch/prompt-toolkit-3.0.48-pyha770c72_1.conda#368d4aa48358439e07a97ae237491785 https://conda.anaconda.org/conda-forge/noarch/pytest-8.3.4-pyhd8ed1ab_1.conda#799ed216dc6af62520f32aa39bc1c2bb @@ -290,12 +290,13 @@ https://conda.anaconda.org/conda-forge/noarch/stack_data-0.6.3-pyhd8ed1ab_1.cond https://conda.anaconda.org/conda-forge/noarch/tinycss2-1.4.0-pyhd8ed1ab_0.conda#f1acf5fdefa8300de697982bcb1761c9 https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.12.2-hd8ed1ab_1.conda#b6a408c64b78ec7b779a3e5c7a902433 https://conda.anaconda.org/conda-forge/noarch/url-normalize-1.4.3-pyhd8ed1ab_1.conda#c7b1961b139c21381764de4704b6bbfb -https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.28.0-pyhd8ed1ab_0.conda#1d601bc1d28b5ce6d112b90f4b9b8ede +https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.28.1-pyhd8ed1ab_0.conda#680b1c287b10cefc8bda0530b217229f https://conda.anaconda.org/conda-forge/noarch/webob-1.8.9-pyhd8ed1ab_1.conda#293718ddac83a0fbc0f2193ff77d1e1c -https://conda.anaconda.org/conda-forge/noarch/yamale-5.2.1-pyhd8ed1ab_1.conda#8380155472575eec439a47eef8f62b80 +https://conda.anaconda.org/conda-forge/noarch/yamale-5.3.0-pyhd8ed1ab_0.conda#d4b5f3a50decd28cd747f4b5f7aea33f https://conda.anaconda.org/conda-forge/linux-64/aws-crt-cpp-0.29.7-hd92328a_7.conda#02b95564257d5c3db9c06beccf711f95 https://conda.anaconda.org/conda-forge/linux-64/azure-identity-cpp-1.10.0-h113e628_0.conda#73f73f60854f325a55f1d31459f2ab73 https://conda.anaconda.org/conda-forge/linux-64/azure-storage-common-cpp-12.8.0-h736e048_1.conda#13de36be8de3ae3f05ba127631599213 +https://conda.anaconda.org/conda-forge/noarch/bleach-with-css-6.2.0-hd8ed1ab_3.conda#e250a492fc70bf604737328dbe02846c https://conda.anaconda.org/conda-forge/noarch/cattrs-24.1.2-pyhd8ed1ab_1.conda#53eca64665361194ca4bbaf87c0ded99 https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.4-py313ha014f3b_1.conda#b20667f9b1d016c1141051a433f76dfc https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.3.1-py313h33d0bda_0.conda#6b6768e7c585d7029f79a04cbc4cbff0 @@ -307,10 +308,10 @@ https://conda.anaconda.org/conda-forge/linux-64/jasper-4.2.4-h536e39c_0.conda#95 https://conda.anaconda.org/conda-forge/noarch/jsonschema-specifications-2024.10.1-pyhd8ed1ab_1.conda#3b519bc21bc80e60b456f1e62962a766 https://conda.anaconda.org/conda-forge/noarch/jupyter_client-8.6.3-pyhd8ed1ab_1.conda#4ebae00eae9705b0c3d6d1018a81d047 https://conda.anaconda.org/conda-forge/linux-64/libgoogle-cloud-storage-2.33.0-h0121fbd_1.conda#b0cfb5044685a7a9fa43ae669124f0a0 -https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h5ddbaa4_116.conda#f51573abc223afed7e5374f34135ce05 +https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h00e09a9_116.conda#417864857bdb6c2be2e923e89bffd2e8 https://conda.anaconda.org/conda-forge/linux-64/libspatialite-5.1.0-h1b4f908_12.conda#641f91ac6f984a91a78ba2411fe4f106 https://conda.anaconda.org/conda-forge/linux-64/pandas-2.2.3-py313ha87cce1_1.conda#c5d63dd501db554b84a30dea33824164 -https://conda.anaconda.org/conda-forge/linux-64/pango-1.54.0-h861ebed_4.conda#e501a460d7574686d514f87d420135dd +https://conda.anaconda.org/conda-forge/linux-64/pango-1.56.0-h861ebed_0.conda#8779ee58be1c8b35e7af464a73674957 https://conda.anaconda.org/conda-forge/noarch/pybtex-0.24.0-pyhd8ed1ab_3.conda#556a52a96313364aa79990ed1337b9a5 https://conda.anaconda.org/conda-forge/noarch/pylint-3.3.3-pyhd8ed1ab_0.conda#5842a1fa3b9b4f9fe7069b9ca5ed068d https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.7.0-py313hdb96ca5_0.conda#2a0d20f16832a170218b474bcec57acf @@ -320,7 +321,7 @@ https://conda.anaconda.org/conda-forge/noarch/pytest-metadata-3.1.1-pyhd8ed1ab_1 https://conda.anaconda.org/conda-forge/noarch/pytest-mock-3.14.0-pyhd8ed1ab_1.conda#1ca25f3fdf32ebd8a51ee9efa97c9a45 https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.6.1-pyhd8ed1ab_1.conda#59aad4fb37cabc0bacc73cf344612ddd https://conda.anaconda.org/conda-forge/noarch/rich-13.9.4-pyhd8ed1ab_1.conda#7aed65d4ff222bfb7335997aa40b7da5 -https://conda.anaconda.org/conda-forge/linux-64/scipy-1.14.1-py313h27c5614_2.conda#25c0eda0d2ed28962c5f3e8f7fbeace3 +https://conda.anaconda.org/conda-forge/linux-64/scipy-1.15.0-py313h750cbce_1.conda#5fa8ee00606ba9d5a928d989b949c45b https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.6-py313h3f71f02_2.conda#dd0b742e8e61b8f15e4b64efcc103ad6 https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py313h33d0bda_5.conda#5bcffe10a500755da4a71cc0fb62a420 https://conda.anaconda.org/conda-forge/linux-64/zstandard-0.23.0-py313h80202fe_1.conda#c178558ff516cd507763ffee230c20b2 @@ -328,25 +329,25 @@ https://conda.anaconda.org/conda-forge/linux-64/aws-sdk-cpp-1.11.458-hc430e4a_4. https://conda.anaconda.org/conda-forge/linux-64/azure-storage-blobs-cpp-12.13.0-h3cf044e_1.conda#7eb66060455c7a47d9dcdbfa9f46579b https://conda.anaconda.org/conda-forge/noarch/bokeh-3.6.2-pyhd8ed1ab_1.conda#976ff24762f1f991b08f7a7a41875086 https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.3.0-py313ha014f3b_0.conda#aecffd7a21d698e374487644ce67d6eb -https://conda.anaconda.org/conda-forge/linux-64/eccodes-2.39.0-hf413ef6_1.conda#63ea3e2f32daf4670182a3e6aad0b47b +https://conda.anaconda.org/conda-forge/linux-64/eccodes-2.39.0-h8bb6dbc_1.conda#87c13b15a9f3ec25cd3c9d8f2e33fe0b https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h8ee276e_7.conda#28a9681054948a7d7e96a7b8fe9b604e -https://conda.anaconda.org/conda-forge/noarch/identify-2.6.4-pyhd8ed1ab_0.conda#5ec16e7ad9bab911ff0696940953f505 +https://conda.anaconda.org/conda-forge/noarch/identify-2.6.5-pyhd8ed1ab_0.conda#c1b0f663ff141265d1be1242259063f0 https://conda.anaconda.org/conda-forge/noarch/jsonschema-4.23.0-pyhd8ed1ab_1.conda#a3cead9264b331b32fe8f0aabc967522 https://conda.anaconda.org/conda-forge/linux-64/libgdal-core-3.10.0-h3359108_13.conda#e9c2fb75425038991370f72231eca6e8 https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.58.4-h49af25d_2.conda#b9846db0abffb09847e2cb0fec4b4db6 https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.10.0-py313h129903b_0.conda#ab5b84154e1d9e41d4f11aea76d74096 -https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.1-nompi_ha5d1325_108.conda#3b3c67ab7987ec0416037fc14386ed70 -https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.7.2-nompi_py313h1dd084c_101.conda#7acb7a454880b024f7d67487a7495631 +https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.1-nompi_h22f9119_108.conda#0967d692b1dd33e7d809cfa355090e4b +https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.7.2-nompi_py313h2a70696_101.conda#fe03a55f80aef5f47b65320cd10025b4 https://conda.anaconda.org/conda-forge/noarch/pyopenssl-24.3.0-pyhd8ed1ab_0.conda#269109707b3810adce78b6afb2a82c80 https://conda.anaconda.org/conda-forge/noarch/pytest-html-4.1.1-pyhd8ed1ab_1.conda#010e50e74c467db278f1398a74106a04 https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.3.0-py313ha014f3b_3.conda#041b8326743c64bd02b8c0f34f05e1ef https://conda.anaconda.org/conda-forge/noarch/urllib3-2.3.0-pyhd8ed1ab_0.conda#32674f8dbfb7b26410ed580dd3c10a29 -https://conda.anaconda.org/conda-forge/noarch/xarray-2024.11.0-pyhd8ed1ab_0.conda#7358eeedbffd742549d372e0066999d3 +https://conda.anaconda.org/conda-forge/noarch/xarray-2025.1.1-pyhd8ed1ab_0.conda#81db80ba986122da460800a67bf8ac7f https://conda.anaconda.org/conda-forge/linux-64/azure-storage-files-datalake-cpp-12.12.0-ha633028_1.conda#7c1980f89dd41b097549782121a73490 https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.24.0-py313ha87cce1_0.conda#44c2091019480603a885aa01e7b710e7 https://conda.anaconda.org/conda-forge/noarch/cf_xarray-0.10.0-pyhd8ed1ab_2.conda#ed15dcf944706ae6ea54968dfa4a06a5 https://conda.anaconda.org/conda-forge/noarch/distributed-2024.12.1-pyhd8ed1ab_0.conda#58df114d7649ddb3f68c9b9adc6fbabe -https://conda.anaconda.org/conda-forge/linux-64/esmf-8.7.0-nompi_h6063b07_1.conda#15e28a0e5e651ba11495c87608652316 +https://conda.anaconda.org/conda-forge/linux-64/esmf-8.8.0-nompi_h4441c20_0.conda#34729c36214ff0b7834065bd5cacdc56 https://conda.anaconda.org/conda-forge/linux-64/fiona-1.10.1-py313hab4ff3b_3.conda#69a5fbc032a6a01aa6cf7010dd2164a0 https://conda.anaconda.org/conda-forge/linux-64/graphviz-12.0.0-hba01fac_0.conda#953e31ea00d46beb7e64a79fc291ec44 https://conda.anaconda.org/conda-forge/noarch/myproxyclient-2.1.1-pyhd8ed1ab_1.conda#1efb1227abaf20324ceb7ac9c06bb86d @@ -356,25 +357,25 @@ https://conda.anaconda.org/conda-forge/noarch/pre-commit-4.0.1-pyha770c72_1.cond https://conda.anaconda.org/conda-forge/linux-64/python-eccodes-2.37.0-py313ha014f3b_0.conda#b28717a6d595cdc42737d6669d422b1d https://conda.anaconda.org/conda-forge/noarch/requests-2.32.3-pyhd8ed1ab_1.conda#a9b9368f3701a417eac9edbcae7cb737 https://conda.anaconda.org/conda-forge/noarch/dask-jobqueue-0.9.0-pyhd8ed1ab_0.conda#a201de7d36907f2355426e019168d337 -https://conda.anaconda.org/conda-forge/noarch/esmpy-8.7.0-pyhecae5ae_1.conda#06552fcd493ed61a8a9a6e1ee014ca8a +https://conda.anaconda.org/conda-forge/noarch/esmpy-8.8.0-pyhecae5ae_0.conda#9d8320aa90c8e213002f9cdb5bb9f579 https://conda.anaconda.org/conda-forge/noarch/iris-3.11.0-pyha770c72_0.conda#a5e36260789ce92074c3736533ecdd61 https://conda.anaconda.org/conda-forge/linux-64/libarrow-18.1.0-hd595efa_7_cpu.conda#08d4aff5ee6dee9a1b9ab13fca927697 https://conda.anaconda.org/conda-forge/noarch/nbclient-0.10.2-pyhd8ed1ab_0.conda#6bb0d77277061742744176ab555b723c https://conda.anaconda.org/conda-forge/noarch/pooch-1.8.2-pyhd8ed1ab_1.conda#b3e783e8e8ed7577cf0b6dee37d1fbac https://conda.anaconda.org/conda-forge/linux-64/pydot-3.0.3-py313h78bf25f_0.conda#7c460c46b2f701a9733bf931223fe4b8 https://conda.anaconda.org/conda-forge/noarch/requests-cache-1.2.1-pyhd8ed1ab_1.conda#584e6aab3a5cffde537c575ad6a673ff -https://conda.anaconda.org/conda-forge/noarch/esgf-pyclient-0.3.1-pyhd8ed1ab_4.conda#f481c17430f801e68ee3b57cc30ecd2e -https://conda.anaconda.org/conda-forge/noarch/iris-grib-0.20.0-pyhd8ed1ab_1.conda#d8dced41fc56982c81190ba0eb10c3de +https://conda.anaconda.org/conda-forge/noarch/esgf-pyclient-0.3.1-pyhd8ed1ab_5.conda#6779887899e0b0b6fb316253eb0f5c64 +https://conda.anaconda.org/conda-forge/noarch/iris-grib-0.21.0-pyhd8ed1ab_0.conda#d5a110459acc9669c58e5d516fc2e165 https://conda.anaconda.org/conda-forge/linux-64/libarrow-acero-18.1.0-hcb10f89_7_cpu.conda#12d84228204c56fec6ed113288014d11 https://conda.anaconda.org/conda-forge/linux-64/libparquet-18.1.0-h081d1f1_7_cpu.conda#b97013ef4e1dd2cf11594f06d5b5e83a -https://conda.anaconda.org/conda-forge/noarch/nbconvert-core-7.16.4-pyhd8ed1ab_3.conda#fbc9010c36dd6fe433c045394df78efa +https://conda.anaconda.org/conda-forge/noarch/nbconvert-core-7.16.5-pyhd8ed1ab_1.conda#dd50a122c5b9782b1e9b2695473bfd95 https://conda.anaconda.org/conda-forge/noarch/prov-2.0.0-pyhd3deb0d_0.tar.bz2#aa9b3ad140f6c0668c646f32e20ccf82 https://conda.anaconda.org/conda-forge/noarch/py-cordex-0.9.0-pyhd8ed1ab_1.conda#53912b9ade4f2ea4dd1d5d6d3de7df70 https://conda.anaconda.org/conda-forge/linux-64/pyarrow-core-18.1.0-py313he5f92c8_0_cpu.conda#5380e12f4468e891911dbbd4248b521a https://conda.anaconda.org/conda-forge/linux-64/libarrow-dataset-18.1.0-hcb10f89_7_cpu.conda#0a81eb63d7cd150f598c752e86388d57 -https://conda.anaconda.org/conda-forge/noarch/nbconvert-pandoc-7.16.4-hd8ed1ab_3.conda#68b369ee7e24bd895c92722d22d0560f +https://conda.anaconda.org/conda-forge/noarch/nbconvert-pandoc-7.16.5-hd8ed1ab_1.conda#593a8fd80968f14f8a7b3a685ddc455e https://conda.anaconda.org/conda-forge/linux-64/libarrow-substrait-18.1.0-h08228c5_7_cpu.conda#e128def53c133e8a23ac00cd4a479335 -https://conda.anaconda.org/conda-forge/noarch/nbconvert-7.16.4-hd8ed1ab_3.conda#59b074b5c8185ec0f66b0b0229592c83 +https://conda.anaconda.org/conda-forge/noarch/nbconvert-7.16.5-hd8ed1ab_1.conda#82ffc2974cd09b45182f018b5af731c8 https://conda.anaconda.org/conda-forge/linux-64/pyarrow-18.1.0-py313h78bf25f_0.conda#a11d880ceedc33993c6f5c14a80ea9d3 https://conda.anaconda.org/conda-forge/noarch/dask-expr-1.1.21-pyhd8ed1ab_0.conda#e72a014dbbd35545dcfba4de9c92fb1d https://conda.anaconda.org/conda-forge/noarch/dask-2024.12.1-pyhd8ed1ab_0.conda#f3134df9565c4d4415ff0e61f3aa28d0 From de514e40b92930138ad798d3094d7e7325ba6567 Mon Sep 17 00:00:00 2001 From: Bouwe Andela Date: Thu, 16 Jan 2025 13:34:39 +0100 Subject: [PATCH 22/36] Silence Iris warnings in ``area_statistics`` preprocessor function (#2625) Co-authored-by: Valeriu Predoi --- esmvalcore/preprocessor/_area.py | 12 +++++++++++- 1 file changed, 11 insertions(+), 1 deletion(-) diff --git a/esmvalcore/preprocessor/_area.py b/esmvalcore/preprocessor/_area.py index 7d8d867155..0ae54f9332 100644 --- a/esmvalcore/preprocessor/_area.py +++ b/esmvalcore/preprocessor/_area.py @@ -7,6 +7,7 @@ from __future__ import annotations import logging +import warnings from pathlib import Path from typing import TYPE_CHECKING, Iterable, Literal, Optional @@ -354,7 +355,16 @@ def area_statistics( agg, agg_kwargs, "cell_area", cube, try_adding_calculated_cell_area ) - result = cube.collapsed(["latitude", "longitude"], agg, **agg_kwargs) + with warnings.catch_warnings(): + # Silence various warnings about collapsing multi-dimensional and/or + # non contiguous coordinates as this should be fine when the cell areas + # are provided and will fail when they needed but not provided. + warnings.filterwarnings( + "ignore", + category=iris.warnings.IrisVagueMetadataWarning, + module="iris", + ) + result = cube.collapsed(["latitude", "longitude"], agg, **agg_kwargs) if normalize is not None: result = get_normalized_cube(cube, result, normalize) From 542a374a0d69843dd0c2ced91581e844977a6692 Mon Sep 17 00:00:00 2001 From: Manuel Schlund <32543114+schlunma@users.noreply.github.com> Date: Thu, 16 Jan 2025 13:41:00 +0100 Subject: [PATCH 23/36] Do not copy ICON horizontal grid every time it is used (#2633) --- esmvalcore/cmor/_fixes/icon/_base_fixes.py | 12 +++---- esmvalcore/cmor/_fixes/icon/icon.py | 2 +- .../integration/cmor/_fixes/icon/test_icon.py | 32 +++++++++++++++---- 3 files changed, 33 insertions(+), 13 deletions(-) diff --git a/esmvalcore/cmor/_fixes/icon/_base_fixes.py b/esmvalcore/cmor/_fixes/icon/_base_fixes.py index 9c551ef4a0..53402daada 100644 --- a/esmvalcore/cmor/_fixes/icon/_base_fixes.py +++ b/esmvalcore/cmor/_fixes/icon/_base_fixes.py @@ -67,7 +67,7 @@ def _create_mesh(self, cube: Cube) -> MeshXY: # we transpose the cube here) vertex_of_cell = horizontal_grid.extract_cube( NameConstraint(var_name="vertex_of_cell") - ) + ).copy() vertex_of_cell.transpose() # Extract start index used to name nodes from the the horizontal grid @@ -167,8 +167,8 @@ def _get_node_coords(self, horizontal_grid): dual_area_cube = horizontal_grid.extract_cube( NameConstraint(var_name="dual_area") ) - node_lat = dual_area_cube.coord(var_name="vlat") - node_lon = dual_area_cube.coord(var_name="vlon") + node_lat = dual_area_cube.coord(var_name="vlat").copy() + node_lon = dual_area_cube.coord(var_name="vlon").copy() # Fix metadata node_lat.bounds = None @@ -373,7 +373,7 @@ def _get_downloaded_grid(self, grid_url: str, grid_name: str) -> CubeList: return cubes def get_horizontal_grid(self, cube): - """Get copy of ICON horizontal grid. + """Get ICON horizontal grid. If given, retrieve grid from `horizontal_grid` facet specified by the user. Otherwise, try to download the file from the location given by @@ -396,7 +396,7 @@ def get_horizontal_grid(self, cube): Returns ------- iris.cube.CubeList - Copy of ICON horizontal grid. + ICON horizontal grid. Raises ------ @@ -414,7 +414,7 @@ def get_horizontal_grid(self, cube): else: grid = self._get_grid_from_cube_attr(cube) - return grid.copy() + return grid def get_mesh(self, cube): """Get mesh. diff --git a/esmvalcore/cmor/_fixes/icon/icon.py b/esmvalcore/cmor/_fixes/icon/icon.py index c5792019c8..bc80eaa17f 100644 --- a/esmvalcore/cmor/_fixes/icon/icon.py +++ b/esmvalcore/cmor/_fixes/icon/icon.py @@ -123,7 +123,7 @@ def _add_coord_from_grid_file(self, cube, coord_name): grid_cube = horizontal_grid.extract_cube( NameConstraint(var_name="cell_area") ) - coord = grid_cube.coord(coord_name_in_grid) + coord = grid_cube.coord(coord_name_in_grid).copy() # Find index of mesh dimension (= single unnamed dimension) n_unnamed_dimensions = cube.ndim - len(cube.dim_coords) diff --git a/tests/integration/cmor/_fixes/icon/test_icon.py b/tests/integration/cmor/_fixes/icon/test_icon.py index cfd3ea6726..bf302f6002 100644 --- a/tests/integration/cmor/_fixes/icon/test_icon.py +++ b/tests/integration/cmor/_fixes/icon/test_icon.py @@ -855,7 +855,7 @@ def test_tas_scalar_height2m_already_present(cubes_2d): check_heightxm(cube, 2.0) -def test_tas_dim_height2m_already_present(cubes_2d): +def test_tas_no_mesh(cubes_2d): """Test fix.""" fix = get_allvars_fix("Amon", "tas") fix.extra_facets["ugrid"] = False @@ -891,7 +891,7 @@ def test_tas_dim_height2m_already_present(cubes_2d): assert cube.coord_dims(lat) == cube.coord_dims(i_coord) -def test_tas_no_mesh(cubes_2d): +def test_tas_dim_height2m_already_present(cubes_2d): """Test fix.""" fix = get_allvars_fix("Amon", "tas") @@ -933,6 +933,28 @@ def test_tas_no_shift_time(cubes_2d): assert time.attributes == {} +def test_fix_does_not_change_cached_grid(cubes_2d): + """Test fix.""" + fix = get_allvars_fix("Amon", "tas") + assert not fix._horizontal_grids + assert not fix._meshes + + # Remove latitude and longitude from tas cube to trigger automatic addition + # of them + cube = cubes_2d.extract_cube(NameConstraint(var_name="tas")) + cube.remove_coord("latitude") + cube.remove_coord("longitude") + + # Make sure horizontal grid is cached + fix.get_horizontal_grid(cube) + assert "icon_grid.nc" in fix._horizontal_grids + original_grid = fix._horizontal_grids["icon_grid.nc"].copy() + + # Make sure that fix does not alter existing grid + fix.fix_metadata(cubes_2d) + assert fix._horizontal_grids["icon_grid.nc"] == original_grid + + # Test uas (for height10m coordinate) @@ -1288,8 +1310,7 @@ def test_get_horizontal_grid_from_attr_cached_in_dict( assert "cached_grid_url.nc" in fix._horizontal_grids assert "grid_from_facet.nc" in fix._horizontal_grids # has not been used assert fix._horizontal_grids["cached_grid_url.nc"] == grid - assert grid == grid_cube - assert grid is not grid_cube + assert grid is grid_cube assert mock_requests.mock_calls == [] mock_get_grid_from_facet.assert_not_called() @@ -1415,8 +1436,7 @@ def test_get_horizontal_grid_from_facet_cached_in_dict( assert "cached_grid_url.nc" in fix._horizontal_grids # has not been used assert "grid.nc" in fix._horizontal_grids assert fix._horizontal_grids["grid.nc"] == grid - assert grid == grid_cube - assert grid is not grid_cube + assert grid is grid_cube mock_get_grid_from_cube_attr.assert_not_called() From f86d15134dffe68055b463db2b4aa399873c76c0 Mon Sep 17 00:00:00 2001 From: Valeriu Predoi Date: Thu, 16 Jan 2025 15:29:02 +0000 Subject: [PATCH 24/36] Fix `oh` for model: EC-Earth3-AerChem mip: AERMonZ (#2634) Co-authored-by: Bouwe Andela Co-authored-by: Manuel Schlund <32543114+schlunma@users.noreply.github.com> --- .../cmor/_fixes/cmip6/ec_earth3_aerchem.py | 35 ++++++++++++++++ .../_fixes/cmip6/test_ec_earth3_aerchem.py | 41 +++++++++++++++++++ 2 files changed, 76 insertions(+) create mode 100644 esmvalcore/cmor/_fixes/cmip6/ec_earth3_aerchem.py create mode 100644 tests/integration/cmor/_fixes/cmip6/test_ec_earth3_aerchem.py diff --git a/esmvalcore/cmor/_fixes/cmip6/ec_earth3_aerchem.py b/esmvalcore/cmor/_fixes/cmip6/ec_earth3_aerchem.py new file mode 100644 index 0000000000..d480bfdf96 --- /dev/null +++ b/esmvalcore/cmor/_fixes/cmip6/ec_earth3_aerchem.py @@ -0,0 +1,35 @@ +"""Fixes for EC-Earth3-AerChem model.""" + +from ..fix import Fix + + +class Oh(Fix): + """Fixes for oh.""" + + def fix_metadata(self, cubes): + """Fix standard name for ps. + + Fix standard_name for Surface Air Pressure (ps). + See discussion in + https://github.com/ESMValGroup/ESMValCore/issues/2613 + Cube has two coordinates called air_pressure: an AuxCoord ps + and a DerivedCoord that is 4D and derived using formula terms, + we are setting the former's standard_name to "surface_air_pressure". + + Parameters + ---------- + cubes : iris.cube.CubeList + Input cubes. + + Returns + ------- + iris.cube.CubeList + """ + cube = self.get_cube_from_list(cubes) + + for cube in cubes: + for coord in cube.coords(): + if coord.var_name == "ps": + coord.standard_name = "surface_air_pressure" + + return cubes diff --git a/tests/integration/cmor/_fixes/cmip6/test_ec_earth3_aerchem.py b/tests/integration/cmor/_fixes/cmip6/test_ec_earth3_aerchem.py new file mode 100644 index 0000000000..c29562ffa1 --- /dev/null +++ b/tests/integration/cmor/_fixes/cmip6/test_ec_earth3_aerchem.py @@ -0,0 +1,41 @@ +"""Tests for EC-Earth3-AerChem model.""" + +import iris +import pytest + +from esmvalcore.cmor._fixes.cmip6.ec_earth3_aerchem import Oh +from esmvalcore.cmor.fix import Fix +from esmvalcore.cmor.table import get_var_info + + +@pytest.fixture +def oh_cubes(): + air_pressure_coord = iris.coords.DimCoord( + [1000.09, 600.6, 200.0], + bounds=[[1200.00001, 800], [800, 400.8], [400.8, 1.9]], + var_name="ps", + standard_name="air_pressure", + units="pa", + ) + oh_cube = iris.cube.Cube( + [0.0, 1.0, 2.0], + var_name="oh", + dim_coords_and_dims=[(air_pressure_coord, 0)], + ) + return iris.cube.CubeList([oh_cube]) + + +def test_get_oh_fix(): + """Test getting of fix.""" + fix = Fix.get_fixes("CMIP6", "EC-Earth3-AerChem", "AERmonZ", "oh") + assert Oh(None) in fix + + +def test_oh_fix_metadata(oh_cubes): + """Test ``fix_metadata`` for ``oh``.""" + vardef = get_var_info("CMIP6", "AERmonZ", "oh") + fix = Oh(vardef) + fixed_cubes = fix.fix_metadata(oh_cubes) + for coord in fixed_cubes[0].coords(): + if coord.var_name == "ps": + assert coord.standard_name == "surface_air_pressure" From fac7e1e0835ff36a69f0202de00dd82ab56511ec Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Thu, 16 Jan 2025 16:36:00 +0100 Subject: [PATCH 25/36] [pre-commit.ci] pre-commit autoupdate (#2632) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- esmvalcore/_recipe/check.py | 3 +-- esmvalcore/_recipe/recipe.py | 6 ++---- esmvalcore/_recipe/to_datasets.py | 6 ++---- esmvalcore/config/_dask.py | 3 +-- esmvalcore/esgf/_download.py | 3 +-- esmvalcore/preprocessor/_io.py | 3 +-- esmvalcore/preprocessor/_mask.py | 3 +-- esmvalcore/preprocessor/_regrid.py | 3 +-- esmvalcore/preprocessor/_regrid_esmpy.py | 3 +-- esmvalcore/preprocessor/_supplementary_vars.py | 3 +-- esmvalcore/preprocessor/_volume.py | 3 +-- tests/integration/esgf/test_search_download.py | 6 ++---- tests/integration/recipe/test_recipe.py | 6 ++---- tests/unit/esgf/test_download.py | 6 ++---- tests/unit/esgf/test_search.py | 3 +-- tests/unit/main/test_esmvaltool.py | 4 +--- 17 files changed, 22 insertions(+), 44 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 347ed69be0..cc511c98b2 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -33,7 +33,7 @@ repos: - id: codespell additional_dependencies: [tomli] # required for Python 3.10 - repo: https://github.com/astral-sh/ruff-pre-commit - rev: "v0.8.6" + rev: "v0.9.1" hooks: - id: ruff args: [--fix] diff --git a/esmvalcore/_recipe/check.py b/esmvalcore/_recipe/check.py index 5001a5d371..ba249cae65 100644 --- a/esmvalcore/_recipe/check.py +++ b/esmvalcore/_recipe/check.py @@ -38,8 +38,7 @@ def ncl_version(): ncl = which("ncl") if not ncl: raise RecipeError( - "Recipe contains NCL scripts, but cannot find " - "an NCL installation." + "Recipe contains NCL scripts, but cannot find an NCL installation." ) try: cmd = [ncl, "-V"] diff --git a/esmvalcore/_recipe/recipe.py b/esmvalcore/_recipe/recipe.py index f42463f5a7..0e41f36f3c 100644 --- a/esmvalcore/_recipe/recipe.py +++ b/esmvalcore/_recipe/recipe.py @@ -1195,8 +1195,7 @@ def run(self): self.tasks.run(max_parallel_tasks=self.session["max_parallel_tasks"]) logger.info( - "Wrote recipe with version numbers and wildcards " - "to:\nfile://%s", + "Wrote recipe with version numbers and wildcards to:\nfile://%s", filled_recipe, ) self.write_html_summary() @@ -1233,8 +1232,7 @@ def write_filled_recipe(self): with filename.open("w", encoding="utf-8") as file: yaml.safe_dump(recipe, file, sort_keys=False) logger.info( - "Wrote recipe with version numbers and wildcards " - "to:\nfile://%s", + "Wrote recipe with version numbers and wildcards to:\nfile://%s", filename, ) return filename diff --git a/esmvalcore/_recipe/to_datasets.py b/esmvalcore/_recipe/to_datasets.py index 2e814e6d8c..ce2bee37e9 100644 --- a/esmvalcore/_recipe/to_datasets.py +++ b/esmvalcore/_recipe/to_datasets.py @@ -423,8 +423,7 @@ def _dataset_from_files(dataset: Dataset) -> list[Dataset]: if any(_isglob(f) for f in dataset.facets.values()): logger.debug( - "Expanding dataset globs for dataset %s, " - "this may take a while..", + "Expanding dataset globs for dataset %s, this may take a while..", dataset.summary(shorten=True), ) @@ -562,8 +561,7 @@ def _get_input_datasets(dataset: Dataset) -> list[Dataset]: _fix_cmip5_fx_ensemble(input_dataset) if input_facets.get("optional") and not input_dataset.files: logger.info( - "Skipping: no data found for %s which is marked as " - "'optional'", + "Skipping: no data found for %s which is marked as 'optional'", input_dataset, ) else: diff --git a/esmvalcore/config/_dask.py b/esmvalcore/config/_dask.py index f9562d161b..f4c1c9aff4 100644 --- a/esmvalcore/config/_dask.py +++ b/esmvalcore/config/_dask.py @@ -61,8 +61,7 @@ def validate_dask_config(dask_config: Mapping) -> None: use = dask_config["use"] if not isinstance(profiles, Mapping): raise InvalidConfigParameter( - f"Key 'dask.profiles' needs to be a mapping, got " - f"{type(profiles)}" + f"Key 'dask.profiles' needs to be a mapping, got {type(profiles)}" ) for profile, profile_cfg in profiles.items(): has_scheduler_address = any( diff --git a/esmvalcore/esgf/_download.py b/esmvalcore/esgf/_download.py index 3d3fcd327c..756b319105 100644 --- a/esmvalcore/esgf/_download.py +++ b/esmvalcore/esgf/_download.py @@ -558,8 +558,7 @@ def download(files, dest_folder, n_jobs=4): ] if not files: logger.debug( - "All required data is available locally," - " not downloading anything." + "All required data is available locally, not downloading anything." ) return diff --git a/esmvalcore/preprocessor/_io.py b/esmvalcore/preprocessor/_io.py index 0c554c3d9a..2e817fe958 100644 --- a/esmvalcore/preprocessor/_io.py +++ b/esmvalcore/preprocessor/_io.py @@ -332,8 +332,7 @@ def _sort_cubes_by_time(cubes): raise ValueError(msg) from exc except TypeError as error: msg = ( - "Cubes cannot be sorted " - f"due to differing time units: {str(error)}" + f"Cubes cannot be sorted due to differing time units: {str(error)}" ) raise TypeError(msg) from error return cubes diff --git a/esmvalcore/preprocessor/_mask.py b/esmvalcore/preprocessor/_mask.py index 1f1d0ddc00..6e987fe357 100644 --- a/esmvalcore/preprocessor/_mask.py +++ b/esmvalcore/preprocessor/_mask.py @@ -254,8 +254,7 @@ def mask_glaciated(cube, mask_out: str = "glaciated"): ], ) logger.debug( - "Applying glaciated areas mask from Natural Earth" - " shapefile: \n%s", + "Applying glaciated areas mask from Natural Earth shapefile: \n%s", shapefiles[mask_out], ) else: diff --git a/esmvalcore/preprocessor/_regrid.py b/esmvalcore/preprocessor/_regrid.py index 5bbed48dcf..eaa8529688 100644 --- a/esmvalcore/preprocessor/_regrid.py +++ b/esmvalcore/preprocessor/_regrid.py @@ -322,8 +322,7 @@ def _spec_to_latlonvals( if step_longitude == 0: raise ValueError( - "Longitude step cannot be 0, " - f"got step_longitude={step_longitude}." + f"Longitude step cannot be 0, got step_longitude={step_longitude}." ) if (start_latitude < _LAT_MIN) or (end_latitude > _LAT_MAX): diff --git a/esmvalcore/preprocessor/_regrid_esmpy.py b/esmvalcore/preprocessor/_regrid_esmpy.py index b5da1e368c..0c34f7a762 100755 --- a/esmvalcore/preprocessor/_regrid_esmpy.py +++ b/esmvalcore/preprocessor/_regrid_esmpy.py @@ -315,8 +315,7 @@ def is_lon_circular(lon): seam = lon.bounds[1:-1, -1, (1, 2)] - lon.bounds[1:-1, 0, (0, 3)] else: raise NotImplementedError( - "AuxCoord longitude is higher " - "dimensional than 2d. Giving up." + "AuxCoord longitude is higher dimensional than 2d. Giving up." ) circular = np.all(abs(seam) % 360.0 < 1.0e-3) else: diff --git a/esmvalcore/preprocessor/_supplementary_vars.py b/esmvalcore/preprocessor/_supplementary_vars.py index 4096036674..0c305dad37 100644 --- a/esmvalcore/preprocessor/_supplementary_vars.py +++ b/esmvalcore/preprocessor/_supplementary_vars.py @@ -63,8 +63,7 @@ def add_cell_measure(cube, cell_measure_cube, measure): """ if measure not in ["area", "volume"]: raise ValueError( - f"measure name must be 'area' or 'volume', " - f"got {measure} instead" + f"measure name must be 'area' or 'volume', got {measure} instead" ) measure = iris.coords.CellMeasure( cell_measure_cube.core_data(), diff --git a/esmvalcore/preprocessor/_volume.py b/esmvalcore/preprocessor/_volume.py index 4c2f7574d0..52ae6adff3 100644 --- a/esmvalcore/preprocessor/_volume.py +++ b/esmvalcore/preprocessor/_volume.py @@ -360,8 +360,7 @@ def axis_statistics( coord_dims = cube.coord_dims(coord) if len(coord_dims) > 1: raise NotImplementedError( - "axis_statistics not implemented for multidimensional " - "coordinates." + "axis_statistics not implemented for multidimensional coordinates." ) # For weighted operations, create a dummy weights coordinate using the diff --git a/tests/integration/esgf/test_search_download.py b/tests/integration/esgf/test_search_download.py index 5029d75b42..431418ee15 100644 --- a/tests/integration/esgf/test_search_download.py +++ b/tests/integration/esgf/test_search_download.py @@ -227,10 +227,8 @@ def test_real_search_many(): "tas_Amon_EC-EARTH_historical_r1i1p1_199001-199912.nc", ], [ - "tas_Amon_AWI-ESM-1-1-LR_historical_" - "r1i1p1f1_gn_200001-200012.nc", - "tas_Amon_AWI-ESM-1-1-LR_historical_" - "r1i1p1f1_gn_200101-200112.nc", + "tas_Amon_AWI-ESM-1-1-LR_historical_r1i1p1f1_gn_200001-200012.nc", + "tas_Amon_AWI-ESM-1-1-LR_historical_r1i1p1f1_gn_200101-200112.nc", ], [ "tas_Amon_CESM2_historical_r4i1p1f1_gn_185001-201412.nc", diff --git a/tests/integration/recipe/test_recipe.py b/tests/integration/recipe/test_recipe.py index 3077901c33..afa5bf8052 100644 --- a/tests/integration/recipe/test_recipe.py +++ b/tests/integration/recipe/test_recipe.py @@ -785,8 +785,7 @@ def test_recipe_iso_timerange( pr_product = pr_task.products.pop() filename = ( - "CMIP6_HadGEM3-GC31-LL_3hr_historical_r2i1p1f1_" - f"pr_gn_{output_time}.nc" + f"CMIP6_HadGEM3-GC31-LL_3hr_historical_r2i1p1f1_pr_gn_{output_time}.nc" ) assert pr_product.filename.name == filename @@ -831,8 +830,7 @@ def test_recipe_iso_timerange_as_dataset( assert len(task.products) == 1 product = task.products.pop() filename = ( - "CMIP6_HadGEM3-GC31-LL_3hr_historical_r2i1p1f1_" - f"pr_gn_{output_time}.nc" + f"CMIP6_HadGEM3-GC31-LL_3hr_historical_r2i1p1f1_pr_gn_{output_time}.nc" ) assert product.filename.name == filename diff --git a/tests/unit/esgf/test_download.py b/tests/unit/esgf/test_download.py index 8af6d3057e..830d65d245 100644 --- a/tests/unit/esgf/test_download.py +++ b/tests/unit/esgf/test_download.py @@ -359,12 +359,10 @@ def test_merge_datasets(): ) dataset0 = ( - "cmip5.output1.FIO.FIO-ESM.historical." - "mon.atmos.Amon.r1i1p1.v20121010" + "cmip5.output1.FIO.FIO-ESM.historical.mon.atmos.Amon.r1i1p1.v20121010" ) dataset1 = ( - "cmip5.output1.FIO.fio-esm.historical." - "mon.atmos.Amon.r1i1p1.v20121010" + "cmip5.output1.FIO.fio-esm.historical.mon.atmos.Amon.r1i1p1.v20121010" ) cmip5_template = ( diff --git a/tests/unit/esgf/test_search.py b/tests/unit/esgf/test_search.py index 66d0551a8e..039a68dce3 100644 --- a/tests/unit/esgf/test_search.py +++ b/tests/unit/esgf/test_search.py @@ -158,8 +158,7 @@ def get_mock_connection(mocker, search_results): def test_esgf_search_files(mocker): # Set up some fake FileResults dataset_id = ( - "cmip5.output1.INM.inmcm4.historical" - ".mon.atmos.Amon.r1i1p1.v20130207" + "cmip5.output1.INM.inmcm4.historical.mon.atmos.Amon.r1i1p1.v20130207" ) dataset_id_template = ( "cmip5.%(product)s.%(valid_institute)s.%(model)s." diff --git a/tests/unit/main/test_esmvaltool.py b/tests/unit/main/test_esmvaltool.py index 03985363d7..c33847029f 100644 --- a/tests/unit/main/test_esmvaltool.py +++ b/tests/unit/main/test_esmvaltool.py @@ -254,9 +254,7 @@ def test_header( "Reading configuration files from:\nconfig_dir (SOURCE)" ) assert caplog.messages[7] == ( - "Writing program log files to:\n" - "path_to_log_file1\n" - "path_to_log_file2" + "Writing program log files to:\npath_to_log_file1\npath_to_log_file2" ) From 5c585da978482ec6292fbf9ef6e82191f45f5ac0 Mon Sep 17 00:00:00 2001 From: Bouwe Andela Date: Fri, 17 Jan 2025 13:08:01 +0100 Subject: [PATCH 26/36] Update the `esmvaltool` command welcome message (#2635) --- esmvalcore/_main.py | 67 ++++++++++++++++++--------------------------- 1 file changed, 26 insertions(+), 41 deletions(-) diff --git a/esmvalcore/_main.py b/esmvalcore/_main.py index 422908e464..47eae8c436 100755 --- a/esmvalcore/_main.py +++ b/esmvalcore/_main.py @@ -1,31 +1,26 @@ -"""ESMValTool - Earth System Model Evaluation Tool. - -http://www.esmvaltool.org - -CORE DEVELOPMENT TEAM AND CONTACTS: - Birgit Hassler (Co-PI; DLR, Germany - birgit.hassler@dlr.de) - Alistair Sellar (Co-PI; Met Office, UK - alistair.sellar@metoffice.gov.uk) - Bouwe Andela (Netherlands eScience Center, The Netherlands - b.andela@esciencecenter.nl) - Lee de Mora (PML, UK - ledm@pml.ac.uk) - Niels Drost (Netherlands eScience Center, The Netherlands - n.drost@esciencecenter.nl) - Veronika Eyring (DLR, Germany - veronika.eyring@dlr.de) - Bettina Gier (UBremen, Germany - gier@uni-bremen.de) - Remi Kazeroni (DLR, Germany - remi.kazeroni@dlr.de) - Nikolay Koldunov (AWI, Germany - nikolay.koldunov@awi.de) - Axel Lauer (DLR, Germany - axel.lauer@dlr.de) - Saskia Loosveldt-Tomas (BSC, Spain - saskia.loosveldt@bsc.es) - Ruth Lorenz (ETH Zurich, Switzerland - ruth.lorenz@env.ethz.ch) - Benjamin Mueller (LMU, Germany - b.mueller@iggf.geo.uni-muenchen.de) - Valeriu Predoi (URead, UK - valeriu.predoi@ncas.ac.uk) - Mattia Righi (DLR, Germany - mattia.righi@dlr.de) - Manuel Schlund (DLR, Germany - manuel.schlund@dlr.de) - Breixo Solino Fernandez (DLR, Germany - breixo.solinofernandez@dlr.de) - Javier Vegas-Regidor (BSC, Spain - javier.vegas@bsc.es) - Klaus Zimmermann (SMHI, Sweden - klaus.zimmermann@smhi.se) - -For further help, please read the documentation at -http://docs.esmvaltool.org. Have fun! -""" +"""Earth System Model Evaluation Tool + +A community tool for the evaluation of Earth system models. + +https://esmvaltool.org + +The Earth System Model Evaluation Tool (ESMValTool) is a community +diagnostics and performance metrics tool for the evaluation of Earth +System Models (ESMs) that allows for routine comparison of single or +multiple models, either against predecessor versions or against +observations. + +Tutorial: https://tutorial.esmvaltool.org +Documentation: https://docs.esmvaltool.org +Contact: esmvaltool-dev@listserv.dfn.de + +If you find this software useful for your research, please cite it using +https://doi.org/10.5281/zenodo.3387139 for ESMValCore or +https://doi.org/10.5281/zenodo.3401363 for ESMValTool or +any of the reference papers listed at https://esmvaltool.org/references/. + +Have fun! +""" # noqa: D400 # pylint: disable=import-outside-toplevel from __future__ import annotations @@ -335,19 +330,9 @@ def show(recipe): class ESMValTool: - """A community tool for routine evaluation of Earth system models. - - The Earth System Model Evaluation Tool (ESMValTool) is a community - diagnostics and performance metrics tool for the evaluation of Earth - System Models (ESMs) that allows for routine comparison of single or - multiple models, either against predecessor versions or against - observations. - - Documentation is available at https://docs.esmvaltool.org. - - To report issues or ask for improvements, please visit - https://github.com/ESMValGroup/ESMValTool. - """ + # This is the `esmvaltool` command. The line below shows the documentation + # at the top of this module when users run e.g. `esmvaltool -- --help`. + __doc__ = __doc__ def __init__(self): self.config = Config() From 04e0cbd85db9440c5911ffbf33e3222e0d8ad12b Mon Sep 17 00:00:00 2001 From: Bouwe Andela Date: Fri, 17 Jan 2025 13:26:59 +0100 Subject: [PATCH 27/36] Only save data from one preprocessing task at a time with the Distributed scheduler (#2610) Co-authored-by: Manuel Schlund <32543114+schlunma@users.noreply.github.com> --- esmvalcore/_task.py | 95 +++++++++++-------- esmvalcore/preprocessor/__init__.py | 18 +++- .../preprocessor/test_preprocessing_task.py | 13 ++- tests/integration/test_task.py | 14 ++- 4 files changed, 95 insertions(+), 45 deletions(-) diff --git a/esmvalcore/_task.py b/esmvalcore/_task.py index 27a6b83d14..cb9269b087 100644 --- a/esmvalcore/_task.py +++ b/esmvalcore/_task.py @@ -5,6 +5,7 @@ import datetime import importlib import logging +import multiprocessing import numbers import os import pprint @@ -14,7 +15,6 @@ import threading import time from copy import deepcopy -from multiprocessing import Pool from pathlib import Path, PosixPath from shutil import which from typing import Optional @@ -260,6 +260,7 @@ def __init__(self, ancestors=None, name="", products=None): self.name = name self.activity = None self.priority = 0 + self.scheduler_lock = None def initialize_provenance(self, recipe_entity): """Initialize task provenance activity.""" @@ -854,45 +855,60 @@ def done(task): """Assume a task is done if it not scheduled or running.""" return not (task in scheduled or task in running) - with Pool(processes=max_parallel_tasks) as pool: - while scheduled or running: - # Submit new tasks to pool - for task in sorted(scheduled, key=lambda t: t.priority): - if len(running) >= max_parallel_tasks: - break - if all(done(t) for t in task.ancestors): - future = pool.apply_async( - _run_task, [task, scheduler_address] + with multiprocessing.Manager() as manager: + # Use a lock to avoid overloading the Dask workers by making only + # one :class:`esmvalcore.preprocessor.PreprocessingTask` submit its + # data save task graph to the distributed scheduler at a time. + # + # See https://github.com/ESMValGroup/ESMValCore/issues/2609 for + # additional detail. + scheduler_lock = ( + None if scheduler_address is None else manager.Lock() + ) + + with multiprocessing.Pool(processes=max_parallel_tasks) as pool: + while scheduled or running: + # Submit new tasks to pool + for task in sorted(scheduled, key=lambda t: t.priority): + if len(running) >= max_parallel_tasks: + break + if all(done(t) for t in task.ancestors): + future = pool.apply_async( + _run_task, + [task, scheduler_address, scheduler_lock], + ) + running[task] = future + scheduled.remove(task) + + # Handle completed tasks + ready = {t for t in running if running[t].ready()} + for task in ready: + _copy_results(task, running[task]) + running.pop(task) + + # Wait if there are still tasks running + if running: + time.sleep(0.1) + + # Log progress message + if ( + len(scheduled) != n_scheduled + or len(running) != n_running + ): + n_scheduled, n_running = len(scheduled), len(running) + n_done = n_tasks - n_scheduled - n_running + logger.info( + "Progress: %s tasks running, %s tasks waiting for " + "ancestors, %s/%s done", + n_running, + n_scheduled, + n_done, + n_tasks, ) - running[task] = future - scheduled.remove(task) - - # Handle completed tasks - ready = {t for t in running if running[t].ready()} - for task in ready: - _copy_results(task, running[task]) - running.pop(task) - - # Wait if there are still tasks running - if running: - time.sleep(0.1) - - # Log progress message - if len(scheduled) != n_scheduled or len(running) != n_running: - n_scheduled, n_running = len(scheduled), len(running) - n_done = n_tasks - n_scheduled - n_running - logger.info( - "Progress: %s tasks running, %s tasks waiting for " - "ancestors, %s/%s done", - n_running, - n_scheduled, - n_done, - n_tasks, - ) - logger.info("Successfully completed all tasks.") - pool.close() - pool.join() + logger.info("Successfully completed all tasks.") + pool.close() + pool.join() def _copy_results(task, future): @@ -900,7 +916,7 @@ def _copy_results(task, future): task.output_files, task.products = future.get() -def _run_task(task, scheduler_address): +def _run_task(task, scheduler_address, scheduler_lock): """Run task and return the result.""" if scheduler_address is None: client = contextlib.nullcontext() @@ -908,6 +924,7 @@ def _run_task(task, scheduler_address): client = Client(scheduler_address) with client: + task.scheduler_lock = scheduler_lock output_files = task.run() return output_files, task.products diff --git a/esmvalcore/preprocessor/__init__.py b/esmvalcore/preprocessor/__init__.py index 2c956aa0ad..6ba0d7c946 100644 --- a/esmvalcore/preprocessor/__init__.py +++ b/esmvalcore/preprocessor/__init__.py @@ -736,9 +736,23 @@ def _run(self, _) -> list[str]: delayed = product.close() delayeds.append(delayed) - logger.info("Computing and saving data for task %s", self.name) delayeds = [d for d in delayeds if d is not None] - _compute_with_progress(delayeds, description=self.name) + + if self.scheduler_lock is not None: + logger.debug("Acquiring save lock for task %s", self.name) + self.scheduler_lock.acquire() + logger.debug("Acquired save lock for task %s", self.name) + try: + logger.info( + "Computing and saving data for preprocessing task %s", + self.name, + ) + _compute_with_progress(delayeds, description=self.name) + finally: + if self.scheduler_lock is not None: + self.scheduler_lock.release() + logger.debug("Released save lock for task %s", self.name) + metadata_files = write_metadata( self.products, self.write_ncl_interface ) diff --git a/tests/integration/preprocessor/test_preprocessing_task.py b/tests/integration/preprocessor/test_preprocessing_task.py index 5b74a94cda..43dc7af6a6 100644 --- a/tests/integration/preprocessor/test_preprocessing_task.py +++ b/tests/integration/preprocessor/test_preprocessing_task.py @@ -2,6 +2,7 @@ import iris import iris.cube +import pytest from prov.model import ProvDocument import esmvalcore.preprocessor @@ -9,7 +10,8 @@ from esmvalcore.preprocessor import PreprocessingTask, PreprocessorFile -def test_load_save_task(tmp_path): +@pytest.mark.parametrize("scheduler_lock", [False, True]) +def test_load_save_task(tmp_path, mocker, scheduler_lock): """Test that a task that just loads and saves a file.""" # Prepare a test dataset cube = iris.cube.Cube(data=[273.0], var_name="tas", units="K") @@ -36,6 +38,9 @@ def test_load_save_task(tmp_path): activity = provenance.activity("software:esmvalcore") task.initialize_provenance(activity) + if scheduler_lock: + task.scheduler_lock = mocker.Mock() + task.run() assert len(task.products) == 1 @@ -45,6 +50,12 @@ def test_load_save_task(tmp_path): result.attributes.clear() assert result == cube + if scheduler_lock: + task.scheduler_lock.acquire.assert_called_once_with() + task.scheduler_lock.release.assert_called_once_with() + else: + assert task.scheduler_lock is None + def test_load_save_and_other_task(tmp_path, monkeypatch): """Test that a task just copies one file and preprocesses another file.""" diff --git a/tests/integration/test_task.py b/tests/integration/test_task.py index 9570ec8e58..d8fec5a416 100644 --- a/tests/integration/test_task.py +++ b/tests/integration/test_task.py @@ -92,7 +92,9 @@ def test_run_tasks(monkeypatch, max_parallel_tasks, example_tasks, mpmethod): get_distributed_client_mock(None), ) monkeypatch.setattr( - esmvalcore._task, "Pool", multiprocessing.get_context(mpmethod).Pool + esmvalcore._task.multiprocessing, + "Pool", + multiprocessing.get_context(mpmethod).Pool, ) example_tasks.run(max_parallel_tasks=max_parallel_tasks) @@ -152,7 +154,7 @@ def _run(self, input_files): return [f"{self.name}_test.nc"] monkeypatch.setattr(MockBaseTask, "_run", _run) - monkeypatch.setattr(esmvalcore._task, "Pool", ThreadPool) + monkeypatch.setattr(esmvalcore._task.multiprocessing, "Pool", ThreadPool) runner(example_tasks) print(order) @@ -165,11 +167,17 @@ def test_run_task(mocker, address): # Set up mock Dask distributed client mocker.patch.object(esmvalcore._task, "Client") + # Set up a mock multiprocessing.Lock + scheduler_lock = mocker.sentinel + task = mocker.create_autospec(DiagnosticTask, instance=True) task.products = mocker.Mock() - output_files, products = _run_task(task, scheduler_address=address) + output_files, products = _run_task( + task, scheduler_address=address, scheduler_lock=scheduler_lock + ) assert output_files == task.run.return_value assert products == task.products + assert task.scheduler_lock == scheduler_lock if address is None: esmvalcore._task.Client.assert_not_called() else: From 0dce90cb34b4c0db9e1c23ce37a0f5485629056a Mon Sep 17 00:00:00 2001 From: Bouwe Andela Date: Fri, 17 Jan 2025 16:22:21 +0100 Subject: [PATCH 28/36] Use better defaults when using ``max_parallel_tasks`` with an unconfigured threaded scheduler (#2626) Co-authored-by: Manuel Schlund <32543114+schlunma@users.noreply.github.com> --- doc/quickstart/configure.rst | 12 +++++-- esmvalcore/_task.py | 54 +++++++++++++++++++++++++++++-- tests/unit/task/test_taskset.py | 57 +++++++++++++++++++++++++++++++++ 3 files changed, 119 insertions(+), 4 deletions(-) create mode 100644 tests/unit/task/test_taskset.py diff --git a/doc/quickstart/configure.rst b/doc/quickstart/configure.rst index 5fee34db5b..a85e85e535 100644 --- a/doc/quickstart/configure.rst +++ b/doc/quickstart/configure.rst @@ -178,7 +178,7 @@ For example, Python's ``None`` is YAML's ``null``, Python's ``True`` is YAML's | | :ref:`running`. | | | +-------------------------------+----------------------------------------+-----------------------------+----------------------------------------+ | ``max_parallel_tasks`` | Maximum number of parallel processes, | :obj:`int` | ``None`` (number of available CPUs) | -| | see also :ref:`task_priority`. | | | +| | see :ref:`task_priority`. [#f5]_ | | | +-------------------------------+----------------------------------------+-----------------------------+----------------------------------------+ | ``max_years`` | Maximum number of years to use, see | :obj:`int` | ``None`` (all years from recipe) | | | :ref:`running`. | | | @@ -272,7 +272,15 @@ For example, Python's ``None`` is YAML's ``null``, Python's ``True`` is YAML's found on ESGF is newer than the local data (if any) or the user specifies a version of the data that is available only from the ESGF, then that data will be downloaded; otherwise, local data will be used. - +.. [#f5] When using ``max_parallel_tasks`` with a value larger than 1 with the + Dask threaded scheduler, every task will start ``num_workers`` threads. + To avoid running out of memory or slowing down computations due to competition + for resources, it is recommended to set ``num_workers`` such that + ``max_parallel_tasks * num_workers`` approximately equals the number of CPU cores. + The number of available CPU cores can be found by running + ``python -c 'import os; print(len(os.sched_getaffinity(0)))'``. + See :ref:`config-dask-threaded-scheduler` for information on how to configure + ``num_workers``. .. _config-dask: diff --git a/esmvalcore/_task.py b/esmvalcore/_task.py index cb9269b087..146e1076cf 100644 --- a/esmvalcore/_task.py +++ b/esmvalcore/_task.py @@ -19,6 +19,7 @@ from shutil import which from typing import Optional +import dask import psutil import yaml from distributed import Client @@ -836,6 +837,50 @@ def _run_sequential(self) -> None: for task in sorted(tasks, key=lambda t: t.priority): task.run() + def _get_dask_config(self, max_parallel_tasks: int) -> dict: + """Configure the threaded Dask scheduler. + + Configure the threaded Dask scheduler to use a reasonable number + of threads when the user has not done so. We will run multiple + processes, each of which will start its own scheduler with + `num_workers` threads. To avoid too much parallelism, we would like to + create n_threads = n_cpu_cores / n_processes. + """ + # pylint: disable=import-outside-toplevel + from esmvalcore.preprocessor import PreprocessingTask + + if dask.config.get("scheduler", "threads") not in ( + "threads", + "threading", + ): + # No need to do anything when not using the threaded scheduler + # https://github.com/dask/dask/blob/3504bcc89f7a937b2d48306a17b8eeff57b1e5ae/dask/base.py#L1027-L1050 + return {} + if dask.config.get("num_workers", None) is not None: + # No need to do anything when the user has configured "num_workers". + return {} + + n_preproc_tasks = sum( + isinstance(t, PreprocessingTask) for t in self.flatten() + ) + if n_preproc_tasks == 0: + # No need to do anything when we are not running PreprocessingTasks. + return {} + + n_available_cpu_cores = len(os.sched_getaffinity(0)) + n_threaded_dask_schedulers = min(n_preproc_tasks, max_parallel_tasks) + n_workers = max( + 1, round(n_available_cpu_cores / n_threaded_dask_schedulers) + ) + logger.info( + "Using the threaded Dask scheduler with %s worker threads per " + "preprocessing task. " + "See https://docs.esmvaltool.org/projects/ESMValCore/en/" + "latest/quickstart/configure.html#f5 for more information.", + n_workers, + ) + return {"num_workers": n_workers} + def _run_parallel(self, scheduler_address, max_parallel_tasks): """Run tasks in parallel.""" scheduled = self.flatten() @@ -845,12 +890,14 @@ def _run_parallel(self, scheduler_address, max_parallel_tasks): n_running = 0 if max_parallel_tasks is None: - max_parallel_tasks = os.cpu_count() + max_parallel_tasks = len(os.sched_getaffinity(0)) max_parallel_tasks = min(max_parallel_tasks, n_tasks) logger.info( "Running %s tasks using %s processes", n_tasks, max_parallel_tasks ) + dask_config = self._get_dask_config(max_parallel_tasks) + def done(task): """Assume a task is done if it not scheduled or running.""" return not (task in scheduled or task in running) @@ -866,7 +913,10 @@ def done(task): None if scheduler_address is None else manager.Lock() ) - with multiprocessing.Pool(processes=max_parallel_tasks) as pool: + with ( + dask.config.set(dask_config), + multiprocessing.Pool(processes=max_parallel_tasks) as pool, + ): while scheduled or running: # Submit new tasks to pool for task in sorted(scheduled, key=lambda t: t.priority): diff --git a/tests/unit/task/test_taskset.py b/tests/unit/task/test_taskset.py new file mode 100644 index 0000000000..a244b72430 --- /dev/null +++ b/tests/unit/task/test_taskset.py @@ -0,0 +1,57 @@ +import dask +import pytest + +from esmvalcore import _task +from esmvalcore.preprocessor import PreprocessingTask + + +@pytest.mark.parametrize( + "max_parallel_tasks,available_cpu_cores,n_preproc_tasks,scheduler,expected_workers", + [ + (8, 128, 100, "distributed", None), # not using threaded scheduler + (8, 128, 0, "threads", None), # not running preproc tasks + (8, 128, 100, "threads", 16), + (4, 20, 4, "threading", 5), # alternative name for threaded scheduler + (2, 4, 3, "threads", 2), + (2, 4, 3, "threads", 2), + (4, 4, 5, "threads", 1), + (4, 4, 2, "threads", 2), + ], +) +def test_taskset_get_dask_config( + mocker, + max_parallel_tasks: int, + available_cpu_cores: int, + n_preproc_tasks: int, + scheduler: str, + expected_workers: int | None, +) -> None: + mocker.patch.object( + _task.os, + "sched_getaffinity", + return_value=set(range(available_cpu_cores)), + ) + + tasks = _task.TaskSet( + { + PreprocessingTask([], name=f"test{i}") + for i in range(n_preproc_tasks) + } + ) + + with dask.config.set({"num_workers": None, "scheduler": scheduler}): + config = tasks._get_dask_config(max_parallel_tasks=max_parallel_tasks) + + if expected_workers is None: + assert config == {} + else: + assert config == {"num_workers": expected_workers} + + +def test_taskset_get_dask_config_noop(mocker) -> None: + tasks = _task.TaskSet() + + with dask.config.set({"num_workers": 4, "scheduler": "threads"}): + config = tasks._get_dask_config(max_parallel_tasks=2) + + assert config == {} From 865ea7dbc12b5b8802cd73100613c7b4ff6fed7a Mon Sep 17 00:00:00 2001 From: Bouwe Andela Date: Mon, 20 Jan 2025 10:39:28 +0100 Subject: [PATCH 29/36] More reliable datasets to recipe conversion (#2472) Co-authored-by: Valeriu Predoi --- esmvalcore/_recipe/from_datasets.py | 15 ++++++----- tests/unit/recipe/test_from_datasets.py | 33 ++++++++++++++++--------- 2 files changed, 31 insertions(+), 17 deletions(-) diff --git a/esmvalcore/_recipe/from_datasets.py b/esmvalcore/_recipe/from_datasets.py index f68bd9e096..76cf368a40 100644 --- a/esmvalcore/_recipe/from_datasets.py +++ b/esmvalcore/_recipe/from_datasets.py @@ -5,9 +5,10 @@ import itertools import logging import re +from collections.abc import Iterable, Mapping, Sequence from functools import partial from pathlib import Path -from typing import TYPE_CHECKING, Any, Dict, Iterable, Mapping, Sequence +from typing import TYPE_CHECKING, Any, Dict from nested_lookup import nested_delete @@ -102,11 +103,13 @@ def _move_datasets_up(recipe: Recipe) -> Recipe: def _to_frozen(item): - """Return a frozen and sorted copy of nested dicts and lists.""" - if isinstance(item, list): - return tuple(sorted(_to_frozen(elem) for elem in item)) - if isinstance(item, dict): - return tuple(sorted((k, _to_frozen(v)) for k, v in item.items())) + """Return a frozen copy of nested dicts and lists.""" + if isinstance(item, str): + return item + if isinstance(item, Mapping): + return frozenset((k, _to_frozen(v)) for k, v in item.items()) + if isinstance(item, Iterable): + return frozenset(_to_frozen(elem) for elem in item) return item diff --git a/tests/unit/recipe/test_from_datasets.py b/tests/unit/recipe/test_from_datasets.py index 599119195a..bc4c714088 100644 --- a/tests/unit/recipe/test_from_datasets.py +++ b/tests/unit/recipe/test_from_datasets.py @@ -23,24 +23,35 @@ def test_to_frozen(): "d", "c", ], + "bb": "dc", }, } result = _to_frozen(data) - expected = ( - ( - "a", + expected = frozenset( + { ( - ( - "b", - ( - "c", - "d", - ), + "a", + frozenset( + { + ( + "b", + frozenset( + { + "c", + "d", + } + ), + ), + ( + "bb", + "dc", + ), + } ), ), - ), - ("abc", "x"), + ("abc", "x"), + } ) assert result == expected From c1133a8f5a3abdfaee431f1d47af63a2a5bf0835 Mon Sep 17 00:00:00 2001 From: Bouwe Andela Date: Mon, 20 Jan 2025 12:13:16 +0100 Subject: [PATCH 30/36] Avoid mutating the input cubes when building the combined cube in preprocessor function ``multi_model_statistics`` (#2564) Co-authored-by: Manuel Schlund <32543114+schlunma@users.noreply.github.com> --- esmvalcore/preprocessor/_multimodel.py | 1 + 1 file changed, 1 insertion(+) diff --git a/esmvalcore/preprocessor/_multimodel.py b/esmvalcore/preprocessor/_multimodel.py index 56cea1e936..f693f1ee30 100644 --- a/esmvalcore/preprocessor/_multimodel.py +++ b/esmvalcore/preprocessor/_multimodel.py @@ -405,6 +405,7 @@ def _combine(cubes): # Equalise some metadata that can cause merge to fail (in-place) # https://scitools-iris.readthedocs.io/en/stable/userguide/ # merge_and_concat.html#common-issues-with-merge-and-concatenate + cubes = [cube.copy() for cube in cubes] equalise_attributes(cubes) _equalise_var_metadata(cubes) _equalise_cell_methods(cubes) From 770ca03efc908d51864240580ec3340209e110d3 Mon Sep 17 00:00:00 2001 From: Bouwe Andela Date: Tue, 21 Jan 2025 12:50:51 +0100 Subject: [PATCH 31/36] Fix OSX compatibility (#2636) --- esmvalcore/_task.py | 14 +++++++++-- tests/unit/task/test_available_cpu_count.py | 28 +++++++++++++++++++++ tests/unit/task/test_taskset.py | 4 +-- 3 files changed, 41 insertions(+), 5 deletions(-) create mode 100644 tests/unit/task/test_available_cpu_count.py diff --git a/esmvalcore/_task.py b/esmvalcore/_task.py index 146e1076cf..b25ea7481b 100644 --- a/esmvalcore/_task.py +++ b/esmvalcore/_task.py @@ -785,6 +785,16 @@ def __repr__(self): return string +def available_cpu_count() -> int: + """Return the number of available CPU cores.""" + if hasattr(os, "sched_getaffinity"): + # Not available on OSX. + return len(os.sched_getaffinity(0)) + if count := os.cpu_count(): + return count + return 1 + + class TaskSet(set): """Container for tasks.""" @@ -867,7 +877,7 @@ def _get_dask_config(self, max_parallel_tasks: int) -> dict: # No need to do anything when we are not running PreprocessingTasks. return {} - n_available_cpu_cores = len(os.sched_getaffinity(0)) + n_available_cpu_cores = available_cpu_count() n_threaded_dask_schedulers = min(n_preproc_tasks, max_parallel_tasks) n_workers = max( 1, round(n_available_cpu_cores / n_threaded_dask_schedulers) @@ -890,7 +900,7 @@ def _run_parallel(self, scheduler_address, max_parallel_tasks): n_running = 0 if max_parallel_tasks is None: - max_parallel_tasks = len(os.sched_getaffinity(0)) + max_parallel_tasks = available_cpu_count() max_parallel_tasks = min(max_parallel_tasks, n_tasks) logger.info( "Running %s tasks using %s processes", n_tasks, max_parallel_tasks diff --git a/tests/unit/task/test_available_cpu_count.py b/tests/unit/task/test_available_cpu_count.py new file mode 100644 index 0000000000..7edbf6823b --- /dev/null +++ b/tests/unit/task/test_available_cpu_count.py @@ -0,0 +1,28 @@ +import pytest + +from esmvalcore import _task + + +def test_available_cpu_count_linux(mocker): + mocker.patch.object(_task, "os") + _task.os.sched_getaffinity.return_value = {0, 1} + result = _task.available_cpu_count() + assert result == 2 + _task.os.sched_getaffinity.assert_called_once_with(0) + + +@pytest.mark.parametrize( + "cpu_count,expected", + [ + (None, 1), + (2, 2), + ], +) +def test_available_cpu_count_osx(mocker, cpu_count, expected): + mocker.patch.object(_task, "os") + if hasattr(_task.os, "sched_getaffinity"): + del _task.os.sched_getaffinity + _task.os.cpu_count.return_value = cpu_count + result = _task.available_cpu_count() + assert result == expected + _task.os.cpu_count.assert_called_once_with() diff --git a/tests/unit/task/test_taskset.py b/tests/unit/task/test_taskset.py index a244b72430..ccb93d05c6 100644 --- a/tests/unit/task/test_taskset.py +++ b/tests/unit/task/test_taskset.py @@ -27,9 +27,7 @@ def test_taskset_get_dask_config( expected_workers: int | None, ) -> None: mocker.patch.object( - _task.os, - "sched_getaffinity", - return_value=set(range(available_cpu_cores)), + _task, "available_cpu_count", return_value=available_cpu_cores ) tasks = _task.TaskSet( From 1035ebba36c0d68c00571230daadcd39faae1132 Mon Sep 17 00:00:00 2001 From: "github-actions[bot]" <41898282+github-actions[bot]@users.noreply.github.com> Date: Tue, 21 Jan 2025 11:51:52 +0000 Subject: [PATCH 32/36] [Condalock] Update Linux condalock file (#2639) Co-authored-by: valeriupredoi --- conda-linux-64.lock | 219 ++++++++++++++++++++++---------------------- 1 file changed, 109 insertions(+), 110 deletions(-) diff --git a/conda-linux-64.lock b/conda-linux-64.lock index 0b8873c723..15ccba6a10 100644 --- a/conda-linux-64.lock +++ b/conda-linux-64.lock @@ -8,9 +8,9 @@ https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed37_0.tar.bz2#34893075a5c9e55cdafac56607368fc6 https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2#4d59c254e01d9cde7957100457e2d5fb https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-h77eed37_3.conda#49023d73832ef61042f6a237cb2687e7 -https://conda.anaconda.org/conda-forge/linux-64/pandoc-3.6.1-ha770c72_0.conda#e94dd7479ba12963364d855fb23cce4f +https://conda.anaconda.org/conda-forge/linux-64/pandoc-3.6.2-ha770c72_0.conda#4ded4ab71d9fd3764d796a23ca3e722b https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.13-5_cp313.conda#381bbd2a92c863f640a55b6ff3c35161 -https://conda.anaconda.org/conda-forge/noarch/tzdata-2024b-hc8b5060_0.conda#8ac3367aafb1cc0a068483c580af8015 +https://conda.anaconda.org/conda-forge/noarch/tzdata-2025a-h78e105d_0.conda#dbcace4706afdfb7eb891f7b37d07c04 https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-0.tar.bz2#f766549260d6815b0c52253f1fb1bb29 https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.43-h712a8e2_2.conda#048b02e3962f066da18efe3a21b77672 https://conda.anaconda.org/conda-forge/linux-64/libglvnd-1.7.0-ha4b6fd6_2.conda#434ca7e50e40f4918ab701e3facd59a0 @@ -29,9 +29,10 @@ https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-14.2.0-h69a702a_1.cond https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-14.2.0-hd5240d6_1.conda#9822b874ea29af082e5d36098d25427d https://conda.anaconda.org/conda-forge/linux-64/liblzma-5.6.3-hb9d3cd8_1.conda#2ecf2f1c7e4e21fcfe6423a51a992d84 https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-14.2.0-hc0a3c3a_1.conda#234a5554c53625688d51062645337328 -https://conda.anaconda.org/conda-forge/linux-64/libutf8proc-2.9.0-hb9d3cd8_1.conda#1e936bd23d737aac62a18e9a1e7f8b18 +https://conda.anaconda.org/conda-forge/linux-64/libutf8proc-2.10.0-h4c51ac1_0.conda#aeccfff2806ae38430638ffbb4be9610 https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.5.0-h851e524_0.conda#63f790534398730f59e1b899c3644d4a https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.3.1-hb9d3cd8_2.conda#edb0dca6bc32e4f4789199455a1dbeb8 +https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.5-h2d0b736_2.conda#04b34b9a40cdc48cfdab261ab176ff74 https://conda.anaconda.org/conda-forge/linux-64/openssl-3.4.0-h7b32b05_1.conda#4ce6875f75469b2757a65e10a5d05e31 https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-hb9d3cd8_1002.conda#b3c17d95b5a10c6e64a21fa17573e70e https://conda.anaconda.org/conda-forge/linux-64/xorg-libice-1.1.2-hb9d3cd8_0.conda#fb901ff28063514abb6046c9ec2c4a45 @@ -39,7 +40,7 @@ https://conda.anaconda.org/conda-forge/linux-64/xorg-libxau-1.0.12-hb9d3cd8_0.co https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdmcp-1.1.5-hb9d3cd8_0.conda#8035c64cb77ed555e3f150b7b3972480 https://conda.anaconda.org/conda-forge/linux-64/aws-c-cal-0.8.1-h1a47875_3.conda#55a8561fdbbbd34f50f57d9be12ed084 https://conda.anaconda.org/conda-forge/linux-64/aws-c-compression-0.3.0-h4e1184b_5.conda#3f4c1197462a6df2be6dc8241828fe93 -https://conda.anaconda.org/conda-forge/linux-64/aws-c-sdkutils-0.2.1-h4e1184b_4.conda#a5126a90e74ac739b00564a4c7ddcc36 +https://conda.anaconda.org/conda-forge/linux-64/aws-c-sdkutils-0.2.2-h4e1184b_0.conda#dcd498d493818b776a77fbc242fbf8e4 https://conda.anaconda.org/conda-forge/linux-64/aws-checksums-0.2.2-h4e1184b_4.conda#74e8c3e4df4ceae34aa2959df4b28101 https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h4bc722e_7.conda#62ee74e96c5ebb0af99386de58cf9553 https://conda.anaconda.org/conda-forge/linux-64/dav1d-1.2.1-hd590300_0.conda#418c6ca5929a611cbd69204907a83995 @@ -51,6 +52,7 @@ https://conda.anaconda.org/conda-forge/linux-64/keyutils-1.6.1-h166bdaf_0.tar.bz https://conda.anaconda.org/conda-forge/linux-64/libabseil-20240722.0-cxx17_hbbce691_4.conda#488f260ccda0afaf08acb286db439c2f https://conda.anaconda.org/conda-forge/linux-64/libbrotlidec-1.1.0-hb9d3cd8_2.conda#9566f0bd264fbd463002e759b8a82401 https://conda.anaconda.org/conda-forge/linux-64/libbrotlienc-1.1.0-hb9d3cd8_2.conda#06f70867945ea6a84d35836af780f1de +https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20240808-pl5321h7949ede_0.conda#8247f80f3dc464d9322e85007e307fe8 https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-hd590300_2.conda#172bf1cd1ff8629f2b1179945ed45055 https://conda.anaconda.org/conda-forge/linux-64/libevent-2.1.12-hf998b51_1.conda#a1cfcc585f0c42bf8d5546bb1dfb668d https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2#d645c6d2ac96843a2bfaccd2d62b3ac3 @@ -62,7 +64,7 @@ https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.1-hd590300_0.conda#30 https://conda.anaconda.org/conda-forge/linux-64/libpciaccess-0.18-hd590300_0.conda#48f4330bfcd959c3cfb704d424903c82 https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.45-h943b412_0.conda#85cbdaacad93808395ac295b5667d25b https://conda.anaconda.org/conda-forge/linux-64/libsodium-1.0.20-h4ab18f5_0.conda#a587892d3c13b6621a6091be690dbca2 -https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.47.2-hee588c1_0.conda#b58da17db24b6e08bcbf8fed2fb8c915 +https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.48.0-hee588c1_0.conda#84bd1c9a82b455e7a2f390375fb38f90 https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.1-hf672d98_0.conda#be2de152d8073ef1c01b7728475f2fe7 https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-14.2.0-h4852527_1.conda#8371ac6457591af2cf6159439c1fd051 https://conda.anaconda.org/conda-forge/linux-64/libudunits2-2.2.28-h40f5838_3.conda#4bdace082e911a3e1f1f0b721bed5b56 @@ -70,10 +72,10 @@ https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda# https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.17.0-h8a09558_0.conda#92ed62436b625154323d40d5f2f11dd7 https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.10.0-h5888daf_1.conda#9de5350a85c4a20c685259b889aa6393 https://conda.anaconda.org/conda-forge/linux-64/lzo-2.10-hd590300_1001.conda#ec7398d21e2651e0dcb0044d03b9a339 -https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.5-he02047a_1.conda#70caf8bb6cf39a0b6b7efc885f51c0fe https://conda.anaconda.org/conda-forge/linux-64/pixman-0.44.2-h29eaf8c_0.conda#5e2a7acfa2c24188af39e7944e1b3604 https://conda.anaconda.org/conda-forge/linux-64/rav1e-0.6.6-he8a937b_2.conda#77d9955b4abddb811cb8ab1aa7d743e4 -https://conda.anaconda.org/conda-forge/linux-64/s2n-1.5.10-hb5b8611_0.conda#999f3673f2a011f59287f2969e3749e4 +https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda#47d31b792659ce70f470b5c82fdfb7a4 +https://conda.anaconda.org/conda-forge/linux-64/s2n-1.5.11-h072c03f_0.conda#5e8060d52f676a40edef0006a75c718f https://conda.anaconda.org/conda-forge/linux-64/snappy-1.2.1-h8bd8927_1.conda#3b3e64af585eadfb52bb90b553db5edf https://conda.anaconda.org/conda-forge/linux-64/svt-av1-2.3.0-h5888daf_0.conda#355898d24394b2af353eb96358db9fdd https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.13-noxft_h4845f30_101.conda#d453b98d9c83e71da0741bb0ff4d76bc @@ -81,19 +83,19 @@ https://conda.anaconda.org/conda-forge/linux-64/xxhash-0.8.2-hd590300_0.conda#f0 https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h7f98852_2.tar.bz2#4cb3ad778ec2d5a7acbdf254eb1c42ae https://conda.anaconda.org/conda-forge/linux-64/zlib-1.3.1-hb9d3cd8_2.conda#c9f075ab2f33b3bbee9e62d4ad0a6cd8 https://conda.anaconda.org/conda-forge/linux-64/aom-3.9.1-hac33072_0.conda#346722a0be40f6edc53f12640d301338 -https://conda.anaconda.org/conda-forge/linux-64/aws-c-io-0.15.3-h831e299_5.conda#80dd9f0ddf935290d1dc00ec75ff3023 +https://conda.anaconda.org/conda-forge/linux-64/aws-c-io-0.15.3-h173a860_6.conda#9a063178f1af0a898526cc24ba7be486 https://conda.anaconda.org/conda-forge/linux-64/brotli-bin-1.1.0-hb9d3cd8_2.conda#c63b5e52939e795ba8d26e35d767a843 https://conda.anaconda.org/conda-forge/linux-64/freetype-2.12.1-h267a509_2.conda#9ae35c3d96db2c94ce0cef86efdfa2cb https://conda.anaconda.org/conda-forge/linux-64/glog-0.7.1-hbabe93e_0.conda#ff862eebdfeb2fd048ae9dc92510baca https://conda.anaconda.org/conda-forge/linux-64/graphite2-1.3.13-h59595ed_1003.conda#f87c7b7c2cb45f323ffbce941c78ab7c https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h2a13503_7.conda#bd77f8da987968ec3927990495dc22e4 https://conda.anaconda.org/conda-forge/linux-64/icu-75.1-he02047a_0.conda#8b189310083baabfb622af68fd9d3ae3 +https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.3-h659f571_0.conda#3f43953b7d3fb3aaa1d0d0723d91e368 https://conda.anaconda.org/conda-forge/linux-64/lerc-4.0.0-h27087fc_0.tar.bz2#76bbff344f0134279f225174e9064c8f https://conda.anaconda.org/conda-forge/linux-64/libaec-1.1.3-h59595ed_0.conda#5e97e271911b8b2001a8b71860c32faa https://conda.anaconda.org/conda-forge/linux-64/libcrc32c-1.1.2-h9c3ff4c_0.tar.bz2#c965a5aa0d5c1c37ffc62dff36e28400 https://conda.anaconda.org/conda-forge/linux-64/libde265-1.0.15-h00ab1b0_0.conda#407fee7a5d7ab2dca12c9ca7f62310ad https://conda.anaconda.org/conda-forge/linux-64/libdrm-2.4.124-hb9d3cd8_0.conda#8bc89311041d7fcb510238cf0848ccae -https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20240808-pl5321h7949ede_0.conda#8247f80f3dc464d9322e85007e307fe8 https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.64.0-h161d5f1_0.conda#19e57602824042dfd0446292ef90488b https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.28-pthreads_h94d23a6_1.conda#62857b389e42b36b686331bec0922050 https://conda.anaconda.org/conda-forge/linux-64/libprotobuf-5.28.3-h6128344_1.conda#d8703f1ffe5a06356f06467f1d0b9464 @@ -102,50 +104,29 @@ https://conda.anaconda.org/conda-forge/linux-64/librttopo-1.1.0-h97f6797_17.cond https://conda.anaconda.org/conda-forge/linux-64/libthrift-0.21.0-h0e7cc3e_0.conda#dcb95c0a98ba9ff737f7ae482aef7833 https://conda.anaconda.org/conda-forge/linux-64/libzip-1.11.2-h6991a6a_0.conda#a7b27c075c9b7f459f1c022090697cba https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.44-hba22ea6_2.conda#df359c09c41cd186fffb93a2d87aa6f5 +https://conda.anaconda.org/conda-forge/linux-64/python-3.13.1-ha99a958_105_cp313.conda#34945787453ee52a8f8271c1d19af1e8 https://conda.anaconda.org/conda-forge/linux-64/qhull-2020.2-h434a139_5.conda#353823361b1d27eb3960efb076dfcaf6 -https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda#47d31b792659ce70f470b5c82fdfb7a4 +https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.48.0-h9eae976_0.conda#2b3a22991c20ed6ea2ed65d3407a91f4 https://conda.anaconda.org/conda-forge/linux-64/udunits2-2.2.28-h40f5838_3.conda#6bb8deb138f87c9d48320ac21b87e7a1 https://conda.anaconda.org/conda-forge/linux-64/uriparser-0.9.8-hac33072_0.conda#d71d3a66528853c0a1ac2c02d79a0284 https://conda.anaconda.org/conda-forge/linux-64/x265-3.5-h924138e_3.tar.bz2#e7f6ed84d4623d52ee581325c1587a6b https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.5-he73a12e_0.conda#4c3e9fab69804ec6077697922d70c6e2 https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.10-h4f16b4b_1.conda#125f34a17d7b4bea418a83904ea82ea6 https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.6-ha6fb4c9_0.conda#4d056880988120e29d75bfff282e0f45 -https://conda.anaconda.org/conda-forge/linux-64/aws-c-event-stream-0.5.0-h7959bf6_11.conda#9b3fb60fe57925a92f399bc3fc42eccf -https://conda.anaconda.org/conda-forge/linux-64/aws-c-http-0.9.2-hefd7a92_4.conda#5ce4df662d32d3123ea8da15571b6f51 -https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.6-he440d0b_1.conda#2c2fae981fd2afd00812c92ac47d023d -https://conda.anaconda.org/conda-forge/linux-64/brotli-1.1.0-hb9d3cd8_2.conda#98514fe74548d768907ce7a13f680e8f -https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.15.0-h7e30c49_1.conda#8f5b0b297b59e1ac160ad4beec99dbee -https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.3-h659f571_0.conda#3f43953b7d3fb3aaa1d0d0723d91e368 -https://conda.anaconda.org/conda-forge/linux-64/libavif16-1.1.1-h1909e37_2.conda#21e468ed3786ebcb2124b123aa2484b7 -https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-26_linux64_openblas.conda#ac52800af2e0c0e7dac770b435ce768a -https://conda.anaconda.org/conda-forge/linux-64/libglib-2.82.2-h2ff4ddf_0.conda#13e8e54035ddd2b91875ba399f0f7c04 -https://conda.anaconda.org/conda-forge/linux-64/libglx-1.7.0-ha4b6fd6_2.conda#c8013e438185f33b13814c5c488acd5c -https://conda.anaconda.org/conda-forge/linux-64/libkml-1.3.0-hf539b9f_1021.conda#e8c7620cc49de0c6a2349b6dd6e39beb -https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.7.0-hd9ff511_3.conda#0ea6510969e1296cc19966fad481f6de -https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.13.5-h8d12d68_1.conda#1a21e49e190d1ffe58531a81b6e400e1 -https://conda.anaconda.org/conda-forge/linux-64/minizip-4.0.7-h05a5f5f_3.conda#eec77634ccdb2ba6c231290c399b1dae -https://conda.anaconda.org/conda-forge/linux-64/orc-2.0.3-h12ee42a_2.conda#4f6f9f3f80354ad185e276c120eac3f0 -https://conda.anaconda.org/conda-forge/linux-64/python-3.13.1-ha99a958_104_cp313.conda#f07c94533999146026ff5194a9e3906b -https://conda.anaconda.org/conda-forge/linux-64/re2-2024.07.02-h9925aae_2.conda#e84ddf12bde691e8ec894b00ea829ddf -https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.47.2-h9eae976_0.conda#64a954de15d114281535a26fd4d1f294 -https://conda.anaconda.org/conda-forge/linux-64/xerces-c-3.2.5-h988505b_2.conda#9dda9667feba914e0e80b95b82f7402b -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.6-hb9d3cd8_0.conda#febbab7d15033c913d53c7a2c102309d -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxfixes-6.0.1-hb9d3cd8_0.conda#4bdb303603e9821baf5fe5fdff1dc8f8 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.12-hb9d3cd8_0.conda#96d57aba173e878a2089d5638016dc5e https://conda.anaconda.org/conda-forge/noarch/alabaster-1.0.0-pyhd8ed1ab_1.conda#1fd9696649f65fd6611fcdb4ffec738a https://conda.anaconda.org/conda-forge/linux-64/astroid-3.3.8-py313h78bf25f_0.conda#cd3ab05349bc9be61760883382598624 https://conda.anaconda.org/conda-forge/noarch/asttokens-3.0.0-pyhd8ed1ab_1.conda#8f587de4bcf981e26228f268df374a9b -https://conda.anaconda.org/conda-forge/linux-64/atk-1.0-2.38.0-h04ea711_2.conda#f730d54ba9cd543666d7220c9f7ed563 https://conda.anaconda.org/conda-forge/noarch/attrs-24.3.0-pyh71513ae_0.conda#356927ace43302bf6f5926e2a58dae6a -https://conda.anaconda.org/conda-forge/linux-64/aws-c-auth-0.8.0-hb921021_15.conda#c79d50f64cffa5ad51ecc1a81057962f -https://conda.anaconda.org/conda-forge/linux-64/aws-c-mqtt-0.11.0-h11f4f37_12.conda#96c3e0221fa2da97619ee82faa341a73 +https://conda.anaconda.org/conda-forge/linux-64/aws-c-event-stream-0.5.0-h7959bf6_11.conda#9b3fb60fe57925a92f399bc3fc42eccf +https://conda.anaconda.org/conda-forge/linux-64/aws-c-http-0.9.2-hefd7a92_4.conda#5ce4df662d32d3123ea8da15571b6f51 +https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.6-he440d0b_1.conda#2c2fae981fd2afd00812c92ac47d023d +https://conda.anaconda.org/conda-forge/linux-64/brotli-1.1.0-hb9d3cd8_2.conda#98514fe74548d768907ce7a13f680e8f https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.1.0-py313h46c70d0_2.conda#f6bb3742e17a4af0dc3c8ca942683ef6 -https://conda.anaconda.org/conda-forge/linux-64/cairo-1.18.2-h3394656_1.conda#b34c2833a1f56db610aeb27f206d800d https://conda.anaconda.org/conda-forge/noarch/certifi-2024.12.14-pyhd8ed1ab_0.conda#6feb87357ecd66733be3279f16a8c400 https://conda.anaconda.org/conda-forge/noarch/cfgv-3.3.1-pyhd8ed1ab_1.conda#57df494053e17dce2ac3a0b33e1b2a2e https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.4.1-pyhd8ed1ab_0.conda#e83a31202d1c0a000fce3e9cf3825875 https://conda.anaconda.org/conda-forge/noarch/click-8.1.8-pyh707e725_0.conda#f22f4d4970e09d68a10b922cbb0408d3 -https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.1.0-pyhd8ed1ab_2.conda#1f76b7e2b3ab88def5aa2f158322c7e6 +https://conda.anaconda.org/conda-forge/noarch/cloudpickle-3.1.1-pyhd8ed1ab_0.conda#364ba6c9fb03886ac979b482f39ebb92 https://conda.anaconda.org/conda-forge/noarch/colorama-0.4.6-pyhd8ed1ab_1.conda#962b9857ee8e7018c22f2776ffa0b2d7 https://conda.anaconda.org/conda-forge/noarch/cycler-0.12.1-pyhd8ed1ab_1.conda#44600c4667a319d67dbe0681fc0bc833 https://conda.anaconda.org/conda-forge/linux-64/cython-3.0.11-py313hc66aa0d_3.conda#1778443eb12b2da98428fa69152a2a2e @@ -160,11 +141,9 @@ https://conda.anaconda.org/conda-forge/noarch/execnet-2.1.1-pyhd8ed1ab_1.conda#a https://conda.anaconda.org/conda-forge/noarch/executing-2.1.0-pyhd8ed1ab_1.conda#ef8b5fca76806159fc25b4f48d8737eb https://conda.anaconda.org/conda-forge/noarch/filelock-3.16.1-pyhd8ed1ab_1.conda#d692e9ba6f92dc51484bf3477e36ce7c https://conda.anaconda.org/conda-forge/noarch/findlibs-0.0.5-pyhd8ed1ab_0.conda#8f325f63020af6f7acbe2c4cb4c920db -https://conda.anaconda.org/conda-forge/linux-64/freexl-2.0.0-h9dce30a_2.conda#ecb5d11305b8ba1801543002e69d2f2f +https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.15.0-h7e30c49_1.conda#8f5b0b297b59e1ac160ad4beec99dbee https://conda.anaconda.org/conda-forge/noarch/fsspec-2024.12.0-pyhd8ed1ab_0.conda#e041ad4c43ab5e10c74587f95378ebc7 -https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.12-hb9ae30d_0.conda#201db6c2d9a3c5e46573ac4cb2e92f4f https://conda.anaconda.org/conda-forge/noarch/geographiclib-2.0-pyhd8ed1ab_1.conda#8b9328ab4aafb8fde493ab32c5eba731 -https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h977cf35_4.conda#4d8df0b0db060d33c9a702ada998a8fe https://conda.anaconda.org/conda-forge/noarch/hpack-4.0.0-pyhd8ed1ab_1.conda#2aa5ff7fa34a81b9196532c84c10d865 https://conda.anaconda.org/conda-forge/noarch/humanfriendly-10.0-pyh707e725_8.conda#7fe569c10905402ed47024fc481bb371 https://conda.anaconda.org/conda-forge/noarch/hyperframe-6.0.1-pyhd8ed1ab_1.conda#566e75c90c1d0c8c459eb0ad9833dc7a @@ -174,26 +153,25 @@ https://conda.anaconda.org/conda-forge/noarch/iniconfig-2.0.0-pyhd8ed1ab_1.conda https://conda.anaconda.org/conda-forge/noarch/isodate-0.7.2-pyhd8ed1ab_1.conda#14c42a6334f38c412449f5a5e4043a5a https://conda.anaconda.org/conda-forge/noarch/itsdangerous-2.2.0-pyhd8ed1ab_1.conda#7ac5f795c15f288984e32add616cdc59 https://conda.anaconda.org/conda-forge/linux-64/kiwisolver-1.4.7-py313h33d0bda_0.conda#9862d13a5e466273d5a4738cffcb8d6c -https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.16-hb7c19ff_0.conda#51bb7010fc86f70eee639b4bb7a894f5 https://conda.anaconda.org/conda-forge/noarch/legacy-cgi-2.6.1-pyh5b84bb0_3.conda#f258b7f54b5d9ddd02441f10c4dca2ac -https://conda.anaconda.org/conda-forge/linux-64/libarchive-3.7.7-h4585015_3.conda#a28808eae584c7f519943719b2a2b386 -https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-26_linux64_openblas.conda#ebcc5f37a435aa3c19640533c82f8d76 +https://conda.anaconda.org/conda-forge/linux-64/libavif16-1.1.1-h1909e37_2.conda#21e468ed3786ebcb2124b123aa2484b7 +https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-26_linux64_openblas.conda#ac52800af2e0c0e7dac770b435ce768a https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.11.1-h332b0f4_0.conda#2b3e0081006dc21e8bf53a91c83a055c -https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-hd3e95f3_10.conda#30ee3a29c84cf7b842a8c5828c4b7c13 -https://conda.anaconda.org/conda-forge/linux-64/libgl-1.7.0-ha4b6fd6_2.conda#928b8be80851f5d8ffb016f9c81dae7a -https://conda.anaconda.org/conda-forge/linux-64/libgrpc-1.67.1-h25350d4_1.conda#0c6497a760b99a926c7c12b74951a39c -https://conda.anaconda.org/conda-forge/linux-64/libheif-1.19.5-gpl_hc21c24c_100.conda#3b57852666eaacc13414ac811dde3f8a -https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-26_linux64_openblas.conda#3792604c43695d6a273bc5faaac47d48 -https://conda.anaconda.org/conda-forge/linux-64/libxslt-1.1.39-h76b75d6_0.conda#e71f31f8cfb0a91439f2086fc8aa0461 +https://conda.anaconda.org/conda-forge/linux-64/libglib-2.82.2-h2ff4ddf_1.conda#37d1af619d999ee8f1f73cf5a06f4e2f +https://conda.anaconda.org/conda-forge/linux-64/libglx-1.7.0-ha4b6fd6_2.conda#c8013e438185f33b13814c5c488acd5c +https://conda.anaconda.org/conda-forge/linux-64/libkml-1.3.0-hf539b9f_1021.conda#e8c7620cc49de0c6a2349b6dd6e39beb +https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.7.0-hd9ff511_3.conda#0ea6510969e1296cc19966fad481f6de +https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.13.5-h8d12d68_1.conda#1a21e49e190d1ffe58531a81b6e400e1 https://conda.anaconda.org/conda-forge/noarch/locket-1.0.0-pyhd8ed1ab_0.tar.bz2#91e27ef3d05cc772ce627e51cff111c4 https://conda.anaconda.org/conda-forge/linux-64/lz4-4.3.3-py313h8756d67_2.conda#135da13cb96aba211acd7feeca301154 https://conda.anaconda.org/conda-forge/linux-64/markupsafe-3.0.2-py313h8060acc_1.conda#21b62c55924f01b6eef6827167b46acb https://conda.anaconda.org/conda-forge/noarch/mccabe-0.7.0-pyhd8ed1ab_1.conda#827064ddfe0de2917fb29f1da4f8f533 https://conda.anaconda.org/conda-forge/noarch/mdurl-0.1.2-pyhd8ed1ab_1.conda#592132998493b3ff25fd7479396e8351 +https://conda.anaconda.org/conda-forge/linux-64/minizip-4.0.7-h05a5f5f_3.conda#eec77634ccdb2ba6c231290c399b1dae https://conda.anaconda.org/conda-forge/linux-64/msgpack-python-1.1.0-py313h33d0bda_0.conda#7f907b1065247efa419bb70d3a3341b5 https://conda.anaconda.org/conda-forge/noarch/munkres-1.1.4-pyh9f0ad1d_0.tar.bz2#2ba8498c1018c1e9c61eb99b973dfe19 https://conda.anaconda.org/conda-forge/noarch/networkx-3.4.2-pyh267e887_2.conda#fd40bf7f7f4bc4b647dc8512053d9873 -https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.3-h5fbd93e_0.conda#9e5816bc95d285c115a3ebc2f8563564 +https://conda.anaconda.org/conda-forge/linux-64/orc-2.0.3-h12ee42a_2.conda#4f6f9f3f80354ad185e276c120eac3f0 https://conda.anaconda.org/conda-forge/noarch/packaging-24.2-pyhd8ed1ab_2.conda#3bfed7e6228ebf2f7b9eaa47f1b4e2aa https://conda.anaconda.org/conda-forge/noarch/pandocfilters-1.5.0-pyhd8ed1ab_0.tar.bz2#457c2c8c08e54905d6954e79cb5b5db9 https://conda.anaconda.org/conda-forge/noarch/parso-0.8.4-pyhd8ed1ab_1.conda#5c092057b6badd30f75b06244ecd01c9 @@ -215,6 +193,7 @@ https://conda.anaconda.org/conda-forge/noarch/python-tzdata-2024.2-pyhd8ed1ab_1. https://conda.anaconda.org/conda-forge/linux-64/python-xxhash-3.5.0-py313h536fd9c_1.conda#5c44ffac1f568dc8b4afb09a3e825d49 https://conda.anaconda.org/conda-forge/noarch/pytz-2024.1-pyhd8ed1ab_0.conda#3eeeeb9e4827ace8c0c1419c85d590ad https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.2-py313h536fd9c_1.conda#3789f360de131c345e96fbfc955ca80b +https://conda.anaconda.org/conda-forge/linux-64/re2-2024.07.02-h9925aae_2.conda#e84ddf12bde691e8ec894b00ea829ddf https://conda.anaconda.org/conda-forge/linux-64/rpds-py-0.22.3-py313h920b4c0_0.conda#f21c21a167b2e25292e436dcb8e7cf3e https://conda.anaconda.org/conda-forge/noarch/setuptools-75.8.0-pyhff2d567_0.conda#8f28e299c11afdd79e0ec1e279dcdc52 https://conda.anaconda.org/conda-forge/noarch/six-1.17.0-pyhd8ed1ab_0.conda#a451d576819089b0d672f18768be0f65 @@ -234,19 +213,23 @@ https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.12.2-pyha770c7 https://conda.anaconda.org/conda-forge/linux-64/ujson-5.10.0-py313h46c70d0_1.conda#7f4872b663aafde0f532543488656f5d https://conda.anaconda.org/conda-forge/noarch/wcwidth-0.2.13-pyhd8ed1ab_1.conda#b68980f2495d096e71c7fd9d7ccf63e6 https://conda.anaconda.org/conda-forge/noarch/webencodings-0.5.1-pyhd8ed1ab_3.conda#2841eb5bfc75ce15e9a0054b98dcd64d -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdamage-1.1.6-hb9d3cd8_0.conda#b5fcc7172d22516e1f965490e65e33a4 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxi-1.8.2-hb9d3cd8_0.conda#17dcc85db3c7886650b8908b183d6876 -https://conda.anaconda.org/conda-forge/linux-64/xorg-libxxf86vm-1.1.6-hb9d3cd8_0.conda#5efa5fa6243a622445fdfd72aee15efa -https://conda.anaconda.org/conda-forge/noarch/xyzservices-2024.9.0-pyhd8ed1ab_1.conda#c79cea50b258f652010cb6c8d81591b5 +https://conda.anaconda.org/conda-forge/linux-64/xerces-c-3.2.5-h988505b_2.conda#9dda9667feba914e0e80b95b82f7402b +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.6-hb9d3cd8_0.conda#febbab7d15033c913d53c7a2c102309d +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxfixes-6.0.1-hb9d3cd8_0.conda#4bdb303603e9821baf5fe5fdff1dc8f8 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.12-hb9d3cd8_0.conda#96d57aba173e878a2089d5638016dc5e +https://conda.anaconda.org/conda-forge/noarch/xyzservices-2025.1.0-pyhd8ed1ab_0.conda#fdf07e281a9e5e10fc75b2dd444136e9 https://conda.anaconda.org/conda-forge/linux-64/zeromq-4.3.5-h3b0a872_7.conda#3947a35e916fcc6b9825449affbf4214 https://conda.anaconda.org/conda-forge/noarch/zict-3.0.0-pyhd8ed1ab_1.conda#e52c2ef711ccf31bb7f70ca87d144b9e https://conda.anaconda.org/conda-forge/noarch/zipp-3.21.0-pyhd8ed1ab_1.conda#0c3cc595284c5e8f0f9900a9b228a332 https://conda.anaconda.org/conda-forge/noarch/accessible-pygments-0.0.5-pyhd8ed1ab_1.conda#74ac5069774cdbc53910ec4d631a3999 -https://conda.anaconda.org/conda-forge/linux-64/aws-c-s3-0.7.7-hf454442_0.conda#947c82025693bebd557f782bb5d6b469 +https://conda.anaconda.org/conda-forge/linux-64/atk-1.0-2.38.0-h04ea711_2.conda#f730d54ba9cd543666d7220c9f7ed563 +https://conda.anaconda.org/conda-forge/linux-64/aws-c-auth-0.8.0-h205f482_16.conda#b0815d37ab812ade9c07239da7c3c369 +https://conda.anaconda.org/conda-forge/linux-64/aws-c-mqtt-0.11.0-h11f4f37_12.conda#96c3e0221fa2da97619ee82faa341a73 https://conda.anaconda.org/conda-forge/linux-64/azure-core-cpp-1.14.0-h5cfcd09_0.conda#0a8838771cc2e985cd295e01ae83baf1 https://conda.anaconda.org/conda-forge/noarch/babel-2.16.0-pyhd8ed1ab_1.conda#3e23f7db93ec14c80525257d8affac28 https://conda.anaconda.org/conda-forge/noarch/beautifulsoup4-4.12.3-pyha770c72_1.conda#d48f7e9fdec44baf6d1da416fe402b04 -https://conda.anaconda.org/conda-forge/noarch/bleach-6.2.0-pyhd8ed1ab_3.conda#b33551d9bac06d754762e8ccb3c4df03 +https://conda.anaconda.org/conda-forge/noarch/bleach-6.2.0-pyh29332c3_4.conda#f0b4c8e370446ef89797608d60a564b3 +https://conda.anaconda.org/conda-forge/linux-64/cairo-1.18.2-h3394656_1.conda#b34c2833a1f56db610aeb27f206d800d https://conda.anaconda.org/conda-forge/linux-64/cffi-1.17.1-py313hfab6e84_0.conda#ce6386a5892ef686d6d680c345c40ad1 https://conda.anaconda.org/conda-forge/noarch/click-plugins-1.1.1-pyhd8ed1ab_1.conda#82bea35e4dac4678ba623cf10e95e375 https://conda.anaconda.org/conda-forge/noarch/cligj-0.7.2-pyhd8ed1ab_2.conda#55c7804f428719241a90b152016085a1 @@ -254,12 +237,13 @@ https://conda.anaconda.org/conda-forge/linux-64/coverage-7.6.10-py313h8060acc_0. https://conda.anaconda.org/conda-forge/linux-64/cytoolz-1.0.1-py313h536fd9c_0.conda#e886bb6a3c24f8b9dd4fcd1d617a1f64 https://conda.anaconda.org/conda-forge/noarch/fire-0.7.0-pyhd8ed1ab_0.conda#c8eefdf1e822c56a6034602e67bc92a5 https://conda.anaconda.org/conda-forge/linux-64/fonttools-4.55.3-py313h8060acc_1.conda#f89b4b415c5be34d24f74f30954792b5 -https://conda.anaconda.org/conda-forge/linux-64/freeglut-3.2.2-ha6d2627_3.conda#84ec3f5b46f3076be49f2cf3f1cfbf02 +https://conda.anaconda.org/conda-forge/linux-64/freexl-2.0.0-h9dce30a_2.conda#ecb5d11305b8ba1801543002e69d2f2f +https://conda.anaconda.org/conda-forge/linux-64/gdk-pixbuf-2.42.12-hb9ae30d_0.conda#201db6c2d9a3c5e46573ac4cb2e92f4f https://conda.anaconda.org/conda-forge/noarch/geopy-2.4.1-pyhd8ed1ab_2.conda#40182a8d62a61d147ec7d3e4c5c36ac2 +https://conda.anaconda.org/conda-forge/linux-64/gts-0.7.6-h977cf35_4.conda#4d8df0b0db060d33c9a702ada998a8fe https://conda.anaconda.org/conda-forge/noarch/h2-4.1.0-pyhd8ed1ab_1.conda#825927dc7b0f287ef8d4d0011bb113b1 -https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-10.1.0-h0b3b770_0.conda#ab1d7d56034814f4c3ed9f69f8c68806 https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.3-nompi_h2d575fe_108.conda#b74598031529dafb2a66f9e90f26f2dc -https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.5.0-pyha770c72_1.conda#315607a3030ad5d5227e76e0733798ff +https://conda.anaconda.org/conda-forge/noarch/importlib-metadata-8.6.1-pyha770c72_0.conda#f4b39bf00c69f56ac01e020ebfac066c https://conda.anaconda.org/conda-forge/noarch/importlib_resources-6.5.2-pyhd8ed1ab_0.conda#c85c76dc67d75619a92f51dfbce06992 https://conda.anaconda.org/conda-forge/noarch/isort-5.13.2-pyhd8ed1ab_1.conda#ef7dc847f19fe4859d5aaa33385bf509 https://conda.anaconda.org/conda-forge/noarch/jedi-0.19.2-pyhd8ed1ab_1.conda#a4f4c5dc9b80bc50e0d3dc4e6e8f1bd9 @@ -267,51 +251,60 @@ https://conda.anaconda.org/conda-forge/noarch/jinja2-3.1.5-pyhd8ed1ab_0.conda#27 https://conda.anaconda.org/conda-forge/noarch/jupyter_core-5.7.2-pyh31011fe_1.conda#0a2980dada0dd7fd0998f0342308b1b1 https://conda.anaconda.org/conda-forge/noarch/jupyterlab_pygments-0.3.0-pyhd8ed1ab_2.conda#fd312693df06da3578383232528c468d https://conda.anaconda.org/conda-forge/noarch/latexcodec-2.0.1-pyh9f0ad1d_0.tar.bz2#8d67904973263afd2985ba56aa2d6bb4 -https://conda.anaconda.org/conda-forge/linux-64/libglu-9.0.3-h03adeef_0.conda#b1df5affe904efe82ef890826b68881d -https://conda.anaconda.org/conda-forge/linux-64/libgoogle-cloud-2.33.0-h2b5623c_1.conda#61829a8dd5f4e2327e707572065bae41 -https://conda.anaconda.org/conda-forge/linux-64/lxml-5.3.0-py313h6eb7059_2.conda#48d1a2d9b1f12ff5180ffb4154050c48 +https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.16-hb7c19ff_0.conda#51bb7010fc86f70eee639b4bb7a894f5 +https://conda.anaconda.org/conda-forge/linux-64/libarchive-3.7.7-h4585015_3.conda#a28808eae584c7f519943719b2a2b386 +https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-26_linux64_openblas.conda#ebcc5f37a435aa3c19640533c82f8d76 +https://conda.anaconda.org/conda-forge/linux-64/libgd-2.3.3-hd3e95f3_10.conda#30ee3a29c84cf7b842a8c5828c4b7c13 +https://conda.anaconda.org/conda-forge/linux-64/libgl-1.7.0-ha4b6fd6_2.conda#928b8be80851f5d8ffb016f9c81dae7a +https://conda.anaconda.org/conda-forge/linux-64/libgrpc-1.67.1-h25350d4_1.conda#0c6497a760b99a926c7c12b74951a39c +https://conda.anaconda.org/conda-forge/linux-64/libheif-1.19.5-gpl_hc21c24c_100.conda#3b57852666eaacc13414ac811dde3f8a +https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-26_linux64_openblas.conda#3792604c43695d6a273bc5faaac47d48 +https://conda.anaconda.org/conda-forge/linux-64/libxslt-1.1.39-h76b75d6_0.conda#e71f31f8cfb0a91439f2086fc8aa0461 https://conda.anaconda.org/conda-forge/noarch/markdown-it-py-3.0.0-pyhd8ed1ab_1.conda#fee3164ac23dfca50cfcc8b85ddefb81 https://conda.anaconda.org/conda-forge/noarch/matplotlib-inline-0.1.7-pyhd8ed1ab_1.conda#af6ab708897df59bd6e7283ceab1b56b https://conda.anaconda.org/conda-forge/noarch/mistune-3.1.0-pyhd8ed1ab_0.conda#d10024c163a52eeecbb166fdeaef8b12 https://conda.anaconda.org/conda-forge/noarch/nested-lookup-0.2.25-pyhd8ed1ab_2.conda#cc2da171723d50bc1a7f8a53a8d0319f https://conda.anaconda.org/conda-forge/noarch/nodeenv-1.9.1-pyhd8ed1ab_1.conda#7ba3f09fceae6a120d664217e58fe686 -https://conda.anaconda.org/conda-forge/linux-64/numpy-2.2.1-py313hb30382a_0.conda#bacc73d89e22828efedf31fdc4b54b4e +https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.3-h5fbd93e_0.conda#9e5816bc95d285c115a3ebc2f8563564 https://conda.anaconda.org/conda-forge/noarch/partd-1.4.2-pyhd8ed1ab_0.conda#0badf9c54e24cecfb0ad2f99d680c163 https://conda.anaconda.org/conda-forge/noarch/pexpect-4.9.0-pyhd8ed1ab_1.conda#d0d408b1f18883a944376da5cf8101ea -https://conda.anaconda.org/conda-forge/linux-64/pillow-11.1.0-py313h8db990d_0.conda#1e86810c6c3fb6d6aebdba26564eb2e8 https://conda.anaconda.org/conda-forge/linux-64/proj-9.5.1-h0054346_0.conda#398cabfd9bd75e90d0901db95224f25f https://conda.anaconda.org/conda-forge/noarch/prompt-toolkit-3.0.48-pyha770c72_1.conda#368d4aa48358439e07a97ae237491785 https://conda.anaconda.org/conda-forge/noarch/pytest-8.3.4-pyhd8ed1ab_1.conda#799ed216dc6af62520f32aa39bc1c2bb https://conda.anaconda.org/conda-forge/noarch/python-dateutil-2.9.0.post0-pyhff2d567_1.conda#5ba79d7c71f03c678c8ead841f347d6e https://conda.anaconda.org/conda-forge/linux-64/pyzmq-26.2.0-py313h8e95178_3.conda#8ab50c9c9c3824ac0ffac9e9dcf5619e -https://conda.anaconda.org/conda-forge/noarch/rdflib-7.1.1-pyhd8ed1ab_1.conda#2405a5561bffdef682167ca6db14683c -https://conda.anaconda.org/conda-forge/noarch/referencing-0.35.1-pyhd8ed1ab_1.conda#8c9083612c1bfe6878715ed5732605f8 +https://conda.anaconda.org/conda-forge/noarch/rdflib-7.1.3-pyhd8ed1ab_0.conda#72ec20859c97d33c82edcc8bdb70d536 +https://conda.anaconda.org/conda-forge/noarch/referencing-0.36.1-pyhd8ed1ab_0.conda#dbb48421efd666ea133c6d5e67291766 https://conda.anaconda.org/conda-forge/noarch/stack_data-0.6.3-pyhd8ed1ab_1.conda#b1b505328da7a6b246787df4b5a49fbc https://conda.anaconda.org/conda-forge/noarch/tinycss2-1.4.0-pyhd8ed1ab_0.conda#f1acf5fdefa8300de697982bcb1761c9 https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.12.2-hd8ed1ab_1.conda#b6a408c64b78ec7b779a3e5c7a902433 https://conda.anaconda.org/conda-forge/noarch/url-normalize-1.4.3-pyhd8ed1ab_1.conda#c7b1961b139c21381764de4704b6bbfb -https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.28.1-pyhd8ed1ab_0.conda#680b1c287b10cefc8bda0530b217229f +https://conda.anaconda.org/conda-forge/noarch/virtualenv-20.29.1-pyhd8ed1ab_0.conda#de06336c9833cffd2a4bd6f27c4cf8ea https://conda.anaconda.org/conda-forge/noarch/webob-1.8.9-pyhd8ed1ab_1.conda#293718ddac83a0fbc0f2193ff77d1e1c +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdamage-1.1.6-hb9d3cd8_0.conda#b5fcc7172d22516e1f965490e65e33a4 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxi-1.8.2-hb9d3cd8_0.conda#17dcc85db3c7886650b8908b183d6876 +https://conda.anaconda.org/conda-forge/linux-64/xorg-libxxf86vm-1.1.6-hb9d3cd8_0.conda#5efa5fa6243a622445fdfd72aee15efa https://conda.anaconda.org/conda-forge/noarch/yamale-5.3.0-pyhd8ed1ab_0.conda#d4b5f3a50decd28cd747f4b5f7aea33f -https://conda.anaconda.org/conda-forge/linux-64/aws-crt-cpp-0.29.7-hd92328a_7.conda#02b95564257d5c3db9c06beccf711f95 +https://conda.anaconda.org/conda-forge/linux-64/aws-c-s3-0.7.9-hf454442_0.conda#8a36f996d5469b2e1f9e71cac3b9feb1 https://conda.anaconda.org/conda-forge/linux-64/azure-identity-cpp-1.10.0-h113e628_0.conda#73f73f60854f325a55f1d31459f2ab73 https://conda.anaconda.org/conda-forge/linux-64/azure-storage-common-cpp-12.8.0-h736e048_1.conda#13de36be8de3ae3f05ba127631599213 -https://conda.anaconda.org/conda-forge/noarch/bleach-with-css-6.2.0-hd8ed1ab_3.conda#e250a492fc70bf604737328dbe02846c +https://conda.anaconda.org/conda-forge/noarch/bleach-with-css-6.2.0-h82add2a_4.conda#a30e9406c873940383555af4c873220d https://conda.anaconda.org/conda-forge/noarch/cattrs-24.1.2-pyhd8ed1ab_1.conda#53eca64665361194ca4bbaf87c0ded99 -https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.4-py313ha014f3b_1.conda#b20667f9b1d016c1141051a433f76dfc -https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.3.1-py313h33d0bda_0.conda#6b6768e7c585d7029f79a04cbc4cbff0 https://conda.anaconda.org/conda-forge/linux-64/cryptography-44.0.0-py313h6556f6e_0.conda#a75161b68e899739b89057b15b1c63cd -https://conda.anaconda.org/conda-forge/noarch/dask-core-2024.12.1-pyhd8ed1ab_0.conda#48060c395f1e87a80330c0adaad332f7 +https://conda.anaconda.org/conda-forge/noarch/dask-core-2025.1.0-pyhd8ed1ab_0.conda#0abebcf57fa0d8f2f0d92f49c47d3f06 +https://conda.anaconda.org/conda-forge/linux-64/freeglut-3.2.2-ha6d2627_3.conda#84ec3f5b46f3076be49f2cf3f1cfbf02 https://conda.anaconda.org/conda-forge/linux-64/geotiff-1.7.3-h77b800c_3.conda#4eb52aecb43e7c72f8e4fca0c386354e +https://conda.anaconda.org/conda-forge/linux-64/harfbuzz-10.2.0-h4bba637_0.conda#9e38e86167e8b1ea0094747d12944ce4 https://conda.anaconda.org/conda-forge/noarch/ipython-8.31.0-pyh707e725_0.conda#1d7fcd803dfa936a6c3bd051b293241c -https://conda.anaconda.org/conda-forge/linux-64/jasper-4.2.4-h536e39c_0.conda#9518ab7016cf4564778aef08b6bd8792 https://conda.anaconda.org/conda-forge/noarch/jsonschema-specifications-2024.10.1-pyhd8ed1ab_1.conda#3b519bc21bc80e60b456f1e62962a766 https://conda.anaconda.org/conda-forge/noarch/jupyter_client-8.6.3-pyhd8ed1ab_1.conda#4ebae00eae9705b0c3d6d1018a81d047 -https://conda.anaconda.org/conda-forge/linux-64/libgoogle-cloud-storage-2.33.0-h0121fbd_1.conda#b0cfb5044685a7a9fa43ae669124f0a0 +https://conda.anaconda.org/conda-forge/linux-64/libglu-9.0.3-h03adeef_0.conda#b1df5affe904efe82ef890826b68881d +https://conda.anaconda.org/conda-forge/linux-64/libgoogle-cloud-2.33.0-h2b5623c_1.conda#61829a8dd5f4e2327e707572065bae41 https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h00e09a9_116.conda#417864857bdb6c2be2e923e89bffd2e8 https://conda.anaconda.org/conda-forge/linux-64/libspatialite-5.1.0-h1b4f908_12.conda#641f91ac6f984a91a78ba2411fe4f106 -https://conda.anaconda.org/conda-forge/linux-64/pandas-2.2.3-py313ha87cce1_1.conda#c5d63dd501db554b84a30dea33824164 -https://conda.anaconda.org/conda-forge/linux-64/pango-1.56.0-h861ebed_0.conda#8779ee58be1c8b35e7af464a73674957 +https://conda.anaconda.org/conda-forge/linux-64/lxml-5.3.0-py313h6eb7059_2.conda#48d1a2d9b1f12ff5180ffb4154050c48 +https://conda.anaconda.org/conda-forge/linux-64/numpy-2.2.2-py313h17eae1a_0.conda#b069b8491f6882134a55d2f980de3818 +https://conda.anaconda.org/conda-forge/linux-64/pillow-11.1.0-py313h8db990d_0.conda#1e86810c6c3fb6d6aebdba26564eb2e8 https://conda.anaconda.org/conda-forge/noarch/pybtex-0.24.0-pyhd8ed1ab_3.conda#556a52a96313364aa79990ed1337b9a5 https://conda.anaconda.org/conda-forge/noarch/pylint-3.3.3-pyhd8ed1ab_0.conda#5842a1fa3b9b4f9fe7069b9ca5ed068d https://conda.anaconda.org/conda-forge/linux-64/pyproj-3.7.0-py313hdb96ca5_0.conda#2a0d20f16832a170218b474bcec57acf @@ -321,64 +314,70 @@ https://conda.anaconda.org/conda-forge/noarch/pytest-metadata-3.1.1-pyhd8ed1ab_1 https://conda.anaconda.org/conda-forge/noarch/pytest-mock-3.14.0-pyhd8ed1ab_1.conda#1ca25f3fdf32ebd8a51ee9efa97c9a45 https://conda.anaconda.org/conda-forge/noarch/pytest-xdist-3.6.1-pyhd8ed1ab_1.conda#59aad4fb37cabc0bacc73cf344612ddd https://conda.anaconda.org/conda-forge/noarch/rich-13.9.4-pyhd8ed1ab_1.conda#7aed65d4ff222bfb7335997aa40b7da5 -https://conda.anaconda.org/conda-forge/linux-64/scipy-1.15.0-py313h750cbce_1.conda#5fa8ee00606ba9d5a928d989b949c45b -https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.6-py313h3f71f02_2.conda#dd0b742e8e61b8f15e4b64efcc103ad6 https://conda.anaconda.org/conda-forge/linux-64/ukkonen-1.0.1-py313h33d0bda_5.conda#5bcffe10a500755da4a71cc0fb62a420 https://conda.anaconda.org/conda-forge/linux-64/zstandard-0.23.0-py313h80202fe_1.conda#c178558ff516cd507763ffee230c20b2 -https://conda.anaconda.org/conda-forge/linux-64/aws-sdk-cpp-1.11.458-hc430e4a_4.conda#aeefac461bea1f126653c1285cf5af08 +https://conda.anaconda.org/conda-forge/linux-64/aws-crt-cpp-0.29.9-hbbd73d0_1.conda#f782f17802a0a4ccf3dd83e15c514708 https://conda.anaconda.org/conda-forge/linux-64/azure-storage-blobs-cpp-12.13.0-h3cf044e_1.conda#7eb66060455c7a47d9dcdbfa9f46579b -https://conda.anaconda.org/conda-forge/noarch/bokeh-3.6.2-pyhd8ed1ab_1.conda#976ff24762f1f991b08f7a7a41875086 -https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.3.0-py313ha014f3b_0.conda#aecffd7a21d698e374487644ce67d6eb -https://conda.anaconda.org/conda-forge/linux-64/eccodes-2.39.0-h8bb6dbc_1.conda#87c13b15a9f3ec25cd3c9d8f2e33fe0b -https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h8ee276e_7.conda#28a9681054948a7d7e96a7b8fe9b604e -https://conda.anaconda.org/conda-forge/noarch/identify-2.6.5-pyhd8ed1ab_0.conda#c1b0f663ff141265d1be1242259063f0 +https://conda.anaconda.org/conda-forge/linux-64/cftime-1.6.4-py313ha014f3b_1.conda#b20667f9b1d016c1141051a433f76dfc +https://conda.anaconda.org/conda-forge/linux-64/contourpy-1.3.1-py313h33d0bda_0.conda#6b6768e7c585d7029f79a04cbc4cbff0 +https://conda.anaconda.org/conda-forge/noarch/identify-2.6.6-pyhd8ed1ab_0.conda#d751c3b4a973ed15b57be90d68c716d1 +https://conda.anaconda.org/conda-forge/linux-64/jasper-4.2.4-h536e39c_0.conda#9518ab7016cf4564778aef08b6bd8792 https://conda.anaconda.org/conda-forge/noarch/jsonschema-4.23.0-pyhd8ed1ab_1.conda#a3cead9264b331b32fe8f0aabc967522 -https://conda.anaconda.org/conda-forge/linux-64/libgdal-core-3.10.0-h3359108_13.conda#e9c2fb75425038991370f72231eca6e8 -https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.58.4-h49af25d_2.conda#b9846db0abffb09847e2cb0fec4b4db6 -https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.10.0-py313h129903b_0.conda#ab5b84154e1d9e41d4f11aea76d74096 +https://conda.anaconda.org/conda-forge/linux-64/libgdal-core-3.10.1-h3359108_1.conda#5a00c65b3812cfb81a24133d36bf97c9 +https://conda.anaconda.org/conda-forge/linux-64/libgoogle-cloud-storage-2.33.0-h0121fbd_1.conda#b0cfb5044685a7a9fa43ae669124f0a0 https://conda.anaconda.org/conda-forge/linux-64/netcdf-fortran-4.6.1-nompi_h22f9119_108.conda#0967d692b1dd33e7d809cfa355090e4b -https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.7.2-nompi_py313h2a70696_101.conda#fe03a55f80aef5f47b65320cd10025b4 -https://conda.anaconda.org/conda-forge/noarch/pyopenssl-24.3.0-pyhd8ed1ab_0.conda#269109707b3810adce78b6afb2a82c80 +https://conda.anaconda.org/conda-forge/linux-64/pandas-2.2.3-py313ha87cce1_1.conda#c5d63dd501db554b84a30dea33824164 +https://conda.anaconda.org/conda-forge/linux-64/pango-1.56.0-h861ebed_0.conda#8779ee58be1c8b35e7af464a73674957 +https://conda.anaconda.org/conda-forge/noarch/pyopenssl-25.0.0-pyhd8ed1ab_0.conda#195fbabc5cc805f2cc10cb881a19cf8b https://conda.anaconda.org/conda-forge/noarch/pytest-html-4.1.1-pyhd8ed1ab_1.conda#010e50e74c467db278f1398a74106a04 https://conda.anaconda.org/conda-forge/linux-64/python-stratify-0.3.0-py313ha014f3b_3.conda#041b8326743c64bd02b8c0f34f05e1ef +https://conda.anaconda.org/conda-forge/linux-64/scipy-1.15.1-py313h750cbce_0.conda#a1a082636391d36d019e3fdeb56f0a4c +https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.6-py313h3f71f02_2.conda#dd0b742e8e61b8f15e4b64efcc103ad6 https://conda.anaconda.org/conda-forge/noarch/urllib3-2.3.0-pyhd8ed1ab_0.conda#32674f8dbfb7b26410ed580dd3c10a29 -https://conda.anaconda.org/conda-forge/noarch/xarray-2025.1.1-pyhd8ed1ab_0.conda#81db80ba986122da460800a67bf8ac7f +https://conda.anaconda.org/conda-forge/linux-64/aws-sdk-cpp-1.11.458-h4d475cb_6.conda#6139e84bbb6fdb27ca49c2981613a5fa https://conda.anaconda.org/conda-forge/linux-64/azure-storage-files-datalake-cpp-12.12.0-ha633028_1.conda#7c1980f89dd41b097549782121a73490 -https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.24.0-py313ha87cce1_0.conda#44c2091019480603a885aa01e7b710e7 -https://conda.anaconda.org/conda-forge/noarch/cf_xarray-0.10.0-pyhd8ed1ab_2.conda#ed15dcf944706ae6ea54968dfa4a06a5 -https://conda.anaconda.org/conda-forge/noarch/distributed-2024.12.1-pyhd8ed1ab_0.conda#58df114d7649ddb3f68c9b9adc6fbabe +https://conda.anaconda.org/conda-forge/noarch/bokeh-3.6.2-pyhd8ed1ab_1.conda#976ff24762f1f991b08f7a7a41875086 +https://conda.anaconda.org/conda-forge/linux-64/cf-units-3.3.0-py313ha014f3b_0.conda#aecffd7a21d698e374487644ce67d6eb +https://conda.anaconda.org/conda-forge/noarch/distributed-2025.1.0-pyhd8ed1ab_0.conda#5ec97e707606eaa891eedb406eba507b +https://conda.anaconda.org/conda-forge/linux-64/eccodes-2.39.0-h8bb6dbc_1.conda#87c13b15a9f3ec25cd3c9d8f2e33fe0b https://conda.anaconda.org/conda-forge/linux-64/esmf-8.8.0-nompi_h4441c20_0.conda#34729c36214ff0b7834065bd5cacdc56 https://conda.anaconda.org/conda-forge/linux-64/fiona-1.10.1-py313hab4ff3b_3.conda#69a5fbc032a6a01aa6cf7010dd2164a0 -https://conda.anaconda.org/conda-forge/linux-64/graphviz-12.0.0-hba01fac_0.conda#953e31ea00d46beb7e64a79fc291ec44 +https://conda.anaconda.org/conda-forge/linux-64/gtk2-2.24.33-h8ee276e_7.conda#28a9681054948a7d7e96a7b8fe9b604e +https://conda.anaconda.org/conda-forge/linux-64/librsvg-2.58.4-h49af25d_2.conda#b9846db0abffb09847e2cb0fec4b4db6 +https://conda.anaconda.org/conda-forge/linux-64/matplotlib-base-3.10.0-py313h129903b_0.conda#ab5b84154e1d9e41d4f11aea76d74096 https://conda.anaconda.org/conda-forge/noarch/myproxyclient-2.1.1-pyhd8ed1ab_1.conda#1efb1227abaf20324ceb7ac9c06bb86d https://conda.anaconda.org/conda-forge/noarch/nbformat-5.10.4-pyhd8ed1ab_1.conda#bbe1963f1e47f594070ffe87cdf612ea -https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_1.conda#9a2be7d0089f5934b550933ca0d9fe85 -https://conda.anaconda.org/conda-forge/noarch/pre-commit-4.0.1-pyha770c72_1.conda#d0ea6ed474bf7f6db88fc85e6dc809b1 -https://conda.anaconda.org/conda-forge/linux-64/python-eccodes-2.37.0-py313ha014f3b_0.conda#b28717a6d595cdc42737d6669d422b1d +https://conda.anaconda.org/conda-forge/linux-64/netcdf4-1.7.2-nompi_py313h2a70696_101.conda#fe03a55f80aef5f47b65320cd10025b4 +https://conda.anaconda.org/conda-forge/noarch/pre-commit-4.1.0-pyha770c72_0.conda#5353f5eb201a9415b12385e35ed1148d https://conda.anaconda.org/conda-forge/noarch/requests-2.32.3-pyhd8ed1ab_1.conda#a9b9368f3701a417eac9edbcae7cb737 +https://conda.anaconda.org/conda-forge/noarch/xarray-2025.1.1-pyhd8ed1ab_0.conda#81db80ba986122da460800a67bf8ac7f +https://conda.anaconda.org/conda-forge/linux-64/cartopy-0.24.0-py313ha87cce1_0.conda#44c2091019480603a885aa01e7b710e7 +https://conda.anaconda.org/conda-forge/noarch/cf_xarray-0.10.0-pyhd8ed1ab_2.conda#ed15dcf944706ae6ea54968dfa4a06a5 https://conda.anaconda.org/conda-forge/noarch/dask-jobqueue-0.9.0-pyhd8ed1ab_0.conda#a201de7d36907f2355426e019168d337 https://conda.anaconda.org/conda-forge/noarch/esmpy-8.8.0-pyhecae5ae_0.conda#9d8320aa90c8e213002f9cdb5bb9f579 -https://conda.anaconda.org/conda-forge/noarch/iris-3.11.0-pyha770c72_0.conda#a5e36260789ce92074c3736533ecdd61 -https://conda.anaconda.org/conda-forge/linux-64/libarrow-18.1.0-hd595efa_7_cpu.conda#08d4aff5ee6dee9a1b9ab13fca927697 +https://conda.anaconda.org/conda-forge/linux-64/graphviz-12.0.0-hba01fac_0.conda#953e31ea00d46beb7e64a79fc291ec44 +https://conda.anaconda.org/conda-forge/linux-64/libarrow-19.0.0-hce2e470_3_cpu.conda#a50ba9a0789061ea395a47a23d6a7734 https://conda.anaconda.org/conda-forge/noarch/nbclient-0.10.2-pyhd8ed1ab_0.conda#6bb0d77277061742744176ab555b723c +https://conda.anaconda.org/conda-forge/noarch/nc-time-axis-1.4.1-pyhd8ed1ab_1.conda#9a2be7d0089f5934b550933ca0d9fe85 https://conda.anaconda.org/conda-forge/noarch/pooch-1.8.2-pyhd8ed1ab_1.conda#b3e783e8e8ed7577cf0b6dee37d1fbac -https://conda.anaconda.org/conda-forge/linux-64/pydot-3.0.3-py313h78bf25f_0.conda#7c460c46b2f701a9733bf931223fe4b8 +https://conda.anaconda.org/conda-forge/linux-64/python-eccodes-2.37.0-py313ha014f3b_0.conda#b28717a6d595cdc42737d6669d422b1d https://conda.anaconda.org/conda-forge/noarch/requests-cache-1.2.1-pyhd8ed1ab_1.conda#584e6aab3a5cffde537c575ad6a673ff https://conda.anaconda.org/conda-forge/noarch/esgf-pyclient-0.3.1-pyhd8ed1ab_5.conda#6779887899e0b0b6fb316253eb0f5c64 -https://conda.anaconda.org/conda-forge/noarch/iris-grib-0.21.0-pyhd8ed1ab_0.conda#d5a110459acc9669c58e5d516fc2e165 -https://conda.anaconda.org/conda-forge/linux-64/libarrow-acero-18.1.0-hcb10f89_7_cpu.conda#12d84228204c56fec6ed113288014d11 -https://conda.anaconda.org/conda-forge/linux-64/libparquet-18.1.0-h081d1f1_7_cpu.conda#b97013ef4e1dd2cf11594f06d5b5e83a +https://conda.anaconda.org/conda-forge/noarch/iris-3.11.0-pyha770c72_0.conda#a5e36260789ce92074c3736533ecdd61 +https://conda.anaconda.org/conda-forge/linux-64/libarrow-acero-19.0.0-hcb10f89_3_cpu.conda#3e1e31382e9c6ecd0b24bd8f6ddb33ec +https://conda.anaconda.org/conda-forge/linux-64/libparquet-19.0.0-h081d1f1_3_cpu.conda#95f8b5758148e62a055f4c6538a31f0b https://conda.anaconda.org/conda-forge/noarch/nbconvert-core-7.16.5-pyhd8ed1ab_1.conda#dd50a122c5b9782b1e9b2695473bfd95 -https://conda.anaconda.org/conda-forge/noarch/prov-2.0.0-pyhd3deb0d_0.tar.bz2#aa9b3ad140f6c0668c646f32e20ccf82 https://conda.anaconda.org/conda-forge/noarch/py-cordex-0.9.0-pyhd8ed1ab_1.conda#53912b9ade4f2ea4dd1d5d6d3de7df70 -https://conda.anaconda.org/conda-forge/linux-64/pyarrow-core-18.1.0-py313he5f92c8_0_cpu.conda#5380e12f4468e891911dbbd4248b521a -https://conda.anaconda.org/conda-forge/linux-64/libarrow-dataset-18.1.0-hcb10f89_7_cpu.conda#0a81eb63d7cd150f598c752e86388d57 +https://conda.anaconda.org/conda-forge/linux-64/pyarrow-core-19.0.0-py313he5f92c8_0_cpu.conda#2c03c58414e73dcaf9556212a88c90ae +https://conda.anaconda.org/conda-forge/linux-64/pydot-3.0.4-py313h78bf25f_0.conda#f23fcb972dcceebc167c4e8b86fd6557 +https://conda.anaconda.org/conda-forge/noarch/iris-grib-0.21.0-pyhd8ed1ab_0.conda#d5a110459acc9669c58e5d516fc2e165 +https://conda.anaconda.org/conda-forge/linux-64/libarrow-dataset-19.0.0-hcb10f89_3_cpu.conda#1face9ff13ec61bfb065063d35fda864 https://conda.anaconda.org/conda-forge/noarch/nbconvert-pandoc-7.16.5-hd8ed1ab_1.conda#593a8fd80968f14f8a7b3a685ddc455e -https://conda.anaconda.org/conda-forge/linux-64/libarrow-substrait-18.1.0-h08228c5_7_cpu.conda#e128def53c133e8a23ac00cd4a479335 +https://conda.anaconda.org/conda-forge/noarch/prov-2.0.0-pyhd3deb0d_0.tar.bz2#aa9b3ad140f6c0668c646f32e20ccf82 +https://conda.anaconda.org/conda-forge/linux-64/libarrow-substrait-19.0.0-h08228c5_3_cpu.conda#8527e1e45c495991fd88f6531e176556 https://conda.anaconda.org/conda-forge/noarch/nbconvert-7.16.5-hd8ed1ab_1.conda#82ffc2974cd09b45182f018b5af731c8 -https://conda.anaconda.org/conda-forge/linux-64/pyarrow-18.1.0-py313h78bf25f_0.conda#a11d880ceedc33993c6f5c14a80ea9d3 -https://conda.anaconda.org/conda-forge/noarch/dask-expr-1.1.21-pyhd8ed1ab_0.conda#e72a014dbbd35545dcfba4de9c92fb1d -https://conda.anaconda.org/conda-forge/noarch/dask-2024.12.1-pyhd8ed1ab_0.conda#f3134df9565c4d4415ff0e61f3aa28d0 +https://conda.anaconda.org/conda-forge/linux-64/pyarrow-19.0.0-py313h78bf25f_0.conda#d8eb3270cfb824a02f1ccc02f559a129 +https://conda.anaconda.org/conda-forge/noarch/dask-2025.1.0-pyhd8ed1ab_0.conda#a5f91379331b61157c203ca69da6331b https://conda.anaconda.org/conda-forge/noarch/iris-esmf-regrid-0.11.0-pyhd8ed1ab_1.conda#86286b197e33e3b034416c18ba0f574c https://conda.anaconda.org/conda-forge/noarch/autodocsumm-0.2.14-pyhd8ed1ab_0.conda#351a11ac1215eb4f6c5b82e30070277a https://conda.anaconda.org/conda-forge/noarch/nbsphinx-0.9.6-pyhd8ed1ab_0.conda#2e4c30e09d50d025836279d80140d0a4 From fc45c727b2b86dd5cf4a0d895d50bdeb36135e0f Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 21 Jan 2025 11:52:35 +0000 Subject: [PATCH 33/36] [pre-commit.ci] pre-commit autoupdate (#2638) Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index cc511c98b2..3679108af5 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -33,7 +33,7 @@ repos: - id: codespell additional_dependencies: [tomli] # required for Python 3.10 - repo: https://github.com/astral-sh/ruff-pre-commit - rev: "v0.9.1" + rev: "v0.9.2" hooks: - id: ruff args: [--fix] From 2e6804ad72499db2f0e5954ae1eef41afbda04c2 Mon Sep 17 00:00:00 2001 From: Manuel Schlund <32543114+schlunma@users.noreply.github.com> Date: Tue, 21 Jan 2025 15:46:08 +0100 Subject: [PATCH 34/36] Make sure that supplementary variables and weights have same chunks as parent cube (#2637) Co-authored-by: Bouwe Andela --- esmvalcore/iris_helpers.py | 4 +- .../preprocessor/_supplementary_vars.py | 87 ++++++++++++------- esmvalcore/preprocessor/_volume.py | 21 +++-- .../test_add_supplementary_variables.py | 50 +++++++++-- .../unit/preprocessor/_volume/test_volume.py | 43 +++++++-- 5 files changed, 152 insertions(+), 53 deletions(-) diff --git a/esmvalcore/iris_helpers.py b/esmvalcore/iris_helpers.py index 8d1c676682..767c3ec260 100644 --- a/esmvalcore/iris_helpers.py +++ b/esmvalcore/iris_helpers.py @@ -247,14 +247,14 @@ def rechunk_cube( cube: Input cube. complete_coords: - (Names of) coordinates along which the output cubes should not be + (Names of) coordinates along which the output cube should not be chunked. remaining_dims: Chunksize of the remaining dimensions. Returns ------- - Cube + iris.cube.Cube Rechunked cube. This will always be a copy of the input cube. """ diff --git a/esmvalcore/preprocessor/_supplementary_vars.py b/esmvalcore/preprocessor/_supplementary_vars.py index 0c305dad37..96e3c4641c 100644 --- a/esmvalcore/preprocessor/_supplementary_vars.py +++ b/esmvalcore/preprocessor/_supplementary_vars.py @@ -1,22 +1,26 @@ """Preprocessor functions for ancillary variables and cell measures.""" import logging -from typing import Iterable +from collections.abc import Callable +from typing import Iterable, Literal import iris.coords -import iris.cube +from iris.cube import Cube logger = logging.getLogger(__name__) PREPROCESSOR_SUPPLEMENTARIES = {} -def register_supplementaries(variables, required): +def register_supplementaries( + variables: list[str], + required: Literal["require_at_least_one", "prefer_at_least_one"], +) -> Callable: """Register supplementary variables required for a preprocessor function. Parameters ---------- - variables: :obj:`list` of :obj`str` + variables: List of variable names. required: How strong the requirement is. Can be 'require_at_least_one' if at @@ -39,16 +43,25 @@ def wrapper(func): return wrapper -def add_cell_measure(cube, cell_measure_cube, measure): - """Add a cube as a cell_measure in the cube containing the data. +def add_cell_measure( + cube: Cube, + cell_measure_cube: Cube, + measure: Literal["area", "volume"], +) -> None: + """Add cell measure to cube (in-place). + + Note + ---- + This assumes that the cell measure spans the rightmost dimensions of the + cube. Parameters ---------- - cube: iris.cube.Cube + cube: Iris cube with input data. - cell_measure_cube: iris.cube.Cube + cell_measure_cube: Iris cube with cell measure data. - measure: str + measure: Name of the measure, can be 'area' or 'volume'. Returns @@ -65,16 +78,22 @@ def add_cell_measure(cube, cell_measure_cube, measure): raise ValueError( f"measure name must be 'area' or 'volume', got {measure} instead" ) - measure = iris.coords.CellMeasure( - cell_measure_cube.core_data(), + coord_dims = tuple( + range(cube.ndim - len(cell_measure_cube.shape), cube.ndim) + ) + cell_measure_data = cell_measure_cube.core_data() + if cell_measure_cube.has_lazy_data(): + cube_chunks = tuple(cube.lazy_data().chunks[d] for d in coord_dims) + cell_measure_data = cell_measure_data.rechunk(cube_chunks) + cell_measure = iris.coords.CellMeasure( + cell_measure_data, standard_name=cell_measure_cube.standard_name, units=cell_measure_cube.units, measure=measure, var_name=cell_measure_cube.var_name, attributes=cell_measure_cube.attributes, ) - start_dim = cube.ndim - len(measure.shape) - cube.add_cell_measure(measure, range(start_dim, cube.ndim)) + cube.add_cell_measure(cell_measure, coord_dims) logger.debug( "Added %s as cell measure in cube of %s.", cell_measure_cube.var_name, @@ -82,14 +101,19 @@ def add_cell_measure(cube, cell_measure_cube, measure): ) -def add_ancillary_variable(cube, ancillary_cube): - """Add cube as an ancillary variable in the cube containing the data. +def add_ancillary_variable(cube: Cube, ancillary_cube: Cube) -> None: + """Add ancillary variable to cube (in-place). + + Note + ---- + This assumes that the ancillary variable spans the rightmost dimensions of + the cube. Parameters ---------- - cube: iris.cube.Cube + cube: Iris cube with input data. - ancillary_cube: iris.cube.Cube + ancillary_cube: Iris cube with ancillary data. Returns @@ -97,15 +121,19 @@ def add_ancillary_variable(cube, ancillary_cube): iris.cube.Cube Cube with added ancillary variables """ + coord_dims = tuple(range(cube.ndim - len(ancillary_cube.shape), cube.ndim)) + ancillary_data = ancillary_cube.core_data() + if ancillary_cube.has_lazy_data(): + cube_chunks = tuple(cube.lazy_data().chunks[d] for d in coord_dims) + ancillary_data = ancillary_data.rechunk(cube_chunks) ancillary_var = iris.coords.AncillaryVariable( - ancillary_cube.core_data(), + ancillary_data, standard_name=ancillary_cube.standard_name, units=ancillary_cube.units, var_name=ancillary_cube.var_name, attributes=ancillary_cube.attributes, ) - start_dim = cube.ndim - len(ancillary_var.shape) - cube.add_ancillary_variable(ancillary_var, range(start_dim, cube.ndim)) + cube.add_ancillary_variable(ancillary_var, coord_dims) logger.debug( "Added %s as ancillary variable in cube of %s.", ancillary_cube.var_name, @@ -114,10 +142,10 @@ def add_ancillary_variable(cube, ancillary_cube): def add_supplementary_variables( - cube: iris.cube.Cube, - supplementary_cubes: Iterable[iris.cube.Cube], -) -> iris.cube.Cube: - """Add ancillary variables and/or cell measures. + cube: Cube, + supplementary_cubes: Iterable[Cube], +) -> Cube: + """Add ancillary variables and/or cell measures to cube (in-place). Parameters ---------- @@ -131,7 +159,7 @@ def add_supplementary_variables( iris.cube.Cube Cube with added ancillary variables and/or cell measures. """ - measure_names = { + measure_names: dict[str, Literal["area", "volume"]] = { "areacella": "area", "areacello": "area", "volcello": "volume", @@ -145,15 +173,14 @@ def add_supplementary_variables( return cube -def remove_supplementary_variables(cube: iris.cube.Cube): - """Remove supplementary variables. +def remove_supplementary_variables(cube: Cube) -> Cube: + """Remove supplementary variables from cube (in-place). - Strip cell measures or ancillary variables from the cube containing the - data. + Strip cell measures or ancillary variables from the cube. Parameters ---------- - cube: iris.cube.Cube + cube: Iris cube with data and cell measures or ancillary variables. Returns diff --git a/esmvalcore/preprocessor/_volume.py b/esmvalcore/preprocessor/_volume.py index 52ae6adff3..83b3029143 100644 --- a/esmvalcore/preprocessor/_volume.py +++ b/esmvalcore/preprocessor/_volume.py @@ -104,7 +104,7 @@ def extract_volume( return cube.extract(z_constraint) -def calculate_volume(cube: Cube) -> da.core.Array: +def calculate_volume(cube: Cube) -> np.ndarray | da.Array: """Calculate volume from a cube. This function is used when the 'ocean_volume' cell measure can't be found. @@ -119,13 +119,13 @@ def calculate_volume(cube: Cube) -> da.core.Array: Parameters ---------- - cube: iris.cube.Cube + cube: input cube. Returns ------- - dask.array.core.Array - Grid volumes. + np.ndarray | dask.array.Array + Grid volume. """ # Load depth field and figure out which dim is which @@ -158,7 +158,11 @@ def calculate_volume(cube: Cube) -> da.core.Array: # Calculate Z-direction thickness thickness = depth.core_bounds()[..., 1] - depth.core_bounds()[..., 0] if cube.has_lazy_data(): - thickness = da.array(thickness) + z_chunks = tuple(cube.lazy_data().chunks[d] for d in z_dim) + if isinstance(thickness, da.Array): + thickness = thickness.rechunk(z_chunks) + else: + thickness = da.asarray(thickness, chunks=z_chunks) # Get or calculate the horizontal areas of the cube has_cell_measure = bool(cube.cell_measures("cell_area")) @@ -182,6 +186,8 @@ def calculate_volume(cube: Cube) -> da.core.Array: thickness, cube.shape, z_dim, chunks=chunks ) grid_volume = area_arr * thickness_arr + if cube.has_lazy_data(): + grid_volume = grid_volume.rechunk(chunks) return grid_volume @@ -403,7 +409,10 @@ def axis_statistics( def _add_axis_stats_weights_coord(cube, coord, coord_dims): """Add weights for axis_statistics to cube (in-place).""" weights = np.abs(coord.lazy_bounds()[:, 1] - coord.lazy_bounds()[:, 0]) - if not cube.has_lazy_data(): + if cube.has_lazy_data(): + coord_chunks = tuple(cube.lazy_data().chunks[d] for d in coord_dims) + weights = weights.rechunk(coord_chunks) + else: weights = weights.compute() weights_coord = AuxCoord( weights, diff --git a/tests/integration/preprocessor/_supplementary_vars/test_add_supplementary_variables.py b/tests/integration/preprocessor/_supplementary_vars/test_add_supplementary_variables.py index 42dee35f62..777052bc8d 100644 --- a/tests/integration/preprocessor/_supplementary_vars/test_add_supplementary_variables.py +++ b/tests/integration/preprocessor/_supplementary_vars/test_add_supplementary_variables.py @@ -4,6 +4,7 @@ :func:`esmvalcore.preprocessor._supplementary_vars` module. """ +import dask.array as da import iris import iris.fileformats import numpy as np @@ -112,20 +113,37 @@ def setUp(self): ], ) + @pytest.mark.parametrize("lazy", [True, False]) @pytest.mark.parametrize("var_name", ["areacella", "areacello"]) - def test_add_cell_measure_area(self, var_name): + def test_add_cell_measure_area(self, var_name, lazy): """Test add area fx variables as cell measures.""" + if lazy: + self.fx_area.data = self.fx_area.lazy_data() + self.new_cube_data = da.array(self.new_cube_data).rechunk((1, 2)) self.fx_area.var_name = var_name self.fx_area.standard_name = "cell_area" self.fx_area.units = "m2" cube = iris.cube.Cube( self.new_cube_data, dim_coords_and_dims=self.coords_spec ) + cube = add_supplementary_variables(cube, [self.fx_area]) - assert cube.cell_measure(self.fx_area.standard_name) is not None - def test_add_cell_measure_volume(self): + assert cube.has_lazy_data() is lazy + assert cube.cell_measures(self.fx_area.standard_name) + cell_measure = cube.cell_measure(self.fx_area.standard_name) + assert cell_measure.has_lazy_data() is lazy + if lazy: + assert cell_measure.lazy_data().chunks == cube.lazy_data().chunks + + @pytest.mark.parametrize("lazy", [True, False]) + def test_add_cell_measure_volume(self, lazy): """Test add volume as cell measure.""" + if lazy: + self.fx_volume.data = self.fx_volume.lazy_data() + self.new_cube_3D_data = da.array(self.new_cube_3D_data).rechunk( + (1, 2, 3) + ) self.fx_volume.var_name = "volcello" self.fx_volume.standard_name = "ocean_volume" self.fx_volume.units = "m3" @@ -137,8 +155,15 @@ def test_add_cell_measure_volume(self): (self.lons, 2), ], ) + cube = add_supplementary_variables(cube, [self.fx_volume]) - assert cube.cell_measure(self.fx_volume.standard_name) is not None + + assert cube.has_lazy_data() is lazy + assert cube.cell_measures(self.fx_volume.standard_name) + cell_measure = cube.cell_measure(self.fx_volume.standard_name) + assert cell_measure.has_lazy_data() is lazy + if lazy: + assert cell_measure.lazy_data().chunks == cube.lazy_data().chunks def test_no_cell_measure(self): """Test no cell measure is added.""" @@ -153,16 +178,27 @@ def test_no_cell_measure(self): cube = add_supplementary_variables(cube, []) assert cube.cell_measures() == [] - def test_add_supplementary_vars(self): - """Test invalid variable is not added as cell measure.""" + @pytest.mark.parametrize("lazy", [True, False]) + def test_add_ancillary_vars(self, lazy): + """Test adding ancillary variables.""" + if lazy: + self.fx_area.data = self.fx_area.lazy_data() + self.new_cube_data = da.array(self.new_cube_data).rechunk((1, 2)) self.fx_area.var_name = "sftlf" self.fx_area.standard_name = "land_area_fraction" self.fx_area.units = "%" cube = iris.cube.Cube( self.new_cube_data, dim_coords_and_dims=self.coords_spec ) + cube = add_supplementary_variables(cube, [self.fx_area]) - assert cube.ancillary_variable(self.fx_area.standard_name) is not None + + assert cube.has_lazy_data() is lazy + assert cube.ancillary_variables(self.fx_area.standard_name) + anc_var = cube.ancillary_variable(self.fx_area.standard_name) + assert anc_var.has_lazy_data() is lazy + if lazy: + assert anc_var.lazy_data().chunks == cube.lazy_data().chunks def test_wrong_shape(self, monkeypatch): """Test variable is not added if it's not broadcastable to cube.""" diff --git a/tests/unit/preprocessor/_volume/test_volume.py b/tests/unit/preprocessor/_volume/test_volume.py index 1dec034b4e..fa032e3dfa 100644 --- a/tests/unit/preprocessor/_volume/test_volume.py +++ b/tests/unit/preprocessor/_volume/test_volume.py @@ -205,13 +205,16 @@ def test_add_axis_stats_weights_coord(self): def test_add_axis_stats_weights_coord_lazy(self): """Test _add_axis_stats_weights_coord.""" - self.grid_4d.data = self.grid_4d.lazy_data() - assert not self.grid_4d.coords("_axis_statistics_weights_") - coord = self.grid_4d.coord("zcoord") - coord_dims = self.grid_4d.coord_dims("zcoord") - _add_axis_stats_weights_coord(self.grid_4d, coord, coord_dims) - weights_coord = self.grid_4d.coord("_axis_statistics_weights_") + assert not self.grid_4d_lazy.coords("_axis_statistics_weights_") + coord = self.grid_4d_lazy.coord("zcoord") + coord_dims = self.grid_4d_lazy.coord_dims("zcoord") + _add_axis_stats_weights_coord(self.grid_4d_lazy, coord, coord_dims) + weights_coord = self.grid_4d_lazy.coord("_axis_statistics_weights_") assert weights_coord.has_lazy_points() + assert ( + weights_coord.lazy_points().chunks[0] + == self.grid_4d_lazy.lazy_data().chunks[coord_dims[0]] + ) assert weights_coord.units == "m" np.testing.assert_allclose(weights_coord.points, [2.5, 22.5, 225.0]) @@ -369,7 +372,6 @@ def test_extract_volume(self): """Test to extract the top two layers of a 3 layer depth column.""" result = extract_volume(self.grid_3d, 0.0, 10.0) expected = np.ones((2, 2, 2)) - print(result.data, expected.data) self.assert_array_equal(result.data, expected) def test_extract_volume_intervals(self): @@ -491,7 +493,7 @@ def test_volume_nolevbounds(self): self.assertFalse(self.grid_4d.cell_measures("ocean_volume")) self.assertFalse(result.cell_measures("ocean_volume")) - def test_calculate_volume_lazy(self): + def test_calculate_volume_lazy_cube(self): """Test that calculate_volume returns a lazy volume. The volume chunks should match those of the input cube for @@ -503,6 +505,31 @@ def test_calculate_volume_lazy(self): assert isinstance(volume, da.Array) assert volume.chunks == chunks + def test_calculate_volume_all_lazy(self): + """Test that calculate_volume returns a lazy volume. + + The volume chunks should match those of the input cube for + computational efficiency. + """ + # Only aux coords can have lazy bounds + z_coord = self.grid_4d_lazy.coord("zcoord") + z_aux_coord = iris.coords.AuxCoord( + z_coord.lazy_points(), + bounds=z_coord.lazy_bounds(), + long_name="zcoord", + units="m", + attributes={"positive": "up"}, + ) + self.grid_4d_lazy.remove_coord("zcoord") + self.grid_4d_lazy.add_aux_coord(z_aux_coord, 1) + chunks = self.grid_4d_lazy.core_data().chunks + + volume = calculate_volume(self.grid_4d_lazy) + + assert self.grid_4d_lazy.has_lazy_data() + assert isinstance(volume, da.Array) + assert volume.chunks == chunks + def test_volume_statistics_cell_measure(self): """Test to take the volume weighted average of a (2,3,2,2) cube. From 31fe0d49635e8b564cdc0be5864865d98d88ce40 Mon Sep 17 00:00:00 2001 From: Liza Malinina <66973360+malininae@users.noreply.github.com> Date: Wed, 22 Jan 2025 05:49:14 -0800 Subject: [PATCH 35/36] Adding hurs (realtive humidity) derivation script (#2397) Co-authored-by: Elizaveta Malinina --- esmvalcore/preprocessor/_derive/hurs.py | 54 +++++++++++++++ tests/unit/preprocessor/_derive/test_hurs.py | 70 ++++++++++++++++++++ 2 files changed, 124 insertions(+) create mode 100644 esmvalcore/preprocessor/_derive/hurs.py create mode 100644 tests/unit/preprocessor/_derive/test_hurs.py diff --git a/esmvalcore/preprocessor/_derive/hurs.py b/esmvalcore/preprocessor/_derive/hurs.py new file mode 100644 index 0000000000..01fdec9284 --- /dev/null +++ b/esmvalcore/preprocessor/_derive/hurs.py @@ -0,0 +1,54 @@ +"""Derivation of variable `hurs`.""" + +import cf_units +import dask.array as da +import iris +from iris import NameConstraint + +from ._baseclass import DerivedVariableBase + +# Constants +GAS_CONSTANT_WV = 461.5 # JK-1kg-1 +ENTALPY_OF_VAPORIZATION = 2.501e6 # Jkg-1 + + +class DerivedVariable(DerivedVariableBase): + """Derivation of variable `hurs`.""" + + @staticmethod + def required(project): + """Declare the variables needed for derivation.""" + required = [ + {"short_name": "tdps"}, + {"short_name": "tas"}, + ] + return required + + @staticmethod + def calculate(cubes): + """Compute relative humidity. + + Relative humidity computed from dewpoint temperature and + surface air temperature following Bohren and Albrecht 1998. + """ + tdps_cube = cubes.extract_cube(NameConstraint(var_name="tdps")) + tas_cube = cubes.extract_cube(NameConstraint(var_name="tas")) + + cubes_difference = tas_cube - tdps_cube + cubes_product = tas_cube * tdps_cube + + log_humidity_cube = ( + -ENTALPY_OF_VAPORIZATION + * cubes_difference + / (GAS_CONSTANT_WV * cubes_product) + ) + + hurs_cube = 100 * iris.analysis.maths.exp(log_humidity_cube) + + hurs_cube.units = cf_units.Unit("%") + + hurs_cube.data = da.ma.where( + hurs_cube.core_data() > 100.0, 100.0, hurs_cube.core_data() + ) + + return hurs_cube diff --git a/tests/unit/preprocessor/_derive/test_hurs.py b/tests/unit/preprocessor/_derive/test_hurs.py new file mode 100644 index 0000000000..fcfd117d6c --- /dev/null +++ b/tests/unit/preprocessor/_derive/test_hurs.py @@ -0,0 +1,70 @@ +"""Test derivation of ``hurs``.""" + +import iris +import numpy as np +import pytest +from iris.cube import Cube, CubeList + +from esmvalcore.preprocessor._derive import hurs + + +@pytest.fixture +def cubes(): + """Input cubes for derivation of ``hurs``.""" + time_coord = iris.coords.DimCoord( + [0.0, 1.0, 2.0, 3.0], + standard_name="time", + var_name="time", + units="days since 1950-01-01 00:00:00", + ) + lat_coord = iris.coords.DimCoord( + [45.0], standard_name="latitude", var_name="lat", units="degrees" + ) + lon_coord = iris.coords.DimCoord( + [10.0], standard_name="longitude", var_name="lon", units="degrees" + ) + + coord_specs = [(time_coord, 0), (lat_coord, 1), (lon_coord, 2)] + + tdps_cube = Cube( + [[[279.17]], [[282.73]], [[288.15]], [[288.25]]], + dim_coords_and_dims=coord_specs, + standard_name="dew_point_temperature", + var_name="tdps", + units="K", + ) + tas_cube = Cube( + [[[288.15]], [[288.15]], [[288.15]], [[288.15]]], + dim_coords_and_dims=coord_specs, + standard_name="air_temperature", + var_name="tas", + units="K", + ) + return CubeList([tdps_cube, tas_cube]) + + +def test_hurs_calculate(cubes): + """Test function ``calculate``.""" + derived_var = hurs.DerivedVariable() + required_vars = derived_var.required("CMIP6") + expected_required_vars = [ + {"short_name": "tdps"}, + {"short_name": "tas"}, + ] + assert required_vars == expected_required_vars + out_cube = derived_var.calculate(cubes) + assert out_cube.shape == (4, 1, 1) + assert out_cube.units == "%" + assert out_cube.coords("time") + assert out_cube.coords("latitude") + assert out_cube.coords("longitude") + np.testing.assert_allclose( + out_cube.data, + [[[54.6093]], [[69.7301]], [[100.0]], [[100.0]]], + rtol=0.00005, + ) + np.testing.assert_allclose( + out_cube.coord("time").points, [0.0, 1.0, 2.0, 3.0] + ) + np.testing.assert_allclose(out_cube.coord("latitude").points, [45.0]) + np.testing.assert_allclose(out_cube.coord("longitude").points, [10.0]) From 1e342016c6539d61b835d70b268b27e8db09d80b Mon Sep 17 00:00:00 2001 From: Manuel Schlund <32543114+schlunma@users.noreply.github.com> Date: Fri, 24 Jan 2025 15:20:06 +0100 Subject: [PATCH 36/36] Added cumulative sum preprocessor (#2642) Co-authored-by: Bouwe Andela --- doc/recipe/preprocessor.rst | 44 ++- esmvalcore/iris_helpers.py | 2 +- esmvalcore/preprocessor/__init__.py | 5 +- esmvalcore/preprocessor/_other.py | 86 +++++- esmvalcore/preprocessor/_shared.py | 88 +++--- esmvalcore/preprocessor/_time.py | 4 +- esmvalcore/preprocessor/_volume.py | 18 +- .../test_compare_with_refs.py | 5 +- tests/unit/preprocessor/_other/test_other.py | 153 +++++++++- tests/unit/preprocessor/_time/test_time.py | 109 -------- .../unit/preprocessor/_volume/test_volume.py | 8 +- tests/unit/preprocessor/test_shared.py | 263 +++++++++++++++++- 12 files changed, 608 insertions(+), 177 deletions(-) diff --git a/doc/recipe/preprocessor.rst b/doc/recipe/preprocessor.rst index 37b1f8675b..0daf2ae0c7 100644 --- a/doc/recipe/preprocessor.rst +++ b/doc/recipe/preprocessor.rst @@ -2870,8 +2870,46 @@ Other Miscellaneous functions that do not belong to any of the other categories. -Clip ----- +.. _cumulative_sum: + +``cumulative_sum`` +------------------ + +This function calculates cumulative sums along a given coordinate. + +The ``cumulative_sum`` preprocessor supports the following arguments in the +recipe: + +* ``coord`` (:obj:`str`): Coordinate over which the cumulative sum is + calculated. + Must be 0D or 1D. +* ``weights`` (array-like, :obj:`bool`, or ``None``, default: ``None``): + Weights for the calculation of the cumulative sum. + Each element in the data is multiplied by the corresponding weight before + summing. + Can be an array of the same shape as the input data, ``False`` or ``None`` + (no weighting), or ``True`` (calculate the weights from the coordinate + bounds; only works if each coordinate point has exactly 2 bounds). +* ``method`` (:obj:`str`, default: ``"sequential"``): Method used to perform + the cumulative sum. + Only relevant if the cube has `lazy data + `__. + See :func:`dask.array.cumsum` for details. + +Example: + +.. code-block:: yaml + + preprocessors: + preproc_cumulative_sum: + cumulative_sum: + coord: time + weights: true + +See also :func:`esmvalcore.preprocessor.cumulative_sum`. + +``clip`` +-------- This function clips data values to a certain minimum, maximum or range. The function takes two arguments: @@ -2892,7 +2930,7 @@ The example below shows how to set all values below zero to zero. .. _histogram: ``histogram`` -------------------- +------------- This function calculates histograms. diff --git a/esmvalcore/iris_helpers.py b/esmvalcore/iris_helpers.py index 767c3ec260..9805629c93 100644 --- a/esmvalcore/iris_helpers.py +++ b/esmvalcore/iris_helpers.py @@ -39,7 +39,7 @@ def add_leading_dim_to_cube(cube, dim_coord): Raises ------ - CoordinateMultiDimError + iris.exceptions.CoordinateMultiDimError ``dim_coord`` is not 1D. """ diff --git a/esmvalcore/preprocessor/__init__.py b/esmvalcore/preprocessor/__init__.py index 6ba0d7c946..9416a6fc73 100644 --- a/esmvalcore/preprocessor/__init__.py +++ b/esmvalcore/preprocessor/__init__.py @@ -49,7 +49,7 @@ mask_outside_range, ) from ._multimodel import ensemble_statistics, multi_model_statistics -from ._other import clip, histogram +from ._other import clip, cumulative_sum, histogram from ._regrid import ( extract_coordinate_points, extract_levels, @@ -146,7 +146,8 @@ # Other "clip", "rolling_window_statistics", - # Region selection + "cumulative_sum", + # Region operations "extract_region", "extract_shape", "extract_volume", diff --git a/esmvalcore/preprocessor/_other.py b/esmvalcore/preprocessor/_other.py index 995bfd30d1..e7bde23e8d 100644 --- a/esmvalcore/preprocessor/_other.py +++ b/esmvalcore/preprocessor/_other.py @@ -13,12 +13,14 @@ import numpy as np from iris.coords import Coord, DimCoord from iris.cube import Cube +from iris.exceptions import CoordinateMultiDimError from esmvalcore.iris_helpers import rechunk_cube from esmvalcore.preprocessor._shared import ( get_all_coord_dims, get_all_coords, get_array_module, + get_coord_weights, get_weights, preserve_float_dtype, ) @@ -58,6 +60,84 @@ def clip(cube, minimum=None, maximum=None): return cube +@preserve_float_dtype +def cumulative_sum( + cube: Cube, + coord: Coord | str, + weights: np.ndarray | da.Array | bool | None = None, + method: Literal["sequential", "blelloch"] = "sequential", +) -> Cube: + """Calculate cumulative sum of the elements along a given coordinate. + + Parameters + ---------- + cube: + Input cube. + coord: + Coordinate over which the cumulative sum is calculated. Must be 0D or + 1D. + weights: + Weights for the calculation of the cumulative sum. Each element in the + data is multiplied by the corresponding weight before summing. Can be + an array of the same shape as the input data, ``False`` or ``None`` (no + weighting), or ``True`` (calculate the weights from the coordinate + bounds; only works if each coordinate point has exactly 2 bounds). + method: + Method used to perform the cumulative sum. Only relevant if the cube + has `lazy data + `__. See :func:`dask.array.cumsum` for details. + + Returns + ------- + Cube + Cube of cumulative sum. Has same dimensions and coordinates of the + input cube. + + Raises + ------ + iris.exceptions.CoordinateMultiDimError + ``coord`` is not 0D or 1D. + iris.exceptions.CoordinateNotFoundError + ``coord`` is not found in ``cube``. + + """ + cube = cube.copy() + + # Only 0D and 1D coordinates are supported + coord = cube.coord(coord) + if coord.ndim > 1: + raise CoordinateMultiDimError(coord) + + # Weighting, make sure to adapt cube standard name and units in this case + if weights is True: + weights = get_coord_weights(cube, coord, broadcast=True) + if isinstance(weights, (np.ndarray, da.Array)): + cube.data = cube.core_data() * weights + cube.standard_name = None + cube.units = cube.units * coord.units + + axes = get_all_coord_dims(cube, [coord]) + + # For 0D coordinates, cumulative_sum is a no-op (this aligns with + # numpy's/dask's behavior) + if axes: + if cube.has_lazy_data(): + cube.data = da.cumsum( + cube.core_data(), axis=axes[0], method=method + ) + else: + cube.data = np.cumsum(cube.core_data(), axis=axes[0]) + + # Adapt cube metadata + if cube.var_name is not None: + cube.var_name = f"cumulative_{cube.var_name}" + if cube.long_name is not None: + cube.long_name = f"Cumulative {cube.long_name}" + + return cube + + @preserve_float_dtype def histogram( cube: Cube, @@ -133,8 +213,10 @@ def histogram( Invalid `normalization` or `bin_range` given or `bin_range` is ``None`` and data is fully masked. iris.exceptions.CoordinateNotFoundError - `longitude` is not found in cube if `weights=True`, `latitude` is in - `coords`, and no `cell_area` is given as + A given coordinate of ``coords`` is not found in ``cube``. + iris.exceptions.CoordinateNotFoundError + `longitude` is not found in cube if ``weights=True``, `latitude` is in + ``coords``, and no `cell_area` is given as :ref:`supplementary_variables`. """ diff --git a/esmvalcore/preprocessor/_shared.py b/esmvalcore/preprocessor/_shared.py index adf45ca1c2..7ed6ae4375 100644 --- a/esmvalcore/preprocessor/_shared.py +++ b/esmvalcore/preprocessor/_shared.py @@ -301,15 +301,11 @@ def get_weights( """Calculate suitable weights for given coordinates.""" npx = get_array_module(cube.core_data()) weights = npx.ones_like(cube.core_data()) + coords = [c.name() if hasattr(c, "name") else c for c in coords] # Time weights: lengths of time interval if "time" in coords: - weights = weights * broadcast_to_shape( - npx.array(get_time_weights(cube)), - cube.shape, - cube.coord_dims("time"), - chunks=cube.lazy_data().chunks if cube.has_lazy_data() else None, - ) + weights = weights * get_coord_weights(cube, "time", broadcast=True) # Latitude weights: cell areas if "latitude" in coords: @@ -319,9 +315,8 @@ def get_weights( ): raise CoordinateNotFoundError( f"Cube {cube.summary(shorten=True)} needs a `longitude` " - f"coordinate to calculate cell area weights for weighted " - f"distance metric over coordinates {coords} (alternatively, " - f"a `cell_area` can be given to the cube as supplementary " + f"coordinate to calculate cell area weights (alternatively, a " + f"`cell_area` can be given to the cube as supplementary " f"variable)" ) try_adding_calculated_cell_area(cube) @@ -341,43 +336,74 @@ def get_weights( return weights -def get_time_weights(cube: Cube) -> np.ndarray | da.core.Array: - """Compute the weighting of the time axis. +def get_coord_weights( + cube: Cube, + coord: str | Coord, + broadcast: bool = False, +) -> np.ndarray | da.core.Array: + """Compute weighting for an arbitrary coordinate. + + Weights are calculated as the difference between the upper and lower + bounds. Parameters ---------- cube: Input cube. + coord: + Coordinate which is used to calculate the weights. Must have bounds + array with 2 bounds per point. + broadcast: + If ``False``, weights have the shape of ``coord``. If ``True``, + broadcast weights to shape of cube. Returns ------- np.ndarray or da.Array - Array of time weights for averaging. Returns a - :class:`dask.array.Array` if the input cube has lazy data; a - :class:`numpy.ndarray` otherwise. + Array of axis weights. Returns a :class:`dask.array.Array` if the input + cube has lazy data; a :class:`numpy.ndarray` otherwise. """ - time = cube.coord("time") - coord_dims = cube.coord_dims("time") + coord = cube.coord(coord) + coord_dims = cube.coord_dims(coord) - # Multidimensional time coordinates are not supported: In this case, - # weights cannot be simply calculated as difference between the bounds - if len(coord_dims) > 1: + # Coordinate needs bounds of size 2 + if not coord.has_bounds(): + raise ValueError( + f"Cannot calculate weights for coordinate '{coord.name()}' " + f"without bounds" + ) + if coord.core_bounds().shape[-1] != 2: raise ValueError( - f"Weighted statistical operations are not supported for " - f"{len(coord_dims):d}D time coordinates, expected 0D or 1D" + f"Cannot calculate weights for coordinate '{coord.name()}' " + f"with {coord.core_bounds().shape[-1]} bounds per point, expected " + f"2 bounds per point" ) - # Extract 1D time weights (= lengths of time intervals) - time_weights = time.lazy_bounds()[:, 1] - time.lazy_bounds()[:, 0] - if cube.has_lazy_data(): - # Align the weight chunks with the data chunks to avoid excessively - # large chunks as a result of broadcasting. - time_chunks = cube.lazy_data().chunks[coord_dims[0]] - time_weights = time_weights.rechunk(time_chunks) - else: - time_weights = time_weights.compute() - return time_weights + # Calculate weights of same shape as coordinate and make sure to use + # identical chunks as parent cube for non-scalar lazy data + weights = np.abs(coord.lazy_bounds()[:, 1] - coord.lazy_bounds()[:, 0]) + if cube.has_lazy_data() and coord_dims: + coord_chunks = tuple(cube.lazy_data().chunks[d] for d in coord_dims) + weights = weights.rechunk(coord_chunks) + if not cube.has_lazy_data(): + weights = weights.compute() + + # Broadcast to cube shape if desired; scalar arrays needs special treatment + # since iris.broadcast_to_shape cannot handle this + if broadcast: + chunks = cube.lazy_data().chunks if cube.has_lazy_data() else None + if coord_dims: + weights = broadcast_to_shape( + weights, cube.shape, coord_dims, chunks=chunks + ) + else: + if cube.has_lazy_data(): + weights = da.broadcast_to(weights, cube.shape, chunks=chunks) + else: + weights = np.broadcast_to(weights, cube.shape) + + return weights def try_adding_calculated_cell_area(cube: Cube) -> None: diff --git a/esmvalcore/preprocessor/_time.py b/esmvalcore/preprocessor/_time.py index ac00f13d01..c2d516696b 100644 --- a/esmvalcore/preprocessor/_time.py +++ b/esmvalcore/preprocessor/_time.py @@ -34,8 +34,8 @@ from esmvalcore.cmor.fixes import get_next_month, get_time_bounds from esmvalcore.iris_helpers import date2num, rechunk_cube from esmvalcore.preprocessor._shared import ( + get_coord_weights, get_iris_aggregator, - get_time_weights, preserve_float_dtype, update_weights_kwargs, ) @@ -867,7 +867,7 @@ def climate_statistics( def _add_time_weights_coord(cube): """Add time weight coordinate to cube (in-place).""" time_weights_coord = AuxCoord( - get_time_weights(cube), + get_coord_weights(cube, "time"), long_name="_time_weights_", units=cube.coord("time").units, ) diff --git a/esmvalcore/preprocessor/_volume.py b/esmvalcore/preprocessor/_volume.py index 83b3029143..7f79a29654 100644 --- a/esmvalcore/preprocessor/_volume.py +++ b/esmvalcore/preprocessor/_volume.py @@ -17,14 +17,17 @@ from iris.cube import Cube from iris.util import broadcast_to_shape -from ._shared import ( +from esmvalcore.preprocessor._shared import ( + get_coord_weights, get_iris_aggregator, get_normalized_cube, preserve_float_dtype, try_adding_calculated_cell_area, update_weights_kwargs, ) -from ._supplementary_vars import register_supplementaries +from esmvalcore.preprocessor._supplementary_vars import ( + register_supplementaries, +) logger = logging.getLogger(__name__) @@ -379,7 +382,6 @@ def axis_statistics( cube, _add_axis_stats_weights_coord, coord=coord, - coord_dims=coord_dims, ) with warnings.catch_warnings(): @@ -406,19 +408,15 @@ def axis_statistics( return result -def _add_axis_stats_weights_coord(cube, coord, coord_dims): +def _add_axis_stats_weights_coord(cube, coord): """Add weights for axis_statistics to cube (in-place).""" - weights = np.abs(coord.lazy_bounds()[:, 1] - coord.lazy_bounds()[:, 0]) - if cube.has_lazy_data(): - coord_chunks = tuple(cube.lazy_data().chunks[d] for d in coord_dims) - weights = weights.rechunk(coord_chunks) - else: - weights = weights.compute() + weights = get_coord_weights(cube, coord) weights_coord = AuxCoord( weights, long_name="_axis_statistics_weights_", units=coord.units, ) + coord_dims = cube.coord_dims(coord) cube.add_aux_coord(weights_coord, coord_dims) diff --git a/tests/unit/preprocessor/_compare_with_refs/test_compare_with_refs.py b/tests/unit/preprocessor/_compare_with_refs/test_compare_with_refs.py index 1140323e61..39675c9a67 100644 --- a/tests/unit/preprocessor/_compare_with_refs/test_compare_with_refs.py +++ b/tests/unit/preprocessor/_compare_with_refs/test_compare_with_refs.py @@ -875,9 +875,8 @@ def test_distance_metric_no_lon_for_area_weights(regular_cubes, metric, error): ref_cube = regular_cubes[0].copy() msg = ( r"Cube .* needs a `longitude` coordinate to calculate cell area " - r"weights for weighted distance metric over coordinates \['time', " - r"'latitude'\] \(alternatively, a `cell_area` can be given to the " - r"cube as supplementary variable\)" + r"weights \(alternatively, a `cell_area` can be given to the cube as " + r"supplementary variable\)" ) if error: context = pytest.raises(CoordinateNotFoundError, match=msg) diff --git a/tests/unit/preprocessor/_other/test_other.py b/tests/unit/preprocessor/_other/test_other.py index c50bed0a83..021fd480eb 100644 --- a/tests/unit/preprocessor/_other/test_other.py +++ b/tests/unit/preprocessor/_other/test_other.py @@ -17,9 +17,10 @@ DimCoord, ) from iris.cube import Cube +from iris.exceptions import CoordinateMultiDimError from numpy.testing import assert_array_equal -from esmvalcore.preprocessor._other import clip, histogram +from esmvalcore.preprocessor._other import clip, cumulative_sum, histogram from tests.unit.preprocessor._compare_with_refs.test_compare_with_refs import ( get_3d_cube, ) @@ -442,5 +443,155 @@ def test_histogram_invalid_normalization(cube): histogram(cube, normalization="invalid") +@pytest.mark.parametrize("lazy", [True, False]) +def test_cumulative_sum_time(cube, lazy): + """Test `cumulative_sum`.""" + if lazy: + cube.data = cube.lazy_data() + + result = cumulative_sum(cube, "time") + + assert result is not cube + assert result.standard_name == "air_temperature" + assert result.var_name == "cumulative_tas" + assert result.long_name is None + assert result.units == "K" + assert result.shape == cube.shape + assert result.dtype == cube.dtype + assert result.has_lazy_data() is lazy + expected_data = np.ma.masked_invalid( + [[[0.0, 4.0], [np.nan, 6.0]], [[1.0, 9.0], [np.nan, 13.0]]], + ) + np.testing.assert_allclose(result.data, expected_data) + np.testing.assert_allclose(result.data.mask, expected_data.mask) + + +@pytest.mark.parametrize("lazy", [True, False]) +def test_cumulative_sum_time_weighted(cube, lazy): + """Test `cumulative_sum`.""" + cube.var_name = None + cube.long_name = "Air Temperature" + if lazy: + cube.data = cube.lazy_data() + + result = cumulative_sum(cube, "time", weights=True) + + assert result is not cube + assert result.standard_name is None + assert result.var_name is None + assert result.long_name == "Cumulative Air Temperature" + assert result.units == "K.d" + assert result.shape == cube.shape + assert result.dtype == cube.dtype + assert result.has_lazy_data() is lazy + expected_data = np.ma.masked_invalid( + [[[0.0, 24.0], [np.nan, 36.0]], [[2.0, 34.0], [np.nan, 50.0]]], + ) + np.testing.assert_allclose(result.data, expected_data) + np.testing.assert_allclose(result.data.mask, expected_data.mask) + + +@pytest.mark.parametrize("weights", [False, None]) +@pytest.mark.parametrize("lazy", [True, False]) +def test_cumulative_sum_latitude(cube, lazy, weights): + """Test `cumulative_sum`.""" + if lazy: + cube.data = cube.lazy_data() + + result = cumulative_sum(cube, "latitude", weights=weights) + + assert result is not cube + assert result.standard_name == "air_temperature" + assert result.var_name == "cumulative_tas" + assert result.long_name is None + assert result.units == "K" + assert result.shape == cube.shape + assert result.dtype == cube.dtype + assert result.has_lazy_data() is lazy + expected_data = np.ma.masked_invalid( + [[[0.0, 4.0], [np.nan, 10.0]], [[1.0, 5.0], [np.nan, 12.0]]], + ) + np.testing.assert_allclose(result.data, expected_data) + np.testing.assert_allclose(result.data.mask, expected_data.mask) + + +@pytest.mark.parametrize("lazy", [True, False]) +def test_cumulative_sum_latitude_weighted(cube, lazy): + """Test `cumulative_sum`.""" + cube.var_name = None + cube.long_name = "Air Temperature" + if lazy: + cube.data = cube.lazy_data() + + result = cumulative_sum(cube, "latitude", weights=cube.core_data()) + + assert result is not cube + assert result.standard_name is None + assert result.var_name is None + assert result.long_name == "Cumulative Air Temperature" + assert result.units == "K.degrees_north" + assert result.shape == cube.shape + assert result.dtype == cube.dtype + assert result.has_lazy_data() is lazy + expected_data = np.ma.masked_invalid( + [[[0.0, 16.0], [np.nan, 52.0]], [[1.0, 25.0], [np.nan, 74.0]]], + ) + np.testing.assert_allclose(result.data, expected_data) + np.testing.assert_allclose(result.data.mask, expected_data.mask) + + +@pytest.mark.parametrize("lazy", [True, False]) +def test_cumulative_sum_scalar_longitude(cube, lazy): + """Test `cumulative_sum`.""" + cube = cube.collapsed("longitude", iris.analysis.SUM) + if lazy: + cube.data = cube.lazy_data() + + result = cumulative_sum(cube, "longitude") + + assert result is not cube + assert result.standard_name == "air_temperature" + assert result.var_name == "cumulative_tas" + assert result.long_name is None + assert result.units == "K" + assert result.shape == cube.shape + assert result.dtype == cube.dtype + assert result.has_lazy_data() is lazy + np.testing.assert_allclose(result.data, [[4.0, 6.0], [6.0, 7.0]]) + + +@pytest.mark.parametrize("lazy", [True, False]) +def test_cumulative_sum_scalar_longitude_weighted(cube, lazy): + """Test `cumulative_sum`.""" + cube = cube.collapsed("longitude", iris.analysis.SUM) + if lazy: + cube.data = cube.lazy_data() + + result = cumulative_sum(cube, "longitude", weights=True) + + assert result is not cube + assert result.standard_name is None + assert result.var_name == "cumulative_tas" + assert result.long_name is None + assert result.units == "K.degrees_east" + assert result.shape == cube.shape + assert result.dtype == cube.dtype + assert result.has_lazy_data() is lazy + np.testing.assert_allclose(result.data, [[40.0, 60.0], [60.0, 70.0]]) + + +def test_cumulative_sum_invalid_coordinate(cube): + """Test `cumulative_sum`.""" + aux_coord = AuxCoord(np.ones((2, 2)), var_name="aux_2d") + cube.add_aux_coord(aux_coord, (0, 1)) + msg = r"Multi-dimensional coordinate not supported: 'aux_2d'" + + with pytest.raises(CoordinateMultiDimError, match=msg): + cumulative_sum(cube, coord="aux_2d") + + with pytest.raises(CoordinateMultiDimError, match=msg): + cumulative_sum(cube, coord=aux_coord) + + if __name__ == "__main__": unittest.main() diff --git a/tests/unit/preprocessor/_time/test_time.py b/tests/unit/preprocessor/_time/test_time.py index e6c7ca09e6..9934558f90 100644 --- a/tests/unit/preprocessor/_time/test_time.py +++ b/tests/unit/preprocessor/_time/test_time.py @@ -36,7 +36,6 @@ extract_month, extract_season, extract_time, - get_time_weights, hourly_statistics, monthly_statistics, regrid_time, @@ -1905,17 +1904,6 @@ def test_anomalies_hourly(period): assert result.coord("time") == cube.coord("time") -def get_0d_time(): - """Get 0D time coordinate.""" - time = iris.coords.AuxCoord( - 15.0, - bounds=[0.0, 30.0], - standard_name="time", - units="days since 1850-01-01 00:00:00", - ) - return time - - def get_1d_time(): """Get 1D time coordinate.""" time = iris.coords.DimCoord( @@ -1927,17 +1915,6 @@ def get_1d_time(): return time -def get_2d_time(): - """Get 2D time coordinate.""" - time = iris.coords.AuxCoord( - [[20.0, 45.0]], - standard_name="time", - bounds=[[[15.0, 30.0], [30.0, 60.0]]], - units=Unit("days since 1950-01-01", calendar="gregorian"), - ) - return time - - def get_lon_coord(): """Get longitude coordinate.""" lons = iris.coords.DimCoord( @@ -1982,92 +1959,6 @@ def _make_cube(): return cube1 -def test_get_time_weights(): - """Test ``get_time_weights`` for complex cube.""" - cube = _make_cube() - weights = get_time_weights(cube) - assert isinstance(weights, np.ndarray) - assert weights.shape == (2,) - np.testing.assert_allclose(weights, [15.0, 30.0]) - - -def test_get_time_weights_lazy(): - """Test ``get_time_weights`` for complex cube with lazy data.""" - cube = _make_cube() - cube.data = cube.lazy_data().rechunk((1, 1, 1, 3)) - weights = get_time_weights(cube) - assert isinstance(weights, da.Array) - assert weights.shape == (2,) - assert weights.chunks == ((1, 1),) - np.testing.assert_allclose(weights, [15.0, 30.0]) - - -def test_get_time_weights_0d_time(): - """Test ``get_time_weights`` for 0D time coordinate.""" - time = get_0d_time() - cube = iris.cube.Cube( - 0.0, var_name="x", units="K", aux_coords_and_dims=[(time, ())] - ) - weights = get_time_weights(cube) - assert weights.shape == (1,) - np.testing.assert_allclose(weights, [30.0]) - - -def test_get_time_weights_0d_time_1d_lon(): - """Test ``get_time_weights`` for 0D time and 1D longitude coordinate.""" - time = get_0d_time() - lons = get_lon_coord() - cube = iris.cube.Cube( - [0.0, 0.0, 0.0], - var_name="x", - units="K", - aux_coords_and_dims=[(time, ())], - dim_coords_and_dims=[(lons, 0)], - ) - weights = get_time_weights(cube) - assert weights.shape == (1,) - np.testing.assert_allclose(weights, [30.0]) - - -def test_get_time_weights_1d_time(): - """Test ``get_time_weights`` for 1D time coordinate.""" - time = get_1d_time() - cube = iris.cube.Cube( - [0.0, 1.0], var_name="x", units="K", dim_coords_and_dims=[(time, 0)] - ) - weights = get_time_weights(cube) - assert weights.shape == (2,) - np.testing.assert_allclose(weights, [15.0, 30.0]) - - -def test_get_time_weights_1d_time_1d_lon(): - """Test ``get_time_weights`` for 1D time and 1D longitude coordinate.""" - time = get_1d_time() - lons = get_lon_coord() - cube = iris.cube.Cube( - [[0.0, 0.0, 0.0], [1.0, 1.0, 1.0]], - var_name="x", - units="K", - dim_coords_and_dims=[(time, 0), (lons, 1)], - ) - weights = get_time_weights(cube) - assert weights.shape == (2,) - np.testing.assert_allclose(weights, [15.0, 30.0]) - - -def test_get_time_weights_2d_time(): - """Test ``get_time_weights`` for 1D time coordinate.""" - time = get_2d_time() - cube = iris.cube.Cube( - [[0.0, 1.0]], - var_name="x", - units="K", - aux_coords_and_dims=[(time, (0, 1))], - ) - with pytest.raises(ValueError): - get_time_weights(cube) - - def test_climate_statistics_0d_time_1d_lon(): """Test climate statistics.""" time = iris.coords.DimCoord( diff --git a/tests/unit/preprocessor/_volume/test_volume.py b/tests/unit/preprocessor/_volume/test_volume.py index fa032e3dfa..a4b05fc37b 100644 --- a/tests/unit/preprocessor/_volume/test_volume.py +++ b/tests/unit/preprocessor/_volume/test_volume.py @@ -196,8 +196,7 @@ def test_add_axis_stats_weights_coord(self): """Test _add_axis_stats_weights_coord.""" assert not self.grid_4d.coords("_axis_statistics_weights_") coord = self.grid_4d.coord("zcoord") - coord_dims = self.grid_4d.coord_dims("zcoord") - _add_axis_stats_weights_coord(self.grid_4d, coord, coord_dims) + _add_axis_stats_weights_coord(self.grid_4d, coord) weights_coord = self.grid_4d.coord("_axis_statistics_weights_") assert not weights_coord.has_lazy_points() assert weights_coord.units == "m" @@ -207,13 +206,12 @@ def test_add_axis_stats_weights_coord_lazy(self): """Test _add_axis_stats_weights_coord.""" assert not self.grid_4d_lazy.coords("_axis_statistics_weights_") coord = self.grid_4d_lazy.coord("zcoord") - coord_dims = self.grid_4d_lazy.coord_dims("zcoord") - _add_axis_stats_weights_coord(self.grid_4d_lazy, coord, coord_dims) + _add_axis_stats_weights_coord(self.grid_4d_lazy, coord) weights_coord = self.grid_4d_lazy.coord("_axis_statistics_weights_") assert weights_coord.has_lazy_points() assert ( weights_coord.lazy_points().chunks[0] - == self.grid_4d_lazy.lazy_data().chunks[coord_dims[0]] + == self.grid_4d_lazy.lazy_data().chunks[1] ) assert weights_coord.units == "m" np.testing.assert_allclose(weights_coord.points, [2.5, 22.5, 225.0]) diff --git a/tests/unit/preprocessor/test_shared.py b/tests/unit/preprocessor/test_shared.py index b449e1998f..46a6283573 100644 --- a/tests/unit/preprocessor/test_shared.py +++ b/tests/unit/preprocessor/test_shared.py @@ -9,7 +9,7 @@ import pytest from cf_units import Unit from iris.aux_factory import HybridPressureFactory -from iris.coords import AuxCoord +from iris.coords import AuxCoord, DimCoord from iris.cube import Cube from esmvalcore.preprocessor import PreprocessorFile @@ -20,11 +20,17 @@ aggregator_accept_weights, apply_mask, get_array_module, + get_coord_weights, get_iris_aggregator, preserve_float_dtype, try_adding_calculated_cell_area, ) from tests import assert_array_equal +from tests.unit.preprocessor._time.test_time import ( + _make_cube, + get_1d_time, + get_lon_coord, +) @pytest.mark.parametrize("operator", ["gmean", "GmEaN", "GMEAN"]) @@ -250,7 +256,7 @@ def test_get_array_module_mixed(): def _create_sample_full_cube(): cube = Cube(np.zeros((4, 180, 360)), var_name="co2", units="J") cube.add_dim_coord( - iris.coords.DimCoord( + DimCoord( np.array([10.0, 40.0, 70.0, 110.0]), standard_name="time", units=Unit("days since 1950-01-01 00:00:00", calendar="gregorian"), @@ -258,7 +264,7 @@ def _create_sample_full_cube(): 0, ) cube.add_dim_coord( - iris.coords.DimCoord( + DimCoord( np.arange(-90.0, 90.0, 1.0), standard_name="latitude", units="degrees", @@ -266,7 +272,7 @@ def _create_sample_full_cube(): 1, ) cube.add_dim_coord( - iris.coords.DimCoord( + DimCoord( np.arange(0.0, 360.0, 1.0), standard_name="longitude", units="degrees", @@ -382,7 +388,7 @@ def test_apply_mask(mask, array, dim_map, expected): def test_rechunk_aux_factory_dependencies(): - delta = iris.coords.AuxCoord( + delta = AuxCoord( points=np.array([0.0, 1.0, 2.0], dtype=np.float64), bounds=np.array( [[-0.5, 0.5], [0.5, 1.5], [1.5, 2.5]], dtype=np.float64 @@ -390,12 +396,12 @@ def test_rechunk_aux_factory_dependencies(): long_name="level_pressure", units="Pa", ) - sigma = iris.coords.AuxCoord( + sigma = AuxCoord( np.array([1.0, 0.9, 0.8], dtype=np.float64), long_name="sigma", units="1", ) - surface_air_pressure = iris.coords.AuxCoord( + surface_air_pressure = AuxCoord( np.arange(4).astype(np.float64).reshape(2, 2), long_name="surface_air_pressure", units="Pa", @@ -406,7 +412,7 @@ def test_rechunk_aux_factory_dependencies(): surface_air_pressure=surface_air_pressure, ) - cube = iris.cube.Cube( + cube = Cube( da.asarray( np.arange(3 * 2 * 2).astype(np.float32).reshape(3, 2, 2), chunks=(1, 2, 2), @@ -432,3 +438,244 @@ def test_rechunk_aux_factory_dependencies(): (2,), (2,), ) == cube.coord("air_pressure").core_points().chunks + + +def get_0d_time(): + """Get 0D time coordinate.""" + time = AuxCoord( + 15.0, + bounds=[0.0, 30.0], + standard_name="time", + units="days since 1850-01-01 00:00:00", + ) + return time + + +@pytest.mark.parametrize("lazy", [True, False]) +def test_get_coord_weights_time(lazy): + """Test ``get_coord_weights`` for complex cube.""" + cube = _make_cube() + if lazy: + cube.data = cube.lazy_data().rechunk((1, 1, 1, 3)) + weights = get_coord_weights(cube, "time") + assert weights.shape == (2,) + if lazy: + assert isinstance(weights, da.Array) + assert weights.chunks == ((1, 1),) + else: + assert isinstance(weights, np.ndarray) + np.testing.assert_allclose(weights, [15.0, 30.0]) + + +@pytest.mark.parametrize("lazy", [True, False]) +def test_get_coord_weights_time_broadcast(lazy): + """Test ``get_coord_weights`` for complex cube.""" + cube = _make_cube() + if lazy: + cube.data = cube.lazy_data().rechunk((1, 1, 1, 3)) + weights = get_coord_weights(cube, "time", broadcast=True) + assert weights.shape == (2, 1, 1, 3) + if lazy: + assert isinstance(weights, da.Array) + assert weights.chunks == ((1, 1), (1,), (1,), (3,)) + else: + assert isinstance(weights, np.ndarray) + expected_data = [[[[15.0, 15.0, 15.0]]], [[[30.0, 30.0, 30.0]]]] + np.testing.assert_allclose(weights, expected_data) + + +@pytest.mark.parametrize("lazy", [True, False]) +def test_get_coord_weights_plev(lazy): + """Test ``get_coord_weights`` for complex cube.""" + cube = _make_cube() + if lazy: + cube.data = cube.lazy_data().rechunk((1, 1, 1, 3)) + weights = get_coord_weights(cube, "air_pressure") + assert weights.shape == (1,) + if lazy: + assert isinstance(weights, da.Array) + assert weights.chunks == ((1,),) + else: + assert isinstance(weights, np.ndarray) + np.testing.assert_allclose(weights, [2.5]) + + +@pytest.mark.parametrize("lazy", [True, False]) +def test_get_coord_weights_lat(lazy): + """Test ``get_coord_weights`` for complex cube.""" + cube = _make_cube() + if lazy: + cube.data = cube.lazy_data().rechunk((1, 1, 1, 3)) + weights = get_coord_weights(cube, "latitude") + assert weights.shape == (1,) + if lazy: + assert isinstance(weights, da.Array) + assert weights.chunks == ((1,),) + else: + assert isinstance(weights, np.ndarray) + np.testing.assert_allclose(weights, [1.0]) + + +@pytest.mark.parametrize("lazy", [True, False]) +def test_get_coord_weights_lon(lazy): + """Test ``get_coord_weights`` for complex cube.""" + cube = _make_cube() + if lazy: + cube.data = cube.lazy_data().rechunk((1, 1, 1, 3)) + weights = get_coord_weights(cube, "longitude") + assert weights.shape == (3,) + if lazy: + assert isinstance(weights, da.Array) + assert weights.chunks == ((3,),) + else: + assert isinstance(weights, np.ndarray) + np.testing.assert_allclose(weights, [1.0, 1.0, 1.0]) + + +@pytest.mark.parametrize("lazy", [True, False]) +def test_get_coord_weights_0d_time(lazy): + """Test ``get_coord_weights`` for 0D time coordinate.""" + time = get_0d_time() + cube = Cube(0.0, var_name="x", units="K", aux_coords_and_dims=[(time, ())]) + if lazy: + cube.data = cube.lazy_data() + weights = get_coord_weights(cube, "time") + assert weights.shape == (1,) + if lazy: + assert isinstance(weights, da.Array) + else: + assert isinstance(weights, np.ndarray) + np.testing.assert_allclose(weights, [30.0]) + + +@pytest.mark.parametrize("lazy", [True, False]) +def test_get_coord_weights_0d_time_1d_lon(lazy): + """Test ``get_coord_weights`` for 0D time and 1D longitude coordinate.""" + time = get_0d_time() + lons = get_lon_coord() + cube = Cube( + [0.0, 0.0, 0.0], + var_name="x", + units="K", + aux_coords_and_dims=[(time, ())], + dim_coords_and_dims=[(lons, 0)], + ) + if lazy: + cube.data = cube.lazy_data() + weights = get_coord_weights(cube, "time") + assert weights.shape == (1,) + if lazy: + assert isinstance(weights, da.Array) + else: + assert isinstance(weights, np.ndarray) + np.testing.assert_allclose(weights, [30.0]) + + +@pytest.mark.parametrize("lazy", [True, False]) +def test_get_coord_weights_0d_time_1d_lon_broadcast(lazy): + """Test ``get_coord_weights`` for 0D time and 1D longitude coordinate.""" + time = get_0d_time() + lons = get_lon_coord() + cube = Cube( + [0.0, 0.0, 0.0], + var_name="x", + units="K", + aux_coords_and_dims=[(time, ())], + dim_coords_and_dims=[(lons, 0)], + ) + if lazy: + cube.data = cube.lazy_data() + weights = get_coord_weights(cube, "time", broadcast=True) + assert weights.shape == (3,) + if lazy: + assert isinstance(weights, da.Array) + else: + assert isinstance(weights, np.ndarray) + np.testing.assert_allclose(weights, [30.0, 30.0, 30.0]) + + +@pytest.mark.parametrize("lazy", [True, False]) +def test_get_coord_weights_1d_time(lazy): + """Test ``get_coord_weights`` for 1D time coordinate.""" + time = get_1d_time() + cube = Cube( + [0.0, 1.0], var_name="x", units="K", dim_coords_and_dims=[(time, 0)] + ) + if lazy: + cube.data = cube.lazy_data() + weights = get_coord_weights(cube, "time") + assert weights.shape == (2,) + if lazy: + assert isinstance(weights, da.Array) + else: + assert isinstance(weights, np.ndarray) + np.testing.assert_allclose(weights, [15.0, 30.0]) + + +@pytest.mark.parametrize("lazy", [True, False]) +def test_get_coord_weights_1d_time_1d_lon(lazy): + """Test ``get_coord_weights`` for 1D time and 1D longitude coordinate.""" + time = get_1d_time() + lons = get_lon_coord() + cube = Cube( + [[0.0, 0.0, 0.0], [1.0, 1.0, 1.0]], + var_name="x", + units="K", + dim_coords_and_dims=[(time, 0), (lons, 1)], + ) + if lazy: + cube.data = cube.lazy_data() + weights = get_coord_weights(cube, "time") + assert weights.shape == (2,) + if lazy: + assert isinstance(weights, da.Array) + else: + assert isinstance(weights, np.ndarray) + np.testing.assert_allclose(weights, [15.0, 30.0]) + + +@pytest.mark.parametrize("lazy", [True, False]) +def test_get_coord_weights_2d_time(lazy): + """Test ``get_coord_weights`` for 2D time coordinate.""" + time = AuxCoord( + [[20.0, 45.0]], + standard_name="time", + bounds=[[[15.0, 30.0], [30.0, 60.0]]], + units=Unit("days since 1950-01-01", calendar="gregorian"), + ) + cube = Cube( + [[0.0, 1.0]], + var_name="x", + units="K", + aux_coords_and_dims=[(time, (0, 1))], + ) + if lazy: + cube.data = cube.lazy_data() + weights = get_coord_weights(cube, "time") + assert weights.shape == (1, 2) + if lazy: + assert isinstance(weights, da.Array) + else: + assert isinstance(weights, np.ndarray) + np.testing.assert_allclose(weights, [[15.0, 30.0]]) + + +def test_get_coord_weights_no_bounds_fail(): + """Test ``get_coord_weights``.""" + cube = _make_cube() + cube.coord("time").bounds = None + msg = r"Cannot calculate weights for coordinate 'time' without bounds" + with pytest.raises(ValueError, match=msg): + get_coord_weights(cube, "time") + + +def test_get_coord_weights_triangular_bound_fail(): + """Test ``get_coord_weights``.""" + cube = _make_cube() + cube.coord("latitude").bounds = [[1.0, 2.0, 3.0]] + msg = ( + r"Cannot calculate weights for coordinate 'latitude' with 3 bounds " + r"per point, expected 2 bounds per point" + ) + with pytest.raises(ValueError, match=msg): + get_coord_weights(cube, "latitude")