diff --git a/.github/workflows/black.yml b/.github/workflows/black.yml new file mode 100644 index 00000000..fd267d5f --- /dev/null +++ b/.github/workflows/black.yml @@ -0,0 +1,37 @@ +name: black + +on: + pull_request: + paths: + - '**.py' + +defaults: + run: + shell: bash + +jobs: + black: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + with: + ref: ${{ github.head_ref }} + - name: Setup Python + uses: actions/setup-python@v4 + with: + python-version: 3.x + - name: Install black + run: | + python -m pip install --upgrade pip + pip install black + - name: Version + run: | + python --version + black --version + - name: Run black + run: | + black xbout + black docs + - uses: stefanzweifel/git-auto-commit-action@v4 + with: + commit_message: "Apply black formatting" diff --git a/.github/workflows/master.yml b/.github/workflows/master.yml index e53792da..8eef5a3b 100644 --- a/.github/workflows/master.yml +++ b/.github/workflows/master.yml @@ -20,94 +20,51 @@ jobs: if: always() strategy: matrix: - python-version: [3.8, 3.9, '3.10', 3.x] - pip-packages: - - "setuptools pip pytest pytest-cov coverage codecov boutdata xarray numpy>=1.16.0" + python-version: ["3.8", "3.9", "3.10", "3.11", "3.x"] fail-fast: false steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - name: Install dependencies run: | sudo apt-get update && sudo apt-get install libhdf5-dev libnetcdf-dev python -m pip install --upgrade pip - pip install --upgrade ${{ matrix.pip-packages }} - pip install -e . + - name: Install package + run: | + pip install -e .[calc,tests] - name: Test with pytest run: | - pip install pytest - pytest -v --long --cov - - name: Upload coverage to Codecov - uses: codecov/codecov-action@v1 + pytest -vv --long --cov - pytest-min-versions: + # test with oldest supported version of packages + pytest-oldest-xarray: runs-on: ubuntu-latest if: always() strategy: matrix: - python-version: [3.8] - pip-packages: - - "setuptools pip pytest pytest-cov coverage codecov boutdata==0.1.4 xarray==0.18.0 dask==2.10.0 numpy==1.18.0 natsort==5.5.0 matplotlib==3.1.1 animatplot==0.4.2 netcdf4==1.4.2 Pillow==6.1.0" # test with oldest supported version of packages. Note, using numpy==1.18.0 as a workaround because numpy==1.17.0 is not supported on Python-3.7, even though we should currently support numpy==1.17.0. + python-version: ["3.8"] fail-fast: false steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - name: Install dependencies run: | sudo apt-get update && sudo apt-get install libhdf5-dev libnetcdf-dev python -m pip install --upgrade pip - pip install --upgrade ${{ matrix.pip-packages }} - pip install -e . - - name: Test with pytest + pip install xarray~=0.18.0 pandas~=1.4.0 + - name: Install package run: | - pip install pytest - pytest -v --long --cov - - name: Upload coverage to Codecov - uses: codecov/codecov-action@v1 - - - # Need to tidy up the things that flake8 finds before we activate this - #flake8: - - # runs-on: ubuntu-latest - # if: always() - - # steps: - # - uses: actions/checkout@v2 - # - name: Set up Python - # uses: actions/setup-python@v1 - # - name: Install dependencies - # run: | - # python -m pip install --upgrade pip - # - name: Lint with flake8 - # run: | - # pip install flake8 - # flake8 - - - black: - - runs-on: ubuntu-latest - if: always() - - steps: - - uses: actions/checkout@v2 - - name: Set up Python - uses: actions/setup-python@v1 - - name: Install dependencies - run: | - python -m pip install --upgrade pip - - name: Check formatting with black + pip install -e .[tests] + - name: Test with pytest run: | - pip install black - black --check . + pytest -vv --long --cov diff --git a/.github/workflows/pythonpackage.yml b/.github/workflows/pythonpackage.yml index 68ede8d9..e323fa86 100644 --- a/.github/workflows/pythonpackage.yml +++ b/.github/workflows/pythonpackage.yml @@ -20,94 +20,51 @@ jobs: if: always() strategy: matrix: - python-version: [3.8, 3.9, '3.10', 3.x] - pip-packages: - - "setuptools pip pytest pytest-cov coverage codecov boutdata xarray numpy>=1.16.0" + python-version: ["3.8", "3.9", "3.10", "3.x"] fail-fast: false steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - name: Install dependencies run: | sudo apt-get update && sudo apt-get install libhdf5-dev libnetcdf-dev python -m pip install --upgrade pip - pip install --upgrade ${{ matrix.pip-packages }} - pip install -e . + - name: Install package + run: | + pip install -e .[calc,tests] - name: Test with pytest run: | - pip install pytest - pytest -v --long --cov - - name: Upload coverage to Codecov - uses: codecov/codecov-action@v1 + pytest -vv --long --cov - pytest-min-versions: + # test with oldest supported version of packages + pytest-oldest-xarray: runs-on: ubuntu-latest if: always() strategy: matrix: - python-version: [3.8] - pip-packages: - - "setuptools pip pytest pytest-cov coverage codecov boutdata==0.1.4 xarray==0.18.0 dask==2.10.0 numpy==1.18.0 natsort==5.5.0 matplotlib==3.1.1 animatplot==0.4.2 netcdf4==1.4.2 Pillow==7.2.0" # test with oldest supported version of packages. Note, using numpy==1.18.0 as a workaround because numpy==1.17.0 is not supported on Python-3.7, even though we should currently support numpy==1.17.0. + python-version: ["3.8"] fail-fast: false steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 + uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} - name: Install dependencies run: | sudo apt-get update && sudo apt-get install libhdf5-dev libnetcdf-dev python -m pip install --upgrade pip - pip install --upgrade ${{ matrix.pip-packages }} - pip install -e . - - name: Test with pytest + pip install xarray~=0.18.0 pandas~=1.4.0 + - name: Install package run: | - pip install pytest - pytest -v --long --cov - - name: Upload coverage to Codecov - uses: codecov/codecov-action@v1 - - - # Need to tidy up the things that flake8 finds before we activate this - #flake8: - - # runs-on: ubuntu-latest - # if: always() - - # steps: - # - uses: actions/checkout@v2 - # - name: Set up Python - # uses: actions/setup-python@v1 - # - name: Install dependencies - # run: | - # python -m pip install --upgrade pip - # - name: Lint with flake8 - # run: | - # pip install flake8 - # flake8 - - - black: - - runs-on: ubuntu-latest - if: always() - - steps: - - uses: actions/checkout@v2 - - name: Set up Python - uses: actions/setup-python@v1 - - name: Install dependencies - run: | - python -m pip install --upgrade pip - - name: Check formatting with black + pip install -e .[tests] + - name: Test with pytest run: | - pip install black - black --check . + pytest -vv --long --cov diff --git a/.github/workflows/pythonpublish.yml b/.github/workflows/pythonpublish.yml index 07ce4bdd..3572a23c 100644 --- a/.github/workflows/pythonpublish.yml +++ b/.github/workflows/pythonpublish.yml @@ -5,125 +5,26 @@ name: Upload Python Package on: release: - types: [created] + types: [published] jobs: - pytest: - - runs-on: ubuntu-latest - if: always() - strategy: - matrix: - python-version: [3.8, 3.9, '3.10', 3.x] - pip-packages: - - "setuptools pip pytest boutdata xarray numpy>=1.16.0" - fail-fast: true - - steps: - - uses: actions/checkout@v2 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 - with: - python-version: ${{ matrix.python-version }} - - name: Install dependencies - run: | - sudo apt-get update && sudo apt-get install libhdf5-dev libnetcdf-dev - python -m pip install --upgrade pip - pip install --upgrade ${{ matrix.pip-packages }} - pip install -e . - - name: Test with pytest - run: | - pip install pytest - pytest -v --long - - - pytest-min-versions: - - runs-on: ubuntu-latest - if: always() - strategy: - matrix: - python-version: [3.8] - pip-packages: - - "setuptools pip pytest boutdata==0.1.4 xarray==0.18.0 dask==2.10.0 numpy==1.18.0 natsort==5.5.0 matplotlib==3.1.1 animatplot==0.4.2 netcdf4==1.4.2 Pillow==7.2.0" # test with oldest supported version of packages. Note, using numpy==1.18.0 as a workaround because numpy==1.17.0 is not supported on Python-3.7, even though we should currently support numpy==1.17.0. - fail-fast: true - - steps: - - uses: actions/checkout@v2 - - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v1 - with: - python-version: ${{ matrix.python-version }} - - name: Install dependencies - run: | - sudo apt-get update && sudo apt-get install libhdf5-dev libnetcdf-dev - python -m pip install --upgrade pip - pip install --upgrade ${{ matrix.pip-packages }} - pip install -e . - - name: Test with pytest - run: | - pip install pytest - pytest -v --long - - - # Need to tidy up the things that flake8 finds before we activate this - #flake8: - - # runs-on: ubuntu-latest - # if: always() - - # steps: - # - uses: actions/checkout@v2 - # - name: Set up Python - # uses: actions/setup-python@v1 - # - name: Install dependencies - # run: | - # python -m pip install --upgrade pip - # - name: Lint with flake8 - # run: | - # pip install flake8 - # flake8 - - - black: - - runs-on: ubuntu-latest - if: always() - - steps: - - uses: actions/checkout@v2 - - name: Set up Python - uses: actions/setup-python@v1 - - name: Install dependencies - run: | - python -m pip install --upgrade pip - - name: Check formatting with black - run: | - pip install black - black --check . - deploy: - + name: Upload release to PyPI runs-on: ubuntu-latest - needs: [pytest, pytest-min-versions, black] - + environment: release + permissions: + id-token: write steps: - - uses: actions/checkout@v2 + - uses: actions/checkout@v3 - name: Set up Python - uses: actions/setup-python@v1 + uses: actions/setup-python@v4 with: python-version: '3.x' - name: Install dependencies run: | sudo apt-get update && sudo apt-get install libhdf5-dev libnetcdf-dev - python -m pip install --upgrade pip - pip install --upgrade setuptools wheel twine - pip install -e . - - name: Build and publish - env: - TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }} - TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }} - run: | - git fetch --tags --unshallow - python setup.py sdist bdist_wheel - twine upload dist/* + python -m pip install --upgrade pip build + - name: Build package + run: python -m build --sdist --wheel + - name: Publish package distributions to PyPI + uses: pypa/gh-action-pypi-publish@release/v1 diff --git a/.github/workflows/ruff.yaml b/.github/workflows/ruff.yaml new file mode 100644 index 00000000..5c26f5dd --- /dev/null +++ b/.github/workflows/ruff.yaml @@ -0,0 +1,15 @@ +name: Linting +on: [pull_request] + +jobs: + dependency-review: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v4 + with: + python-version: '3.x' + - name: Install ruff + run: pip install ruff + - name: Run ruff + run: ruff xbout diff --git a/.gitignore b/.gitignore index 907b66d0..c79ebe7d 100644 --- a/.gitignore +++ b/.gitignore @@ -68,6 +68,7 @@ instance/ # Sphinx documentation docs/_build/ +docs/generated # PyBuilder target/ diff --git a/.readthedocs.yml b/.readthedocs.yml index f2f8fd70..a8743fec 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -1,8 +1,28 @@ +# Read the Docs configuration file for Sphinx projects +# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details + +# Required +version: 2 + +# Set the OS, Python version and other tools you might need build: - image: latest + os: ubuntu-22.04 + tools: + python: "3.11" + +# Build documentation in the "docs/" directory with Sphinx +sphinx: + configuration: docs/conf.py + +# Optionally build your docs in additional formats such as PDF and ePub +# formats: +# - pdf +# - epub +# See https://docs.readthedocs.io/en/stable/guides/reproducible-builds.html python: - version: 3.8 - pip_install: true - extra_requirements: - - docs + install: + - method: pip + path: . + extra_requirements: + - docs diff --git a/README.md b/README.md index ec283403..a0ffa766 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,8 @@ # xBOUT [![Build Status](https://github.com/boutproject/xBOUT/workflows/master/badge.svg)](https://github.com/boutproject/xBOUT/actions) -[![codecov](https://codecov.io/gh/boutproject/xBOUT/branch/master/graph/badge.svg)](https://codecov.io/gh/boutproject/xBOUT) [![Documentation Status](https://readthedocs.org/projects/xbout/badge/?version=latest)](https://xbout.readthedocs.io/en/latest/?badge=latest) [![DOI](https://zenodo.org/badge/160846663.svg)](https://zenodo.org/badge/latestdoi/160846663) +[![Documentation Status](https://readthedocs.org/projects/xbout/badge/?version=latest)](https://xbout.readthedocs.io/en/latest/?badge=latest) +[![DOI](https://zenodo.org/badge/160846663.svg)](https://zenodo.org/badge/latestdoi/160846663) Documentation: https://xbout.readthedocs.io diff --git a/docs/_templates/custom-class-template.rst b/docs/_templates/custom-class-template.rst new file mode 100644 index 00000000..4a2889ec --- /dev/null +++ b/docs/_templates/custom-class-template.rst @@ -0,0 +1,32 @@ +{{ fullname | escape | underline}} + +.. currentmodule:: {{ module }} + +.. autoclass:: {{ objname }} + :members: + :undoc-members: + :show-inheritance: + + {% block methods %} + .. automethod:: __init__ + + {% if methods %} + .. rubric:: {{ _('Methods') }} + + .. autosummary:: + {% for item in methods %} + ~{{ name }}.{{ item }} + {%- endfor %} + {% endif %} + {% endblock %} + + {% block attributes %} + {% if attributes %} + .. rubric:: {{ _('Attributes') }} + + .. autosummary:: + {% for item in attributes %} + ~{{ name }}.{{ item }} + {%- endfor %} + {% endif %} + {% endblock %} diff --git a/docs/_templates/custom-module-template.rst b/docs/_templates/custom-module-template.rst new file mode 100644 index 00000000..f46f9f63 --- /dev/null +++ b/docs/_templates/custom-module-template.rst @@ -0,0 +1,66 @@ +{{ fullname | escape | underline}} + +.. automodule:: {{ fullname }} + + {% block attributes %} + {% if attributes %} + .. rubric:: {{ _('Module Attributes') }} + + .. autosummary:: + :toctree: + {% for item in attributes %} + {{ item }} + {%- endfor %} + {% endif %} + {% endblock %} + + {% block functions %} + {% if functions %} + .. rubric:: {{ _('Functions') }} + + .. autosummary:: + :toctree: + {% for item in functions %} + {{ item }} + {%- endfor %} + {% endif %} + {% endblock %} + + {% block classes %} + {% if classes %} + .. rubric:: {{ _('Classes') }} + + .. autosummary:: + :toctree: + :template: custom-class-template.rst + {% for item in classes %} + {{ item }} + {%- endfor %} + {% endif %} + {% endblock %} + + {% block exceptions %} + {% if exceptions %} + .. rubric:: {{ _('Exceptions') }} + + .. autosummary:: + :toctree: + {% for item in exceptions %} + {{ item }} + {%- endfor %} + {% endif %} + {% endblock %} + +{% block modules %} +{% if modules %} +.. rubric:: Modules + +.. autosummary:: + :toctree: + :template: custom-module-template.rst + :recursive: +{% for item in modules %} + {{ item }} +{%- endfor %} +{% endif %} +{% endblock %} diff --git a/docs/accessor_methods.rst b/docs/accessor_methods.rst index 6191c4f8..6c9fea07 100644 --- a/docs/accessor_methods.rst +++ b/docs/accessor_methods.rst @@ -1,8 +1,8 @@ BoutDataset Accessor Methods ============================ -xBOUT defines a set of accessor_ methods on the loaded Datasets and -DataArrays, which are called by `ds.bout.method`. +xBOUT defines a set of accessor_ methods on the loaded `xarray.Dataset` and +`xarray.DataArray`, which are called by ``ds.bout.``. This is where BOUT-specific data manipulation, analysis and plotting functionality is stored, for example:: @@ -17,4 +17,7 @@ or:: ds.bout.create_restarts(savepath='.', nxpe=4, nype=4) -.. _accessor: https://xarray.pydata.org/en/stable/internals.html#extending-xarray +See `BoutDatasetAccessor` and `BoutDataArrayAccessor` for details on +the available methods. + +.. _accessor: https://docs.xarray.dev/en/stable/internals/extending-xarray.html diff --git a/docs/api.rst b/docs/api.rst new file mode 100644 index 00000000..ed1cca92 --- /dev/null +++ b/docs/api.rst @@ -0,0 +1,10 @@ +=============== + API Reference +=============== + +.. autosummary:: + :toctree: generated + :template: custom-module-template.rst + :recursive: + + xbout diff --git a/docs/conf.py b/docs/conf.py index 22fdd208..25548c6e 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -12,27 +12,19 @@ # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. # -import os -# import sys -# sys.path.insert(0, os.path.abspath('.')) - -# Are we running on readthedocs? -on_rtd = os.environ.get("READTHEDOCS") == "True" +from importlib.metadata import version as get_version # -- Project information ----------------------------------------------------- -import xbout - project = "xBOUT" -copyright = "2018, Tom Nicholas" +copyright = "2018-2023, Tom Nicholas, BOUT++ team" author = "Tom Nicholas" -# The short X.Y version -version = xbout.__version__ # The full version, including alpha/beta/rc tags -release = xbout.__version__ - +release = get_version("xbout") +# The short X.Y version +version = ".".join(release.split(".")[:2]) # -- General configuration --------------------------------------------------- @@ -45,12 +37,26 @@ # ones. extensions = [ "sphinx.ext.autodoc", + "sphinx.ext.autosummary", "sphinx.ext.coverage", "sphinx.ext.mathjax", "sphinx.ext.viewcode", "sphinx.ext.napoleon", + "sphinx.ext.intersphinx", + "sphinx_autodoc_typehints", ] +autosummary_generate = True +autodoc_default_options = {"ignore-module-all": True} +autodoc_typehints = "description" +autodoc_class_signature = "mixed" + +# Numpy-doc config +napoleon_google_docstring = False +napoleon_numpy_docstring = True +napoleon_attr_annotations = True +napoleon_preprocess_types = True + # Add any paths that contain templates here, relative to this directory. templates_path = ["_templates"] @@ -68,7 +74,7 @@ # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. -language = None +language = "en" # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. @@ -85,21 +91,26 @@ # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # -if on_rtd: - html_theme = "default" -else: - html_theme = "alabaster" +html_theme = "sphinx_book_theme" # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. # -# html_theme_options = {} +html_theme_options = { + "repository_url": "https://github.com/boutproject/xBOUT", + "repository_branch": "master", + "path_to_docs": "docs", + "use_edit_page_button": True, + "use_repository_button": True, + "use_issues_button": True, + "home_page_in_toc": False, +} # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] +html_static_path = ["images"] # Custom sidebar templates, must be a dictionary that maps document names # to template names. @@ -169,3 +180,11 @@ # -- Extension configuration ------------------------------------------------- + +intersphinx_mapping = { + "python": ("https://docs.python.org/3", None), + "numpy": ("https://numpy.org/doc/stable", None), + "scipy": ("https://docs.scipy.org/doc/scipy", None), + "xarray": ("https://docs.xarray.dev/en/latest", None), + "mayavi": ("https://docs.enthought.com/mayavi/mayavi/", None), +} diff --git a/docs/extending_xbout.rst b/docs/extending_xbout.rst index d158dbc1..81059129 100644 --- a/docs/extending_xbout.rst +++ b/docs/extending_xbout.rst @@ -3,12 +3,12 @@ Extending xBOUT for your BOUT module The accessor classes `BoutDatasetAccessor` and `BoutDataArrayAccessor` are intended to be subclassed for specific BOUT++ modules. The -subclass accessor will then inherit all the `.bout` accessor methods, +subclass accessor will then inherit all the ``.bout`` accessor methods, but you will also be able to override these and define your own methods within your new accessor. -For example to add an extra method specific to the `STORM` BOUT++ +For example to add an extra method specific to the ``STORM`` BOUT++ module:: from xarray import register_dataset_accessor @@ -26,7 +26,7 @@ module:: >>> ds.storm.special_method() Do something only STORM users would want to do -There is included an example of a `StormDataset` which contains all +There is included an example of a ``StormDataset`` which contains all the data from a STORM_ simulation, as well as extra calculated quantities which are specific to the STORM module. diff --git a/docs/index.rst b/docs/index.rst index f96b3e7d..cd4dbe68 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -22,24 +22,29 @@ suggestions. loading_data accessor_methods extending_xbout - xbout + +.. toctree:: + :maxdepth: 2 + :caption: Reference + + API Reference Installation ------------ -With `pip`: +With ``pip``: .. code-block:: bash pip install --user xbout -or `conda`: +or ``conda``: .. code-block:: bash conda install -c conda-forge xbout -You can run the tests by running `pytest --pyargs xbout`. +You can run the tests by running ``pytest --pyargs xbout``. xBOUT will install the required python packages when you run one of the above install commands if they are not already installed on your diff --git a/docs/loading_data.rst b/docs/loading_data.rst index 07e8f99b..f20eff3d 100644 --- a/docs/loading_data.rst +++ b/docs/loading_data.rst @@ -10,6 +10,6 @@ The data from a BOUT++ run can be loaded with just:: bd = open_boutdataset('./run_dir*/BOUT.dmp.*.nc', inputfilepath='./BOUT.inp') `open_boutdataset` returns an instance of an `xarray.Dataset` which -contains BOUT-specific information in the `attrs`, so represents a +contains BOUT-specific information in the ``attrs``, so represents a general structure for storing all of the output of a simulation, including data, input options and (soon) grid data. diff --git a/docs/xbout.rst b/docs/xbout.rst deleted file mode 100644 index 8bc4f536..00000000 --- a/docs/xbout.rst +++ /dev/null @@ -1,42 +0,0 @@ -API reference -============= - -xbout.boutdataarray module --------------------------- - -.. automodule:: xbout.boutdataarray - :members: - :undoc-members: - :show-inheritance: - -xbout.boutdataset module ------------------------- - -.. automodule:: xbout.boutdataset - :members: - :undoc-members: - :show-inheritance: - -xbout.load module ------------------ - -.. automodule:: xbout.load - :members: - :undoc-members: - :show-inheritance: - -xbout.plotting.animate module ------------------------------ - -.. automodule:: xbout.plotting.animate - :members: - :undoc-members: - :show-inheritance: - -xbout.plotting.utils module ---------------------------- - -.. automodule:: xbout.plotting.utils - :members: - :undoc-members: - :show-inheritance: diff --git a/examples/boutmodules/tests/test_stormdataset.py b/examples/boutmodules/tests/test_stormdataset.py index 622de7fb..8f51f15b 100644 --- a/examples/boutmodules/tests/test_stormdataset.py +++ b/examples/boutmodules/tests/test_stormdataset.py @@ -2,7 +2,6 @@ from xarray import concat from xbout import open_boutdataset -from xbout.tests.test_load import bout_xyt_example_files @pytest.mark.skip diff --git a/pyproject.toml b/pyproject.toml index 97869c5b..39e926e8 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,83 @@ [build-system] -requires = ["setuptools >= 42", "wheel", "setuptools_scm[toml]>=3.4", "setuptools_scm_git_archive"] +requires = [ + "setuptools >= 65", + "setuptools_scm[toml] >= 7", + "wheel >= 0.29.0", +] build-backend = "setuptools.build_meta" +[project] +name = "xbout" +description = "Collect data from BOUT++ runs in python using xarray" +readme = "README.md" +authors = [ + {name = "Thomas Nicholas", email = "thomas.nicholas@york.ac.uk"}, + {name = "John Omotani"}, +] +license = {file = "LICENSE"} +dynamic = ["version"] +keywords = ["gyrokinetics", "analysis", "plasma", "research"] +classifiers = [ + "Development Status :: 3 - Alpha", + "Intended Audience :: Science/Research", + "Intended Audience :: Education", + "Intended Audience :: Developers", + "License :: OSI Approved :: Apache Software License", + "Natural Language :: English", + "Operating System :: POSIX :: Linux", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Topic :: Scientific/Engineering :: Visualization", +] +requires-python = ">=3.8" +dependencies = [ + "xarray>=0.18.0,<2022.9.0", + "boutdata>=0.1.4", + "dask[array]>=2.10.0", + "gelidum>=0.5.3", + "natsort>=5.5.0", + "matplotlib>=3.1.1,!=3.3.0,!=3.3.1,!=3.3.2", + "animatplot-ng>=0.4.2", + "netcdf4>=1.4.0", + "Pillow>=6.1.0", +] + +[project.optional-dependencies] +calc = [ + "numpy >= 1.18.0", + "scipy >= 1.3.0", + "dask >= 2.2.0", + "statsmodels >= 0.10.1", + "xrft", + "xhistogram", +] +docs = [ + "sphinx >= 5.3", + "sphinx-book-theme >= 0.4.0rc1", + "sphinx_autodoc_typehints >= 1.19", +] +3d_plot = [ + "k3d >= 2.8.0", + "mayavi >= 4.7.2", + "wand", +] +tests = [ + "pytest >= 3.3.0", + "pytest-cov", +] + +[project.urls] +Source = "https://github.com/boutproject/xBOUT" +Tracker = "https://github.com/boutproject/xBOUT/issues" +Documentation = "https://xbout.readthedocs.io/en/latest/" + [tool.setuptools_scm] write_to = "xbout/_version.py" + +[tool.setuptools] +packages = ["xbout"] + +[tool.ruff] +ignore = ["E501"] diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index 9464f8b8..00000000 --- a/setup.cfg +++ /dev/null @@ -1,62 +0,0 @@ -[metadata] -name = xbout -url = https://github.com/boutproject/xBOUT -author = Thomas Nicholas -author_email = thomas.nicholas@york.ac.uk -description = Collect data from BOUT++ runs in python using xarray -license = Apache -python_requires = >=3.8 -long_description = file: README.md -long_description_content_type = text/markdown -classifiers = - Development Status :: 3 - Alpha - Intended Audience :: Science/Research - Intended Audience :: Education - Intended Audience :: Developers - License :: OSI Approved :: Apache Software License - Natural Language :: English - Operating System :: POSIX :: Linux - Programming Language :: Python :: 3.8 - Programming Language :: Python :: 3.9 - Programming Language :: Python :: 3.10 - Programming Language :: Python :: 3.11 - Topic :: Scientific/Engineering :: Visualization - -[options] -setup_requires = - setuptools>=42 - setuptools_scm[toml]>=3.4 - setuptools_scm_git_archive -install_requires = - xarray>=0.18.0,<2022.9.0 - boutdata>=0.1.4 - dask[array]>=2.10.0 - gelidum>=0.5.3 - natsort>=5.5.0 - matplotlib>=3.1.1,!=3.3.0,!=3.3.1,!=3.3.2 - animatplot>=0.4.2 - netcdf4>=1.4.0 - Pillow>=6.1.0 - importlib-metadata; python_version < "3.8" -include_package_data = True -packages = find: - -[options.extras_require] -calc = - numpy >= 1.13.0 - scipy >= 1.3.0 - dask >= 2.2.0 - statsmodels >= 0.10.1 - xrft - xhistogram -docs = sphinx >= 1.4 -3d_plot = - k3d >= 2.8.0 - mayavi >= 4.7.2 - wand - -[build_sphinx] -project = $metadata.name -version = $metadata.version -release = $metadata.version -source-dir = docs diff --git a/setup.py b/setup.py deleted file mode 100644 index c7ba67b2..00000000 --- a/setup.py +++ /dev/null @@ -1,6 +0,0 @@ -#!/usr/bin/env python3 - -from setuptools import setup - -if __name__ == "__main__": - setup(use_scm_version=True) diff --git a/xbout/__init__.py b/xbout/__init__.py index 917706b8..9511b93d 100644 --- a/xbout/__init__.py +++ b/xbout/__init__.py @@ -11,13 +11,25 @@ from .fastoutput import open_fastoutput -try: - from importlib.metadata import version, PackageNotFoundError -except ModuleNotFoundError: - from importlib_metadata import version, PackageNotFoundError +from importlib.metadata import version, PackageNotFoundError + try: __version__ = version(__name__) except PackageNotFoundError: from setuptools_scm import get_version __version__ = get_version(root="..", relative_to=__file__) + +__all__ = [ + "open_boutdataset", + "collect", + "geometries", + "register_geometry", + "REGISTERED_GEOMETRIES", + "BoutDataArrayAccessor", + "BoutDatasetAccessor", + "animate_pcolormesh", + "animate_poloidal", + "plot_separatrix", + "open_fastoutput", +] diff --git a/xbout/boutdataarray.py b/xbout/boutdataarray.py index 7e57cf1e..b018cd91 100644 --- a/xbout/boutdataarray.py +++ b/xbout/boutdataarray.py @@ -29,7 +29,7 @@ class BoutDataArrayAccessor: selecting a variable from a BOUT++ dataset. These BOUT-specific methods and attributes are accessed via the bout - accessor, e.g. `da.bout.options` returns a `BoutOptionsFile` instance. + accessor, e.g. ``da.bout.options`` returns a `BoutOptionsFile` instance. """ def __init__(self, da): @@ -306,7 +306,6 @@ def interpolate_parallel( # Select a particular 'region' and interpolate to higher parallel resolution da = self.data region = da.bout._regions[region] - tcoord = da.metadata["bout_tdim"] xcoord = da.metadata["bout_xdim"] ycoord = da.metadata["bout_ydim"] zcoord = da.metadata["bout_zdim"] @@ -385,9 +384,6 @@ def interpolate_parallel( return da - def add_cartesian_coordinates(self): - return _add_cartesian_coordinates(self.data) - def add_cartesian_coordinates(self): """ Add Cartesian (X,Y,Z) coordinates. @@ -698,6 +694,7 @@ def animate2D( If set to false, do not create the animation, just return the block or blocks axis_coords : None, str, dict Coordinates to use for axis labelling. + - None: Use the dimension coordinate for each axis, if it exists. - "index": Use the integer index values. - dict: keys are dimension names, values set axis_coords for each axis @@ -705,6 +702,7 @@ def animate2D( coordinate (which must have the dimension given by 'key'), or a 1d numpy array, dask array or DataArray whose length matches the length of the dimension given by 'key'. + Only affects time coordinate for plots with poloidal_plot=True. fps : int, optional Frames per second of resulting gif @@ -815,6 +813,7 @@ def animate1D( Dimension over which to animate, defaults to the time dimension axis_coords : None, str, dict Coordinates to use for axis labelling. + - None: Use the dimension coordinate for each axis, if it exists. - "index": Use the integer index values. - dict: keys are dimension names, values set axis_coords for each axis @@ -834,7 +833,7 @@ def animate1D( A matplotlib axes instance to plot to. If None, create a new figure and axes, and plot to that aspect : str or None, optional - Argument to set_aspect(), defaults to "auto" + Argument to ``ax.set_aspect()``, defaults to "auto" kwargs : dict, optional Additional keyword arguments are passed on to the plotting function (animatplot.blocks.Line). @@ -842,7 +841,7 @@ def animate1D( Returns ------- animation or block - If animate==True, returns an animatplot.Animation object, otherwise + If ``animate==True``, returns an animatplot.Animation object, otherwise returns an animatplot.blocks.Line instance. """ @@ -947,7 +946,6 @@ def interpolate_from_unstructured( method = "linear" # extend input coordinates to cover all dims, so we can flatten them - input_coords = [] for coord in kwargs: data = da[coord] missing_dims = tuple(set(dims) - set(data.dims)) @@ -1025,7 +1023,7 @@ def interpolate_to_cartesian(self, *args, **kwargs): This method is intended to be used to produce data for visualisation, which normally does not require double-precision values, so by default the data is - converted to `np.float32`. Pass `use_float32=False` to retain the original + converted to `numpy.float32`. Pass ``use_float32=False`` to retain the original precision. Parameters @@ -1037,10 +1035,10 @@ def interpolate_to_cartesian(self, *args, **kwargs): nZ : int (default 100) Number of grid points in the Z direction use_float32 : bool (default True) - Downgrade precision to `np.float32`? + Downgrade precision to `numpy.float32`? fill_value : float (default np.nan) Value to use for points outside the interpolation domain (passed to - `scipy.RegularGridInterpolator`) + `scipy.interpolate.RegularGridInterpolator`) See Also -------- diff --git a/xbout/boutdataset.py b/xbout/boutdataset.py index 6540fe78..cc308fcb 100644 --- a/xbout/boutdataset.py +++ b/xbout/boutdataset.py @@ -41,7 +41,7 @@ class BoutDatasetAccessor: `open_boutdataset()`. These BOUT-specific methods and attributes are accessed via the bout - accessor, e.g. `ds.bout.options` returns a `BoutOptionsFile` instance. + accessor, e.g. ``ds.bout.options`` returns a `BoutOptionsFile` instance. """ def __init__(self, ds): @@ -266,9 +266,6 @@ def find_with_dims(first_var, dims): return ds - def add_cartesian_coordinates(self): - return _add_cartesian_coordinates(self.data) - def integrate_midpoints(self, variable, *, dims=None, cumulative_t=False): """ Integrate using the midpoint rule for spatial dimensions, and trapezium rule for @@ -277,18 +274,18 @@ def integrate_midpoints(self, variable, *, dims=None, cumulative_t=False): The quantity being integrated is assumed to be a scalar variable. When doing a 1d integral in the 'y' dimension, the integral is calculated as a - poloidal integral if the variable is on the standard grid ('direction_y' + poloidal integral if the variable is on the standard grid (``direction_y`` attribute is "Standard"), or as a parallel-to-B integral if the variable is on - the field-aligned grid ('direction_y' attribute is "Aligned"). + the field-aligned grid (``direction_y`` attribute is "Aligned"). When doing a 2d integral over 'x' and 'y' dimensions, the integral will be over - poloidal cross-sections if the variable is not field-aligned (direction_y == - "Standard") and over field-aligned surfaces if the variable is field-aligned - (direction_ == "Aligned"). The latter seems unlikely to be useful as the + poloidal cross-sections if the variable is not field-aligned (``direction_y == + "Standard"``) and over field-aligned surfaces if the variable is field-aligned + (``direction_ == "Aligned"``). The latter seems unlikely to be useful as the surfaces depend on the arbitrary origin used for zShift. Is a method of BoutDataset accessor rather than of BoutDataArray so we can use - other variables like `J`, `g11`, `g_22` for the integration. + other variables like :math:`J`, :math:`g^{11}`, :math:`g_{22}` for the integration. Note the xarray.DataArray.integrate() method uses the trapezium rule, which is not consistent with the way BOUT++ defines grid spacings as cell widths. Also, @@ -557,7 +554,7 @@ def interpolate_to_cartesian( This method is intended to be used to produce data for visualisation, which normally does not require double-precision values, so by default the data is - converted to `np.float32`. Pass `use_float32=False` to retain the original + converted to `numpy.float32`. Pass ``use_float32=False`` to retain the original precision. Parameters @@ -569,10 +566,10 @@ def interpolate_to_cartesian( nZ : int (default 100) Number of grid points in the Z direction use_float32 : bool (default True) - Downgrade precision to `np.float32`? + Downgrade precision to `numpy.float32`? fill_value : float (default np.nan) Value to use for points outside the interpolation domain (passed to - `scipy.RegularGridInterpolator`) + `scipy.interpolate.RegularGridInterpolator`) See Also -------- @@ -619,7 +616,6 @@ def interpolate_to_cartesian( from scipy.interpolate import ( RegularGridInterpolator, - griddata, ) # Create Cylindrical coordinates for intermediate grid @@ -681,9 +677,6 @@ def interp_single_time(da): # just use da.min().item() here (to get a scalar value instead of a # zero-size array) because .item() doesn't work for dask arrays (yet!). - datamin = float_type(da.min().values) - datamax = float_type(da.max().values) - if tdim in da.dims: data_cartesian = np.zeros((nt, nX, nY, nZ), dtype=float_type) for tind in range(nt): @@ -813,10 +806,10 @@ def save( Examples -------- - If `separate_vars=True`, then multiple files will be created. These can - all be opened and merged in one go using a call of the form: + If ``separate_vars=True``, then multiple files will be created. These can + all be opened and merged in one go using a call of the form:: - ds = xr.open_mfdataset('boutdata_*.nc', combine='nested', concat_dim=None) + ds = xr.open_mfdataset('boutdata_*.nc', combine='nested', concat_dim=None) """ if variables is None: @@ -976,10 +969,10 @@ def to_restart( tind : int, default -1 Time-index of the slice to write to the restart files. Note, when creating restart files from 'dump' files it is recommended to open the Dataset using - the full time range and use the `tind` argument here, rather than selecting - a time point manually, so that the calculation of `hist_hi` in the output - can be correct (which requires knowing the existing value of `hist_hi` - (output step count at the end of the simulation), `tind` and the total + the full time range and use the ``tind`` argument here, rather than selecting + a time point manually, so that the calculation of ``hist_hi`` in the output + can be correct (which requires knowing the existing value of ``hist_hi`` + (output step count at the end of the simulation), ``tind`` and the total number of time points in the current output data). prefix : str, default "BOUT.restart" Prefix to use for names of restart files @@ -1061,6 +1054,7 @@ def animate_list( grid coordinates, per variable if sequence is given axis_coords : None, str, dict or list of None, str or dict Coordinates to use for axis labelling. + - None: Use the dimension coordinate for each axis, if it exists. - "index": Use the integer index values. - dict: keys are dimension names, values set axis_coords for each axis @@ -1068,6 +1062,7 @@ def animate_list( coordinate (which must have the dimension given by 'key'), or a 1d numpy array, dask array or DataArray whose length matches the length of the dimension given by 'key'. + Only affects time coordinate for plots with poloidal_plot=True. If a list is passed, it must have the same length as 'variables' and gives the axis_coords setting for each plot individually. diff --git a/xbout/calc/__init__.py b/xbout/calc/__init__.py index 8333be7a..912a423e 100644 --- a/xbout/calc/__init__.py +++ b/xbout/calc/__init__.py @@ -1 +1,3 @@ from .turbulence import rms + +__all__ = ["rms"] diff --git a/xbout/geometries.py b/xbout/geometries.py index 1cc6d727..7a7f8ad7 100644 --- a/xbout/geometries.py +++ b/xbout/geometries.py @@ -4,7 +4,7 @@ import xarray as xr import numpy as np -from .region import Region, _create_regions_toroidal, _create_single_region +from .region import _create_regions_toroidal, _create_single_region from .utils import ( _add_attrs_to_var, _set_attrs_on_all_vars, @@ -156,7 +156,6 @@ def apply_geometry(ds, geometry_name, *, coordinates=None, grid=None): _add_attrs_to_var(updated_ds, xcoord) if ycoord not in updated_ds.coords: - ny = updated_ds.dims[ycoord] # dy should always be constant in x, so it is safe to convert to a 1d # coordinate. [The y-coordinate has to be a 1d coordinate that labels x-z # slices of the grid (similarly x-coordinate is 1d coordinate that labels y-z @@ -206,7 +205,7 @@ def apply_geometry(ds, geometry_name, *, coordinates=None, grid=None): if bout_v5: if not np.all(updated_ds["dz"].min() == updated_ds["dz"].max()): raise ValueError( - f"Spacing is not constant. Cannot create z coordinate" + "Spacing is not constant. Cannot create z coordinate" ) dz = updated_ds["dz"].min() diff --git a/xbout/load.py b/xbout/load.py index 56b53e22..bd36497e 100644 --- a/xbout/load.py +++ b/xbout/load.py @@ -91,9 +91,9 @@ def open_boutdataset( file. Can also load from a grid file or from restart files. Note that when reloading a Dataset that was saved by xBOUT, the state of the saved - Dataset is restored, and the values of `keep_xboundaries`, `keep_yboundaries`, and - `run_name` are ignored. `geometry` is treated specially, and can be passed when - reloading a Dataset (along with `gridfilepath` if needed). + Dataset is restored, and the values of ``keep_xboundaries``, ``keep_yboundaries``, and + ``run_name`` are ignored. ``geometry`` is treated specially, and can be passed when + reloading a Dataset (along with ``gridfilepath`` if needed). Troubleshooting --------------- @@ -101,14 +101,14 @@ def open_boutdataset( some variables may have conflicts (e.g. a source term was changed between some of the restarts, but the source term is saved as time-independent, without a t-dimension). In this case one workaround is to pass a list of variable names to the - keyword argument `drop_vars` to ignore the variables with conflicts, e.g. if `"S1"` - and `"S2"` have conflicts - ``` - ds = open_boutdataset("data*/boutdata.nc", drop_variables=["S1", "S2"]) - ``` - will open a Dataset which is missing `"S1"` and `"S2"`.\ - [`drop_variables` is an argument of `xarray.open_dataset()` that is passed down - through `kwargs`.] + keyword argument ``drop_vars`` to ignore the variables with conflicts, e.g. if ``"S1"`` + and ``"S2"`` have conflicts:: + + ds = open_boutdataset("data*/boutdata.nc", drop_variables=["S1", "S2"]) + + will open a Dataset which is missing ``"S1"`` and ``"S2"`` + (``drop_variables`` is an argument of `xarray.open_dataset` that is passed down + through ``kwargs``.) Parameters ---------- @@ -131,11 +131,12 @@ def open_boutdataset( If not specified then will attempt to read it from the file attrs. If still not found then a warning will be thrown, which can be - suppressed by passing `info`=False. + suppressed by passing ``info=False``. To define a new type of geometry you need to use the - `register_geometry` decorator. You are encouraged to do this for your - own BOUT++ physics module, to apply relevant normalisations. + `register_geometry` decorator. You are encouraged to do + this for your own BOUT++ physics module, to apply relevant + normalisations. gridfilepath : str, optional The path to a grid file, containing any variables needed to apply the geometry specified by the 'geometry' option, which are not contained in the dump files. @@ -163,8 +164,8 @@ def open_boutdataset( is_restart : bool, optional Restart files require some special handling (e.g. working around variables that are not present in restart files). By default, this special handling is enabled - if the files do not have a time dimension and `restart` is present in the file - name in `datapath`. This option can be set to True or False to explicitly enable + if the files do not have a time dimension and ``restart`` is present in the file + name in ``datapath``. This option can be set to True or False to explicitly enable or disable the restart file handling. kwargs : optional Keyword arguments are passed down to `xarray.open_mfdataset`, which in @@ -899,21 +900,79 @@ def _arrange_for_concatenation(filepaths, nxpe=1, nype=1): nprocs = nxpe * nype n_runs = int(len(filepaths) / nprocs) - if len(filepaths) % nprocs != 0: - raise ValueError( - "Each run directory does not contain an equal number " - "of output files. If the parallelization scheme of " - "your simulation changed partway-through, then please " - "load each directory separately and concatenate them " - "along the time dimension with xarray.concat()." - ) + runids = [] + + def getrunid(fp): + if _is_path(fp): + try: + with xr.open_dataset(fp) as tmp: + return tmp.get("run_id", None) + except FileNotFoundError: + return None + return fp.get("run_id", None) + + for fp in filepaths: + thisrunid = getrunid(fp) + if thisrunid is None: + runids = None + break + runids.append(thisrunid) + if not runids: + if len(filepaths) < nprocs: + if len(filepaths) == 1: + raise ValueError( + "A parallel simulation was loaded, but only a single " + "file was loaded. Please ensure to pass in all files " + "by specifing e.g. `BOUT.dmp.*.nc` rather than " + "`BOUT.dmp.0.nc`." + ) + raise ValueError( + f"A parallel simulation was loaded, but only {len(filepathts)} " + "files were loaded. Please ensure to pass in all files " + "by specifing e.g. `BOUT.dmp.*.nc`" + ) + if len(filepaths) % nprocs != 0: + raise ValueError( + "Each run directory does not contain an equal number " + "of output files. If the parallelization scheme of " + "your simulation changed partway-through, then please " + "load each directory separately and concatenate them " + "along the time dimension with xarray.concat()." + ) + # Create list of lists of filepaths, so that xarray knows how they should + # be concatenated by xarray.open_mfdataset() + paths = iter(filepaths) + paths_grid = [ + [[next(paths) for x in range(nxpe)] for y in range(nype)] + for t in range(n_runs) + ] + + else: + paths_sorted = [] + lastid = None + for path, gid in zip(filepaths, runids): + if lastid != gid: + lastid = gid + paths_sorted.append([]) + paths_sorted[-1].append(path) + paths_grid = [] + for paths in paths_sorted: + if len(paths) != nprocs: + with xr.open_dataset(paths[0]) as tmp: + if tmp["PE_XIND"] != 0 or tmp["PE_YIND"] != 0: + # The first file is missing. + warn( + f"Ignoring {len(paths)} files as the first seems to be missing: {paths}" + ) + continue + assert tmp["NXPE"] == nxpe + assert tmp["NYPE"] == nype + raise ValueError( + f"Something is wrong. We expected {nprocs} files but found {len(paths)} files." + ) + paths = iter(paths) - # Create list of lists of filepaths, so that xarray knows how they should - # be concatenated by xarray.open_mfdataset() - paths = iter(filepaths) - paths_grid = [ - [[next(paths) for x in range(nxpe)] for y in range(nype)] for t in range(n_runs) - ] + paths_grid.append([[next(paths) for x in range(nxpe)] for y in range(nype)]) # Dimensions along which no concatenation is needed are still present as # single-element lists, so need to concatenation along dim=None for those diff --git a/xbout/plotting/animate.py b/xbout/plotting/animate.py index 96a7289a..68536a97 100644 --- a/xbout/plotting/animate.py +++ b/xbout/plotting/animate.py @@ -138,6 +138,7 @@ def animate_poloidal( Colors to use for the plot axis_coords : None, str, dict Coordinates to use for axis labelling. Only affects time coordinate. + - None: Use the dimension coordinate for each axis, if it exists. - "index": Use the integer index values. - dict: keys are dimension names, values set axis_coords for each axis @@ -342,6 +343,7 @@ def animate_pcolormesh( If set to false, do not create the animation, just return the block axis_coords : None, str, dict Coordinates to use for axis labelling. + - None: Use the dimension coordinate for each axis, if it exists. - "index": Use the integer index values. - dict: keys are dimension names, values set axis_coords for each axis @@ -543,6 +545,7 @@ def animate_line( If set to false, do not create the animation, just return the block axis_coords : None, str, dict Coordinates to use for axis labelling. + - None: Use the dimension coordinate for each axis, if it exists. - "index": Use the integer index values. - dict: keys are dimension names, values set axis_coords for each axis diff --git a/xbout/plotting/plotfuncs.py b/xbout/plotting/plotfuncs.py index 9cf371d2..e115d434 100644 --- a/xbout/plotting/plotfuncs.py +++ b/xbout/plotting/plotfuncs.py @@ -13,7 +13,6 @@ _decompose_regions, _is_core_only, _k3d_plot_isel, - _make_structured_triangulation, plot_separatrices, plot_targets, ) @@ -48,7 +47,7 @@ def plot_regions(da, ax=None, **kwargs): da_regions = _decompose_regions(da) colored_regions = [ - xr.full_like(da_region, fill_value=num / len(regions)) + xr.full_like(da_region, fill_value=num / len(da_regions)) for num, da_region in enumerate(da_regions.values()) ] @@ -102,7 +101,7 @@ def plot2d_wrapper( """ Make a 2D plot using an xarray method, taking into account branch cuts (X-points). - Wraps `xarray.plot` methods, so automatically adds labels. + Wraps `xarray.DataArray.plot` methods, so automatically adds labels. Parameters ---------- @@ -311,11 +310,11 @@ def plot2d_wrapper( for x, y in zip(x_regions, y_regions): if ( - not da.metadata["bout_xdim"] in x.dims - and not da.metadata["bout_ydim"] in x.dims + da.metadata["bout_xdim"] not in x.dims + and da.metadata["bout_ydim"] not in x.dims ) or ( - not da.metadata["bout_xdim"] in y.dims - and not da.metadata["bout_ydim"] in y.dims + da.metadata["bout_xdim"] not in y.dims + and da.metadata["bout_ydim"] not in y.dims ): # Small regions around X-point do not have segments in x- or y-directions, # so skip @@ -433,15 +432,16 @@ def plot3d( mayavi_figure : mayavi.core.scene.Scene, default None Existing Mayavi figure to add this plot to. mayavi_figure_args : dict, default None - Arguments to use when creating a new Mayavi figure. Ignored if `mayavi_figure` + Arguments to use when creating a new Mayavi figure. Ignored if ``mayavi_figure`` is passed. mayavi_view : (float, float, float), default None - If set, arguments are passed to mlab.view() to set the view when engine="mayavi" + If set, arguments are passed to `mayavi.mlab.view` to set the + view when ``engine="mayavi"`` vmin, vmax : float vmin and vmax are treated specially. If a float is passed, then it is used for vmin/vmax. If the arguments are not passed, then the minimum and maximum of the data are used. For an animation, to get minimum and/or maximum calculated - separately for each frame, pass `vmin=None` and/or `vmax=None` explicitly. + separately for each frame, pass ``vmin=None`` and/or ``vmax=None`` explicitly. **kwargs Extra keyword arguments are passed to the backend plotting function """ @@ -544,7 +544,6 @@ def plot3d( from scipy.interpolate import ( RegularGridInterpolator, griddata, - LinearNDInterpolator, ) print("start interpolating") @@ -800,7 +799,7 @@ def create_or_update_plot(plot_objects=None, tind=None, this_save_as=None): # First create png files in the temporary directory temp_path = Path(d) temp_save_as = str(temp_path.joinpath("temp.png")) - print(f"tind=0") + print("tind=0") plot_objects = create_or_update_plot( tind=0, this_save_as=temp_save_as ) diff --git a/xbout/plotting/utils.py b/xbout/plotting/utils.py index a99ef257..db70b74a 100644 --- a/xbout/plotting/utils.py +++ b/xbout/plotting/utils.py @@ -129,7 +129,6 @@ def plot_targets(da, ax, *, x="R", y="Z", hatching=True): da0 = list(da_regions.values())[0] - xcoord = da0.metadata["bout_xdim"] ycoord = da0.metadata["bout_ydim"] if da0.metadata["keep_yboundaries"]: @@ -253,5 +252,5 @@ def _k3d_plot_isel(da_region, isel, vmin, vmax, **kwargs): indices, attribute=data, color_range=[vmin, vmax], - **kwargs + **kwargs, ) diff --git a/xbout/region.py b/xbout/region.py index b6653824..d30d4047 100644 --- a/xbout/region.py +++ b/xbout/region.py @@ -1,4 +1,4 @@ -from copy import copy, deepcopy +from copy import deepcopy import numpy as np import xarray as xr from .utils import _set_attrs_on_all_vars @@ -145,9 +145,6 @@ def __repr__(self): result += f"\t{attr}\t{val}\n" return result - def __eq__(self, other): - return vars(self) == vars(other) - def get_slices(self, mxg=0, myg=0): """ Return x- and y-dimension slices that select this region from the global @@ -1229,11 +1226,6 @@ def _create_single_region(ds, periodic_y=True): nx = ds.metadata["nx"] ny = ds.metadata["ny"] - mxg = ds.metadata["MXG"] - myg = ds.metadata["MYG"] - # keep_yboundaries is 1 if there are y-boundaries and 0 if there are not - ybndry = ds.metadata["keep_yboundaries"] * myg - connection = "all" if periodic_y else None regions = { diff --git a/xbout/tests/conftest.py b/xbout/tests/conftest.py new file mode 100644 index 00000000..99b3b016 --- /dev/null +++ b/xbout/tests/conftest.py @@ -0,0 +1,189 @@ +import operator +import re +from copy import deepcopy +from functools import reduce +from pathlib import Path + +import numpy as np +import pytest +from xarray import DataArray + +from xbout.tests.utils_for_tests import ( + _get_kwargs, + create_bout_ds_list, + create_bout_grid_ds, +) + + +@pytest.fixture(scope="session") +def bout_xyt_example_files(tmp_path_factory): + return _bout_xyt_example_files + + +_bout_xyt_example_files_cache = {} + + +def _bout_xyt_example_files( + tmp_path_factory, + prefix="BOUT.dmp", + lengths=(6, 2, 4, 7), + nxpe=4, + nype=2, + nt=1, + guards=None, + syn_data_type="random", + grid=None, + squashed=False, + topology="core", + write_to_disk=False, + bout_v5=False, + metric_3D=False, +): + """ + Mocks up a set of BOUT-like Datasets + + Either returns list of Datasets (if write_to_disk=False) + or writes Datasets to netCDF files and returns the temporary test directory + containing them, deleting the temporary directory once that test is done (if + write_to_disk=True). + """ + call_args = _get_kwargs(ignore="tmp_path_factory") + + try: + # Has been called with the same signature before, just return the cached result + return deepcopy(_bout_xyt_example_files_cache[call_args]) + except KeyError: + pass + + if guards is None: + guards = {} + + if squashed: + # create a single data-file, but alter the 'nxpe' and 'nype' variables, as if the + # file had been created by combining a set of BOUT.dmp.*.nc files + this_lengths = ( + lengths[0], + lengths[1] * nxpe, + lengths[2] * nype, + lengths[3], + ) + ds_list, file_list = create_bout_ds_list( + prefix=prefix, + lengths=this_lengths, + nxpe=1, + nype=1, + nt=nt, + guards=guards, + topology=topology, + syn_data_type=syn_data_type, + squashed=True, + bout_v5=bout_v5, + metric_3D=metric_3D, + ) + ds_list[0]["nxpe"] = nxpe + ds_list[0]["nype"] = nype + else: + ds_list, file_list = create_bout_ds_list( + prefix=prefix, + lengths=lengths, + nxpe=nxpe, + nype=nype, + nt=nt, + guards=guards, + topology=topology, + syn_data_type=syn_data_type, + bout_v5=bout_v5, + metric_3D=metric_3D, + ) + + if grid is not None: + xsize = lengths[1] * nxpe + ysize = lengths[2] * nype + grid_ds = create_bout_grid_ds( + xsize=xsize, + ysize=ysize, + guards=guards, + topology=topology, + ny_inner=3 * lengths[2], + ) + + if not write_to_disk: + if grid is None: + _bout_xyt_example_files_cache[call_args] = ds_list + return deepcopy(ds_list) + else: + _bout_xyt_example_files_cache[call_args] = ds_list, grid_ds + return deepcopy((ds_list, grid_ds)) + raise ValueError("tmp_path_factory required when write_to_disk=True") + + save_dir = tmp_path_factory.mktemp("data") + + for ds, file_name in zip(ds_list, file_list): + ds.to_netcdf(save_dir.joinpath(file_name)) + + if grid is not None: + grid_ds.to_netcdf(save_dir.joinpath(grid + ".nc")) + + # Return a glob-like path to all files created, which has all file numbers replaced + # with a single asterix + path = str(save_dir.joinpath(file_list[-1])) + + count = 1 + if nt > 1: + count += 1 + # We have to reverse the path before limiting the number of numbers replaced so that the + # tests don't get confused by pytest's persistent temporary directories (which are also designated + # by different numbers) + glob_pattern = Path((re.sub(r"\d+", "*", path[::-1], count=count))[::-1]) + _bout_xyt_example_files_cache[call_args] = glob_pattern + return glob_pattern + + +fci_shape = (2, 2, 3, 4) +fci_guards = (2, 2, 0) + + +@pytest.fixture +def create_example_grid_file_fci(tmp_path_factory): + """ + Mocks up a FCI-like netCDF file, and return the temporary test + directory containing them. + + Deletes the temporary directory once that test is done. + """ + + # Create grid dataset + shape = (fci_shape[1] + 2 * fci_guards[0], *fci_shape[2:]) + arr = np.arange(reduce(operator.mul, shape, 1)).reshape(shape) + grid = DataArray(data=arr, name="R", dims=["x", "y", "z"]).to_dataset() + grid["Z"] = DataArray(np.random.random(shape), dims=["x", "y", "z"]) + grid["dy"] = DataArray(np.ones(shape), dims=["x", "y", "z"]) + grid = grid.set_coords(["dy"]) + + # Create temporary directory + save_dir = tmp_path_factory.mktemp("griddata") + + # Save + filepath = save_dir.joinpath("fci.nc") + grid.to_netcdf(filepath, engine="netcdf4") + + return filepath + + +@pytest.fixture +def create_example_files_fci(tmp_path_factory): + return _bout_xyt_example_files( + tmp_path_factory, + lengths=fci_shape, + nxpe=1, + nype=1, + # nt=1, + guards={a: b for a, b in zip("xyz", fci_guards)}, + syn_data_type="random", + grid=None, + squashed=False, + # topology="core", + write_to_disk=False, + bout_v5=True, + metric_3D=True, + ) diff --git a/xbout/tests/test_against_collect.py b/xbout/tests/test_against_collect.py index 81d8b33c..5f22cf97 100644 --- a/xbout/tests/test_against_collect.py +++ b/xbout/tests/test_against_collect.py @@ -3,7 +3,7 @@ from xbout import open_boutdataset, collect as new_collect -from .test_load import create_bout_ds, create_bout_ds_list, METADATA_VARS +from .utils_for_tests import create_bout_ds, create_bout_ds_list, METADATA_VARS boutdata = pytest.importorskip("boutdata", reason="boutdata is not available") old_collect = boutdata.collect diff --git a/xbout/tests/test_animate.py b/xbout/tests/test_animate.py index 13a37bdc..fd565c55 100644 --- a/xbout/tests/test_animate.py +++ b/xbout/tests/test_animate.py @@ -1,12 +1,10 @@ import pytest -import matplotlib from matplotlib import pyplot as plt import numpy as np import xarray as xr from xbout import open_boutdataset -from xbout.boutdataarray import BoutDataArrayAccessor -from .test_load import create_bout_ds_list +from .utils_for_tests import create_bout_ds_list from animatplot.blocks import Pcolormesh, Line @@ -270,7 +268,7 @@ def test_animate_list_not_enough_nrowsncols(self, create_test_file): save_dir, ds = create_test_file with pytest.raises(ValueError): - animation = ds.isel(z=3).bout.animate_list( + ds.isel(z=3).bout.animate_list( ["n", ds["T"].isel(x=2), ds["n"].isel(y=1, z=2)], nrows=2, ncols=1 ) diff --git a/xbout/tests/test_boutdataarray.py b/xbout/tests/test_boutdataarray.py index 280aa309..62c909b2 100644 --- a/xbout/tests/test_boutdataarray.py +++ b/xbout/tests/test_boutdataarray.py @@ -3,13 +3,11 @@ import dask.array import numpy as np import numpy.testing as npt -from pathlib import Path import xarray as xr import xarray.testing as xrt from xarray.core.utils import dict_equiv -from xbout.tests.test_load import bout_xyt_example_files, create_bout_ds from xbout import open_boutdataset from xbout.geometries import apply_geometry from xbout.utils import _1d_coord_from_spacing @@ -1147,6 +1145,6 @@ def test_derivatives_doublenull(self, bout_xyt_example_files): keep_yboundaries=True, ) - test_ddx = ds["n"].bout.ddx() - test_ddy = ds["n"].bout.ddy() - test_ddz = ds["n"].bout.ddz() + ds["n"].bout.ddx() + ds["n"].bout.ddy() + ds["n"].bout.ddz() diff --git a/xbout/tests/test_boutdataset.py b/xbout/tests/test_boutdataset.py index 6ca5259a..50448c68 100644 --- a/xbout/tests/test_boutdataset.py +++ b/xbout/tests/test_boutdataset.py @@ -1,7 +1,7 @@ import pytest import numpy.testing as npt -from xarray import Dataset, DataArray, concat, open_dataset, open_mfdataset +from xarray import Dataset, DataArray, concat, open_dataset import xarray.testing as xrt import dask.array @@ -9,14 +9,13 @@ from pathlib import Path from scipy.integrate import quad_vec -from xbout.tests.test_load import bout_xyt_example_files, create_bout_ds from xbout.tests.test_region import ( params_guards, params_guards_values, params_boundaries, params_boundaries_values, ) -from xbout import BoutDatasetAccessor, open_boutdataset +from xbout import open_boutdataset from xbout.geometries import apply_geometry from xbout.utils import _set_attrs_on_all_vars, _1d_coord_from_spacing from xbout.tests.utils_for_tests import set_geometry_from_input_file @@ -683,7 +682,6 @@ def test_interpolate_parallel( ny_inner = ds.metadata["ny_inner"] + 2 * ybndry jys12 = ds.metadata["jyseps1_2"] + 3 * ybndry jys22 = ds.metadata["jyseps2_2"] + 3 * ybndry - ny = ds.metadata["ny"] + 4 * ybndry for var in ["n", "T"]: v = ds[var] @@ -1172,7 +1170,6 @@ def test_interpolate_parallel_limiter( # Get high parallel resolution version of ds, and check that ds = ds.bout.interpolate_parallel(["n", "T"]) mxg = 2 - myg = 2 ixs1 = ds.metadata["ixseps1"] for var in ["n", "T"]: v = ds[var] @@ -1930,6 +1927,8 @@ def test_load_options(self): @pytest.mark.skip def test_load_options_in_dataset(self, bout_xyt_example_files): + from boutdata.data import BoutOptions + dataset_list = bout_xyt_example_files(None, nxpe=1, nype=1, nt=1) ds = open_boutdataset( datapath=dataset_list, inputfilepath=EXAMPLE_OPTIONS_FILE_PATH @@ -2427,9 +2426,6 @@ def test_to_restart_change_npe_doublenull_expect_fail( with pytest.warns(UserWarning): ds = open_boutdataset(datapath=path) - nx = ds.metadata["nx"] - ny = ds.metadata["ny"] - # Save it to a netCDF file savepath = tmp_path_factory.mktemp( "test_to_restart_change_npe_doublenull_expect_fail" diff --git a/xbout/tests/test_fastoutput.py b/xbout/tests/test_fastoutput.py index 398c0760..3e3bc208 100644 --- a/xbout/tests/test_fastoutput.py +++ b/xbout/tests/test_fastoutput.py @@ -1,10 +1,8 @@ import numpy as np import random -import pytest from xarray import DataArray, Dataset -import xarray.testing as xrt from xbout.fastoutput import open_fastoutput @@ -92,7 +90,7 @@ def make_fastoutput_set(path, n): def make_fastoutput(path, i, locations): """ Create a single file in the format produced by - [`FastOutput`](https://github.com/johnomotani/BoutFastOutput) + `FastOutput `_ Parameters ---------- diff --git a/xbout/tests/test_grid.py b/xbout/tests/test_grid.py index 72da6bc8..007733b4 100644 --- a/xbout/tests/test_grid.py +++ b/xbout/tests/test_grid.py @@ -1,4 +1,4 @@ -from xarray import Dataset, DataArray, open_dataset, merge +from xarray import DataArray, open_dataset, merge from xarray.testing import assert_equal import pytest import numpy as np diff --git a/xbout/tests/test_load.py b/xbout/tests/test_load.py index 6621695c..d8766236 100644 --- a/xbout/tests/test_load.py +++ b/xbout/tests/test_load.py @@ -1,16 +1,8 @@ -from collections import namedtuple -from copy import deepcopy -import inspect from pathlib import Path -import re -from functools import reduce -import operator import pytest -import numpy as np - -from xarray import DataArray, Dataset, concat +from xarray import concat from xarray.tests.test_dataset import create_test_data import xarray.testing as xrt @@ -28,7 +20,7 @@ _BOUT_TIME_DEPENDENT_META_VARS, ) from xbout.utils import _separate_metadata -from xbout.tests.utils_for_tests import _get_kwargs +from xbout.tests.utils_for_tests import create_bout_ds, METADATA_VARS def test_check_extensions(tmp_path): @@ -124,7 +116,7 @@ def test_no_files(self, tmp_path): with pytest.raises(IOError): path = files_dir.joinpath("run*/example.*.nc") - actual_filepaths = _expand_filepaths(path) + _expand_filepaths(path) @pytest.fixture() @@ -264,554 +256,6 @@ def test_arrange_along_xyt(self, create_filepaths): assert actual_concat_dims == ["t", "y", "x"] -@pytest.fixture(scope="session") -def bout_xyt_example_files(tmp_path_factory): - return _bout_xyt_example_files - - -_bout_xyt_example_files_cache = {} - - -def _bout_xyt_example_files( - tmp_path_factory, - prefix="BOUT.dmp", - lengths=(6, 2, 4, 7), - nxpe=4, - nype=2, - nt=1, - guards=None, - syn_data_type="random", - grid=None, - squashed=False, - topology="core", - write_to_disk=False, - bout_v5=False, - metric_3D=False, -): - """ - Mocks up a set of BOUT-like Datasets - - Either returns list of Datasets (if write_to_disk=False) - or writes Datasets to netCDF files and returns the temporary test directory - containing them, deleting the temporary directory once that test is done (if - write_to_disk=True). - """ - call_args = _get_kwargs(ignore="tmp_path_factory") - - try: - # Has been called with the same signature before, just return the cached result - return deepcopy(_bout_xyt_example_files_cache[call_args]) - except KeyError: - pass - - if guards is None: - guards = {} - - mxg = guards.get("x", 0) - myg = guards.get("y", 0) - - if squashed: - # create a single data-file, but alter the 'nxpe' and 'nype' variables, as if the - # file had been created by combining a set of BOUT.dmp.*.nc files - this_lengths = ( - lengths[0], - lengths[1] * nxpe, - lengths[2] * nype, - lengths[3], - ) - ds_list, file_list = create_bout_ds_list( - prefix=prefix, - lengths=this_lengths, - nxpe=1, - nype=1, - nt=nt, - guards=guards, - topology=topology, - syn_data_type=syn_data_type, - squashed=True, - bout_v5=bout_v5, - metric_3D=metric_3D, - ) - ds_list[0]["nxpe"] = nxpe - ds_list[0]["nype"] = nype - else: - ds_list, file_list = create_bout_ds_list( - prefix=prefix, - lengths=lengths, - nxpe=nxpe, - nype=nype, - nt=nt, - guards=guards, - topology=topology, - syn_data_type=syn_data_type, - bout_v5=bout_v5, - metric_3D=metric_3D, - ) - - if grid is not None: - xsize = lengths[1] * nxpe - ysize = lengths[2] * nype - grid_ds = create_bout_grid_ds( - xsize=xsize, - ysize=ysize, - guards=guards, - topology=topology, - ny_inner=3 * lengths[2], - ) - - if not write_to_disk: - if grid is None: - _bout_xyt_example_files_cache[call_args] = ds_list - return deepcopy(ds_list) - else: - _bout_xyt_example_files_cache[call_args] = ds_list, grid_ds - return deepcopy((ds_list, grid_ds)) - raise ValueError("tmp_path_factory required when write_to_disk=True") - - save_dir = tmp_path_factory.mktemp("data") - - for ds, file_name in zip(ds_list, file_list): - ds.to_netcdf(save_dir.joinpath(file_name)) - - if grid is not None: - grid_ds.to_netcdf(save_dir.joinpath(grid + ".nc")) - - # Return a glob-like path to all files created, which has all file numbers replaced - # with a single asterix - path = str(save_dir.joinpath(file_list[-1])) - - count = 1 - if nt > 1: - count += 1 - # We have to reverse the path before limiting the number of numbers replaced so that the - # tests don't get confused by pytest's persistent temporary directories (which are also designated - # by different numbers) - glob_pattern = Path((re.sub(r"\d+", "*", path[::-1], count=count))[::-1]) - _bout_xyt_example_files_cache[call_args] = glob_pattern - return glob_pattern - - -def create_bout_ds_list( - prefix, - lengths=(6, 2, 4, 7), - nxpe=4, - nype=2, - nt=1, - guards={}, - topology="core", - syn_data_type="random", - squashed=False, - bout_v5=False, - metric_3D=False, -): - """ - Mocks up a set of BOUT-like datasets. - - Structured as though they were produced by a x-y parallelised run with multiple restarts. - """ - - if nt != 1: - raise ValueError( - "nt > 1 means the time dimension is split over several " - + "directories. This is not implemented yet." - ) - - file_list = [] - ds_list = [] - for j in range(nype): - for i in range(nxpe): - num = i + nxpe * j - filename = prefix + "." + str(num) + ".nc" - file_list.append(filename) - - # Include guard cells - upper_bndry_cells = {dim: guards.get(dim) for dim in guards.keys()} - lower_bndry_cells = {dim: guards.get(dim) for dim in guards.keys()} - - ds = create_bout_ds( - syn_data_type=syn_data_type, - num=num, - lengths=lengths, - nxpe=nxpe, - nype=nype, - xproc=i, - yproc=j, - guards=guards, - topology=topology, - squashed=squashed, - bout_v5=bout_v5, - metric_3D=metric_3D, - ) - ds_list.append(ds) - - return ds_list, file_list - - -_create_bout_ds_cache = {} - - -def create_bout_ds( - syn_data_type="random", - lengths=(6, 2, 4, 7), - num=0, - nxpe=1, - nype=1, - xproc=0, - yproc=0, - guards=None, - topology="core", - squashed=False, - bout_v5=False, - metric_3D=False, -): - call_args = _get_kwargs() - - try: - # Has been called with the same signature before, just return the cached result - return deepcopy(_create_bout_ds_cache[call_args]) - except KeyError: - pass - - if metric_3D and not bout_v5: - raise ValueError("3D metric requires BOUT++ v5") - - if guards is None: - guards = {} - - # Set the shape of the data in this dataset - t_length, x_length, y_length, z_length = lengths - mxg = guards.get("x", 0) - myg = guards.get("y", 0) - x_length += 2 * mxg - y_length += 2 * myg - - # calculate global nx, ny and nz - nx = nxpe * lengths[1] + 2 * mxg - ny = nype * lengths[2] - nz = 1 * lengths[3] - - if squashed and "double-null" in topology: - ny = ny + 2 * myg - y_length = y_length + 2 * myg - shape = (t_length, x_length, y_length, z_length) - - # Fill with some kind of synthetic data - if syn_data_type == "random": - # Each dataset contains unique random noise - np.random.seed(seed=num) - data = np.random.randn(*shape) - elif syn_data_type == "linear": - # Variables increase linearly across entire domain - data = DataArray(-np.ones(shape), dims=("t", "x", "y", "z")) - - t_array = DataArray( - (nx - 2 * mxg) * ny * nz * np.arange(t_length, dtype=float), dims="t" - ) - x_array = DataArray( - ny * nz * (xproc * lengths[1] + np.arange(lengths[1], dtype=float)), - dims="x", - ) - y_array = DataArray( - nz * (yproc * lengths[2] + np.arange(lengths[2], dtype=float)), dims="y" - ) - z_array = DataArray(np.arange(z_length, dtype=float), dims="z") - - data[:, mxg : x_length - mxg, myg : lengths[2] + myg, :] = ( - t_array + x_array + y_array + z_array - ) - elif syn_data_type == "stepped": - # Each dataset contains a different number depending on the filename - data = np.ones(shape) * num - elif isinstance(syn_data_type, int): - data = np.ones(shape) * syn_data_type - else: - raise ValueError("Not a recognised choice of type of synthetic bout data.") - - T = DataArray(data, dims=["t", "x", "y", "z"]) - n = DataArray(data, dims=["t", "x", "y", "z"]) - S = DataArray(data[:, :, :, 0], dims=["t", "x", "y"]) - for v in [n, T]: - v.attrs["direction_y"] = "Standard" - v.attrs["cell_location"] = "CELL_CENTRE" - v.attrs["direction_z"] = "Standard" - for v in [S]: - v.attrs["direction_y"] = "Standard" - v.attrs["cell_location"] = "CELL_CENTRE" - v.attrs["direction_z"] = "Average" - ds = Dataset({"n": n, "T": T, "S": S}) - - # BOUT_VERSION needed to deal with backwards incompatible changes: - # - # - v3 and earlier: number of points in z is MZ-1 - # - v4 and later: number of points in z is MZ - # - v5 and later: metric components can be either 2D or 3D - # - v5 and later: dz changed to be a Field2D/3D - ds["BOUT_VERSION"] = 5.0 if bout_v5 else 4.3 - ds["use_metric_3d"] = int(metric_3D) - - # Include grid data - ds["NXPE"] = nxpe - ds["NYPE"] = nype - ds["NZPE"] = 1 - ds["PE_XIND"] = xproc - ds["PE_YIND"] = yproc - ds["MYPE"] = num - - ds["MXG"] = mxg - ds["MYG"] = myg - ds["MZG"] = 0 - ds["nx"] = nx - ds["ny"] = ny - ds["nz"] = nz - ds["MZ"] = 1 * lengths[3] - if squashed: - ds["MXSUB"] = lengths[1] // nxpe - ds["MYSUB"] = lengths[2] // nype - ds["MZSUB"] = lengths[3] - else: - ds["MXSUB"] = lengths[1] - ds["MYSUB"] = lengths[2] - ds["MZSUB"] = lengths[3] - - MYSUB = lengths[2] - - extra_boundary_points = 0 - - if topology == "core": - ds["ixseps1"] = nx - ds["ixseps2"] = nx - ds["jyseps1_1"] = -1 - ds["jyseps2_1"] = ny // 2 - 1 - ds["jyseps1_2"] = ny // 2 - 1 - ds["jyseps2_2"] = ny - 1 - ds["ny_inner"] = ny // 2 - elif topology == "sol": - ds["ixseps1"] = 0 - ds["ixseps2"] = 0 - ds["jyseps1_1"] = -1 - ds["jyseps2_1"] = ny // 2 - 1 - ds["jyseps1_2"] = ny // 2 - 1 - ds["jyseps2_2"] = ny - 1 - ds["ny_inner"] = ny // 2 - elif topology == "limiter": - ds["ixseps1"] = nx // 2 - ds["ixseps2"] = nx - ds["jyseps1_1"] = -1 - ds["jyseps2_1"] = ny // 2 - 1 - ds["jyseps1_2"] = ny // 2 - 1 - ds["jyseps2_2"] = ny - 1 - ds["ny_inner"] = ny // 2 - elif topology == "xpoint": - if nype < 4 and not squashed: - raise ValueError(f"Not enough processors for xpoint topology: nype={nype}") - ds["ixseps1"] = nx // 2 - ds["ixseps2"] = nx // 2 - ds["jyseps1_1"] = MYSUB - 1 - ny_inner = 2 * MYSUB - ds["ny_inner"] = ny_inner - ds["jyseps2_1"] = MYSUB - 1 - ds["jyseps1_2"] = ny - MYSUB - 1 - ds["jyseps2_2"] = ny - MYSUB - 1 - elif topology == "single-null": - if nype < 3 and not squashed: - raise ValueError(f"Not enough processors for xpoint topology: nype={nype}") - ds["ixseps1"] = nx // 2 - ds["ixseps2"] = nx - ds["jyseps1_1"] = MYSUB - 1 - ds["jyseps2_1"] = ny // 2 - 1 - ds["jyseps1_2"] = ny // 2 - 1 - ds["jyseps2_2"] = ny - MYSUB - 1 - ds["ny_inner"] = ny // 2 - elif topology == "connected-double-null": - if nype < 6 and not squashed: - raise ValueError( - "Not enough processors for connected-double-null topology: " - f"nype={nype}" - ) - ds["ixseps1"] = nx // 2 - ds["ixseps2"] = nx // 2 - ds["jyseps1_1"] = MYSUB - 1 - ny_inner = 3 * MYSUB - ds["ny_inner"] = ny_inner - ds["jyseps2_1"] = ny_inner - MYSUB - 1 - ds["jyseps1_2"] = ny_inner + MYSUB - 1 - ds["jyseps2_2"] = ny - MYSUB - 1 - elif topology == "lower-disconnected-double-null": - if nype < 6 and not squashed: - raise ValueError( - "Not enough processors for lower-disconnected-double-null " - f"topology: nype={nype}" - ) - ds["ixseps1"] = nx // 2 - ds["ixseps2"] = nx // 2 + 4 - if ds["ixseps2"] >= nx: - raise ValueError( - "Not enough points in the x-direction. ixseps2=" - f'{ds["ixseps2"]} > nx={nx}' - ) - ds["jyseps1_1"] = MYSUB - 1 - ny_inner = 3 * MYSUB - ds["ny_inner"] = ny_inner - ds["jyseps2_1"] = ny_inner - MYSUB - 1 - ds["jyseps1_2"] = ny_inner + MYSUB - 1 - ds["jyseps2_2"] = ny - MYSUB - 1 - elif topology == "upper-disconnected-double-null": - if nype < 6 and not squashed: - raise ValueError( - "Not enough processors for upper-disconnected-double-null " - f"topology: nype={nype}" - ) - ds["ixseps2"] = nx // 2 - ds["ixseps1"] = nx // 2 + 4 - if ds["ixseps2"] >= nx: - raise ValueError( - "Not enough points in the x-direction. ixseps2=" - f'{ds["ixseps2"]} > nx={nx}' - ) - ds["jyseps1_1"] = MYSUB - 1 - ny_inner = 3 * MYSUB - ds["ny_inner"] = ny_inner - ds["jyseps2_1"] = ny_inner - MYSUB - 1 - ds["jyseps1_2"] = ny_inner + MYSUB - 1 - ds["jyseps2_2"] = ny - MYSUB - 1 - else: - raise ValueError(f"Unrecognised topology={topology}") - - if metric_3D: - one = DataArray(np.ones((x_length, y_length, z_length)), dims=["x", "y", "z"]) - zero = DataArray(np.zeros((x_length, y_length, z_length)), dims=["x", "y", "z"]) - else: - one = DataArray(np.ones((x_length, y_length)), dims=["x", "y"]) - zero = DataArray(np.zeros((x_length, y_length)), dims=["x", "y"]) - - ds["zperiod"] = 1 - ds["ZMIN"] = 0.0 - ds["ZMAX"] = 1.0 - ds["g11"] = one - ds["g22"] = one - ds["g33"] = one - ds["g12"] = zero - ds["g13"] = zero - ds["g23"] = zero - ds["g_11"] = one - ds["g_22"] = one - ds["g_33"] = one - ds["g_12"] = zero - ds["g_13"] = zero - ds["g_23"] = zero - ds["G1"] = zero - ds["G2"] = zero - ds["G3"] = zero - ds["J"] = one - ds["Bxy"] = one - ds["zShift"] = zero - - ds["dx"] = 0.5 * one - ds["dy"] = 2.0 * one - if bout_v5: - ds["dz"] = 2.0 * one * np.pi / nz - else: - ds["dz"] = 2.0 * np.pi / nz - - ds["iteration"] = t_length - 1 - ds["hist_hi"] = t_length - 1 - ds["t_array"] = DataArray(np.arange(t_length, dtype=float) * 10.0, dims="t") - ds["tt"] = ds["t_array"][-1] - - # xarray adds this encoding when opening a file. Emulate here as it may be used to - # get the file number - ds.encoding["source"] = f"BOUT.dmp.{num}.nc" - - _create_bout_ds_cache[call_args] = ds - return deepcopy(ds) - - -_create_bout_grid_ds_cache = {} - - -def create_bout_grid_ds(xsize=2, ysize=4, guards={}, topology="core", ny_inner=0): - call_args = _get_kwargs() - - try: - # Has been called with the same signature before, just return the cached result - return deepcopy(_create_bout_grid_ds_cache[call_args]) - except KeyError: - pass - - # Set the shape of the data in this dataset - mxg = guards.get("x", 0) - myg = guards.get("y", 0) - xsize += 2 * mxg - ysize += 2 * myg - - # jyseps* from grid file only ever used to check topology when loading the grid file, - # so do not need to be consistent with the main dataset - jyseps2_1 = ysize // 2 - jyseps1_2 = jyseps2_1 - - if "double-null" in topology or "xpoint" in topology: - # Has upper target as well - ysize += 2 * myg - - # make different from jyseps2_1 so double-null toplogy is recognised - jyseps1_2 += 1 - - shape = (xsize, ysize) - - data = DataArray(np.ones(shape), dims=["x", "y"]) - - ds = Dataset( - { - "psixy": data, - "Rxy": data, - "Zxy": data, - "hthe": data, - "y_boundary_guards": myg, - "jyseps2_1": jyseps2_1, - "jyseps1_2": jyseps1_2, - "ny_inner": ny_inner, - "y_boundary_guards": myg, - } - ) - - _create_bout_grid_ds_cache[call_args] = ds - return deepcopy(ds) - - -# Note, MYPE, PE_XIND and PE_YIND not included, since they are different for each -# processor and so are dropped when loading datasets. -METADATA_VARS = [ - "BOUT_VERSION", - "NXPE", - "NYPE", - "NZPE", - "MXG", - "MYG", - "MZG", - "nx", - "ny", - "nz", - "MZ", - "MXSUB", - "MYSUB", - "MZSUB", - "hist_hi", - "iteration", - "ixseps1", - "ixseps2", - "jyseps1_1", - "jyseps1_2", - "jyseps2_1", - "jyseps2_2", - "ny_inner", - "tt", - "zperiod", - "ZMIN", - "ZMAX", - "use_metric_3d", -] - - class TestStripMetadata: def test_strip_metadata(self): original = create_bout_ds() @@ -1291,11 +735,13 @@ def test_infer_boundaries_2d_parallelization( """ Numbering scheme for nxpe=3, nype=4 - y 9 10 11 - ^ 6 7 8 - | 3 4 5 - | 0 1 2 - -----> x + .. code:: text + + y 9 10 11 + ^ 6 7 8 + | 3 4 5 + | 0 1 2 + -----> x """ ds = create_test_data(0) @@ -1320,11 +766,13 @@ def test_infer_boundaries_2d_parallelization_doublenull( """ Numbering scheme for nxpe=3, nype=4 - y 9 10 11 - ^ 6 7 8 - | 3 4 5 - | 0 1 2 - -----> x + .. code:: text + + y 9 10 11 + ^ 6 7 8 + | 3 4 5 + | 0 1 2 + -----> x """ ds = create_test_data(0) @@ -1351,11 +799,13 @@ def test_infer_boundaries_2d_parallelization_by_filenum( """ Numbering scheme for nxpe=3, nype=4 - y 9 10 11 - ^ 6 7 8 - | 3 4 5 - | 0 1 2 - -----> x + .. code:: text + + y 9 10 11 + ^ 6 7 8 + | 3 4 5 + | 0 1 2 + -----> x """ filenum = yproc * nxpe + xproc @@ -1381,11 +831,13 @@ def test_infer_boundaries_2d_parallelization_doublenull_by_filenum( """ Numbering scheme for nxpe=3, nype=4 - y 9 10 11 - ^ 6 7 8 - | 3 4 5 - | 0 1 2 - -----> x + .. code:: text + + y 9 10 11 + ^ 6 7 8 + | 3 4 5 + | 0 1 2 + -----> x """ filenum = yproc * nxpe + xproc @@ -1498,53 +950,3 @@ def test_trim_timing_info(self, is_restart): expected = create_test_data(0) xrt.assert_equal(ds, expected) - - -fci_shape = (2, 2, 3, 4) -fci_guards = (2, 2, 0) - - -@pytest.fixture -def create_example_grid_file_fci(tmp_path_factory): - """ - Mocks up a FCI-like netCDF file, and return the temporary test - directory containing them. - - Deletes the temporary directory once that test is done. - """ - - # Create grid dataset - shape = (fci_shape[1] + 2 * fci_guards[0], *fci_shape[2:]) - arr = np.arange(reduce(operator.mul, shape, 1)).reshape(shape) - grid = DataArray(data=arr, name="R", dims=["x", "y", "z"]).to_dataset() - grid["Z"] = DataArray(np.random.random(shape), dims=["x", "y", "z"]) - grid["dy"] = DataArray(np.ones(shape), dims=["x", "y", "z"]) - grid = grid.set_coords(["dy"]) - - # Create temporary directory - save_dir = tmp_path_factory.mktemp("griddata") - - # Save - filepath = save_dir.joinpath("fci.nc") - grid.to_netcdf(filepath, engine="netcdf4") - - return filepath - - -@pytest.fixture -def create_example_files_fci(tmp_path_factory): - return _bout_xyt_example_files( - tmp_path_factory, - lengths=fci_shape, - nxpe=1, - nype=1, - # nt=1, - guards={a: b for a, b in zip("xyz", fci_guards)}, - syn_data_type="random", - grid=None, - squashed=False, - # topology="core", - write_to_disk=False, - bout_v5=True, - metric_3D=True, - ) diff --git a/xbout/tests/test_plot.py b/xbout/tests/test_plot.py index 18685750..4042f43a 100644 --- a/xbout/tests/test_plot.py +++ b/xbout/tests/test_plot.py @@ -1,11 +1,9 @@ import pytest -from pathlib import Path from matplotlib import pyplot as plt import numpy as np from xbout import open_boutdataset -from xbout.tests.test_load import bout_xyt_example_files from xbout.tests.test_region import ( params_guards, params_guards_values, diff --git a/xbout/tests/test_region.py b/xbout/tests/test_region.py index 0e49e11b..8ca6dc58 100644 --- a/xbout/tests/test_region.py +++ b/xbout/tests/test_region.py @@ -1,11 +1,9 @@ import pytest -from pathlib import Path import numpy.testing as npt import xarray.testing as xrt -from xbout.tests.test_load import bout_xyt_example_files from xbout import open_boutdataset @@ -141,8 +139,6 @@ def test_region_limiter( keep_yboundaries=keep_yboundaries, ) - mxg = guards["x"] - if keep_xboundaries: ixs = ds.metadata["ixseps1"] else: @@ -239,7 +235,6 @@ def test_region_xpoint( jys1 = ds.metadata["jyseps1_1"] + ybndry ny_inner = ds.metadata["ny_inner"] + 2 * ybndry jys2 = ds.metadata["jyseps2_2"] + 3 * ybndry - ny = ds.metadata["ny"] + 4 * ybndry n = ds["n"] n_noregions = n.copy(deep=True) @@ -433,7 +428,6 @@ def test_region_singlenull( ybndry = 0 jys1 = ds.metadata["jyseps1_1"] + ybndry jys2 = ds.metadata["jyseps2_2"] + ybndry - ny = ds.metadata["ny"] + 2 * ybndry n = ds["n"] n_noregions = n.copy(deep=True) @@ -606,7 +600,6 @@ def test_region_connecteddoublenull( ny_inner = ds.metadata["ny_inner"] + 2 * ybndry jys12 = ds.metadata["jyseps1_2"] + 3 * ybndry jys22 = ds.metadata["jyseps2_2"] + 3 * ybndry - ny = ds.metadata["ny"] + 4 * ybndry n = ds["n"] n_noregions = n.copy(deep=True) @@ -928,7 +921,6 @@ def test_region_disconnecteddoublenull( ny_inner = ds.metadata["ny_inner"] + 2 * ybndry jys12 = ds.metadata["jyseps1_2"] + 3 * ybndry jys22 = ds.metadata["jyseps2_2"] + 3 * ybndry - ny = ds.metadata["ny"] + 4 * ybndry n = ds["n"] n_noregions = n.copy(deep=True) @@ -1502,7 +1494,6 @@ def test_region_disconnecteddoublenull_get_one_guard( ny_inner = ds.metadata["ny_inner"] + 2 * ybndry jys12 = ds.metadata["jyseps1_2"] + 3 * ybndry jys22 = ds.metadata["jyseps2_2"] + 3 * ybndry - ny = ds.metadata["ny"] + 4 * ybndry if isinstance(with_guards, dict): xguards = with_guards.get("x", mxg) diff --git a/xbout/tests/utils_for_tests.py b/xbout/tests/utils_for_tests.py index bcdb1850..01f01159 100644 --- a/xbout/tests/utils_for_tests.py +++ b/xbout/tests/utils_for_tests.py @@ -1,8 +1,44 @@ +from copy import deepcopy from boutdata.data import BoutOptionsFile from gelidum import freeze import inspect import numpy as np from pathlib import Path +from xarray import DataArray, Dataset + + +# Note, MYPE, PE_XIND and PE_YIND not included, since they are different for each +# processor and so are dropped when loading datasets. +METADATA_VARS = [ + "BOUT_VERSION", + "NXPE", + "NYPE", + "NZPE", + "MXG", + "MYG", + "MZG", + "nx", + "ny", + "nz", + "MZ", + "MXSUB", + "MYSUB", + "MZSUB", + "hist_hi", + "iteration", + "ixseps1", + "ixseps2", + "jyseps1_1", + "jyseps1_2", + "jyseps2_1", + "jyseps2_2", + "ny_inner", + "tt", + "zperiod", + "ZMIN", + "ZMAX", + "use_metric_3d", +] def load_example_input(name, *, nx=None, ny=None, nz=None): @@ -139,3 +175,383 @@ def _get_kwargs(ignore=None): kwargs[key] = value return freeze(kwargs) + + +def create_bout_ds_list( + prefix, + lengths=(6, 2, 4, 7), + nxpe=4, + nype=2, + nt=1, + guards={}, + topology="core", + syn_data_type="random", + squashed=False, + bout_v5=False, + metric_3D=False, +): + """ + Mocks up a set of BOUT-like datasets. + + Structured as though they were produced by a x-y parallelised run with multiple restarts. + """ + + if nt != 1: + raise ValueError( + "nt > 1 means the time dimension is split over several " + + "directories. This is not implemented yet." + ) + + file_list = [] + ds_list = [] + for j in range(nype): + for i in range(nxpe): + num = i + nxpe * j + filename = prefix + "." + str(num) + ".nc" + file_list.append(filename) + + ds = create_bout_ds( + syn_data_type=syn_data_type, + num=num, + lengths=lengths, + nxpe=nxpe, + nype=nype, + xproc=i, + yproc=j, + guards=guards, + topology=topology, + squashed=squashed, + bout_v5=bout_v5, + metric_3D=metric_3D, + ) + ds_list.append(ds) + + return ds_list, file_list + + +_create_bout_ds_cache = {} + + +def create_bout_ds( + syn_data_type="random", + lengths=(6, 2, 4, 7), + num=0, + nxpe=1, + nype=1, + xproc=0, + yproc=0, + guards=None, + topology="core", + squashed=False, + bout_v5=False, + metric_3D=False, +): + call_args = _get_kwargs() + + try: + # Has been called with the same signature before, just return the cached result + return deepcopy(_create_bout_ds_cache[call_args]) + except KeyError: + pass + + if metric_3D and not bout_v5: + raise ValueError("3D metric requires BOUT++ v5") + + if guards is None: + guards = {} + + # Set the shape of the data in this dataset + t_length, x_length, y_length, z_length = lengths + mxg = guards.get("x", 0) + myg = guards.get("y", 0) + x_length += 2 * mxg + y_length += 2 * myg + + # calculate global nx, ny and nz + nx = nxpe * lengths[1] + 2 * mxg + ny = nype * lengths[2] + nz = 1 * lengths[3] + + if squashed and "double-null" in topology: + ny = ny + 2 * myg + y_length = y_length + 2 * myg + shape = (t_length, x_length, y_length, z_length) + + # Fill with some kind of synthetic data + if syn_data_type == "random": + # Each dataset contains unique random noise + np.random.seed(seed=num) + data = np.random.randn(*shape) + elif syn_data_type == "linear": + # Variables increase linearly across entire domain + data = DataArray(-np.ones(shape), dims=("t", "x", "y", "z")) + + t_array = DataArray( + (nx - 2 * mxg) * ny * nz * np.arange(t_length, dtype=float), dims="t" + ) + x_array = DataArray( + ny * nz * (xproc * lengths[1] + np.arange(lengths[1], dtype=float)), + dims="x", + ) + y_array = DataArray( + nz * (yproc * lengths[2] + np.arange(lengths[2], dtype=float)), dims="y" + ) + z_array = DataArray(np.arange(z_length, dtype=float), dims="z") + + data[:, mxg : x_length - mxg, myg : lengths[2] + myg, :] = ( + t_array + x_array + y_array + z_array + ) + elif syn_data_type == "stepped": + # Each dataset contains a different number depending on the filename + data = np.ones(shape) * num + elif isinstance(syn_data_type, int): + data = np.ones(shape) * syn_data_type + else: + raise ValueError("Not a recognised choice of type of synthetic bout data.") + + T = DataArray(data, dims=["t", "x", "y", "z"]) + n = DataArray(data, dims=["t", "x", "y", "z"]) + S = DataArray(data[:, :, :, 0], dims=["t", "x", "y"]) + for v in [n, T]: + v.attrs["direction_y"] = "Standard" + v.attrs["cell_location"] = "CELL_CENTRE" + v.attrs["direction_z"] = "Standard" + for v in [S]: + v.attrs["direction_y"] = "Standard" + v.attrs["cell_location"] = "CELL_CENTRE" + v.attrs["direction_z"] = "Average" + ds = Dataset({"n": n, "T": T, "S": S}) + + # BOUT_VERSION needed to deal with backwards incompatible changes: + # + # - v3 and earlier: number of points in z is MZ-1 + # - v4 and later: number of points in z is MZ + # - v5 and later: metric components can be either 2D or 3D + # - v5 and later: dz changed to be a Field2D/3D + ds["BOUT_VERSION"] = 5.0 if bout_v5 else 4.3 + ds["use_metric_3d"] = int(metric_3D) + + # Include grid data + ds["NXPE"] = nxpe + ds["NYPE"] = nype + ds["NZPE"] = 1 + ds["PE_XIND"] = xproc + ds["PE_YIND"] = yproc + ds["MYPE"] = num + + ds["MXG"] = mxg + ds["MYG"] = myg + ds["MZG"] = 0 + ds["nx"] = nx + ds["ny"] = ny + ds["nz"] = nz + ds["MZ"] = 1 * lengths[3] + if squashed: + ds["MXSUB"] = lengths[1] // nxpe + ds["MYSUB"] = lengths[2] // nype + ds["MZSUB"] = lengths[3] + else: + ds["MXSUB"] = lengths[1] + ds["MYSUB"] = lengths[2] + ds["MZSUB"] = lengths[3] + + MYSUB = lengths[2] + + if topology == "core": + ds["ixseps1"] = nx + ds["ixseps2"] = nx + ds["jyseps1_1"] = -1 + ds["jyseps2_1"] = ny // 2 - 1 + ds["jyseps1_2"] = ny // 2 - 1 + ds["jyseps2_2"] = ny - 1 + ds["ny_inner"] = ny // 2 + elif topology == "sol": + ds["ixseps1"] = 0 + ds["ixseps2"] = 0 + ds["jyseps1_1"] = -1 + ds["jyseps2_1"] = ny // 2 - 1 + ds["jyseps1_2"] = ny // 2 - 1 + ds["jyseps2_2"] = ny - 1 + ds["ny_inner"] = ny // 2 + elif topology == "limiter": + ds["ixseps1"] = nx // 2 + ds["ixseps2"] = nx + ds["jyseps1_1"] = -1 + ds["jyseps2_1"] = ny // 2 - 1 + ds["jyseps1_2"] = ny // 2 - 1 + ds["jyseps2_2"] = ny - 1 + ds["ny_inner"] = ny // 2 + elif topology == "xpoint": + if nype < 4 and not squashed: + raise ValueError(f"Not enough processors for xpoint topology: nype={nype}") + ds["ixseps1"] = nx // 2 + ds["ixseps2"] = nx // 2 + ds["jyseps1_1"] = MYSUB - 1 + ny_inner = 2 * MYSUB + ds["ny_inner"] = ny_inner + ds["jyseps2_1"] = MYSUB - 1 + ds["jyseps1_2"] = ny - MYSUB - 1 + ds["jyseps2_2"] = ny - MYSUB - 1 + elif topology == "single-null": + if nype < 3 and not squashed: + raise ValueError(f"Not enough processors for xpoint topology: nype={nype}") + ds["ixseps1"] = nx // 2 + ds["ixseps2"] = nx + ds["jyseps1_1"] = MYSUB - 1 + ds["jyseps2_1"] = ny // 2 - 1 + ds["jyseps1_2"] = ny // 2 - 1 + ds["jyseps2_2"] = ny - MYSUB - 1 + ds["ny_inner"] = ny // 2 + elif topology == "connected-double-null": + if nype < 6 and not squashed: + raise ValueError( + "Not enough processors for connected-double-null topology: " + f"nype={nype}" + ) + ds["ixseps1"] = nx // 2 + ds["ixseps2"] = nx // 2 + ds["jyseps1_1"] = MYSUB - 1 + ny_inner = 3 * MYSUB + ds["ny_inner"] = ny_inner + ds["jyseps2_1"] = ny_inner - MYSUB - 1 + ds["jyseps1_2"] = ny_inner + MYSUB - 1 + ds["jyseps2_2"] = ny - MYSUB - 1 + elif topology == "lower-disconnected-double-null": + if nype < 6 and not squashed: + raise ValueError( + "Not enough processors for lower-disconnected-double-null " + f"topology: nype={nype}" + ) + ds["ixseps1"] = nx // 2 + ds["ixseps2"] = nx // 2 + 4 + if ds["ixseps2"] >= nx: + raise ValueError( + "Not enough points in the x-direction. ixseps2=" + f'{ds["ixseps2"]} > nx={nx}' + ) + ds["jyseps1_1"] = MYSUB - 1 + ny_inner = 3 * MYSUB + ds["ny_inner"] = ny_inner + ds["jyseps2_1"] = ny_inner - MYSUB - 1 + ds["jyseps1_2"] = ny_inner + MYSUB - 1 + ds["jyseps2_2"] = ny - MYSUB - 1 + elif topology == "upper-disconnected-double-null": + if nype < 6 and not squashed: + raise ValueError( + "Not enough processors for upper-disconnected-double-null " + f"topology: nype={nype}" + ) + ds["ixseps2"] = nx // 2 + ds["ixseps1"] = nx // 2 + 4 + if ds["ixseps2"] >= nx: + raise ValueError( + "Not enough points in the x-direction. ixseps2=" + f'{ds["ixseps2"]} > nx={nx}' + ) + ds["jyseps1_1"] = MYSUB - 1 + ny_inner = 3 * MYSUB + ds["ny_inner"] = ny_inner + ds["jyseps2_1"] = ny_inner - MYSUB - 1 + ds["jyseps1_2"] = ny_inner + MYSUB - 1 + ds["jyseps2_2"] = ny - MYSUB - 1 + else: + raise ValueError(f"Unrecognised topology={topology}") + + if metric_3D: + one = DataArray(np.ones((x_length, y_length, z_length)), dims=["x", "y", "z"]) + zero = DataArray(np.zeros((x_length, y_length, z_length)), dims=["x", "y", "z"]) + else: + one = DataArray(np.ones((x_length, y_length)), dims=["x", "y"]) + zero = DataArray(np.zeros((x_length, y_length)), dims=["x", "y"]) + + ds["zperiod"] = 1 + ds["ZMIN"] = 0.0 + ds["ZMAX"] = 1.0 + ds["g11"] = one + ds["g22"] = one + ds["g33"] = one + ds["g12"] = zero + ds["g13"] = zero + ds["g23"] = zero + ds["g_11"] = one + ds["g_22"] = one + ds["g_33"] = one + ds["g_12"] = zero + ds["g_13"] = zero + ds["g_23"] = zero + ds["G1"] = zero + ds["G2"] = zero + ds["G3"] = zero + ds["J"] = one + ds["Bxy"] = one + ds["zShift"] = zero + + ds["dx"] = 0.5 * one + ds["dy"] = 2.0 * one + if bout_v5: + ds["dz"] = 2.0 * one * np.pi / nz + else: + ds["dz"] = 2.0 * np.pi / nz + + ds["iteration"] = t_length - 1 + ds["hist_hi"] = t_length - 1 + ds["t_array"] = DataArray(np.arange(t_length, dtype=float) * 10.0, dims="t") + ds["tt"] = ds["t_array"][-1] + + # xarray adds this encoding when opening a file. Emulate here as it may be used to + # get the file number + ds.encoding["source"] = f"BOUT.dmp.{num}.nc" + + _create_bout_ds_cache[call_args] = ds + return deepcopy(ds) + + +_create_bout_grid_ds_cache = {} + + +def create_bout_grid_ds(xsize=2, ysize=4, guards={}, topology="core", ny_inner=0): + call_args = _get_kwargs() + + try: + # Has been called with the same signature before, just return the cached result + return deepcopy(_create_bout_grid_ds_cache[call_args]) + except KeyError: + pass + + # Set the shape of the data in this dataset + mxg = guards.get("x", 0) + myg = guards.get("y", 0) + xsize += 2 * mxg + ysize += 2 * myg + + # jyseps* from grid file only ever used to check topology when loading the grid file, + # so do not need to be consistent with the main dataset + jyseps2_1 = ysize // 2 + jyseps1_2 = jyseps2_1 + + if "double-null" in topology or "xpoint" in topology: + # Has upper target as well + ysize += 2 * myg + + # make different from jyseps2_1 so double-null toplogy is recognised + jyseps1_2 += 1 + + shape = (xsize, ysize) + + data = DataArray(np.ones(shape), dims=["x", "y"]) + + ds = Dataset( + { + "psixy": data, + "Rxy": data, + "Zxy": data, + "hthe": data, + "y_boundary_guards": myg, + "jyseps2_1": jyseps2_1, + "jyseps1_2": jyseps1_2, + "ny_inner": ny_inner, + } + ) + + _create_bout_grid_ds_cache[call_args] = ds + return deepcopy(ds) diff --git a/xbout/utils.py b/xbout/utils.py index 9587fcb4..32be7edc 100644 --- a/xbout/utils.py +++ b/xbout/utils.py @@ -128,27 +128,6 @@ def update_ny(name): return da -def _add_cartesian_coordinates(ds): - # Add Cartesian X and Y coordinates if they do not exist already - # Works on either BoutDataset or BoutDataArray - - R = ds["R"] - Z = ds["Z"] - zeta = ds[ds.metadata["bout_zdim"]] - if "X_cartesian" not in ds.coords: - X = R * np.cos(zeta) - ds = ds.assign_coords(X_cartesian=X) - if "Y_cartesian" not in ds.coords: - Y = R * np.sin(zeta) - ds = ds.assign_coords(Y_cartesian=Y) - if "Z_cartesian" not in ds.coords: - zcoord = ds.metadata["bout_zdim"] - nz = len(ds[zcoord]) - ds = ds.assign_coords(Z_cartesian=Z.expand_dims({zcoord: nz}, axis=-1)) - - return ds - - def _1d_coord_from_spacing(spacing, dim, ds=None, *, origin_at=None): """ Create a 1d coordinate varying along the dimension 'dim' from the grid spacing @@ -252,7 +231,6 @@ def _check_new_nxpe(ds, nxpe): # Check nxpe is valid nx = ds.metadata["nx"] - 2 * ds.metadata["MXG"] - mxsub = nx // nxpe if nx % nxpe != 0: raise ValueError( @@ -514,21 +492,23 @@ def _split_into_restarts(ds, variables, savepath, nxpe, nype, tind, prefix, over if isinstance(value, str): # Write strings as byte-strings so BOUT++ can read them value = value.encode() + elif isinstance(value, int): + value = np.intc(value) restart_ds[v] = value # These variables need to be altered, because they depend on the number of # files and/or the rank of this file. - restart_ds["MXSUB"] = mxsub - restart_ds["MYSUB"] = mysub - restart_ds["NXPE"] = nxpe - restart_ds["NYPE"] = nype - restart_ds["PE_XIND"] = xproc - restart_ds["PE_YIND"] = yproc - restart_ds["hist_hi"] = hist_hi - restart_ds["PE_XIND"] = xproc - restart_ds["PE_YIND"] = yproc - restart_ds["MYPE"] = yproc * nxpe + xproc + restart_ds["MXSUB"] = np.intc(mxsub) + restart_ds["MYSUB"] = np.intc(mysub) + restart_ds["NXPE"] = np.intc(nxpe) + restart_ds["NYPE"] = np.intc(nype) + restart_ds["PE_XIND"] = np.intc(xproc) + restart_ds["PE_YIND"] = np.intc(yproc) + restart_ds["hist_hi"] = np.intc(hist_hi) + restart_ds["PE_XIND"] = np.intc(xproc) + restart_ds["PE_YIND"] = np.intc(yproc) + restart_ds["MYPE"] = np.intc(yproc * nxpe + xproc) # tt is the simulation time where the restart happens restart_ds["tt"] = tt @@ -686,7 +666,6 @@ def _follow_boundary(ds, start_region, start_direction, xbndry, ybndry, Rcoord, visited_regions.append(this_region) ds_region = ds.bout.from_region(this_region, with_guards=0) - region = ds.bout._regions[this_region] # Get all boundary points from this region, and decide which region to go to next this_region = None @@ -749,9 +728,6 @@ def _get_bounding_surfaces(ds, coords): else: ybndry = 0 - xcoord = ds.metadata["bout_xdim"] - ycoord = ds.metadata["bout_ydim"] - # First find the outer boundary start_region = None for name, region in ds.bout._regions.items():