diff --git a/.build_rtd_docs/conf.py b/.build_rtd_docs/conf.py index 2de05fbe9be..bfc37a3a41f 100644 --- a/.build_rtd_docs/conf.py +++ b/.build_rtd_docs/conf.py @@ -81,6 +81,18 @@ dst = os.path.join(dstdir, fpth) shutil.copy(src, dst) +# -- build the deprecations table -------------------------------------------- +print("Build the deprecations markdown table") +pth = os.path.join("..", "doc", "mf6io", "mf6ivar") +args = (sys.executable, "deprecations.py") +# run the command +proc = Popen(args, stdout=PIPE, stderr=PIPE, cwd=pth) +stdout, stderr = proc.communicate() +if stdout: + print(stdout.decode("utf-8")) +if stderr: + print("Errors:\n{}".format(stderr.decode("utf-8"))) + # -- copy deprecations markdown --------------------------------------------- print("Copy the deprecations table") dstdir = "_mf6run" diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 8f1fb7b720e..65e103de583 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -459,6 +459,10 @@ jobs: name: deprecations path: modflow6/doc/mf6io/mf6ivar/md/deprecations.md + - name: Build MF6IO files from DFNs + working-directory: modflow6/doc/mf6io/mf6ivar + run: python mf6ivar.py + - name: Build documentation env: # this step is lazy about building the mf6 examples PDF document, first diff --git a/distribution/benchmark.py b/distribution/benchmark.py index 599335df18e..a633703e771 100644 --- a/distribution/benchmark.py +++ b/distribution/benchmark.py @@ -428,11 +428,10 @@ def test_run_benchmarks(tmp_path): if __name__ == "__main__": parser = argparse.ArgumentParser( - prog="Benchmark MODFLOW 6 versions on example models", formatter_class=argparse.RawDescriptionHelpFormatter, epilog=textwrap.dedent( """\ - Benchmarks the current version of MODFLOW 6 against the latest official release. + Benchmarks the current version of MODFLOW 6 against the latest official release, with the example models stored in the MODFLOW-USGS/modflow6-examples repository. """ ), diff --git a/distribution/build_dist.py b/distribution/build_dist.py index c457db6f65e..fb178f10b46 100644 --- a/distribution/build_dist.py +++ b/distribution/build_dist.py @@ -111,16 +111,21 @@ def test_copy_sources(tmp_path): def setup_examples( bin_path: PathLike, examples_path: PathLike, - overwrite: bool = False, + force: bool = False, models: Optional[List[str]] = None, ): examples_path = Path(examples_path).expanduser().absolute() - latest = get_release("MODFLOW-USGS/modflow6-examples", "latest") + + # find and download example models distribution from latest examples release + latest = get_release( + "MODFLOW-USGS/modflow6-examples", tag="latest", verbose=True + ) assets = latest["assets"] + print(f"Found {len(assets)} assets from the latest examples release:") + pprint([a["name"] for a in assets]) asset = next( - iter([a for a in assets if a["name"] == "mf6examples.zip"]), None + iter([a for a in assets if a["name"].endswith("examples.zip")]), None ) - # download example models zip asset download_and_unzip( asset["browser_download_url"], examples_path, verbose=True ) @@ -141,7 +146,7 @@ def setup_examples( model_paths = get_model_paths(examples_path) for mp in model_paths: script_path = mp / f"run{SCR_EXT}" - if not overwrite and script_path.is_file(): + if not force and script_path.is_file(): print(f"Script {script_path} already exists") else: print(f"Creating {script_path}") @@ -165,7 +170,7 @@ def setup_examples( # add runall.sh/bat, which runs all examples script_path = examples_path / f"runall{SCR_EXT}" - if not overwrite and script_path.is_file(): + if not force and script_path.is_file(): print(f"Script {script_path} already exists") else: print(f"Creating {script_path}") @@ -191,7 +196,7 @@ def setup_examples( def build_programs_meson( - build_path: PathLike, bin_path: PathLike, overwrite: bool = False + build_path: PathLike, bin_path: PathLike, force: bool = False ): build_path = Path(build_path).expanduser().absolute() bin_path = Path(bin_path).expanduser().absolute() @@ -204,7 +209,7 @@ def build_programs_meson( lib_paths = [bin_path / f"libmf6{LIB_EXT}"] if ( - not overwrite + not force and all(p.is_file() for p in exe_paths) and all(p.is_file() for p in lib_paths) ): @@ -293,7 +298,7 @@ def build_distribution( build_path: PathLike, output_path: PathLike, full: bool = False, - overwrite: bool = False, + force: bool = False, models: Optional[List[str]] = None, ): print(f"Building {'full' if full else 'minimal'} distribution") @@ -305,7 +310,7 @@ def build_distribution( build_programs_meson( build_path=build_path, bin_path=output_path / "bin", - overwrite=overwrite, + force=force, ) # code.json metadata @@ -319,7 +324,7 @@ def build_distribution( setup_examples( bin_path=output_path / "bin", examples_path=output_path / "examples", - overwrite=overwrite, + force=force, models=models, ) @@ -334,7 +339,7 @@ def build_distribution( bin_path=output_path / "bin", full=full, output_path=output_path / "doc", - overwrite=overwrite, + force=force, ) @@ -348,7 +353,7 @@ def test_build_distribution(tmp_path, full): build_path=tmp_path / "builddir", output_path=output_path, full=full, - overwrite=True, + force=True, ) if full: @@ -378,16 +383,20 @@ def test_build_distribution(tmp_path, full): if __name__ == "__main__": parser = argparse.ArgumentParser( - prog="Create a Modflow 6 distribution directory for release", formatter_class=argparse.RawDescriptionHelpFormatter, epilog=textwrap.dedent( """\ - Create a distribution folder. If no output path is provided - distribution files are written to the distribution/ folder. + Create a MODFLOW 6 distribution. If output path is provided + distribution files are written to the selected path, if not + they are written to the distribution/ project subdirectory. By default a minimal distribution containing only binaries, mf6io documentation, release notes and metadata (code.json) is created. To create a full distribution including sources - and examples, use the --full flag. + and examples, use the --full flag. Models to be included in + the examples and documentation can be selected with --model + (or -m), which may be used multiple times. Use --force (-f) + to overwrite preexisting distribution artifacts; by default + the script is lazy and will only create what it can't find. """ ), ) @@ -436,6 +445,6 @@ def test_build_distribution(tmp_path, full): build_path=build_path, output_path=out_path, full=args.full, - overwrite=args.force, + force=args.force, models=models, ) diff --git a/distribution/build_docs.py b/distribution/build_docs.py index e34241b8791..37108cfee88 100644 --- a/distribution/build_docs.py +++ b/distribution/build_docs.py @@ -26,7 +26,13 @@ from modflow_devtools.markers import no_parallel, requires_exe, requires_github from modflow_devtools.misc import run_cmd, run_py_script, set_dir -from utils import convert_line_endings, get_project_root_path +from utils import ( + assert_match, + convert_line_endings, + get_project_root_path, + glob, + match, +) # paths PROJ_ROOT_PATH = get_project_root_path() @@ -36,8 +42,8 @@ BENCHMARKS_PATH = PROJ_ROOT_PATH / "distribution" / ".benchmarks" DOCS_PATH = PROJ_ROOT_PATH / "doc" MF6IO_PATH = DOCS_PATH / "mf6io" +MF6IVAR_PATH = MF6IO_PATH / "mf6ivar" RELEASE_NOTES_PATH = DOCS_PATH / "ReleaseNotes" -DEPRECATIONS_SCRIPT_PATH = MF6IO_PATH / "mf6ivar" / "deprecations.py" TEX_PATHS = { "minimal": [ MF6IO_PATH / "mf6io.tex", @@ -51,6 +57,9 @@ DOCS_PATH / "SuppTechInfo" / "mf6suptechinfo.tex", ], } + +# models to include in the docs by default, +# filterable with the --models (-m) option DEFAULT_MODELS = ["gwf", "gwt", "gwe", "prt", "swf"] # OS-specific extensions @@ -70,52 +79,13 @@ ] -def clean_tex_files(): - print("Cleaning latex files") - exts = ["pdf", "aux", "bbl", "idx", "lof", "out", "toc"] - pth = PROJ_ROOT_PATH / "doc" / "mf6io" - files = [(pth / f"mf6io.{e}") for e in exts] - for file in files: - file.unlink(missing_ok=True) - assert not os.path.isfile(str(pth) + ".pdf") - - pth = PROJ_ROOT_PATH / "doc" / "ReleaseNotes" - files = [(pth / f"ReleaseNotes.{e}") for e in exts] - for file in files: - file.unlink(missing_ok=True) - assert not os.path.isfile(str(pth) + ".pdf") - - pth = PROJ_ROOT_PATH / "doc" / "zonebudget" - files = [(pth / f"zonebudget.{e}") for e in exts] - for file in files: - file.unlink(missing_ok=True) - assert not os.path.isfile(str(pth) + ".pdf") - - pth = PROJ_ROOT_PATH / "doc" / "ConverterGuide" - files = [(pth / f"converter_mf5to6.{e}") for e in exts] - for file in files: - file.unlink(missing_ok=True) - assert not os.path.isfile(str(pth) + ".pdf") - - pth = PROJ_ROOT_PATH.parent / "modflow6-docs.git" / "mf6suptechinfo" - files = [(pth / f"mf6suptechinfo.{e}") for e in exts] - if pth.is_dir(): - for file in files: - file.unlink(missing_ok=True) - assert not os.path.isfile(str(pth) + ".pdf") - - pth = EXAMPLES_REPO_PATH / "doc" - files = [(pth / f"mf6examples.{e}") for e in exts] - for file in files: - file.unlink(missing_ok=True) - assert not os.path.isfile(str(pth) + ".pdf") - - def download_benchmarks( output_path: PathLike, verbose: bool = False, repo_owner: str = "MODFLOW-USGS", ) -> Optional[Path]: + """Try to download MF6 benchmarks from GitHub Actions.""" + output_path = Path(output_path).expanduser().absolute() name = "run-time-comparison" # todo make configurable repo = ( @@ -169,9 +139,11 @@ def test_download_benchmarks(tmp_path, github_user): def build_benchmark_tex( output_path: PathLike, - overwrite: bool = False, + force: bool = False, repo_owner: str = "MODFLOW-USGS", ): + """Build LaTeX files for MF6 performance benchmarks to go into the release notes.""" + BENCHMARKS_PATH.mkdir(parents=True, exist_ok=True) benchmarks_path = BENCHMARKS_PATH / "run-time-comparison.md" @@ -182,7 +154,7 @@ def build_benchmark_tex( ) # run benchmarks again if no benchmarks found on GitHub or overwrite requested - if overwrite or not benchmarks_path.is_file(): + if force or not benchmarks_path.is_file(): run_benchmarks( build_path=PROJ_ROOT_PATH / "builddir", current_bin_path=PROJ_ROOT_PATH / "bin", @@ -202,9 +174,7 @@ def build_benchmark_tex( assert tex_path.is_file() if (DISTRIBUTION_PATH / f"{benchmarks_path.stem}.md").is_file(): - assert ( - DOCS_PATH / "ReleaseNotes" / f"{benchmarks_path.stem}.tex" - ).is_file() + assert (RELEASE_NOTES_PATH / f"{benchmarks_path.stem}.tex").is_file() @flaky @@ -221,109 +191,95 @@ def test_build_benchmark_tex(tmp_path): tex_path.unlink(missing_ok=True) -def build_deprecations_tex(): - mf6ivar_path = MF6IO_PATH / "mf6ivar" - md_path = mf6ivar_path / "md" - md_path.mkdir(exist_ok=True) +def build_deprecations_tex(force: bool = False): + """Build LaTeX files for the deprecations table to go into the release notes.""" # make deprecations markdown table - run_py_script(DEPRECATIONS_SCRIPT_PATH) - with set_dir(mf6ivar_path): - deprecations_path = md_path / "deprecations.md" - deprecations_path.unlink(missing_ok=True) - out, err, ret = run_cmd( - sys.executable, "deprecations.py", verbose=True - ) - assert not ret, out + err - assert deprecations_path.is_file() + (MF6IVAR_PATH / "md").mkdir(exist_ok=True) + md_path = MF6IVAR_PATH / "md" / "deprecations.md" + if md_path.is_file() and not force: + print(f"{md_path} already exists.") + else: + md_path.unlink(missing_ok=True) + with set_dir(MF6IVAR_PATH): + out, err, ret = run_py_script("deprecations.py", verbose=True) + assert not ret, out + err - # convert markdown deprecations to LaTeX - with set_dir(RELEASE_NOTES_PATH): - tex_path = Path("deprecations.tex") + # convert markdown table to LaTeX + tex_path = RELEASE_NOTES_PATH / "deprecations.tex" + if tex_path.is_file() and not force: + print(f"{tex_path} already exists.") + else: tex_path.unlink(missing_ok=True) - out, err, ret = run_cmd( - sys.executable, - "mk_deprecations.py", - deprecations_path, - verbose=True, - ) - assert not ret, out + err - assert tex_path.is_file() + with set_dir(RELEASE_NOTES_PATH): + out, err, ret = run_py_script( + "mk_deprecations.py", md_path, verbose=True + ) + assert not ret, out + err - assert ( - DOCS_PATH / "ReleaseNotes" / f"{deprecations_path.stem}.tex" - ).is_file() + # check deprecations files exist + assert md_path.is_file() + assert tex_path.is_file() @no_parallel def test_build_deprecations_tex(): - build_deprecations_tex() + build_deprecations_tex(force=True) -def build_mf6io_tex_from_dfn( - overwrite: bool = False, models: Optional[List[str]] = None -): - if overwrite: - clean_tex_files() - - def files_match(tex_path, dfn_path, ignored): - dfn_names = [ - f.stem - for f in dfn_path.glob("*") - if f.is_file() - and "dfn" in f.suffix - and not any(pattern in f.name for pattern in ignored) - ] - tex_names = [ - f.stem.replace("-desc", "") - for f in tex_path.glob("*") - if f.is_file() - and "tex" in f.suffix - and not any(pattern in f.name for pattern in ignored) - ] - - return set(tex_names) == set(dfn_names) - - with set_dir(PROJ_ROOT_PATH / "doc" / "mf6io" / "mf6ivar"): - ignored = ["appendix", "common"] - tex_pth = Path("tex") - dfn_pth = Path("dfn") - tex_files = [f for f in tex_pth.glob("*") if f.is_file()] - dfn_files = [f for f in dfn_pth.glob("*") if f.is_file()] - - if ( - not overwrite - and any(tex_files) - and any(dfn_files) - and files_match(tex_pth, dfn_pth, ignored) - ): - print("DFN files already exist:") - pprint(dfn_files) +def build_mf6io_tex(models: Optional[List[str]] = None, force: bool = False): + """Build LaTeX files for the MF6IO guide from DFN files.""" + + if models is None: + models = DEFAULT_MODELS + + included = models + ["sim", "utl", "exg", "sln"] + excluded = ["appendix", "common"] + list(set(DEFAULT_MODELS) - set(models)) + + with set_dir(MF6IVAR_PATH): + cwd = Path.cwd() + + def _glob(pattern): + return list(glob(cwd, pattern, included, excluded)) + + def _stems(paths): + return [p.stem.replace("-desc", "") for p in paths] + + tex_files, dfn_files = _glob("*.tex"), _glob("*.dfn") + tex_stems, dfn_stems = _stems(tex_files), _stems(dfn_files) + if match(tex_stems, dfn_stems) and not force: + print("DFN files already exist.") else: - for f in tex_files: - f.unlink() + # remove md and tex output dirs + shutil.rmtree("md", ignore_errors=True) + shutil.rmtree("tex", ignore_errors=True) - # run python script + # run mf6ivar script args = [sys.executable, "mf6ivar.py"] - if models is not None and any(models): - for model in models: - args += ["--model", model] - out, err, ret = run_cmd(*args) + for model in models: + args += ["--model", model] + out, err, ret = run_cmd(*args, verbose=True) assert not ret, out + err - # check that dfn and tex files match - assert files_match(tex_pth, dfn_pth, ignored) + # check that a tex file was generated for each dfn + tex_files, dfn_files = _glob("*.tex"), _glob("*.dfn") + tex_stems, dfn_stems = _stems(tex_files), _stems(dfn_files) + assert_match(tex_stems, dfn_stems, "tex", "dfn") @no_parallel -@pytest.mark.parametrize("overwrite", [True, False]) -def test_build_mf6io_tex_from_dfn(overwrite): - build_mf6io_tex_from_dfn(overwrite=overwrite) +def test_build_mf6io_tex(): + build_mf6io_tex(force=True) -def build_mf6io_tex_example( +def build_usage_example_tex( workspace_path: PathLike, bin_path: PathLike, example_model_path: PathLike ): + """ + Build LaTeX files for the MF6 usage example in the MF6IO guide. + Runs MF6 to capture the output and insert into the document. + """ + workspace_path = Path(workspace_path) / "workspace" bin_path = Path(bin_path).expanduser().absolute() mf6_exe_path = bin_path / f"mf6{EXE_EXT}" @@ -386,12 +342,14 @@ def build_mf6io_tex_example( f.write("}\n") -def build_pdfs_from_tex( +def build_pdfs( tex_paths: List[PathLike], output_path: PathLike, passes: int = 3, - overwrite: bool = False, + force: bool = False, ): + """Build PDF documents from LaTeX files.""" + print("Building PDFs from LaTex:") pprint(tex_paths) @@ -402,7 +360,7 @@ def build_pdfs_from_tex( pdf_name = tex_path.stem + ".pdf" pdf_path = tex_path.parent / pdf_name tgt_path = output_path / pdf_name - if overwrite or not tgt_path.is_file(): + if force or not tgt_path.is_file(): print(f"Converting {tex_path} to PDF") with set_dir(tex_path.parent): first = True @@ -454,25 +412,27 @@ def test_build_pdfs_from_tex(tmp_path): DOCS_PATH / "ConverterGuide" / "converter_mf5to6.bbl", ] - build_pdfs_from_tex(tex_paths, tmp_path) + build_pdfs(tex_paths, tmp_path) for p in tex_paths[:-1] + bbl_paths: assert p.is_file() def build_documentation( bin_path: PathLike, + output_path: PathLike, + force: bool = False, full: bool = False, - output_path: Optional[PathLike] = None, - overwrite: bool = False, - repo_owner: str = "MODFLOW-USGS", models: Optional[List[str]] = None, + repo_owner: str = "MODFLOW-USGS", ): + """Build documentation for a MODFLOW 6 distribution.""" + print(f"Building {'full' if full else 'minimal'} documentation") bin_path = Path(bin_path).expanduser().absolute() output_path = Path(output_path).expanduser().absolute() - if (output_path / "mf6io.pdf").is_file() and not overwrite: + if (output_path / "mf6io.pdf").is_file() and not force: print(f"{output_path / 'mf6io.pdf'} already exists") return @@ -480,28 +440,28 @@ def build_documentation( output_path.mkdir(parents=True, exist_ok=True) # build LaTex input/output docs from DFN files - build_mf6io_tex_from_dfn(overwrite=overwrite, models=models) + build_mf6io_tex(force=force, models=models) # build LaTeX input/output example model docs with TemporaryDirectory() as temp: - build_mf6io_tex_example( - workspace_path=Path(temp), + build_usage_example_tex( bin_path=bin_path, + workspace_path=Path(temp), example_model_path=PROJ_ROOT_PATH / ".mf6minsim", ) # build deprecations table for insertion into LaTex release notes - build_deprecations_tex() + build_deprecations_tex(force=force) if full: # convert benchmarks to LaTex, running them first if necessary build_benchmark_tex( - output_path=output_path, overwrite=overwrite, repo_owner=repo_owner + output_path=output_path, force=force, repo_owner=repo_owner ) # download example docs pdf_name = "mf6examples.pdf" - if overwrite or not (output_path / pdf_name).is_file(): + if force or not (output_path / pdf_name).is_file(): latest = get_release(f"{repo_owner}/modflow6-examples", "latest") assets = latest["assets"] asset = next( @@ -524,17 +484,17 @@ def build_documentation( raise # convert LaTex to PDF - build_pdfs_from_tex( + build_pdfs( tex_paths=TEX_PATHS["full"], output_path=output_path, - overwrite=overwrite, + force=force, ) else: # just convert LaTeX to PDF - build_pdfs_from_tex( + build_pdfs( tex_paths=TEX_PATHS["minimal"], output_path=output_path, - overwrite=overwrite, + force=force, ) # enforce os line endings on all text files @@ -558,12 +518,11 @@ def test_build_documentation(tmp_path): bin_path = tmp_path / "bin" dist_path = tmp_path / "dist" meson_build(PROJ_ROOT_PATH, tmp_path / "builddir", bin_path) - build_documentation(bin_path, dist_path, EXAMPLES_REPO_PATH) + build_documentation(bin_path, dist_path) if __name__ == "__main__": parser = argparse.ArgumentParser( - prog="Convert LaTeX docs to PDFs", formatter_class=argparse.RawDescriptionHelpFormatter, epilog=textwrap.dedent( """\ @@ -625,9 +584,9 @@ def test_build_documentation(tmp_path): models = args.model if args.model else DEFAULT_MODELS build_documentation( bin_path=bin_path, - full=args.full, output_path=output_path, - overwrite=args.force, - repo_owner=args.repo_owner, + force=args.force, + full=args.full, models=models, + repo_owner=args.repo_owner, ) diff --git a/distribution/update_version.py b/distribution/update_version.py index 3df05b104d4..2b781927896 100755 --- a/distribution/update_version.py +++ b/distribution/update_version.py @@ -467,7 +467,6 @@ def test_update_version(version, approved, developmode): if __name__ == "__main__": parser = argparse.ArgumentParser( - prog="Update Modflow 6 version", formatter_class=argparse.RawDescriptionHelpFormatter, epilog=textwrap.dedent( """\ diff --git a/distribution/utils.py b/distribution/utils.py index 484abc21c96..cb281ec91d9 100644 --- a/distribution/utils.py +++ b/distribution/utils.py @@ -1,16 +1,10 @@ -import os -import platform import re -import shutil import subprocess import sys from datetime import datetime -from os import PathLike, environ from pathlib import Path -from warnings import warn - -import pytest -from modflow_devtools.markers import requires_exe +from pprint import pformat +from typing import Iterator, List, Optional _project_root_path = Path(__file__).resolve().parent.parent @@ -19,33 +13,6 @@ def get_project_root_path(): return _project_root_path -def get_branch(): - branch = None - try: - # determine current branch - b = subprocess.Popen( - ("git", "status"), stdout=subprocess.PIPE, stderr=subprocess.STDOUT - ).communicate()[0] - if isinstance(b, bytes): - b = b.decode("utf-8") - - # determine current branch - for line in b.splitlines(): - if "On branch" in line: - branch = line.replace("On branch ", "").rstrip() - if branch is None: - raise - except: - branch = os.environ.get("GITHUB_REF_NAME", None) - - if branch is None: - raise ValueError("Couldn't detect branch") - else: - print(f"Detected branch: {branch}") - - return branch - - def get_modified_time(path: Path) -> float: return ( path.stat().st_mtime @@ -54,66 +21,25 @@ def get_modified_time(path: Path) -> float: ) -def get_ostag(): - zipname = sys.platform.lower() - if zipname == "linux2": - zipname = "linux" - elif zipname == "darwin": - zipname = "mac" - elif zipname == "win32": - if platform.architecture()[0] == "64bit": - zipname = "win64" - return zipname +def glob( + path: Path, + pattern: str, + included: Optional[List[str]], + excluded: Optional[List[str]], +) -> Iterator[Path]: + def is_included(p): + if included is None: + return True + return any(i in p.name for i in included) + def is_excluded(p): + if excluded is None: + return True + return any(e in p.name for e in excluded) -def get_repo_path() -> Path: - """ - Returns the path to the folder containing example/test model repositories. - """ - repo_path = environ.get("REPOS_PATH", None) - if not repo_path: - warn( - "REPOS_PATH environment variable missing, defaulting to parent of project root" - ) - return Path(repo_path) if repo_path else _project_root_path - - -def copytree(src: PathLike, dst: PathLike, symlinks=False, ignore=None): - """ - Copy a folder from src to dst. If dst does not exist, then create it. - """ - src = Path(src).expanduser().absolute() - dst = Path(dst).expanduser().absolute() - - for s in src.glob("*"): - d = dst / s.name - if s.is_dir(): - print(f" copying {s} ===> {d}") - shutil.copytree(s, d, symlinks, ignore) - else: - print(f" copying {s} ===> {d}") - shutil.copy2(s, d) - - -def run_command(argv, pth, timeout=None): - with subprocess.Popen( - argv, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=pth - ) as process: - try: - output, unused_err = process.communicate(timeout=timeout) - buff = output.decode("utf-8") - ierr = process.returncode - except subprocess.TimeoutExpired: - process.kill() - output, unused_err = process.communicate() - buff = output.decode("utf-8") - ierr = 100 - except: - output, unused_err = process.communicate() - buff = output.decode("utf-8") - ierr = 101 - - return buff, ierr + for p in path.glob(pattern): + if p.is_file() and is_included(p) and not is_excluded(p): + yield p def convert_line_endings(folder, windows=True): @@ -139,12 +65,26 @@ def convert_line_endings(folder, windows=True): print(p.communicate()) -@requires_exe("dos2unix", "unix2dos") -@pytest.mark.skip(reason="todo") -def test_convert_line_endings(): - pass - - def split_nonnumeric(s): match = re.compile("[^0-9]").search(s) return [s[: match.start()], s[match.start() :]] if match else s + + +def match(l, r): + l = set(l) + r = set(r) + diff = l ^ r + return not any(diff) + + +def assert_match(l, r, lname=None, rname=None): + l = set(l) + r = set(r) + diff = l ^ r + lname = lname or "l" + rname = rname or "r" + assert not any(diff), ( + f"=> symmetric difference:\n{pformat(diff)}\n" + f"=> {lname} - {rname}:\n{pformat(l - r)}\n" + f"=> {rname} - {lname}:\n{pformat(r - l)}\n" + ) diff --git a/doc/mf6io/mf6ivar/deprecations.py b/doc/mf6io/mf6ivar/deprecations.py index d6896607e83..9f2efe0f5f5 100644 --- a/doc/mf6io/mf6ivar/deprecations.py +++ b/doc/mf6io/mf6ivar/deprecations.py @@ -1,9 +1,11 @@ -import os from pathlib import Path from typing import List, Optional, Tuple from packaging.version import Version +PROJ_ROOT_PATH = Path(__file__).parents[3] +MF6IVAR_PATH = PROJ_ROOT_PATH / "doc" / "mf6io" / "mf6ivar" + def get_deprecations( dfndir, @@ -33,7 +35,7 @@ def get_deprecations( def create_deprecations_file(dfndir, mddir, verbose): deprecations = get_deprecations(dfndir) - deps_path = (Path(mddir) / "deprecations.md").absolute() + deps_path = (mddir / "deprecations.md").absolute() if verbose: print(f"Found {len(deprecations)} deprecations, writing {deps_path}") with open(deps_path, "w") as f: @@ -50,6 +52,7 @@ def create_deprecations_file(dfndir, mddir, verbose): if __name__ == "__main__": - dfndir = os.path.join(".", "dfn") - mddir = os.path.join(".", "md") + dfndir = MF6IVAR_PATH / "dfn" + mddir = MF6IVAR_PATH / "md" + mddir.mkdir(exist_ok=True) create_deprecations_file(dfndir, mddir, verbose=True)