diff --git a/.gitignore b/.gitignore index 0ad4b60b..59167492 100644 --- a/.gitignore +++ b/.gitignore @@ -1,7 +1,6 @@ -src/* -runs/* scripts/__pycache__/* __pycache__/* .DS_Store -plots/* -./benchmark_cable_qsub.sh +*.pyc +benchcab.egg-info +build/ \ No newline at end of file diff --git a/AUTHORS b/AUTHORS new file mode 100644 index 00000000..401cd4c7 --- /dev/null +++ b/AUTHORS @@ -0,0 +1,6 @@ +Aidan Heerdegen +C. Carouge +Claire +Claire Carouge +Martin De Kauwe +mdekauwe diff --git a/ChangeLog b/ChangeLog new file mode 100644 index 00000000..0a4004fc --- /dev/null +++ b/ChangeLog @@ -0,0 +1,257 @@ +CHANGES +======= + +* Update README +* Use site data published in ks32 +* Add running the configurations +* .gitignore files from install +* Fix: re-building source code +* Add building CABLE +* Fix setup with setup.cfg +* Moved all setup phases into a class +* Add setup for compilation +* Fix issue with configuration file format +* simplify the file structure +* Attempt to build for command line +* Define input config in yaml +* Create min. work directory tree +* Modify command line args +* Add simple command line +* Clean start +* Update .gitignore +* Add LICENSE +* separate inputs by type of input +* Update README.md +* Update README.md +* Forgotten loading the python module +* To run on multi-processors +* Some typos in the README file +* More docs updates to make things simpler for users +* Updated documentation +* Use hh5 environments in qsub script +* Load hh5 conda environment in qsub script + +v0.1 +---- + +* Serial runs only work for now +* Fix imports +* update all from optparse to argparse +* Black formatting +* A few missing things for mpi exe. name +* Add "-mpi" to exe name if compiled with mpi +* "Fix" error handling for subprocess. To update +* Import submodules properly +* Update to argparse. Remove append to path for imports +* To work with updated build script for CABLE +* Update how to find compilation options for Gadi +* Simplify check for NCI machine +* Update to Argparse from optparse +* Don't append to path for imports +* replace "is" with "==" +* fixes for jhan +* removed date +* fixed sys exit +* Added xarray +* fixed dates issue +* fixed for jhan +* fixed for share branch +* fixed option to pass user branch +* test more cores +* test more cores +* Changed call +* test +* test +* reduced core calls +* added unload openmpi +* changed logging options +* Added netcdf unload cmd +* fixed filename +* fixed seasonal plot wrapper +* Fixed logic for qsub +* fixed mpi +* fix for unused cores +* fix for unused cores +* changed default mpi +* removed clean +* fixed cmd line parser +* matched other run script +* matched other run script +* Added flag to build sep of get +* fixed literals +* removed qsub +* Changed tests +* Added NCI hint +* fixed literals issue +* fix for gadi modules +* fix for ifort +* fix for ifort +* fix for ifort +* fix for ifort +* fix for ifort +* fix for ifort +* fix for ifort +* added debug +* added debug +* fixed literal issue? +* test for gadi fi +* fixed storm server paths +* fixed storm server paths +* Added missing lib +* Added missing lib +* Added catch for anna +* fixed typo +* Changed storm paths +* clean up +* filename cleanup +* Added plumber path for storm +* changed trunk name +* blah +* added unplumbed spatial +* Added spatial file, needs editing +* fixed new name +* renamed file +* Added -s flag +* Added -s flag +* fixed met path +* fixed met path +* fixed met path +* fixed met path +* fixed to get it working again +* fixed paths +* fixed typos +* changed pbs options +* updated met files +* rolled back update utils +* updated utils +* updated text to include mention of integration +* fixed paths for gadi +* changed nc path +* changed paths for gadi +* changed paths for gadi +* changed paths for gadi +* Added note on integration branch +* Commented out old ver +* Added newline +* Changed subprocess call to one that automatically raises exception with error +* Tweaked multiprocessing code +* Tweaked multiprocessing code +* Fixed met flag +* Tweaked options +* Added sci config +* Added sci config +* Added further sci configs +* tweaked usage request +* Removed removal of src +* Removed file +* Fixed ignore +* Fixed ignore +* Changed path +* Fixed stupid logic +* Changed qsub options +* Added comment +* Turned on MPI +* Removed date from Trunk +* Made integration the default +* Made integration the default +* Added script to gen integration branch +* Added missing numpy +* Fixed home bug +* Added password SVN option +* Added missing dict check +* Added missing dict check +* Put back missings args +* Fixed options typo +* Added additional check +* Further edits +* Further edits +* Further edits +* Fixed readme +* removed wrapper script +* Split and reorganised files to work on PBS +* Renamed file +* Renamed file +* Removed correct command for non empty dir +* Removed debug stuff +* Added initial clean up of src dir +* Fixed netcdf load +* Fixed netcdf load +* Fixed netcdf load +* added debug statements +* added debug statements +* added debug statements +* added pbs wd +* added pbs wd +* added pbs wd +* added pbs wd +* various fixes to nci script +* changed raijin testing +* changed raijin testing +* Added qsub script +* Changed raijin met path +* Fixed raijin netcdf libs +* Changed paths on storm server to match old plumber files +* Changed paths for storm server +* Changed paths for storm server +* Changed paths for storm server +* Changed paths for storm server +* Added case for imac +* Added repo id and site id, for MPI stuff +* Fixed Matplotlib Deprecation Warning +* Moved paths bit to reduce immediate options +* Moved paths bit to reduce immediate options +* Added different NC paths +* Added different met paths +* Added different met paths +* Added functionality to not use the trunk +* Added options for rajin storm +* Added options for rajin storm +* Added options for rajin storm +* Added options for rajin storm +* updated ignore +* Removed stats stuff +* Tweaked comment layout +* Tweaked comment layout +* Removed change path bug +* Removed change path bug +* Fixed mis-step when it changes to run dir +* Fixed plot fname +* swapped png for pdf +* Updated readme +* Updated readme +* Updated readme +* Updated readme +* Updated readme +* Updated readme +* Updated readme +* Updated readme +* Updated readme +* Updated readme info +* Updated readme info +* Updated readme info +* Updated readme info +* Updated readme info +* Updated readme info +* plugged in plots +* Added plot script +* Moved logic to wrapper +* removed duplicate file +* removed duplicate file +* Removed debug statement +* passing repo id +* passing repo id +* moved sci configs +* Removed default clean option +* Remved old thing +* Added sci config changes +* Big reorganisation +* Added script to build executables +* Added script to build executables +* Added funcs to get different branch +* Added funcs to get repos +* Added bones +* updated readme +* updated readme +* updated readme +* updated readme +* first commit diff --git a/LICENSE b/LICENSE deleted file mode 100644 index 261eeb9e..00000000 --- a/LICENSE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/README.md b/README.md index 10dfb122..6c118aa5 100644 --- a/README.md +++ b/README.md @@ -1,48 +1,62 @@ # CABLE benchmarking -Repository to benchmark a user branch (with personal changes) against the head of the trunk. The results should be attached with all new [tickets](https://trac.nci.org.au/trac/cable/report/1). +Repository to benchmark CABLE. The benchmark will run the exact same configurations on two CABLE branches specified by the user, e.g. a user branch (with personal changes) against the head of the trunk. The results should be attached with all new [tickets](https://trac.nci.org.au/trac/cable/report/1). -The code will: (i) check out; (ii) build; and (iii) run both the head of the trunk and the user's personal branch across N standard science configurations. It is possible to produce some plots locally. But the outputs should be uploaded to [the modelevaluation website](https://modelevaluation.org/) for further benchmarking and evaluation. +The code will: (i) check out; (ii) build; and (iii) run branches across N standard science configurations. It is possible to produce some plots locally. But the outputs should be uploaded to [the modelevaluation website](https://modelevaluation.org/) for further benchmarking and evaluation. +For the moment, the benchmarking only works on NCI supercomputer. ## Permissions To run the benchmarking, you will need access to the following projects at NCI: -* w35 +* ks32 * wd9 * hh5 -You can request access via [my.nci.org.au](https://my.nci.org.au/mancini/login?next=/mancini/) -## Get the benchmarking code and run directory -Clone the directory to your preferred location: - - $ git clone https://github.com/ccarouge/CABLE_benchmarking.git - -## Setup the benchmarking -You will need to update the following entries in `user_options.py`: -* user information -* name of the user branch -### user information -The program needs to know your user login name and the project you want to use to run the benchmarking -### Name of the user branch -Give the name of your branch in the `repo2` variable. - +You can request access via [my.nci.org.au](https://my.nci.org.au/mancini/login?next=/mancini/). All of those projects accept all requests by default. + +## Create a work directory. +1. **Choose a work directory.** +You can run the benchmark from any directory you want. The code will create sub-directories as needed. Please ensure you have enough space to store the CABLE outputs in your directory, at least temporary until you upload them to [modelevaluation.org](https://modelevaluation.org/). You will need about 70GB. + +1. **Create a config file in the work directory.** + The default name is `config.yaml` but any name can be specified at run time. This file follows the YAML format. You can find an example configuration file [here](https://github.com/CABLE-LSM/bench_example.git) + +1. **Create a science configuration file in the work directory.** + The default name is `site_configs.yaml` but any name can be specified at run time. This file follows the YAML format. You can find an example science configuration file [here](https://github.com/CABLE-LSM/bench_example.git) + ## Run the benchmarking -Once you have updated `user_options.py`, you need to load the modules for Python: +### Start the benchmarking process +Once you have a configuration file, you need to load the modules for Python: ``` module use /g/data/hh5/public/modules -module load conda +module load conda/analysis3-unstable +``` +Then you simply launch the benchmarking: +``` +benchcab ``` -Then you need to initialise the runs: +For help on the available options for the benchmarking: ``` -./initialise_site_runs.py +benchcab -h ``` -This will get the source for both the trunk and your branch. Then, it will compile both codes and prepare a script to submit to the PBS scheduler. -Once it is finished, you should have a new file `benchmark_cable_qsub.sh`. You then need to run: +Note: This line ``` -qsub benchmark_cable_qsub.sh +module use /g/data/hh5/public/modules ``` +can be safely added anywhere in your $HOME/.bashrc file. You then need to log out and back in for it to take effect. If you do so, you can simply load the Python module with `module load` and you do not have to type the `module use` line everytime. It is not recommended to put any `module load` lines in your $HOME/.bashrc file however. + +### Check the status +The benchmarking will follow the steps: +1. Checkout both branches. The codes will be stored under `src/` directory in your work directory. The directories are created automatically. +1. Compile the source code from both branches +1. Setup and launch a PBS job to run the simulations in parallel. The simulation inputs and outputs are stored under `runs/site/`. The directories are created automatically. + +When the benchmarking will launch the PBS job, it will print out the job id to screen. You can check the status of the job with `qstat`. +## Use modelevaluation.org +Once the benchmarking has finished running all the simulations, you need to upload the output files to modelevaluation.org. The output files can be found under `runs/site/outputs`. + +**Process still to be documented** + ## Contacts -Preferably enter your questions as issues or discussions on the Github repository. -* [Martin De Kauwe](http://mdekauwe.github.io/). -* [Gab Abramowitz](http://web.science.unsw.edu.au/~gabrielabramowitz/UNSW_homepage/Gab_Abramowitz_home_page.html). +Please enter your questions as issues or discussions on the Github repository. diff --git a/benchcab/__init__.py b/benchcab/__init__.py new file mode 100644 index 00000000..9fca47a4 --- /dev/null +++ b/benchcab/__init__.py @@ -0,0 +1 @@ +import benchcab.benchcab as benchcab \ No newline at end of file diff --git a/benchcab/bench_config.py b/benchcab/bench_config.py new file mode 100644 index 00000000..33e15c02 --- /dev/null +++ b/benchcab/bench_config.py @@ -0,0 +1,78 @@ +import yaml +from pathlib import Path +import os +from benchcab.set_default_paths import set_paths +from benchcab.benchtree import BenchTree + +class BenchSetup(object): + def __init__(self, myconfig:str): + """ + myconfig: str, Name of the config file to use for setup.""" + + self.myconfig = myconfig + + @staticmethod + def check_config(opt:dict): + """Run some checks on the config file to ensure the data is coherent. + check1: make sure the names in use_branches are keys in the dictionary. + check2: make sure all required entries are listed + """ + + assert all([x in opt for x in opt["use_branches"][0:2]]), "At least one of the first 2 aliases " \ + " listed in 'use_branches' is not an entry in the config file to define a CABLE branch." + + assert all([x in opt for x in [ + "use_branches", + "project", + "user", + "modules", + ]]), "The config file does not list all required entries. "\ + "Those are 'use_branches', 'project', 'user', 'modules'" + + assert len(opt["use_branches"]) >= 2, "You need to list 2 branches in 'use_branches'" + if len(opt["use_branches"]) > 2: + print("------------------------------------------------------") + print("Warning: more than 2 branches listed in 'use_branches'") + print("Only the first 2 branches will be used.") + print("------------------------------------------------------") + + + def read_config(self): + """Read config file for the CABLE benchmarking""" + + assert Path(self.myconfig).is_file(), f"{self.myconfig} was not found" + + with open(Path(self.myconfig), "r") as fin: + opt = yaml.safe_load(fin) + + self.check_config(opt) + return opt + + @staticmethod + def compilation_setup(ModToLoad:list): + """Load the modules and define the paths to libraries + depending on the machine name as needed for CABLE compilation. + + ModToLoad: list, list of modules to load for Gadi. Empty list for other cases""" + + (_, nodename, _, _, _) = os.uname() + + compilation_opt = set_paths(nodename, ModToLoad) + + return compilation_opt + + def setup_bench(self): + """Main function to setup a CABLE benchmarking run""" + + opt = self.read_config() + compilation_opt = self.compilation_setup(opt["modules"]) + + # Setup the minimal benchmarking directory tree + myworkdir=Path.cwd() + benchdirs=BenchTree(myworkdir) + benchdirs.create_minbenchtree() + + return (opt, compilation_opt, benchdirs) + + + diff --git a/benchcab/benchcab.py b/benchcab/benchcab.py new file mode 100644 index 00000000..116ef051 --- /dev/null +++ b/benchcab/benchcab.py @@ -0,0 +1,199 @@ +#!/usr/bin/env python + +import argparse +import sys +from pathlib import Path +import os +import shlex +import subprocess + +from benchcab.bench_config import BenchSetup +from benchcab.benchtree import BenchTree +from benchcab.build_cable import BuildCable +from benchcab.get_cable import GetCable + +# Site runs +from benchcab.run_cable_site import RunCableSite +from benchcab.setup.cases_fluxsites import * +from benchcab.setup.pbs_fluxsites import qsub_fname +import benchcab.benchsiterun as benchsiterun + + +def parse_args(arglist): + """ + Parse arguments given as list (arglist) + """ + parser = argparse.ArgumentParser(description="Run the benchmarking for CABLE") + parser.add_argument("-c","--config", help="Config filename", default="config.yaml") + parser.add_argument("-s", "--science_config", help="Config file to define the various configurations to run", default="site_configs.yaml") + parser.add_argument("-f","--fluxnet", help="Runs the tests for the Fluxnet sites only", action="store_true") + parser.add_argument("-w","--world", help="Runs the global tests only",action="store_true") + parser.add_argument("-b","--bitrepro", help="Check bit reproducibility, not implemented yet",action="store_true") + parser.add_argument("-r", "--rebuild", action="store_true", default=False, help="Rebuild src?") + + args = parser.parse_args(arglist) + + if args.bitrepro: + print("Bit reproducibility not implemented yet") + sys.exit() + + if args.fluxnet and args.world: + print("You can not specify -f and -g together.") + print("To run all the tests, do not specify any of those 2 options.") + sys.exit() + + return args + +def retrieve_cable_code(benchdirs:BenchTree, opt:dict): + """Checkout the 2 branches of CABLE from the svn repository in the + source directory created at setup. + benchdir: BenchTree, contains path information for paths in the working directory + opt: dict, contains the branch information we want to use""" + + # Aliases to branches to use: + branch_alias = opt["use_branches"] + branch1 = opt[branch_alias[0]] + branch2 = opt[branch_alias[1]] + + G = GetCable(src_dir=benchdirs.src_dir, user=opt["user"]) + G.main(**branch1) + G.main(**branch2) + +def build_cable_code(benchdirs:BenchTree, compilation_opt:dict, opt:dict, mpi:False): + + # Aliases to branches to use: + branch_alias = opt["use_branches"] + branch1 = opt[branch_alias[0]] + branch2 = opt[branch_alias[1]] + + B = BuildCable( + src_dir=benchdirs.src_dir, + ModToLoad=opt["modules"], + NCDIR=compilation_opt["NCDIR"], + NCMOD=compilation_opt["NCMOD"], + FC=compilation_opt["FC"], + CFLAGS=compilation_opt["CFLAGS"], + LD=compilation_opt["LD"], + LDFLAGS=compilation_opt["LDFLAGS"], + mpi=mpi, + ) + B.main(repo_name=branch1["name"]) + B.main(repo_name=branch2["name"]) + +def setup_site_runs(opt, compilation_opt, benchdirs, sci_configs, mpi, multiprocess): + """Ssetup specific to site simulations""" + + # Aliases to branches to use: + branch_alias = opt["use_branches"] + run_branches=[opt[branch_alias[0]],] + run_branches.append(opt[branch_alias[1]]) + + start_dir=Path.cwd() + os.chdir(benchdirs.runroot_dir/"site") + for branchid, branch in enumerate(run_branches): + branch_name = branch["name"] + cable_src = benchdirs.src_dir/branch_name + + # Define the name for the executable: cable for serial, cable-mpi for mpi runs + cable_exe = f"cable{'-mpi'*mpi}" + + R = RunCableSite( + met_dir=compilation_opt["met_dir"], + log_dir=benchdirs.site_run["log_dir"], + output_dir=benchdirs.site_run["output_dir"], + restart_dir=benchdirs.site_run["restart_dir"], + aux_dir=benchdirs.aux_dir, + namelist_dir=benchdirs.site_run["namelist_dir"], + met_subset=[], + cable_src=cable_src, + num_cores=None, + cable_exe=cable_exe, + multiprocess=multiprocess, + ) + + for sci_id, sci_config in enumerate(sci_configs.values()): + R.main(sci_config, branchid, sci_id) + + os.chdir(start_dir) + + +def main(args): + + # Setup of the benchmark: + #------------------------ + # - read config file. + # - define compilation variables. + # - create minimal work directory tree. + # - checkout the CABLE source codes. + # - compile the CABLE source codes. + + mysetup = BenchSetup(args.config) + opt, compilation_opt, benchdirs = mysetup.setup_bench() + + # Get the source code for both branches + print("Retrieving the source code from both branches in the src/ directory") + retrieve_cable_code(benchdirs, opt) + + + # Run the benchmark: + # We run both at single sites and spatial runs unless otherwise specified + # by command line arguments + #------------------------------------------------------------------------ + # Identify cases to run + run_flux = not args.world + run_spatial = not args.fluxnet + + mess="" + if run_flux: + mess=mess+"Running the single sites tests " + print("Running the single sites tests ") + + mpi = False + multiprocess = True + + # Build the source codes + build_cable_code(benchdirs, compilation_opt, opt, mpi) + + # Create directory tree for site runs + benchdirs.create_sitebenchtree() + + # Create the qsub script for NCI if needed + (_, nodename, _, _, _) = os.uname() + if "nci" in nodename: + benchsiterun.main(qsub=True, config=args.config, science_config=args.science_config) + + cmd = shlex.split(f"qsub {qsub_fname}") + sb=subprocess.run(cmd, capture_output=True) + if (sb.returncode != 0): + print("Error when submitting job to NCI queue") + print(sb.stderr) + sys.exit(1) + else: + print(f"Benchmark submitted in PBS job: {sb.stdout}") + + else: + print("Only running on NCI is implemented at the moment.") + + if run_spatial: + mess=mess+"Running the spatial tests " + print("Running the spatial tests ") + + return mess + +def main_parse_args(arglist): + """ + Call main with list of arguments. Callable from tests + """ + # Must return so that check command return value is passed back to calling routine + # otherwise py.test will fail + return main(parse_args(arglist)) + +def main_argv(): + """ + Call main and pass command line arguments. This is required for setup.py entry_points + """ + mess = main_parse_args(sys.argv[1:]) + +if __name__ == "__main__": + + main_argv() \ No newline at end of file diff --git a/benchcab/benchsiterun.py b/benchcab/benchsiterun.py new file mode 100644 index 00000000..129c4b50 --- /dev/null +++ b/benchcab/benchsiterun.py @@ -0,0 +1,121 @@ +#!/usr/bin/env python + +# To run the CABLE benchmarking at single sites +import argparse +import sys +import os +from pathlib import Path +import yaml + +from benchcab.run_cable_site import RunCableSite +from benchcab.bench_config import BenchSetup + +# Define names of default config files globally +default_config = "config.yaml" +default_science = "site_configs.yaml" +def myparse(arglist): + """ + Parse arguments given as list (arglist) + """ + parser = argparse.ArgumentParser(description="Run CABLE simulations at single sites for benchmarking") + parser.add_argument("-q","--qsub", help="Creates a qsub job script if running at NCI", action="store_true") + parser.add_argument("-c","--config", help="Config filename", default=default_config) + parser.add_argument("-s", "--science_config", help="Config file to define the various configurations to run", default=default_science) + + args = parser.parse_args(arglist) + + (_, nodename, _, _, _) = os.uname() + if args.qsub and not "nci" in nodename: + print("Remote scripts are only implemented for NCI machine") + print("You can not invoke benchsiterun -q if not running at NCI") + raise + + if not "nci" in nodename and not args.met_dir: + raise("You need to specify the path to the meteorological data if you are not running at NCI.") + + return args + +def read_sci_configs(sci_configfile): + """Read the science config file""" + + with open(sci_configfile, "r") as fin: + sci_configs=yaml.safe_load(fin) + + return sci_configs + + +def main(qsub=False, config=default_config, science_config=default_science,**kwargs): + """To run CABLE on single sites for the benchmarking. + Keyword arguments are the same as the command line arguments for the benchsiterun command + """ + # Always run site simulations without mpi and with multiprocess + mpi=False + multiprocess=True + + # Read setup and create directory structure for single site runs + mysetup = BenchSetup(config) + opt, compilation_opt, benchdirs = mysetup.setup_bench() + benchdirs.create_sitebenchtree() + + # Read science configurations + sci_configs= read_sci_configs(science_config) + + if qsub: + # Create a script to launch on NCI's compute nodes if requested + # Create a run object instance using default values since we won't use those values + R = RunCableSite( ) + R.create_qsub_script(opt["project"], opt["user"], config, science_config) + + else: + + # Aliases to branches to use: + branch_alias = opt["use_branches"] + run_branches=[opt[branch_alias[0]],] + run_branches.append(opt[branch_alias[1]]) + + start_dir=Path.cwd() + os.chdir(benchdirs.runroot_dir/"site") + for branchid, branch in enumerate(run_branches): + branch_name = branch["name"] + cable_src = benchdirs.src_dir/branch_name + + # Define the name for the executable: cable for serial, cable-mpi for mpi runs + cable_exe = f"cable{'-mpi'*mpi}" + + R = RunCableSite( + met_dir=compilation_opt["met_dir"], + log_dir=benchdirs.site_run["log_dir"], + output_dir=benchdirs.site_run["output_dir"], + restart_dir=benchdirs.site_run["restart_dir"], + aux_dir=benchdirs.aux_dir, + namelist_dir=benchdirs.site_run["namelist_dir"], + met_subset=[], + cable_src=cable_src, + num_cores=None, + cable_exe=cable_exe, + multiprocess=multiprocess, + ) + + for sci_id, sci_config in enumerate(sci_configs.values()): + R.main(sci_config, branchid, sci_id) + + os.chdir(start_dir) + + +def main_parse_args(arglist): + """ + Call main with list of arguments. Callable from tests + """ + # Must return so that check command return value is passed back to calling routine + # otherwise py.test will fail + return main(**vars(myparse(arglist))) + +def main_argv(): + """ + Call main and pass command line arguments. This is required for setup.py entry_points + """ + mess = main_parse_args(sys.argv[1:]) + +if __name__ == "__main__": + + main_argv() \ No newline at end of file diff --git a/benchcab/benchtree.py b/benchcab/benchtree.py new file mode 100644 index 00000000..3c3bef5d --- /dev/null +++ b/benchcab/benchtree.py @@ -0,0 +1,44 @@ +from pathlib import Path +import os +class BenchTree(object): + """Manage the directory tree to run the benchmarking for CABLE""" + + def __init__(self, curdir:Path): + + self.src_dir = curdir/"src" + self.aux_dir = curdir/"src/CABLE-AUX" + self.plot_dir = curdir/"plots" + # Run directory and its sub-directories + self.runroot_dir = curdir/"runs" + self.site_run = { + "log_dir": self.runroot_dir/"site/logs", + "output_dir": self.runroot_dir/"site/outputs", + "restart_dir": self.runroot_dir/"site/restart_files", + "namelist_dir": self.runroot_dir/"site/namelists", + } + + def create_minbenchtree(self): + """Create the minimum directory tree needed to run the CABLE benchmarking. + At least, we need: + - a source directory to checkout and compile the repository branches + - a run directory to run the testcases.""" + + dir_to_create= [ + self.src_dir, + self.runroot_dir, + ] + for mydir in dir_to_create: + if not Path.is_dir(mydir): + os.makedirs(mydir) + + def create_sitebenchtree(self): + """Create directory tree for site benchmark""" + + # Make sure the default directories are created + self.create_minbenchtree() + + # Create the sub-directories in the run directory + for mydir in self.site_run.values(): + if not mydir.is_dir(): + os.makedirs(mydir) + diff --git a/scripts/build_cable.py b/benchcab/build_cable.py similarity index 81% rename from scripts/build_cable.py rename to benchcab/build_cable.py index 42fd344f..e7d8ebe6 100755 --- a/scripts/build_cable.py +++ b/benchcab/build_cable.py @@ -15,13 +15,12 @@ import subprocess import datetime from pathlib import Path - -from scripts.config import default_envfiles - +from typing import Iterable class BuildCable(object): def __init__( self, + ModToLoad:Iterable, src_dir=None, NCDIR=None, NCMOD=None, @@ -42,7 +41,9 @@ def __init__( self.LDFLAGS = LDFLAGS self.debug = debug self.mpi = mpi + self.ModToLoad = ModToLoad + @staticmethod def find_purge_line(filelines, filename=""): """Find the line with module purge in the list of file lines. Check there is only 1 such line. Return the index of the line. @@ -61,7 +62,7 @@ def find_purge_line(filelines, filename=""): return purge_line - def add_module_load(lines, nindent): + def add_module_load(self,lines, nindent): """Read in the environment file using config data. Add lines to load each module listed in environment file at the end of the list of strings, lines @@ -71,15 +72,8 @@ def add_module_load(lines, nindent): loclines = lines.copy() - # Read environment file - cwd_path = Path(os.getcwd()) - config_path = cwd_path.parents[2] # Need to go back 3 up. - config_path = config_path / default_envfiles["gadi"] - with config_path.open() as rfile: - ModToLoad = rfile.readlines() - # Append new lines to the list of lines for each module - for mod in ModToLoad: + for mod in self.ModToLoad: # Add newline if not in "mod" if "\n" not in mod: mod = mod + "\n" @@ -88,7 +82,7 @@ def add_module_load(lines, nindent): return loclines - def change_build_lines(filelines, filename=""): + def change_build_lines(self,filelines, filename=""): """Get the lines from the build script and modify them: - remove all the module load and module add lines - read in the environment file for Gadi @@ -104,7 +98,7 @@ def change_build_lines(filelines, filename=""): ] # Find the line with "module purge" - purge_line = BuildCable.find_purge_line(nomodulelines, filename=filename) + purge_line = self.find_purge_line(nomodulelines, filename=filename) # Get the indentation right: copy the indentation from the module purge line nindent = nomodulelines[purge_line].find("module purge") @@ -112,7 +106,7 @@ def change_build_lines(filelines, filename=""): outlines = nomodulelines[: purge_line + 1] # Take all lines until module purge # append lines to load the correct modules - outlines = BuildCable.add_module_load(outlines, nindent) + outlines = self.add_module_load(outlines, nindent) # add the end of the file as in the original file outlines.extend(nomodulelines[purge_line + 1 :]) @@ -143,13 +137,11 @@ def adjust_build_script(self): ofname = "my_build.ksh" of = open(ofname, "w") - # check_host = "host_%s()" % (host) - # We find all the "module load" lines and remove them from # the list of lines. # Then after the line "module purge", we add a line for # each module listed in gadi_env.sh - outlines = BuildCable.change_build_lines(lines, filename=fname) + outlines = self.change_build_lines(lines, filename=fname) of.writelines(outlines) of.close() @@ -173,12 +165,33 @@ def build_cable(self, ofname): os.remove(ofname) - def main(self, repo_name=None, trunk=False): + def clean_if_needed(self): + """Clean a previous compilation if latest executable doesn't have the name we want.""" + + wanted_exe = f"cable{'-mpi'*self.mpi}" + + exe_list=[Path("cable-mpi"), Path("cable") ] + exe_found = [ this_exe for this_exe in exe_list if this_exe.is_file() ] + + clean_compil = False + if len(exe_found) > 0: + newest_exe = max( exe_found, key=lambda x: x.stat().st_mtime ) + clean_compil = newest_exe != wanted_exe + + # Clean compilation if needed + if clean_compil: + cmd = f"rm -fr .tmp" + error = subprocess.call(cmd, shell=True) + if error == 1: + raise ("Error cleaning previous compilation") + + def main(self, repo_name=None): build_dir = "%s/%s" % (repo_name, "offline") cwd = os.getcwd() os.chdir(os.path.join(self.src_dir, build_dir)) + self.clean_if_needed() ofname = self.adjust_build_script() self.build_cable(ofname) diff --git a/scripts/cable_utils.py b/benchcab/cable_utils.py similarity index 100% rename from scripts/cable_utils.py rename to benchcab/cable_utils.py diff --git a/scripts/get_cable.py b/benchcab/get_cable.py similarity index 90% rename from scripts/get_cable.py rename to benchcab/get_cable.py index 2b8dcc59..1bb80953 100755 --- a/scripts/get_cable.py +++ b/benchcab/get_cable.py @@ -11,7 +11,6 @@ __email__ = "mdekauwe@gmail.com" import os -import sys import subprocess import datetime import getpass @@ -30,18 +29,18 @@ def __init__( self.aux_dir = "CABLE-AUX" self.home_dir = os.environ["HOME"] - def main(self, repo_name=None, trunk=False, user_branch=False, share_branch=False): + def main(self, name=None, trunk=False, share_branch=False): self.initialise_stuff() - self.get_repo(repo_name, trunk, user_branch, share_branch) + self.get_repo(name, trunk, share_branch) def initialise_stuff(self): if not os.path.exists(self.src_dir): os.makedirs(self.src_dir) - def get_repo(self, repo_name, trunk, user_branch, share_branch): + def get_repo(self, repo_name, trunk, share_branch): need_pass = False cwd = os.getcwd() @@ -153,22 +152,17 @@ def get_repo(self, repo_name, trunk, user_branch, share_branch): if need_pass: - if user_branch: - cmd = "svn checkout %s/branches/Users/%s/%s --password %s" % ( - self.root, - self.user, - repo_name, - pswd, - ) - elif share_branch: + if share_branch: cmd = "svn checkout %s/branches/Share/%s --password %s" % ( self.root, repo_name, pswd, ) else: - cmd = "svn checkout %s/branches/Share/integration --password %s" % ( + cmd = "svn checkout %s/branches/Users/%s/%s --password %s" % ( self.root, + self.user, + repo_name, pswd, ) @@ -181,16 +175,14 @@ def get_repo(self, repo_name, trunk, user_branch, share_branch): raise ("Error downloading repo") f.close() else: - if user_branch: + if share_branch: + cmd = "svn checkout %s/branches/Share/%s" % (self.root, repo_name) + else: cmd = "svn checkout %s/branches/Users/%s/%s" % ( self.root, self.user, repo_name, ) - elif share_branch: - cmd = "svn checkout %s/branches/Share/%s" % (self.root, repo_name) - else: - cmd = "svn checkout %s/branches/Share/integration" % (self.root) error = subprocess.call(cmd, shell=True) if error == 1: raise ("Error downloading repo") @@ -240,5 +232,5 @@ def get_repo(self, repo_name, trunk, user_branch, share_branch): # ------------------------------------------- # G = GetCable(src_dir=src_dir, user=user) - G.main(repo_name=repo1, trunk=True) - G.main(repo_name=repo2, trunk=False, user_branch=False, share_branch=True) + G.main(name=repo1, trunk=True) + G.main(name=repo2, trunk=False, user_branch=False, share_branch=True) diff --git a/scripts/run_cable_site.py b/benchcab/run_cable_site.py similarity index 80% rename from scripts/run_cable_site.py rename to benchcab/run_cable_site.py index f38e80b6..3af3a556 100644 --- a/scripts/run_cable_site.py +++ b/benchcab/run_cable_site.py @@ -20,22 +20,24 @@ import subprocess import multiprocessing as mp import numpy as np +from pathlib import Path -from scripts.cable_utils import adjust_nml_file -from scripts.cable_utils import get_svn_info -from scripts.cable_utils import change_LAI -from scripts.cable_utils import add_attributes_to_output_file +from benchcab.cable_utils import adjust_nml_file +from benchcab.cable_utils import get_svn_info +from benchcab.cable_utils import change_LAI +from benchcab.cable_utils import add_attributes_to_output_file +from benchcab.setup.pbs_fluxsites import * -class RunCable(object): +class RunCableSite(object): def __init__( self, - met_dir=None, - log_dir=None, - output_dir=None, - restart_dir=None, - aux_dir=None, - namelist_dir=None, + met_dir="", + log_dir="", + output_dir="", + restart_dir="", + aux_dir="", + namelist_dir="", nml_fname="cable.nml", veg_fname="def_veg_params_zr_clitt_albedo_fix.txt", soil_fname="def_soil_params.txt", @@ -43,17 +45,16 @@ def __init__( phen_fname="modis_phenology_csiro.txt", cnpbiome_fname="pftlookup_csiro_v16_17tiles.csv", elev_fname="GSWP3_gwmodel_parameters.nc", - lai_dir=None, + lai_dir="", fixed_lai=None, co2_conc=400.0, met_subset=[], - cable_src=None, + cable_src="", cable_exe="cable", - mpi=True, num_cores=None, - verbose=True, multiprocess=False, - ): + verbose=False, + ): self.met_dir = met_dir self.log_dir = log_dir @@ -77,10 +78,9 @@ def __init__( self.cable_exe = os.path.join(cable_src, "offline/%s" % (cable_exe)) self.setup_exe() self.verbose = verbose - self.mpi = mpi - self.num_cores = num_cores self.lai_dir = lai_dir self.fixed_lai = fixed_lai + self.num_cores = num_cores self.multiprocess = multiprocess def main(self, sci_config, repo_id, sci_id): @@ -147,7 +147,7 @@ def worker(self, met_files, url, rev, sci_config, repo_id, sci_id): (out_fname, out_log_fname) = self.clean_up_old_files(site, repo_id, sci_id) # Add LAI to met file? - if self.fixed_lai is not None or self.lai_dir is not None: + if self.fixed_lai is not None or self.lai_dir is not "": fname = change_LAI( fname, site, fixed=self.fixed_lai, lai_dir=self.lai_dir ) @@ -177,7 +177,7 @@ def worker(self, met_files, url, rev, sci_config, repo_id, sci_id): add_attributes_to_output_file(nml_fname, out_fname, sci_config, url, rev) shutil.move(nml_fname, os.path.join(self.namelist_dir, nml_fname)) - if self.fixed_lai is not None or self.lai_dir is not None: + if self.fixed_lai is not None or self.lai_dir is not "": os.remove("%s_tmp.nc" % (site)) def setup_exe(self): @@ -186,7 +186,8 @@ def setup_exe(self): local_exe = "cable" if os.path.isfile(local_exe): os.remove(local_exe) - shutil.copy(self.cable_exe, local_exe) + if os.path.isfile(self.cable_exe): + shutil.copy(self.cable_exe, local_exe) self.cable_exe = local_exe def initialise_stuff(self): @@ -247,6 +248,51 @@ def run_me(self, nml_fname): except subprocess.CalledProcessError as e: print("Job failed to submit: ", e.cmd) + @staticmethod + def create_qsub_script(project, user, config, science_config): + + email_address = f"{user}@nci.org.au" + + # Add the local directory to the storage flag for PBS + curdir=Path.cwd().parts + if ("scratch" in curdir): + curdir_root="scratch" + curdir_proj = curdir[2] + elif ("g" in curdir and "data" in curdir): + curdir_root="gdata" + curdir_proj = curdir[3] + else: + print("Current directory structure unknown on Gadi") + sys.exit(1) + + f = open(qsub_fname, "w") + + f.write("#!/bin/bash\n") + f.write("\n") + f.write("#PBS -l wd\n") + f.write("#PBS -l ncpus=%d\n" % (ncpus)) + f.write("#PBS -l mem=%s\n" % (mem)) + f.write("#PBS -l walltime=%s\n" % (wall_time)) + f.write("#PBS -q normal\n") + f.write("#PBS -P %s\n" % (project)) + f.write("#PBS -j oe\n") + f.write("#PBS -M %s\n" % (email_address)) + f.write(f"#PBS -l storage=gdata/ks32+gdata/wd9+gdata/hh5+gdata/{project}+{curdir_root}/{curdir_proj}\n") + f.write("\n") + f.write("\n") + f.write("\n") + f.write("\n") + f.write("module purge\n") + f.write("module use /g/data/hh5/public/modules\n") + f.write("module load conda/analysis3-unstable\n") + f.write("module add netcdf/4.7.1\n") + f.write(f"benchsiterun --config={config} --science_config={science_config}\n") + f.write("\n") + + f.close() + + os.chmod(qsub_fname, 0o755) + def merge_two_dicts(x, y): """Given two dicts, merge them into a new dict as a shallow copy.""" @@ -272,7 +318,7 @@ def merge_two_dicts(x, y): sci_config = {} # ------------------------------------------- # - C = RunCable( + C = RunCableSite( met_dir=met_dir, log_dir=log_dir, output_dir=output_dir, diff --git a/benchcab/set_default_paths.py b/benchcab/set_default_paths.py new file mode 100755 index 00000000..f8fc5f56 --- /dev/null +++ b/benchcab/set_default_paths.py @@ -0,0 +1,99 @@ +#!/usr/bin/env python + +""" +Set default paths on various machines... + +That's all folks. +""" + +__author__ = "Martin De Kauwe" +__version__ = "1.0 (06.06.2020)" +__email__ = "mdekauwe@gmail.com" + +import os +import sys +import subprocess +from pathlib import Path + + +def set_paths(nodename, ModToLoad:list): + + + if "Mac" in nodename or "imac" in nodename: + NCDIR = "/opt/local/lib/" + NCMOD = "/opt/local/include/" + FC = "gfortran" + FCMPI = "gfortran-mp-9" + CFLAGS = "-O2" + LD = "'-lnetcdf -lnetcdff'" + LDFLAGS = "'-L/opt/local/lib -O2'" + + # + ## Met paths ... + # + met_dir = "/Users/mdekauwe/research/plumber_test" + + elif "unsw" in nodename: + cmd = "module load netcdf-c/4.4.1.1-intel" + cmd = "module load netcdf-f/4.4.4-intel" + error = subprocess.call(cmd, shell=True) + if error == 1: + raise ("Error loading netcdf libs") + + NCDIR = "/share/apps/netcdf-f/intel/4.4.4/lib" + NCMOD = "/share/apps/netcdf-f/intel/4.4.4/include" + + FC = "ifort" + FCMPI = "mpif90" + CFLAGS = "-O2" + LD = "'-lnetcdf -lnetcdff'" + LDFLAGS = "'-L/opt/local/lib -O2'" + + # + ## Met paths ... + met_dir = ( + "/srv/ccrc/data04/z3509830/Fluxnet_data/" + "All_flux_sites_processed_PLUMBER2/" + "Post-processed_PLUMBER2_outputs/Nc_files/Met" + ) + + elif "gadi" in nodename: + assert len(ModToLoad) > 0, \ + "Please add the modules you want to use to the configuration file" + # Load modules + MODULESHOME = Path(os.environ["MODULESHOME"]) + sys.path.append(str(MODULESHOME / "init")) + import python as mod + + mod.module("purge") + for modname in ModToLoad: + mod.module("load", modname.rstrip()) + + # Setup variables for compilation + # FC is setup by the modules + NCBASE = Path(os.environ["NETCDF"]) # Set when loading the netcdf module + NCDIR = NCBASE / "lib" + NCMOD = NCBASE / "include" + FCMPI = "mpif90" + FC = os.environ["FC"] + CFLAGS = "-O2" + LD = "'-lnetcdf -lnetcdff'" + LDFLAGS = "'-L'$NCDIR' -O0'" + met_dir = Path( + "/g/data/ks32/CLEX_Data/PLUMBER2/v1-0/Met/" + ) + + else: + raise("Machine unknown. This case needs to be defined in set_default_paths.") + + compilation_opt={ + "met_dir":met_dir, + "NCDIR": NCDIR, + "NCMOD": NCMOD, + "FC": FC, + "FCMPI": FCMPI, + "CFLAGS": CFLAGS, + "LD":LD, + "LDFLAGS":LDFLAGS, + } + return compilation_opt diff --git a/benchcab/setup/__init__.py b/benchcab/setup/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/setup/cases_fluxsites.py b/benchcab/setup/cases_fluxsites.py similarity index 64% rename from setup/cases_fluxsites.py rename to benchcab/setup/cases_fluxsites.py index b257ce7d..4644917c 100644 --- a/setup/cases_fluxsites.py +++ b/benchcab/setup/cases_fluxsites.py @@ -1,16 +1,3 @@ -# -## Met files ... -# -# met_subset = ['FI-Hyy_1996-2014_FLUXNET2015_Met.nc',\ -# 'AU-Tum_2002-2017_OzFlux_Met.nc'] -# met_subset = ['TumbaFluxnet.1.4_met.nc'] - -# Till fixed -# met_dir = "/g/data/w35/mgk576/research/CABLE_runs/met/Ozflux" -# met_subset = ["AU-Tum_2002-2017_OzFlux_Met.nc", "AU-How_2003-2017_OzFlux_Met.nc"] -met_subset = [] # if empty...run all the files in the met_dir - - # ## science configs # diff --git a/setup/pbs_fluxsites.py b/benchcab/setup/pbs_fluxsites.py similarity index 58% rename from setup/pbs_fluxsites.py rename to benchcab/setup/pbs_fluxsites.py index c29416a3..c30ba80e 100644 --- a/setup/pbs_fluxsites.py +++ b/benchcab/setup/pbs_fluxsites.py @@ -1,10 +1,7 @@ -from user_options import user - qsub_fname = "benchmark_cable_qsub.sh" -ncpus = 16 -mem = "15GB" -wall_time = "1:30:00" -email_address = f"{user}@nci.org.au" +ncpus = 18 +mem = "30GB" +wall_time = "6:00:00" # ## MPI stuff diff --git a/gadi_env.sh b/gadi_env.sh deleted file mode 100755 index d2ba1c1e..00000000 --- a/gadi_env.sh +++ /dev/null @@ -1,3 +0,0 @@ -intel-compiler/2021.1.1 -openmpi/4.1.0 -netcdf/4.7.4 \ No newline at end of file diff --git a/initialise_site_runs.py b/initialise_site_runs.py deleted file mode 100755 index 25a41cd2..00000000 --- a/initialise_site_runs.py +++ /dev/null @@ -1,98 +0,0 @@ -#!/usr/bin/env python - -""" -Checkout CABLE repositories, build executables and generate a qsub script to -wrap cable benchmarking scripts when used on raijin. - -We can't use the run_comparison script as raijin nodes have no internet -connection. - -That's all folks. -""" - -__author__ = "Martin De Kauwe" -__version__ = "1.0 (16.06.2020)" -__email__ = "mdekauwe@gmail.com" - -import os -import sys -import datetime -import subprocess -import argparse - -from user_options import * -from setup.machine_inits import * -from setup.pbs_fluxsites import * -from setup.repo_init import * -from setup.directory_structure import * - -from scripts.get_cable import GetCable -from scripts.build_cable import BuildCable -from scripts.generate_qsub_script import create_qsub_script - - -# i.e. if on NCI -if "nci" in nodename: - # if ("Mac" not in nodename and - # "MacBook" not in nodename and - # "imac" not in nodename and - # "unsw" not in nodename): - - create_qsub_script(qsub_fname, ncpus, mem, wall_time, project, email_address) - -parser = argparse.ArgumentParser() -parser.add_argument( - "-s", "--skipbuild", action="store_true", default=False, help="Rebuild src?" -) -parser.add_argument( - "-g", "--skipget", action="store_true", default=False, help="Get src?" -) - -args = parser.parse_args() - - -if args.skipget == False: - - # - ## Get CABLE ... - # - G = GetCable(src_dir=src_dir, user=user) - G.main(repo_name=repos[0], trunk=trunk) # Default is True - - # Run on a users branch, not integration - if repos[1] != "integration": - get_user_branch = True - else: - get_user_branch = False - - if share_branch: - get_user_branch = False - - G.main( - repo_name=repos[1], - trunk=False, - user_branch=get_user_branch, - share_branch=share_branch, - ) # integration branch - -if args.skipbuild == False: - - # - ## Build CABLE ... - # - B = BuildCable( - src_dir=src_dir, - NCDIR=NCDIR, - NCMOD=NCMOD, - FC=FC, - CFLAGS=CFLAGS, - LD=LD, - LDFLAGS=LDFLAGS, - ) - B.main(repo_name=repos[0]) - - if share_branch: - # print(os.path.basename(repos[1])) - B.main(repo_name=os.path.basename(repos[1])) - else: - B.main(repo_name=repos[1]) diff --git a/initialise_spatial_runs.py b/initialise_spatial_runs.py deleted file mode 100755 index a30edf4b..00000000 --- a/initialise_spatial_runs.py +++ /dev/null @@ -1,96 +0,0 @@ -#!/usr/bin/env python - -""" -Checkout CABLE repositories, build executables and generate a qsub script to -wrap cable benchmarking scripts when used on raijin. - -We can't use the run_comparison script as raijin nodes have no internet -connection. - -That's all folks. -""" - -__author__ = "Martin De Kauwe" -__version__ = "1.0 (12.03.2019)" -__email__ = "mdekauwe@gmail.com" - -import os -import sys -import datetime -import subprocess -from argparse import ArgumentParser - -from user_options import * - -from scripts.get_cable import GetCable -from scripts.build_cable import BuildCable -from scripts.run_cable_spatial import RunCable - -parser = ArgumentParser() -parser.add_argument( - "-s", "--skipbuild", action="store_true", default=False, help="Rebuild src?" -) -parser.add_argument( - "-g", "--skipget", action="store_true", default=False, help="Get src?" -) - -args = parser.parse_args() - - -if args.skipget == False: - # - ## Get CABLE ... - # - G = GetCable(src_dir=src_dir, user=user) - G.main(repo_name=repos[0], trunk=trunk) # Default is True - G.main(repo_name=repos[1], trunk=False) # integration branch - -elif args.skipbuild == False: - - # - ## Build CABLE ... - # - B = BuildCable( - src_dir=src_dir, - NCDIR=NCDIR, - NCMOD=NCMOD, - FC=FC, - CFLAGS=CFLAGS, - LD=LD, - LDFLAGS=LDFLAGS, - ) - B.main(repo_name=repos[0]) - B.main(repo_name=repos[1]) - - -# ------------- Change stuff ------------- # -tmp_ancillary_dir = "global_files" # GSWP3 grid/mask file, temporarily - -met_dir = "/g/data/wd9/MetForcing/Global/GSWP3_2017/" -start_yr = 1901 -end_yr = 1901 -walltime = "0:30:00" -qsub_fname = "qsub_wrapper_script_simulation.sh" -# ------------- Change stuff ------------- # - -cable_aux = os.path.join("../", aux_dir) -for repo_id, repo in enumerate(repos): - cable_src = os.path.join(os.path.join("../", src_dir), repo) - for sci_id, sci_config in enumerate(sci_configs): - - R = RunCable( - met_dir=met_dir, - log_dir=log_dir, - output_dir=output_dir, - restart_dir=restart_dir, - aux_dir=aux_dir, - spin_up=spin_up, - cable_src=cable_src, - qsub_fname=qsub_fname, - met_data=met_data, - nml_fname=nml_fname, - walltime=walltime, - tmp_ancillary_dir=tmp_ancillary_dir, - ) - R.initialise_stuff() - R.setup_nml_file() diff --git a/make_seasonal_plots.py b/make_seasonal_plots.py deleted file mode 100755 index daa0711b..00000000 --- a/make_seasonal_plots.py +++ /dev/null @@ -1,41 +0,0 @@ -#!/usr/bin/env python - -""" -Get CABLE, build the executables, setup the run directory, run CABLE ... this is -a wrapper around the actual scripts (see scripts directory) - -That's all folks. -""" - -__author__ = "Martin De Kauwe" -__version__ = "1.0 (09.03.2019)" -__email__ = "mdekauwe@gmail.com" - -import os -import shutil -import sys -import glob -import datetime - -import numpy as np -from user_options import * - -from scripts.benchmark_seasonal_plot import main as seas_plot - -# -## Make seasonal plots ... -# -if not os.path.exists(plot_dir): - os.makedirs(plot_dir) - -ofdir = os.path.join(run_dir, output_dir) -all_files = glob.glob(os.path.join(ofdir, "*.nc")) -sites = np.unique([os.path.basename(f).split(".")[0].split("_")[0] for f in all_files]) -for site in sites: - print(site) - for sci_id, sci_config in enumerate(sci_configs): - - old_fname = glob.glob("%s/%s_*_R%d_S%d_out.nc" % (ofdir, site, 0, sci_id))[0] - new_fname = glob.glob("%s/%s_*_R%d_S%d_out.nc" % (ofdir, site, 1, sci_id))[0] - plot_fname = os.path.join(plot_dir, "%s_S%d.png" % (site, sci_id)) - seas_plot(old_fname, new_fname, plot_fname) diff --git a/meta.yaml b/meta.yaml new file mode 100644 index 00000000..544f6a2b --- /dev/null +++ b/meta.yaml @@ -0,0 +1,29 @@ +{% set version = "0.1.2" %} + +package: + name: benchcab + version: {{ version }} + +source: + path: . + + +build: + script: "{{ PYTHON }} -m pip install . --no-deps" + noarch: python + number: {{ GIT_DESCRIBE_NUMBER }} + + +requirements: + host: + - python >=3.9 + - pip + - pbr + run: + - python >=3.9 + - xarray + - numpy + - pandas + - matplotlib + - netCDF4 + - PyYAML \ No newline at end of file diff --git a/run_site_comparison.py b/run_site_comparison.py deleted file mode 100755 index 1da001ea..00000000 --- a/run_site_comparison.py +++ /dev/null @@ -1,124 +0,0 @@ -#!/usr/bin/env python - -""" -Get CABLE, build the executables, setup the run directory, run CABLE ... this is -a wrapper around the actual scripts (see scripts directory) - -That's all folks. -""" - -__author__ = "Martin De Kauwe" -__version__ = "1.0 (09.03.2019)" -__email__ = "mdekauwe@gmail.com" - -import os -import shutil -import sys -import glob -import datetime -import subprocess -import numpy as np -import argparse -from user_options import * -from setup.machine_inits import * -from setup.pbs_fluxsites import * -from setup.repo_init import * -from setup.directory_structure import * -from setup.cases_fluxsites import * - -from scripts.get_cable import GetCable -from scripts.build_cable import BuildCable -from scripts.run_cable_site import RunCable - - -parser = argparse.ArgumentParser() -parser.add_argument( - "--qsub", action="store_true", default=False, help="Run qsub script?" -) -parser.add_argument( - "-s", "--skipsrc", action="store_true", default=False, help="Rebuild src?" -) - -args = parser.parse_args() - -if args.qsub == False and args.skipsrc == False: - - # - ## Get CABLE ... - # - G = GetCable(src_dir=src_dir, user=user) - G.main(repo_name=repos[0], trunk=trunk) # Default is True - - # Run on a users branch, not integration - if repos[1] != "integration": - get_user_branch = True - else: - get_user_branch = False - - if share_branch: - get_user_branch = False - - G.main( - repo_name=repos[1], - trunk=False, - user_branch=get_user_branch, - share_branch=share_branch, - ) # integration branch - - # - ## Build CABLE ... - # - B = BuildCable( - src_dir=src_dir, - NCDIR=NCDIR, - NCMOD=NCMOD, - FC=FC, - CFLAGS=CFLAGS, - LD=LD, - LDFLAGS=LDFLAGS, - mpi=mpi, - ) - B.main(repo_name=repos[0]) - - if share_branch: - # print(os.path.basename(repos[1])) - B.main(repo_name=os.path.basename(repos[1])) - else: - B.main(repo_name=repos[1]) - - -# - -# -## Run CABLE for each science config, for each repo -# - -if not os.path.exists(run_dir): - os.makedirs(run_dir) -os.chdir(run_dir) - -cable_aux = os.path.join("../", aux_dir) -for repo_id, repo in enumerate(repos): - cable_src = os.path.join(os.path.join("../", src_dir), repo) - - # Define the name for the executable: cable for serial, cable-mpi for mpi runs - cable_exe = f"cable{'-mpi'*mpi}" - R = RunCable( - met_dir=met_dir, - log_dir=log_dir, - output_dir=output_dir, - restart_dir=restart_dir, - aux_dir=cable_aux, - namelist_dir=namelist_dir, - met_subset=met_subset, - cable_src=cable_src, - mpi=mpi, - num_cores=num_cores, - cable_exe=cable_exe, - multiprocess=multiprocess, - ) - for sci_id, sci_config in enumerate(sci_configs): - R.main(sci_config, repo_id, sci_id) - - -os.chdir(cwd) diff --git a/run_spatial_comparison.py b/run_spatial_comparison.py deleted file mode 100755 index 27890507..00000000 --- a/run_spatial_comparison.py +++ /dev/null @@ -1,110 +0,0 @@ -#!/usr/bin/env python - -""" -Get CABLE, build the executables, setup the run directory, run CABLE ... this is -a wrapper around the actual scripts (see scripts directory) - -That's all folks. -""" - -__author__ = "Martin De Kauwe" -__version__ = "1.0 (09.03.2019)" -__email__ = "mdekauwe@gmail.com" - -import os -import shutil -import sys -import glob -import datetime -import subprocess -import numpy as np -from argparse import ArgumentParser - - -from user_options import * - -from scripts.get_cable import GetCable -from scripts.build_cable import BuildCable - -# Shouldn't this import run_cable_spatial? -from scripts.run_cable_site import RunCable - - -parser = ArgumentParser() -parser.add_argument( - "--qsub", action="store_true", default=False, help="Run qsub script?" -) -parser.add_argument( - "-s", "--skipsrc", action="store_true", default=False, help="Rebuild src?" -) - -args = parser.parse_args() - -if args.qsub == False and args.skipsrc == False: - - # - ## Get CABLE ... - # - G = GetCable(src_dir=src_dir, user=user) - G.main(repo_name=repos[0], trunk=trunk) # Default is True - G.main(repo_name=repos[1], trunk=False) # integration branch - - # - ## Build CABLE ... - # - B = BuildCable( - src_dir=src_dir, - NCDIR=NCDIR, - NCMOD=NCMOD, - FC=FCMPI, - CFLAGS=CFLAGS, - LD=LD, - LDFLAGS=LDFLAGS, - mpi=True, - ) - B.main(repo_name=repos[0]) - B.main(repo_name=repos[1]) - - -# -## Run CABLE for each science config, for each repo -# - -if not os.path.exists(run_dir): - os.makedirs(run_dir) -os.chdir(run_dir) - -# ------------- Change stuff ------------- # -tmp_ancillary_dir = "global_files" # GSWP3 grid/mask file, temporarily - -met_dir = "/g/data/wd9/MetForcing/Global/GSWP3_2017/" -start_yr = 1901 -end_yr = 1901 -walltime = "0:30:00" -qsub_fname = "qsub_wrapper_script_simulation.sh" -# ------------- Change stuff ------------- # - -cable_aux = os.path.join("../", aux_dir) -for repo_id, repo in enumerate(repos): - cable_src = os.path.join(os.path.join("../", src_dir), repo) - for sci_id, sci_config in enumerate(sci_configs): - - R = RunCable( - met_dir=met_dir, - log_dir=log_dir, - output_dir=output_dir, - restart_dir=restart_dir, - aux_dir=aux_dir, - spin_up=spin_up, - cable_src=cable_src, - qsub_fname=qsub_fname, - met_data=met_data, - nml_fname=nml_fname, - walltime=walltime, - tmp_ancillary_dir=tmp_ancillary_dir, - ) - R.initialise_stuff() - R.setup_nml_file() - R.run_qsub_script(start_yr, end_yr) - -os.chdir(cwd) diff --git a/scripts/benchmark_seasonal_plot.py b/scripts/benchmark_seasonal_plot.py deleted file mode 100644 index fa38fe90..00000000 --- a/scripts/benchmark_seasonal_plot.py +++ /dev/null @@ -1,160 +0,0 @@ -#!/usr/bin/env python - -""" -Plot visual benchmark (average seasonal cycle) of old vs new model runs. - -That's all folks. -""" -__author__ = "Martin De Kauwe" -__version__ = "1.0 (18.10.2017)" -__email__ = "mdekauwe@gmail.com" - -import xarray as xr -import matplotlib.pyplot as plt -import sys -import datetime as dt -import pandas as pd -import numpy as np -from matplotlib.ticker import FixedLocator - - -def main(old_fname, new_fname, plot_fname): - - df_old = read_cable_file(old_fname) - df_old = resample_to_seasonal_cycle(df_old) - - df_new = read_cable_file(new_fname) - df_new = resample_to_seasonal_cycle(df_new) - - fig = plt.figure(figsize=(6, 9)) - fig.subplots_adjust(hspace=0.3) - fig.subplots_adjust(wspace=0.2) - plt.rcParams["text.usetex"] = False - plt.rcParams["font.family"] = "sans-serif" - plt.rcParams["font.sans-serif"] = "Helvetica" - plt.rcParams["axes.labelsize"] = 12 - plt.rcParams["font.size"] = 12 - plt.rcParams["legend.fontsize"] = 12 - plt.rcParams["xtick.labelsize"] = 12 - plt.rcParams["ytick.labelsize"] = 12 - - ax1 = fig.add_subplot(3, 2, 1) - ax2 = fig.add_subplot(3, 2, 2) - ax3 = fig.add_subplot(3, 2, 3) - ax4 = fig.add_subplot(3, 2, 4) - ax5 = fig.add_subplot(3, 2, 5) - ax6 = fig.add_subplot(3, 2, 6) - - axes = [ax1, ax2, ax3, ax4, ax5, ax6] - vars = ["GPP", "NEE", "Qle", "Qh", "TVeg", "ESoil"] - for a, v in zip(axes, vars): - a.plot(df_old.month, df_old[v], c="black", lw=2.0, ls="-", label="Old") - a.plot(df_new.month, df_new[v], c="red", lw=2.0, ls="-", label="New") - - labels = [ - "GPP (g C m$^{-2}$ d$^{-1}$)", - "NEE (g C m$^{-2}$ d$^{-1}$)", - "Qle (W m$^{-2}$)", - "Qh (W m$^{-2}$)", - "TVeg (mm d$^{-1}$)", - "Esoil (mm d$^{-1}$)", - ] - for a, l in zip(axes, labels): - a.set_title(l, fontsize=12) - - xtickagaes_minor = FixedLocator([2, 3, 4, 5, 7, 8, 9, 10, 11]) - for i, a in enumerate(axes): - a.set_xticks([1, 6, 12]) - if i != 1: - a.set_ylim(bottom=0) - a.xaxis.set_minor_locator(xtickagaes_minor) - a.set_xticklabels(["Jan", "Jun", "Dec"]) - if i < 4: - plt.setp(a.get_xticklabels(), visible=False) - - ax1.legend(loc="best", numpoints=1) - fig.savefig(plot_fname, bbox_inches="tight", pad_inches=0.1) - - -def read_cable_file(fname): - # works with new cftime update - ds = xr.open_dataset(fname, decode_times=True) - vars_to_keep = ["GPP", "Qle", "Qh", "TVeg", "ESoil", "NEE", "time"] - df = ds[vars_to_keep].squeeze(dim=["x", "y"], drop=True).to_dataframe() - - # f = nc.Dataset(fname) - # time = nc.num2date(f.variables['time'][:], - # f.variables['time'].units) - # df = pd.DataFrame(f.variables['GPP'][:,0,0], columns=['GPP']) - # df['Qle'] = f.variables['Qle'][:,0,0] - # df['Qh'] = f.variables['Qh'][:,0,0] - # df['TVeg'] = f.variables['TVeg'][:,0,0] - # df['ESoil'] = f.variables['ESoil'][:,0,0] - # df['NEE'] = f.variables['NEE'][:,0,0] - # - # df['dates'] = time - # df = df.set_index('dates') - - return df - - -def resample_to_seasonal_cycle(df, OBS=False): - - UMOL_TO_MOL = 1e-6 - MOL_C_TO_GRAMS_C = 12.0 - SEC_2_DAY = 86400.0 - - # umol/m2/s -> g/C/d - df["GPP"] *= UMOL_TO_MOL * MOL_C_TO_GRAMS_C * SEC_2_DAY - df["NEE"] *= UMOL_TO_MOL * MOL_C_TO_GRAMS_C * SEC_2_DAY - - # kg/m2/s -> mm/d - df["TVeg"] *= SEC_2_DAY - df["ESoil"] *= SEC_2_DAY - - method = { - "GPP": "mean", - "NEE": "mean", - "Qle": "mean", - "Qh": "mean", - "TVeg": "mean", - "ESoil": "mean", - } - df = df.resample("M").agg(method).groupby(lambda x: x.month).mean() - df["month"] = np.arange(1, 13) - - return df - - -if __name__ == "__main__": - - from argparse import ArgumentParser - - parser = ArgumentParser() - parser.add_argument( - "-o", - "--old_fname", - dest="old_fname", - action="store", - help="Old CABLE output filename", - type="string", - ) - parser.add_argument( - "-n", - "--new_fname", - dest="new_fname", - action="store", - help="New CABLE output filename", - type="string", - ) - parser.add_argument( - "-p", - "--plot_fname", - dest="plot_fname", - action="store", - help="Benchmark plot filename", - type="string", - ) - args = parser.parse_args() - - main(args.old_fname, args.new_fname, args.plot_fname) diff --git a/scripts/config.py b/scripts/config.py deleted file mode 100644 index c3468e20..00000000 --- a/scripts/config.py +++ /dev/null @@ -1,3 +0,0 @@ -# Set some default values for environment files -# for different systems -default_envfiles = {"gadi": "gadi_env.sh"} diff --git a/scripts/generate_qsub_script.py b/scripts/generate_qsub_script.py deleted file mode 100755 index 7bcdf61c..00000000 --- a/scripts/generate_qsub_script.py +++ /dev/null @@ -1,61 +0,0 @@ -#!/usr/bin/env python - -""" -Generate qsub script - -That's all folks. -""" - -__author__ = "Martin De Kauwe" -__version__ = "1.0 (09.03.2019)" -__email__ = "mdekauwe@gmail.com" - -import os -import sys -import subprocess -import datetime - - -def create_qsub_script(ofname, ncpus, mem, wall_time, project, email_address): - - f = open(ofname, "w") - - f.write("#!/bin/bash\n") - f.write("\n") - f.write("#PBS -l wd\n") - f.write("#PBS -l ncpus=%d\n" % (ncpus)) - f.write("#PBS -l mem=%s\n" % (mem)) - f.write("#PBS -l walltime=%s\n" % (wall_time)) - f.write("#PBS -q normal\n") - f.write("#PBS -P %s\n" % (project)) - f.write("#PBS -j oe\n") - f.write("#PBS -M %s\n" % (email_address)) - f.write("#PBS -l storage=gdata/w35+gdata/wd9+gdata/hh5\n") - f.write("\n") - f.write("\n") - f.write("\n") - f.write("\n") - f.write("module purge\n") - f.write("module use /g/data/hh5/public/modules\n") - f.write("module load conda\n") - f.write("module add netcdf/4.7.1\n") - f.write("python ./run_site_comparison.py --qsub\n") - f.write("\n") - - f.close() - - os.chmod(ofname, 0o755) - - -if __name__ == "__main__": - - # ------------- Change stuff ------------- # - project = "w35" - ofname = "benchmark_cable_qsub.sh" - ncpus = 2 - mem = "32GB" - wall_time = "00:30:00" - email_address = "mdekauwe@gmail.com" - # ------------------------------------------- # - - create_qsub_script(ofname, ncpus, mem, wall_time, project, email_address) diff --git a/scripts/run_cable_spatial.py b/scripts/run_cable_spatial.py deleted file mode 100755 index a378802f..00000000 --- a/scripts/run_cable_spatial.py +++ /dev/null @@ -1,376 +0,0 @@ -#!/usr/bin/env python - -""" -Run CABLE spatially, i.e. global GSWP3 run. - -Ensure you run it with "-s" if you want to do a spin-up run, i.e. - -./run_cable_spatial.py -s - -Once this is complete just run without the "-s" flag - -The script does a few things internally: -- creates a qsub script. -- after the spin-up step it renames the final restart file to be the first - simulation year restart file so that we can run a longer simulation -- submits the qsub script - -That's all folks. -""" - -__author__ = "Martin De Kauwe" -__version__ = "1.0 (14.07.2019)" -__email__ = "mdekauwe@gmail.com" - -import subprocess -import sys -import os -import glob -import shutil -import tempfile -import argparse - -from cable_utils import adjust_nml_file -from cable_utils import generate_spatial_qsub_script - - -def cmd_line_parser(): - - p = argparse.ArgumentParser() - p.add_argument("-s", action="store_true", default=False, help="Spinup model") - p.add_argument( - "-a", action="store_true", default=False, help="Adjust namelist file" - ) - p.add_argument("-t", action="store_true", default=False, help="Sort restart files") - p.add_argument("-y", default="1900", help="year") - p.add_argument("-l", default="", help="log filename") - p.add_argument("-o", default="", help="out filename") - p.add_argument("-i", default="", help="restart in filename") - p.add_argument("-r", default="", help="restart out filename") - p.add_argument("-c", default="400.0", help="CO2 concentration") - p.add_argument("-n", default=None, help="nml_fname") - args = p.parse_args() - - return ( - args.l, - args.o, - args.i, - args.r, - int(args.y), - float(args.c), - args.n, - args.s, - args.a, - args.t, - ) - - -class RunCable(object): - def __init__( - self, - met_dir=None, - log_dir=None, - output_dir=None, - restart_dir=None, - aux_dir=None, - cable_src=None, - nml_fname=None, - spin_up=False, - qsub_fname=None, - tmp_ancillary_dir=None, - spinup_dir="spinup_restart", - namelist_dir="namelists", - soil_fname="def_soil_params.txt", - veg_fname="def_veg_params_zr_clitt_albedo_fix.txt", - co2_fname="Annual_CO2_concentration_until_2010.txt", - grid_fname="gridinfo_mmy_MD_elev_orig_std_avg-sand_mask.nc", - mask_fname="gridinfo_mmy_MD_elev_orig_std_avg-sand_landmask.nc", - met_data="GSWP3", - cable_exe="cable-mpi", - walltime=None, - mem="64GB", - ncpus="48", - ): - - self.met_dir = met_dir - self.log_dir = log_dir - self.output_dir = output_dir - self.aux_dir = aux_dir - self.restart_dir = restart_dir - self.spinup_dir = spinup_dir - self.grid_dir = os.path.join(self.aux_dir, "offline") - self.soil_fname = soil_fname - self.biogeophys_dir = os.path.join(self.aux_dir, "core/biogeophys") - self.biogeochem_dir = os.path.join(self.aux_dir, "core/biogeochem/") - self.veg_fname = os.path.join(self.biogeophys_dir, veg_fname) - self.soil_fname = os.path.join(self.biogeophys_dir, soil_fname) - self.grid_fname = os.path.join(tmp_ancillary_dir, grid_fname) - self.mask_fname = os.path.join(tmp_ancillary_dir, mask_fname) - self.namelist_dir = namelist_dir - self.co2_fname = os.path.join(tmp_ancillary_dir, co2_fname) - self.qsub_fname = qsub_fname - self.cable_src = cable_src - self.cable_exe = os.path.join(cable_src, "offline/%s" % (cable_exe)) - self.spin_up = spin_up - self.met_data = met_data - - if nml_fname is None: - nml_fname = "cable.nml" - base_nml_file = os.path.join(self.grid_dir, "%s" % (nml_fname)) - shutil.copyfile(base_nml_file, nml_fname) - self.nml_fname = nml_fname - else: - self.nml_fname = nml_fname - - # qsub stuff - self.walltime = walltime - self.mem = mem - self.ncpus = ncpus - - def initialise_stuff(self): - - if not os.path.exists(self.restart_dir): - os.makedirs(self.restart_dir) - - if not os.path.exists(self.output_dir): - os.makedirs(self.output_dir) - - if not os.path.exists(self.log_dir): - os.makedirs(self.log_dir) - - if not os.path.exists(self.namelist_dir): - os.makedirs(self.namelist_dir) - - # delete local executable, copy a local copy and use that - local_exe = os.path.basename(self.cable_exe) - if os.path.isfile(local_exe): - os.remove(local_exe) - shutil.copy(self.cable_exe, local_exe) - - def setup_nml_file(self): - - replace_dict = { - "filename%met": "''", # not needed for GSWP3 run - "filename%type": "'%s'" % (self.grid_fname), - "filename%veg": "'%s'" % (self.veg_fname), - "filename%soil": "'%s'" % (self.soil_fname), - "gswpfile%mask": "'%s'" % (self.mask_fname), - "output%averaging": "'monthly'", - "spinup": ".FALSE.", - "cable_user%FWSOIL_SWITCH": "'standard'", - "cable_user%GS_SWITCH": "'medlyn'", - "cable_user%GW_MODEL": ".FALSE.", - "cable_user%or_evap": ".FALSE.", - "cable_user%GSWP3": ".TRUE.", - "cable_user%MetType": "'gswp3'", - "verbose": ".FALSE.", - } - adjust_nml_file(self.nml_fname, replace_dict) - - def run_qsub_script(self, start_yr, end_yr): - - # Create a qsub script for simulations if missing, there is one of spinup - # and one for simulations, so two qsub_fnames - if not os.path.isfile(self.qsub_fname): - generate_spatial_qsub_script( - self.qsub_fname, - self.walltime, - self.mem, - self.ncpus, - spin_up=self.spin_up, - ) - - # Run qsub script - qs_cmd = "qsub -v start_yr=%d,end_yr=%d,co2_fname=%s %s" % ( - start_yr, - end_yr, - self.co2_fname, - self.qsub_fname, - ) - error = subprocess.call(qs_cmd, shell=True) - if error is 1: - raise ("Job failed to submit\n") - - def create_new_nml_file( - self, log_fname, out_fname, restart_in_fname, restart_out_fname, year, co2_conc - ): - - out_log_fname = os.path.join(self.log_dir, log_fname) - out_fname = os.path.join(self.output_dir, out_fname) - - # i.e. no restart file for first spinup year - if restart_in_fname == "missing": - restart_in_fname = "" - else: - restart_in_fname = os.path.join(self.restart_dir, restart_in_fname) - restart_out_fname = os.path.join(self.restart_dir, restart_out_fname) - - if self.met_data == "GSWP3": - rainf_fn = os.path.join( - self.met_dir, "Rainf/GSWP3.BC.Rainf.3hrMap.%s.nc" % (year) - ) - snowf_fn = os.path.join( - self.met_dir, "Snowf/GSWP3.BC.Snowf.3hrMap.%s.nc" % (year) - ) - lwdown_fn = os.path.join( - self.met_dir, "LWdown/GSWP3.BC.LWdown.3hrMap.%s.nc" % (year) - ) - swdown_fn = os.path.join( - self.met_dir, "SWdown/GSWP3.BC.SWdown.3hrMap.%s.nc" % (year) - ) - psurf_fn = os.path.join( - self.met_dir, "PSurf/GSWP3.BC.PSurf.3hrMap.%s.nc" % (year) - ) - qair_fn = os.path.join( - self.met_dir, "Qair/GSWP3.BC.Qair.3hrMap.%s.nc" % (year) - ) - wind_fn = os.path.join( - self.met_dir, "Wind/GSWP3.BC.Wind.3hrMap.%s.nc" % (year) - ) - tair_fn = os.path.join( - self.met_dir, "Tair/GSWP3.BC.Tair.3hrMap.%s.nc" % (year) - ) - elif self.met_data == "AWAP": - rainf_fn = os.path.join(self.met_dir, "Rainf/AWAP.Rainf.3hr.%s.nc" % (year)) - snowf_fn = os.path.join(self.met_dir, "Snowf/AWAP.Snowf.3hr.%s.nc" % (year)) - lwdown_fn = os.path.join( - self.met_dir, "LWdown/AWAP.LWdown.3hr.%s.nc" % (year) - ) - swdown_fn = os.path.join( - self.met_dir, "SWdown/AWAP.SWdown.3hr.%s.nc" % (year) - ) - psurf_fn = os.path.join(self.met_dir, "PSurf/AWAP.PSurf.3hr.%s.nc" % (year)) - qair_fn = os.path.join(self.met_dir, "Qair/AWAP.Qair.3hr.%s.nc" % (year)) - wind_fn = os.path.join(self.met_dir, "Wind/AWAP.Wind.3hr.%s.nc" % (year)) - tair_fn = os.path.join(self.met_dir, "Tair/AWAP.Tair.3hr.%s.nc" % (year)) - - replace_dict = { - "filename%log": "'%s'" % (out_log_fname), - "filename%out": "'%s'" % (out_fname), - "filename%restart_in": "'%s'" % (restart_in_fname), - "filename%restart_out": "'%s'" % (restart_out_fname), - "fixedCO2": "%f" % (co2_conc), - "ncciy": "%s" - % (year), # 0 for not using gswp; 4-digit year input for year of gswp met - "CABLE_USER%YearStart": "0", # needs to be 0 so the ncciy is set - "CABLE_USER%YearEnd": "0", # needs to be 0 so the ncciy is set - "gswpfile%rainf": "'%s'" % (rainf_fn), - "gswpfile%snowf": "'%s'" % (snowf_fn), - "gswpfile%LWdown": "'%s'" % (lwdown_fn), - "gswpfile%SWdown": "'%s'" % (swdown_fn), - "gswpfile%PSurf": "'%s'" % (psurf_fn), - "gswpfile%Qair": "'%s'" % (qair_fn), - "gswpfile%Tair": "'%s'" % (tair_fn), - "gswpfile%wind": "'%s'" % (wind_fn), - } - adjust_nml_file(self.nml_fname, replace_dict) - - # save copy as we go for debugging - remove later - shutil.copyfile( - self.nml_fname, os.path.join(self.namelist_dir, "cable_%d.nml" % (year)) - ) - - def sort_restart_files(self, start_yr, end_yr): - - if not os.path.exists(self.spinup_dir): - os.makedirs(self.spinup_dir) - - # Copy the last spinup restart file to the backup dir and rename - # it as if it was the first year - fn_in = "restart_%d.nc" % (end_yr) - fn_out = "restart_%d.nc" % (start_yr) - - restart_in_fname = os.path.join(self.restart_dir, fn_in) - restart_out_fname = os.path.join(self.spinup_dir, fn_out) - - shutil.copyfile(restart_in_fname, restart_out_fname) - - # remove the restart dir and remake it with the equilibrium file - shutil.rmtree(self.restart_dir, ignore_errors=True) - - if not os.path.exists(self.restart_dir): - os.makedirs(self.restart_dir) - - fn_in = "restart_%d.nc" % (start_yr) - restart_in_fname = os.path.join(self.spinup_dir, fn_in) - restart_out_fname = os.path.join(self.restart_dir, fn_in) - shutil.copyfile(restart_in_fname, restart_out_fname) - - -if __name__ == "__main__": - - # ------------- Change stuff ------------- # - # met_data = "AWAP" - met_data = "GSWP3" - if met_data == "GSWP3": - met_dir = "/g/data/wd9/MetForcing/Global/GSWP3_2017/" - elif met_data == "AWAP": - met_dir = "/g/data1a/w35/mgk576/research/AWAP_interpolation/interpolated" - - log_dir = "logs" - output_dir = "outputs" - restart_dir = "restarts" - aux_dir = "/g/data/w35/mgk576/research/CABLE_runs/src/CABLE-AUX" - # cable_src = "../../src/trunk/trunk/" - cable_src = "../../src/trunk_DESICA_PFTs/trunk_DESICA_PFTs/" - spinup_start_yr = 1995 - # spinup_end_yr = 1995 - spinup_end_yr = 2000 - run_start_yr = 1901 - run_end_yr = 1901 - # ------------------------------------------- # - - ( - log_fname, - out_fname, - restart_in_fname, - restart_out_fname, - year, - co2_conc, - nml_fname, - spin_up, - adjust_nml, - sort_restarts, - ) = cmd_line_parser() - - if spin_up: - start_yr = spinup_start_yr - end_yr = spinup_end_yr - walltime = "4:00:00" - qsub_fname = "qsub_wrapper_script_spinup.sh" - else: - start_yr = run_start_yr - end_yr = run_end_yr - walltime = "7:30:00" - qsub_fname = "qsub_wrapper_script_simulation.sh" - - C = RunCable( - met_dir=met_dir, - log_dir=log_dir, - output_dir=output_dir, - restart_dir=restart_dir, - aux_dir=aux_dir, - spin_up=spin_up, - cable_src=cable_src, - qsub_fname=qsub_fname, - met_data=met_data, - nml_fname=nml_fname, - walltime=walltime, - ) - - # Sort the restart files out before we run simulations "-t" - if sort_restarts: - C.sort_restart_files(spinup_start_yr, spinup_end_yr) - sys.exit("Restart files fixed up, run simulation") - - # Setup initial namelist file and submit qsub job - if adjust_nml == False: - C.initialise_stuff() - C.setup_nml_file() - C.run_qsub_script(start_yr, end_yr) - - # qsub script is adjusting namelist file, i.e. for a different year - else: - C.create_new_nml_file( - log_fname, out_fname, restart_in_fname, restart_out_fname, year, co2_conc - ) diff --git a/scripts/set_default_paths.py b/scripts/set_default_paths.py deleted file mode 100755 index 64b66166..00000000 --- a/scripts/set_default_paths.py +++ /dev/null @@ -1,144 +0,0 @@ -#!/usr/bin/env python - -""" -Set default paths on various machines... - -That's all folks. -""" - -__author__ = "Martin De Kauwe" -__version__ = "1.0 (06.06.2020)" -__email__ = "mdekauwe@gmail.com" - -import os -import sys -import subprocess -import datetime -import shlex -from pathlib import Path - -from scripts.config import * - - -def return_machine_name(nodename: str, default_envfiles: dict): - """Return the key in default_envfiles that correspond to nodename if any""" - machine_name = "" - for key in default_envfiles.keys(): - if key in nodename: - machine_name = key - - return machine_name - - -def set_paths(nodename, envfile=""): - - # Get name of the machine if listed in default_envfiles - # default_envfiles is a dictionary stored in config.py - machine_name = return_machine_name(nodename, default_envfiles) - - # Set envfile using the nodename if not given at call - if not envfile and machine_name: - envfile = default_envfiles[machine_name] - - if "Mac" in nodename or "imac" in nodename: - NCDIR = "/opt/local/lib/" - NCMOD = "/opt/local/include/" - FC = "gfortran" - FCMPI = "gfortran-mp-9" - CFLAGS = "-O2" - LD = "'-lnetcdf -lnetcdff'" - LDFLAGS = "'-L/opt/local/lib -O2'" - - # - ## Met paths ... - # - # met_dir = "/Users/mdekauwe/research/CABLE_runs/met_data/plumber_met" - met_dir = "/Users/mdekauwe/research/plumber_test" - - elif "unsw" in nodename: - cmd = "module load netcdf-c/4.4.1.1-intel" - cmd = "module load netcdf-f/4.4.4-intel" - error = subprocess.call(cmd, shell=True) - if error == 1: - raise ("Error loading netcdf libs") - - # NCDIR = '/share/apps/netcdf/intel/4.1.3/lib' - # NCMOD = '/share/apps/netcdf/intel/4.1.3/include' - NCDIR = "/share/apps/netcdf-f/intel/4.4.4/lib" - NCMOD = "/share/apps/netcdf-f/intel/4.4.4/include" - - FC = "ifort" - FCMPI = "mpif90" - CFLAGS = "-O2" - LD = "'-lnetcdf -lnetcdff'" - LDFLAGS = "'-L/opt/local/lib -O2'" - - # - ## Met paths ... - # - # met_dir = ("/srv/ccrc/data45/z3509830/CABLE_runs/Inputs/" - # "PLUMBER_sites/met") - met_dir = ( - "/srv/ccrc/data04/z3509830/Fluxnet_data/" - "All_flux_sites_processed_PLUMBER2/" - "Post-processed_PLUMBER2_outputs/Nc_files/Met" - ) - - elif "gadi" in nodename: - # Load modules - MODULESHOME = Path(os.environ["MODULESHOME"]) - sys.path.append(str(MODULESHOME / "init")) - import python as mod - - with open(f"./{envfile}") as rfile: - ModToLoad = rfile.readlines() - - mod.module("purge") - for modname in ModToLoad: - mod.module("load", modname.rstrip()) - - # Setup variables for compilation - # FC is setup by the modules - NCBASE = Path(os.environ["NETCDF"]) # Set when loading the netcdf module - NCDIR = NCBASE / "lib" - NCMOD = NCBASE / "include" - FCMPI = "mpif90" - FC = os.environ["FC"] - CFLAGS = "-O2" - LD = "'-lnetcdf -lnetcdff'" - LDFLAGS = "'-L'$NCDIR' -O0'" - met_dir = Path( - "/g/data/w35/Shared_data/Observations/Fluxnet_data/" - "Post-processed_PLUMBER2_outputs/Nc_files/Met" - ) - - else: - sys.path.append("/opt/Modules/v4.3.0/init/") - import python as mod - - # exec(open('/opt/Modules/v4.3.0/init/python.py').read()) - ver = "4.7.1" - mod.module("unload", "netcdf") - mod.module("unload", "openmpi") - mod.module("load", "netcdf/%s" % (ver)) - mod.module("load", "intel-compiler/2019.3.199") - mod.module("load", "intel-mpi/2019.6.166") - - NCDIR = "/apps/netcdf/%s/lib" % (ver) - NCMOD = "/apps/netcdf/%s/include" % (ver) - FCMPI = "mpif90" - FC = "ifort" - CFLAGS = "-O2" - LD = "'-lnetcdf -lnetcdff'" - LDFLAGS = "'-L/opt/local/lib -O2'" - # - ## Met paths ... - # - # met_dir = ("/g/data1/w35/Shared_data/Observations/Fluxnet_data/" - # "FLUXNET2015/Processed_data/Missing_10%_Gapfill_20%/Daily") - met_dir = ( - "/g/data/w35/Shared_data/Observations/Fluxnet_data/" - "Post-processed_PLUMBER2_outputs/Nc_files/Met" - ) - - return (met_dir, NCDIR, NCMOD, FC, FCMPI, CFLAGS, LD, LDFLAGS) diff --git a/scripts/setup_integration_branch.py b/scripts/setup_integration_branch.py deleted file mode 100755 index ae32c43e..00000000 --- a/scripts/setup_integration_branch.py +++ /dev/null @@ -1,29 +0,0 @@ -#!/usr/bin/env python - -""" -Create an integration branch in the shared space. - -That's all folks. -""" - -__author__ = "Martin De Kauwe" -__version__ = "1.0 (09.03.2019)" -__email__ = "mdekauwe@gmail.com" - -import os -import sys -import subprocess -import datetime - -root = "https://trac.nci.org.au/svn/cable" -msg = '"setup integration branch"' - -cmd = "svn copy %s/branches/Users/ccc561/Metvar-lookup1 %s/branches/Share/integration -m %s" % ( - root, - root, - msg, -) -print(cmd) -error = subprocess.call(cmd, shell=True) -if error == 1: - raise ("Error copying repo") diff --git a/setup.cfg b/setup.cfg new file mode 100644 index 00000000..f16e7c87 --- /dev/null +++ b/setup.cfg @@ -0,0 +1,26 @@ +[metadata] +name=benchcab +summary= Software to run a benchmarking suite for CABLE LSM +version=0.1.2 +description=To benchmark CABLE simulations +url=https://github.com/CABLE-LSM/benchcab +author=Claire Carouge +author_email=c.carouge@unsw.edu.au +license=Apache 2.0 +classifier= + Development Status :: 2 - Pre-Alpha + Intended Audience :: Science/Research + License :: OSI Approved :: Apache Software License + Operating System :: POSIX :: Linux + Programming Language :: Python :: 3.9 + +[files] +packages = benchcab + +[entry_points] +console_scripts = + benchcab=benchcab.benchcab:main_argv + benchsiterun=benchcab.benchsiterun:main_argv + +[tool:pytest] +addopts = --doctest-modules --doctest-glob='*.rst' --ignore setup.py --ignore conftest.py --ignore docs/conf.py \ No newline at end of file diff --git a/setup.py b/setup.py new file mode 100644 index 00000000..84088388 --- /dev/null +++ b/setup.py @@ -0,0 +1,8 @@ +#!/usr/bin/env python + +from setuptools import setup + +setup( + setup_requires=['setuptools', 'pbr'], + pbr=True, +) \ No newline at end of file diff --git a/setup/directory_structure.py b/setup/directory_structure.py deleted file mode 100644 index 44cb2b1c..00000000 --- a/setup/directory_structure.py +++ /dev/null @@ -1,16 +0,0 @@ -import os - -# -## user directories ... -# -src_dir = "src" -aux_dir = "src/CABLE-AUX" -run_dir = "runs" -log_dir = "logs" -plot_dir = "plots" -output_dir = "outputs" -restart_dir = "restart_files" -namelist_dir = "namelists" - -if not os.path.exists(src_dir): - os.makedirs(src_dir) diff --git a/setup/machine_inits.py b/setup/machine_inits.py deleted file mode 100644 index 620f30e9..00000000 --- a/setup/machine_inits.py +++ /dev/null @@ -1,13 +0,0 @@ -import user_options -import datetime -import os -import sys -import shutil -from scripts.set_default_paths import set_paths - -now = datetime.datetime.now() -date = now.strftime("%d_%m_%Y") -cwd = os.getcwd() -(sysname, nodename, release, version, machine) = os.uname() - -(met_dir, NCDIR, NCMOD, FC, FCMPI, CFLAGS, LD, LDFLAGS) = set_paths(nodename) diff --git a/setup/repo_init.py b/setup/repo_init.py deleted file mode 100644 index fb808961..00000000 --- a/setup/repo_init.py +++ /dev/null @@ -1,11 +0,0 @@ -from user_options import repo2 -from setup.machine_inits import date - -# -## Repositories to test, default is head of the trunk against personal repo. -## But if trunk is false, repo1 could be anything -# -trunk = True -repo1 = f"Trunk_{date}" -share_branch = False -repos = [repo1, repo2] diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 00000000..60750a14 --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,51 @@ +import pytest +import os +import yaml +from pathlib import Path + +from benchcab import bench_config +from benchcab import get_cable + +@pytest.fixture +def testconfig(): + # Test config + conf = { + "use_branches":["user_branch","trunk"], + "user_branch":{ + "name": "v3.0-YP-changes", + "trunk": False, + "share_branch": False, + }, + "trunk":{ + "name": "trunk", + "trunk": True, + "share_branch": False, + }, + "share":{ + "name": "integration", + "trunk": False, + "share_branch": True, + }, + "user":os.environ["USER"], + "project":os.environ["PROJECT"], + "modules":[ + "intel-compiler/2021.1.1", + "openmpi/4.1.0", + "netcdf/4.7.4", + ], + } + return conf + +@pytest.fixture +def create_testconfig(testconfig, tmp_path): + + os.chdir(tmp_path) + with open("config.yaml", "w") as fout: + yaml.dump(testconfig,fout) + + TestSetup=bench_config.BenchSetup("config.yaml") + # return the config options, compilation options and directory tree: + # opt, compilation_opt, benchdirs + opt, compilation_opt, benchdirs = TestSetup.setup_bench() + + return tmp_path, opt, compilation_opt, benchdirs diff --git a/tests/test_benchcab.py b/tests/test_benchcab.py new file mode 100644 index 00000000..9e408ffd --- /dev/null +++ b/tests/test_benchcab.py @@ -0,0 +1,53 @@ +#!/usr/bin/env python +import yaml +from pathlib import Path +import tempfile +import os +import pytest + +from benchcab import benchcab +from benchcab import build_cable +from tests.test_workdir import checkout_branch + +mydir = Path.cwd() + +def test_run_sites(create_testconfig): + + os.chdir(create_testconfig[0]) + mess = "Running the single sites tests " + assert benchcab.main_parse_args(["-f"]) == mess + + os.chdir(mydir) + +def test_run_spatial(create_testconfig): + + os.chdir(create_testconfig[0]) + mess = "Running the spatial tests " + assert benchcab.main_parse_args(["-w"]) == mess + + os.chdir(mydir) + + +def test_run_all(create_testconfig): + + os.chdir(create_testconfig[0]) + mess = "Running the single sites tests "+"Running the spatial tests " + assert benchcab.main_parse_args([]) == mess + + os.chdir(mydir) + +# def test_compilation(create_testconfig): + +# os.chdir(create_testconfig) + +# checkout_branch("user", create_testconfig) +# B = build_cable.BuildCable( +# src_dir=benchdirs.src_dir, +# ModToLoad=opt["modules"], +# **compilation_opt, +# mpi=True, +# ) +# B.main(repo_name=branch1["name"]) + + + diff --git a/tests/test_workdir.py b/tests/test_workdir.py new file mode 100644 index 00000000..4fb66d40 --- /dev/null +++ b/tests/test_workdir.py @@ -0,0 +1,65 @@ +from pathlib import Path +import tempfile +import os +import yaml + +from benchcab import benchtree +from benchcab import bench_config +from benchcab import get_cable + + +def checkout_branch(branch_type:str, opt:dict,tb:benchtree.BenchTree): + + # Check if the branch_type exists in file? + locbranch = opt[branch_type] + tr = get_cable.GetCable(src_dir=tb.src_dir, user=opt["user"]) + tr.main(**locbranch) + + print(Path.cwd()) + assert Path(f"src/{locbranch['name']}").is_dir(), "Directory does not exist" + assert len(list(Path(f"src/{locbranch['name']}").iterdir())) > 0, "Directory is empty" + +def test_create_minbenchtree(create_testconfig): + """Test the min. directory tree is created""" + + td = create_testconfig[0] + # Get into the temporary directory to test creating the directory structure + os.chdir(td) + + tb = benchtree.BenchTree(Path(td)) + tb.create_minbenchtree() + + paths_to_create=[ + (td/"src").is_dir(), + (td/"runs").is_dir(), + ] + assert all(paths_to_create) + + +def test_read_config(create_testconfig, testconfig): + + os.chdir(create_testconfig[0]) + TestSetup=bench_config.BenchSetup("config.yaml") + # Get the branch information from the testconfig + opt = TestSetup.read_config() + + assert opt == testconfig + +def test_checkout_trunk(create_testconfig): + + td = create_testconfig[0] + os.chdir(td) + # Setup the benchmarking + opt, _, tb = create_testconfig[1:] + + checkout_branch("trunk", opt, tb) + +# def test_checkout_user(create_testconfig, checkout_branch): + +# checkout_branch("user_branch", create_testconfig) + +# def test_checkout_share(create_testconfig, checkout_branch): + +# checkout_branch("share", create_testconfig) + + diff --git a/user_options.py b/user_options.py deleted file mode 100644 index 245c3bd3..00000000 --- a/user_options.py +++ /dev/null @@ -1,5 +0,0 @@ -# ------------- User set stuff ------------- # -project = "w35" -user = "ccc561" -repo2 = "v3.0-YP-changes" -# ------------------------------------------ #