Skip to content

Commit

Permalink
Update to instruments
Browse files Browse the repository at this point in the history
  • Loading branch information
akashdhruv committed Nov 3, 2023
1 parent 8fe9e7e commit 4806627
Show file tree
Hide file tree
Showing 6 changed files with 155 additions and 7 deletions.
1 change: 0 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,5 @@ __pycache__
*.rst
.sphinx/build/*
.sphinx/source/media/*
jobrunner/options.py
!.sphinx/source/index.rst
!./README.rst
4 changes: 2 additions & 2 deletions README.rst
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,7 @@ customization specific to instruments.
pip install PyJobruner --user --install-option="--with-instruments"
This allow for the use of the ``instrument:`` field in the Jobfile
This allow for the use of the ``instrument`` field in the Jobfile

There maybe situations where users may want to install Jobrunner in
development mode $\\textemdash$ to design new features, debug, or
Expand Down Expand Up @@ -225,7 +225,7 @@ The ``Jobfile`` at this node assigns the use of ``environment.sh``,
# file: Project/Jobfile
instrument: flashx
instrument: Flash-X
# Scripts to include during jobrunner setup and submit commands
job:
Expand Down
2 changes: 1 addition & 1 deletion jobrunner/__meta__.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
"""Metadata for jobrunner"""

__pkgname__ = "PyJobRunner"
__version__ = "2023.11.01"
__version__ = "2023.11.2"
__authors__ = "Akash Dhruv"
__license__ = "Apache Software License"
__copyright__ = "Copyright (c) Akash Dhruv 2023. All Rights Reserved."
Expand Down
2 changes: 1 addition & 1 deletion jobrunner/api/_commands.py
Original file line number Diff line number Diff line change
Expand Up @@ -75,11 +75,11 @@ def submit(dirlist, verbose=False):
config = lib.ParseJobConfig(basedir, workdir)

# Build inputfile
lib.CreateInputFile(config)
if config.job.input:
print(f"\n{lib.Color.purple}INPUT: {lib.Color.end}")
for value in config.job.input:
print(f'{" "*4}- {value.replace(basedir,"<ROOT>")}')
lib.CreateInputFile(config)

# Build targetfile
lib.CreateTargetFile(config)
Expand Down
150 changes: 149 additions & 1 deletion jobrunner/instruments/flashx.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,9 @@

import os
import toml
import numpy
import h5py
from scipy.stats import qmc


def CreateParfile(workdir):
Expand Down Expand Up @@ -29,6 +32,9 @@ def CreateParfile(workdir):
# Loop over keys in the input dictionary to and start building the parfile
for group in input_dict:

if not group.isupper():
raise ValueError(f'[jobrunner] Group "{group}" should be uppercase')

# Indicate which key the following runtime parameters belong to
parfile.write(f"\n# Runtime parameters for {group}\n")

Expand All @@ -39,7 +45,7 @@ def CreateParfile(workdir):
# more complex configuration which will not be handled.
if type(value) == dict:
print(
f"[jobrunner] {group}.{key} is a dictionary and will not be handled "
f'{" "*4}[jobrunner] {group}.{key} is a dictionary and will not be handled '
+ "during Flash-X parfile generation"
)

Expand All @@ -55,3 +61,145 @@ def CreateParfile(workdir):
# Deal with rest
else:
parfile.write(f"{key} = {value}\n")


def CreateHeater(workdir):
"""
Create hdf5 input files based on heater configuration. This functionality
is only available when "instrument" is defined in the Jobfile, and jobrunner
is installed with the option --with-instrument
"""
# Load TOML dictionary for JobWorkDir/job.input, JobWorkDir is a
# reserved environment variable for
input_dict = toml.load(workdir + os.sep + "job.input")

# Return immediately if HEATER not present in input dictionary
if "HEATER" not in input_dict.keys():
return

# If we are here then HEATER is present in the input dictionary
# and we can safely load the corresponding heater dictionary
heater_dict = input_dict["HEATER"]

# Set a counter to track how many heater files are being written
# and then loop over items in heater dictionary
num_heaters = 0
for key, info in heater_dict.items():

# if info is of type dictionary we have hit a heater configuration
# that needs to be written to a file. Start implementing that logic
if type(info) == dict:

# Increase heater counter to track number of heaters
num_heaters = num_heaters + 1

# Set filename and open the hdf5 file in write mode
filename = (
workdir + os.sep + heater_dict["sim_heaterName"] + "_hdf5_htr_" + key
)
hfile = h5py.File(filename, "w")

xsite = numpy.ndarray([info["numSites"]], dtype=float)
ysite = numpy.ndarray([info["numSites"]], dtype=float)
zsite = numpy.ndarray([info["numSites"]], dtype=float)
radii = numpy.ndarray([info["numSites"]], dtype=float)

if info["numSites"] == 1:
xsite[:] = 0.0
ysite[:] = 1e-13
zsite[:] = 0.0
radii[:] = 0.2

else:
halton = qmc.Halton(d=2, seed=1)
sample = halton.random(info["numSites"])

xsite[:] = info["xmin"] + sample[:, 0] * (info["xmax"] - info["xmin"])
ysite[:] = 1e-13
radii[:] = 0.2

if info["dim"] == 1:
zsite[:] = 0.0
elif info["dim"] == 2:
zsite[:] = info["zmin"] + sample[:, 1] * (
info["zmax"] - info["zmin"]
)
else:
raise ValueError(f"[jobrunner] Error in HEATER.{key}.dim")

hfile.create_dataset(
"heater/xMin", data=info["xmin"], shape=(1), dtype="float32"
)
hfile.create_dataset(
"heater/xMax", data=info["xmax"], shape=(1), dtype="float32"
)
hfile.create_dataset(
"heater/zMin", data=info["zmin"], shape=(1), dtype="float32"
)
hfile.create_dataset(
"heater/zMax", data=info["zmax"], shape=(1), dtype="float32"
)
hfile.create_dataset(
"heater/yMin", data=info["ymin"], shape=(1), dtype="float32"
)
hfile.create_dataset(
"heater/yMax", data=info["ymax"], shape=(1), dtype="float32"
)
hfile.create_dataset(
"heater/wallTemp",
data=info["wallTemp"],
shape=(1),
dtype="float32",
)
hfile.create_dataset(
"heater/advAngle",
data=info["advAngle"],
shape=(1),
dtype="float32",
)
hfile.create_dataset(
"heater/rcdAngle",
data=info["rcdAngle"],
shape=(1),
dtype="float32",
)
hfile.create_dataset(
"heater/velContact",
data=info["velContact"],
shape=(1),
dtype="float32",
)
hfile.create_dataset(
"heater/nucWaitTime",
data=info["nucWaitTime"],
shape=(1),
dtype="float32",
)
hfile.create_dataset(
"site/num", data=info["numSites"], shape=(1), dtype="int32"
)
hfile.create_dataset(
"site/x", data=xsite, shape=(info["numSites"]), dtype="float32"
)
hfile.create_dataset(
"site/y", data=ysite, shape=(info["numSites"]), dtype="float32"
)
hfile.create_dataset(
"site/z", data=zsite, shape=(info["numSites"]), dtype="float32"
)
hfile.create_dataset(
"init/radii",
data=radii,
shape=(info["numSites"]),
dtype="float32",
)
hfile.close()

print(
f'{" "*4}[jobrunner] Wrote heater information to file {filename.replace(workdir + os.sep,"")}'
)

if num_heaters != heater_dict["sim_numHeaters"]:
raise ValueError(
f"[jobrunner] Number of heater files not equal to sim_numHeaters"
)
3 changes: 2 additions & 1 deletion jobrunner/lib/_filetools.py
Original file line number Diff line number Diff line change
Expand Up @@ -108,8 +108,9 @@ def CreateInputFile(config):
# else:
# inputfile.write(f'{" "*2}{variable} = {value}\n')

if config.instrument == "flashx" and options.INSTRUMENTS == 1:
if config.instrument == "Flash-X" and options.INSTRUMENTS == 1:
instruments.flashx.CreateParfile(config.job.workdir)
instruments.flashx.CreateHeater(config.job.workdir)


def CreateTargetFile(config):
Expand Down

0 comments on commit 4806627

Please sign in to comment.