Skip to content

Commit

Permalink
Update to account for missing configuration defaults (#12)
Browse files Browse the repository at this point in the history
* Update to account for missing configuration defaults

* Update job submission config
  • Loading branch information
nsmith- committed Sep 7, 2022
1 parent b08e3c7 commit b6dd9da
Show file tree
Hide file tree
Showing 3 changed files with 19 additions and 23 deletions.
25 changes: 5 additions & 20 deletions src/lpcjobqueue/cluster.py
Original file line number Diff line number Diff line change
Expand Up @@ -57,9 +57,9 @@ def __init__(
if ship_env:
base_class_kwargs["python"] = ".env/bin/python"
base_class_kwargs.setdefault(
"extra", list(dask.config.get("jobqueue.%s.extra" % self.config_name))
"worker_extra_args", list(dask.config.get("jobqueue.%s.worker_extra_args" % self.config_name))
)
base_class_kwargs["extra"].extend(["--preload", "lpcjobqueue.patch"])
base_class_kwargs["worker_extra_args"].extend(["--preload", "lpcjobqueue.patch"])
else:
base_class_kwargs["python"] = "python"
super().__init__(scheduler=scheduler, name=name, **base_class_kwargs)
Expand All @@ -81,21 +81,6 @@ def __init__(
}
)

def job_script(self):
"""Construct a job submission script"""
quoted_arguments = quote_arguments(self._command_template.split(" "))
quoted_environment = quote_environment(self.env_dict)
job_header_lines = "\n".join(
"%s = %s" % (k, v) for k, v in self.job_header_dict.items()
)
return self._script_template % {
"shebang": self.shebang,
"job_header": job_header_lines,
"quoted_environment": quoted_environment,
"quoted_arguments": quoted_arguments,
"executable": self.executable,
}

async def start(self):
"""Start workers and point them to our local scheduler"""
logger.info("Starting worker: %s", self.name)
Expand Down Expand Up @@ -298,9 +283,9 @@ async def _start(self):
prepared_input_files = await self.loop.run_in_executor(
None, self._build_scratch
)
self._job_kwargs.setdefault("job_extra", {})
self._job_kwargs["job_extra"]["initialdir"] = self.scratch_area.name
self._job_kwargs["job_extra"]["transfer_input_files"] = ",".join(
self._job_kwargs.setdefault("job_extra_directives", {})
self._job_kwargs["job_extra_directives"]["initialdir"] = self.scratch_area.name
self._job_kwargs["job_extra_directives"]["transfer_input_files"] = ",".join(
prepared_input_files
)

Expand Down
4 changes: 3 additions & 1 deletion src/lpcjobqueue/condor_exec.exe
Original file line number Diff line number Diff line change
@@ -1,2 +1,4 @@
#!/bin/bash
${@}
# htcondor jobqueue executable is /bin/sh, so
# we emulate /bin/sh -c "command" behavior
$2
13 changes: 11 additions & 2 deletions src/lpcjobqueue/config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -10,13 +10,22 @@ jobqueue:
interface: null # Network interface to use like eth0 or ib0
death-timeout: 60 # Number of seconds to wait if a worker can not find a scheduler
local-directory: /srv # Location of fast local storage like /scratch or $TMPDIR
extra: ["--worker-port 10000:10070", "--nanny-port 10070:10100", "--no-dashboard"]
shared-temp-directory: null
extra: null
worker-extra-args: ["--worker-port 10000:10070", "--nanny-port 10070:10100", "--no-dashboard"]

# HTCondor Resource Manager options
disk: 200MB # Total amount of disk per job
env-extra: []
job-extra: {} # Extra submit attributes
log-directory: null
env-extra: null
job-script-prologue: []
job-extra: null # Extra submit attributes
job-extra-directives: {} # Extra submit attributes
job-directives-skip: []
submit-command-extra: [] # Extra condor_submit arguments
cancel-command-extra: [] # Extra condor_rm arguments
log-directory: null
shebang: "#!/usr/bin/env condor_submit" # doesn't matter

# Scheduler options
Expand Down

0 comments on commit b6dd9da

Please sign in to comment.