Skip to content

Commit

Permalink
Merge pull request #295 from lsst/tickets/DM-45113
Browse files Browse the repository at this point in the history
DM-45113: Re-enable skipping and clobbering with LimitedButler
  • Loading branch information
mfisherlevine authored Jul 26, 2024
2 parents 4fa1592 + e07a1af commit 454af29
Show file tree
Hide file tree
Showing 3 changed files with 13 additions and 7 deletions.
3 changes: 1 addition & 2 deletions python/lsst/ctrl/mpexec/cmdLineFwk.py
Original file line number Diff line number Diff line change
Expand Up @@ -994,8 +994,7 @@ def runGraphQBB(self, task_factory: TaskFactory, args: SimpleNamespace) -> None:
exitOnKnownError=args.fail_fast,
limited_butler_factory=_butler_factory,
resources=resources,
clobberOutputs=True,
skipExisting=True,
assumeNoExistingOutputs=True,
)

timeout = self.MP_TIMEOUT if args.timeout is None else args.timeout
Expand Down
6 changes: 3 additions & 3 deletions python/lsst/ctrl/mpexec/dotTools.py
Original file line number Diff line number Diff line change
Expand Up @@ -113,7 +113,7 @@ def _renderQuantumNode(
labels = [f"{quantumNode.nodeId}", html.escape(taskDef.label)]
dataId = quantumNode.quantum.dataId
assert dataId is not None, "Quantum DataId cannot be None"
labels.extend(f"{key} = {dataId[key]}" for key in sorted(dataId.keys()))
labels.extend(f"{key} = {dataId[key]}" for key in sorted(dataId.required.keys()))
_renderNode(file, nodeName, "quantum", labels)


Expand All @@ -128,7 +128,7 @@ def _renderDSTypeNode(name: str, dimensions: list[str], file: io.TextIOBase) ->
def _renderDSNode(nodeName: str, dsRef: DatasetRef, file: io.TextIOBase) -> None:
"""Render GV node for a dataset"""
labels = [html.escape(dsRef.datasetType.name), f"run: {dsRef.run!r}"]
labels.extend(f"{key} = {dsRef.dataId[key]}" for key in sorted(dsRef.dataId.keys()))
labels.extend(f"{key} = {dsRef.dataId[key]}" for key in sorted(dsRef.dataId.required.keys()))
_renderNode(file, nodeName, "dataset", labels)


Expand All @@ -144,7 +144,7 @@ def _renderEdge(fromName: str, toName: str, file: io.TextIOBase, **kwargs: Any)
def _datasetRefId(dsRef: DatasetRef) -> str:
"""Make an identifying string for given ref"""
dsId = [dsRef.datasetType.name]
dsId.extend(f"{key} = {dsRef.dataId[key]}" for key in sorted(dsRef.dataId.keys()))
dsId.extend(f"{key} = {dsRef.dataId[key]}" for key in sorted(dsRef.dataId.required.keys()))
return ":".join(dsId)


Expand Down
11 changes: 9 additions & 2 deletions python/lsst/ctrl/mpexec/singleQuantumExecutor.py
Original file line number Diff line number Diff line change
Expand Up @@ -123,6 +123,12 @@ class SingleQuantumExecutor(QuantumExecutor):
Unlike ``skipExistingIn``, this works with limited butlers as well as
full butlers. Always set to `True` if ``skipExistingIn`` matches
``butler.run``.
assumeNoExistingOutputs : `bool`, optional
If `True`, assume preexisting outputs are impossible (e.g. because this
is known by higher-level code to be a new ``RUN`` collection), and do
not look for them. This causes the ``skipExisting`` and
``clobberOutputs`` options to be ignored, but unlike just setting both
of those to `False`, it also avoids all dataset existence checks.
"""

def __init__(
Expand All @@ -136,6 +142,7 @@ def __init__(
limited_butler_factory: Callable[[Quantum], LimitedButler] | None = None,
resources: ExecutionResources | None = None,
skipExisting: bool = False,
assumeNoExistingOutputs: bool = False,
):
self.butler = butler
self.taskFactory = taskFactory
Expand All @@ -144,6 +151,7 @@ def __init__(
self.exitOnKnownError = exitOnKnownError
self.limited_butler_factory = limited_butler_factory
self.resources = resources
self.assumeNoExistingOutputs = assumeNoExistingOutputs

if self.butler is None:
assert limited_butler_factory is not None, "limited_butler_factory is needed when butler is None"
Expand Down Expand Up @@ -345,8 +353,7 @@ def checkExistingOutputs(
"""
task_node = self._conform_task_def(task_node)

if not self.butler:
# Skip/prune logic only works for full butler.
if self.assumeNoExistingOutputs:
return False

if self.skipExisting:
Expand Down

0 comments on commit 454af29

Please sign in to comment.