Skip to content

Commit

Permalink
Merge pull request #432 from lsst/tickets/DM-45536
Browse files Browse the repository at this point in the history
DM-45536: address edge cases in task mocking system
  • Loading branch information
TallJimbo committed Aug 2, 2024
2 parents 5d287ca + 36fc00e commit 5079685
Show file tree
Hide file tree
Showing 3 changed files with 8 additions and 9 deletions.
1 change: 1 addition & 0 deletions doc/changes/DM-45536.bugfix.md
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Fix support for task metadata as inputs in the `PipelineTask` mocking system.
11 changes: 2 additions & 9 deletions python/lsst/pipe/base/connections.py
Original file line number Diff line number Diff line change
Expand Up @@ -493,15 +493,8 @@ class DeferredDatasetRef:

datasetRef: DatasetRef

@property
def datasetType(self) -> DatasetType:
"""The dataset type for this dataset."""
return self.datasetRef.datasetType

@property
def dataId(self) -> DataCoordinate:
"""The data ID for this dataset."""
return self.datasetRef.dataId
def __getattr__(self, name: str) -> Any:
return getattr(self.datasetRef, name)


class PipelineTaskConnections(metaclass=PipelineTaskConnectionsMetaclass):
Expand Down
5 changes: 5 additions & 0 deletions python/lsst/pipe/base/tests/mocks/_pipeline_task.py
Original file line number Diff line number Diff line change
Expand Up @@ -425,6 +425,11 @@ def __init__(self, *, config: MockPipelineTaskConfig):
raise ValueError(
f"Unmocked dataset type {connection.name!r} cannot be used as an init-output."
)
elif connection.name.endswith("_metadata") and connection.storageClass == "TaskMetadata":
# Task metadata does not use a mock storage class, because it's
# written by the system, but it does end up with the _mock_*
# prefix because the task label does.
connection = dataclasses.replace(connection, name=get_mock_name(connection.name))
setattr(self, name, connection)

def getSpatialBoundsConnections(self) -> Iterable[str]:
Expand Down

0 comments on commit 5079685

Please sign in to comment.