Skip to content

Commit

Permalink
Merge pull request #878 from lsst/tickets/DM-40366
Browse files Browse the repository at this point in the history
DM-40366: Use default pydantic model for testing
  • Loading branch information
timj authored Aug 12, 2023
2 parents 42f2c38 + 7c5e2c1 commit 41038f7
Show file tree
Hide file tree
Showing 2 changed files with 24 additions and 8 deletions.
21 changes: 16 additions & 5 deletions python/lsst/daf/butler/formatters/yaml.py
Original file line number Diff line number Diff line change
Expand Up @@ -152,15 +152,26 @@ def _toBytes(self, inMemoryDataset: Any) -> bytes:
This will fail for data structures that have complex python classes
without a registered YAML representer.
"""
converted = False
if hasattr(inMemoryDataset, "model_dump") and hasattr(inMemoryDataset, "model_dump_json"):
# Pydantic-like model if both dump() and json() exist.
# Pydantic v2-like model if both model_dump() and model_json()
# exist.
with contextlib.suppress(Exception):
inMemoryDataset = inMemoryDataset.model_dump()
converted = True

if not converted and hasattr(inMemoryDataset, "dict") and hasattr(inMemoryDataset, "json"):
# Pydantic v1-like model if both dict() and json() exist.
with contextlib.suppress(Exception):
inMemoryDataset = inMemoryDataset.dict()
converted = True

if not converted:
if dataclasses.is_dataclass(inMemoryDataset):
inMemoryDataset = dataclasses.asdict(inMemoryDataset)
elif hasattr(inMemoryDataset, "_asdict"):
inMemoryDataset = inMemoryDataset._asdict()

if dataclasses.is_dataclass(inMemoryDataset):
inMemoryDataset = dataclasses.asdict(inMemoryDataset)
elif hasattr(inMemoryDataset, "_asdict"):
inMemoryDataset = inMemoryDataset._asdict()
unsafe_dump = self.writeParameters.get("unsafe_dump", False)
if unsafe_dump:
serialized = yaml.dump(inMemoryDataset)
Expand Down
11 changes: 8 additions & 3 deletions python/lsst/daf/butler/tests/_examplePythonTypes.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@
from typing import TYPE_CHECKING, Any

from lsst.daf.butler import StorageClass, StorageClassDelegate
from lsst.daf.butler._compat import _BaseModelCompat
from pydantic import BaseModel

if TYPE_CHECKING:
from lsst.daf.butler import Butler, Datastore, FormatterFactory
Expand Down Expand Up @@ -264,7 +264,7 @@ def makeFromDict(cls, exportDict: dict[str, list | dict | None]) -> MetricsExamp
return cls(exportDict["summary"], exportDict["output"], data)


class MetricsExampleModel(_BaseModelCompat):
class MetricsExampleModel(BaseModel):
"""A variant of `MetricsExample` based on model."""

summary: dict[str, Any] | None = None
Expand All @@ -274,7 +274,12 @@ class MetricsExampleModel(_BaseModelCompat):
@classmethod
def from_metrics(cls, metrics: MetricsExample) -> MetricsExampleModel:
"""Create a model based on an example."""
return cls.model_validate(metrics.exportAsDict())
d = metrics.exportAsDict()
# Assume pydantic v2 but fallback to v1
try:
return cls.model_validate(d)
except AttributeError:
return cls.parse_obj(d)


@dataclasses.dataclass
Expand Down

0 comments on commit 41038f7

Please sign in to comment.