Skip to content

Commit

Permalink
Initial attempt to support pydantic v2
Browse files Browse the repository at this point in the history
Still some failures. Lots of deprecation warnings.
  • Loading branch information
timj committed Jul 14, 2023
1 parent 0bd4a79 commit d29b608
Show file tree
Hide file tree
Showing 16 changed files with 243 additions and 143 deletions.
112 changes: 112 additions & 0 deletions python/lsst/daf/butler/_compat.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,112 @@
# This file is part of pipe_base.
#
# Developed for the LSST Data Management System.
# This product includes software developed by the LSST Project
# (https://www.lsst.org).
# See the COPYRIGHT file at the top-level directory of this distribution
# for details of code ownership.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.

"""Code to support backwards compatibility."""

__all__ = ["PYDANTIC_V2", "_BaseModelCompat"]

from collections.abc import Callable, Mapping
from typing import Any, Self

from pydantic import BaseModel
from pydantic.version import VERSION as PYDANTIC_VERSION

PYDANTIC_V2 = PYDANTIC_VERSION.startswith("2.")


if PYDANTIC_V2:

class _BaseModelCompat(BaseModel):
"""Methods from pydantic v1 that we want to emulate in v2.
Some of these methods are provided by v2 but issue deprecation
warnings. We need to decide whether we are also okay with deprecating
them or want to support them without the deprecation message.
"""

def json(
self,
*,
include: set[int | str] | Mapping[int | str, Any] | None = None,
exclude: set[int | str] | Mapping[int | str, Any] | None = None,
by_alias: bool = False,
skip_defaults: bool | None = None,
exclude_unset: bool = False,
exclude_defaults: bool = False,
exclude_none: bool = False,
encoder: Callable[[Any], Any] | None = None,
models_as_dict: bool = True,
**dumps_kwargs: Any,
) -> str:
if dumps_kwargs:
raise TypeError("dumps_kwargs no longer supported.")
if encoder is not None:
raise TypeError("json encoder is no longer supported.")
# Can catch warnings and call BaseModel.json() directly.
return self.model_dump_json(
include=include,
exclude=exclude,
by_alias=by_alias,
exclude_defaults=exclude_defaults,
exclude_none=exclude_none,
exclude_unset=exclude_unset,
)

@classmethod
def parse_obj(cls, obj: Any) -> Self:
# Catch warnings and call BaseModel.parse_obj directly?
return cls.model_validate(obj)

else:

class _BaseModelCompat(BaseModel):
@classmethod
def model_validate(
cls,
obj: Any,
*,
strict: bool | None = None,
from_attributes: bool | None = None,
context: dict[str, Any] | None = None,
) -> Self:
return cls.parse_obj(obj)

def model_dump_json(
self,
*,
indent: int | None = None,
include: set[int] | set[str] | dict[int, Any] | dict[str, Any] | None = None,
exclude: set[int] | set[str] | dict[int, Any] | dict[str, Any] | None = None,
by_alias: bool = False,
exclude_unset: bool = False,
exclude_defaults: bool = False,
exclude_none: bool = False,
round_trip: bool = False,
warnings: bool = True,
) -> str:
return self.json(
include=include,
exclude=exclude,
by_alias=by_alias,
exclude_unset=exclude_unset,
exclude_defaults=exclude_defaults,
exclude_none=exclude_none,
)
8 changes: 2 additions & 6 deletions python/lsst/daf/butler/_quantum_backed.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,13 +31,9 @@
from typing import TYPE_CHECKING, Any

from deprecated.sphinx import deprecated
from lsst.daf.butler._compat import _BaseModelCompat
from lsst.resources import ResourcePathExpression

try:
from pydantic.v1 import BaseModel
except ModuleNotFoundError:
from pydantic import BaseModel # type: ignore

from ._butlerConfig import ButlerConfig
from ._deferredDatasetHandle import DeferredDatasetHandle
from ._limited_butler import LimitedButler
Expand Down Expand Up @@ -597,7 +593,7 @@ def extract_provenance_data(self) -> QuantumProvenanceData:
)


class QuantumProvenanceData(BaseModel):
class QuantumProvenanceData(_BaseModelCompat):
"""A serializable struct for per-quantum provenance information and
datastore records.
Expand Down
9 changes: 3 additions & 6 deletions python/lsst/daf/butler/core/datasets/ref.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,12 +35,9 @@
from collections.abc import Iterable
from typing import TYPE_CHECKING, Any, ClassVar, Protocol, runtime_checkable

from lsst.daf.butler._compat import _BaseModelCompat
from lsst.utils.classes import immutable

try:
from pydantic.v1 import BaseModel, StrictStr, validator
except ModuleNotFoundError:
from pydantic import BaseModel, StrictStr, validator # type: ignore
from pydantic import StrictStr, validator

from ..configSupport import LookupKey
from ..dimensions import DataCoordinate, DimensionGraph, DimensionUniverse, SerializedDataCoordinate
Expand Down Expand Up @@ -173,7 +170,7 @@ def makeDatasetId(
_serializedDatasetRefFieldsSet = {"id", "datasetType", "dataId", "run", "component"}


class SerializedDatasetRef(BaseModel):
class SerializedDatasetRef(_BaseModelCompat):
"""Simplified model of a `DatasetRef` suitable for serialization."""

id: uuid.UUID
Expand Down
8 changes: 3 additions & 5 deletions python/lsst/daf/butler/core/datasets/type.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,10 +29,8 @@
from types import MappingProxyType
from typing import TYPE_CHECKING, Any, ClassVar

try:
from pydantic.v1 import BaseModel, StrictBool, StrictStr
except ModuleNotFoundError:
from pydantic import BaseModel, StrictBool, StrictStr # type: ignore
from lsst.daf.butler._compat import _BaseModelCompat
from pydantic import StrictBool, StrictStr

from ..configSupport import LookupKey
from ..dimensions import DimensionGraph, SerializedDimensionGraph
Expand All @@ -51,7 +49,7 @@ def _safeMakeMappingProxyType(data: Mapping | None) -> Mapping:
return MappingProxyType(data)


class SerializedDatasetType(BaseModel):
class SerializedDatasetType(_BaseModelCompat):
"""Simplified model of a `DatasetType` suitable for serialization."""

name: StrictStr
Expand Down
11 changes: 4 additions & 7 deletions python/lsst/daf/butler/core/datastoreCacheManager.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,12 +45,9 @@
from random import Random
from typing import TYPE_CHECKING

from lsst.daf.butler._compat import _BaseModelCompat
from lsst.resources import ResourcePath

try:
from pydantic.v1 import BaseModel, PrivateAttr
except ModuleNotFoundError:
from pydantic import BaseModel, PrivateAttr # type: ignore
from pydantic import PrivateAttr

from .config import ConfigSubset
from .configSupport import processLookupConfigs
Expand Down Expand Up @@ -124,7 +121,7 @@ def _parse_cache_name(cached_location: str) -> tuple[uuid.UUID, str | None, str
return id_, component, ext


class CacheEntry(BaseModel):
class CacheEntry(_BaseModelCompat):
"""Represent an entry in the cache."""

name: str
Expand Down Expand Up @@ -172,7 +169,7 @@ class _MarkerEntry(CacheEntry):
pass


class CacheRegistry(BaseModel):
class CacheRegistry(_BaseModelCompat):
"""Collection of cache entries."""

_size: int = PrivateAttr(0)
Expand Down
8 changes: 2 additions & 6 deletions python/lsst/daf/butler/core/datastoreRecordData.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,14 +30,10 @@
from collections.abc import Mapping
from typing import TYPE_CHECKING, Any

from lsst.daf.butler._compat import _BaseModelCompat
from lsst.utils import doImportType
from lsst.utils.introspection import get_full_type_name

try:
from pydantic.v1 import BaseModel
except ModuleNotFoundError:
from pydantic import BaseModel # type: ignore

from .datasets import DatasetId
from .dimensions import DimensionUniverse
from .persistenceContext import PersistenceContextVars
Expand All @@ -49,7 +45,7 @@
_Record = dict[str, Any]


class SerializedDatastoreRecordData(BaseModel):
class SerializedDatastoreRecordData(_BaseModelCompat):
"""Representation of a `DatastoreRecordData` suitable for serialization."""

dataset_ids: list[uuid.UUID]
Expand Down
8 changes: 2 additions & 6 deletions python/lsst/daf/butler/core/dimensions/_coordinate.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,13 +34,9 @@
from typing import TYPE_CHECKING, Any, ClassVar, Literal, overload

from deprecated.sphinx import deprecated
from lsst.daf.butler._compat import _BaseModelCompat
from lsst.sphgeom import IntersectionRegion, Region

try:
from pydantic.v1 import BaseModel
except ModuleNotFoundError:
from pydantic import BaseModel # type: ignore

from ..json import from_json_pydantic, to_json_pydantic
from ..named import NamedKeyDict, NamedKeyMapping, NamedValueAbstractSet, NameLookupMapping
from ..persistenceContext import PersistenceContextVars
Expand All @@ -65,7 +61,7 @@
"""


class SerializedDataCoordinate(BaseModel):
class SerializedDataCoordinate(_BaseModelCompat):
"""Simplified model for serializing a `DataCoordinate`."""

dataId: dict[str, DataIdValue]
Expand Down
8 changes: 2 additions & 6 deletions python/lsst/daf/butler/core/dimensions/_graph.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,13 +28,9 @@
from types import MappingProxyType
from typing import TYPE_CHECKING, Any, ClassVar

from lsst.daf.butler._compat import _BaseModelCompat
from lsst.utils.classes import cached_getter, immutable

try:
from pydantic.v1 import BaseModel
except ModuleNotFoundError:
from pydantic import BaseModel # type: ignore

from .._topology import TopologicalFamily, TopologicalSpace
from ..json import from_json_pydantic, to_json_pydantic
from ..named import NamedValueAbstractSet, NamedValueSet
Expand All @@ -46,7 +42,7 @@
from ._universe import DimensionUniverse


class SerializedDimensionGraph(BaseModel):
class SerializedDimensionGraph(_BaseModelCompat):
"""Simplified model of a `DimensionGraph` suitable for serialization."""

names: list[str]
Expand Down
51 changes: 21 additions & 30 deletions python/lsst/daf/butler/core/dimensions/_records.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,20 +26,9 @@
from typing import TYPE_CHECKING, Any, ClassVar, Optional, Tuple

import lsst.sphgeom
from lsst.daf.butler._compat import PYDANTIC_V2, _BaseModelCompat
from lsst.utils.classes import immutable

try:
from pydantic.v1 import BaseModel, Field, StrictBool, StrictFloat, StrictInt, StrictStr, create_model
except ModuleNotFoundError:
from pydantic import ( # type: ignore
BaseModel,
Field,
StrictBool,
StrictFloat,
StrictInt,
StrictStr,
create_model,
)
from pydantic import Field, StrictBool, StrictFloat, StrictInt, StrictStr, create_model

from ..json import from_json_pydantic, to_json_pydantic
from ..persistenceContext import PersistenceContextVars
Expand Down Expand Up @@ -78,7 +67,7 @@ def _subclassDimensionRecord(definition: DimensionElement) -> type[DimensionReco
return type(definition.name + ".RecordClass", (DimensionRecord,), d)


class SpecificSerializedDimensionRecord(BaseModel, extra="forbid"):
class SpecificSerializedDimensionRecord(_BaseModelCompat, extra="forbid"):
"""Base model for a specific serialized record content."""


Expand Down Expand Up @@ -128,7 +117,7 @@ def _createSimpleRecordSubclass(definition: DimensionElement) -> type[SpecificSe
return model


class SerializedDimensionRecord(BaseModel):
class SerializedDimensionRecord(_BaseModelCompat):
"""Simplified model for serializing a `DimensionRecord`."""

definition: str = Field(
Expand All @@ -147,22 +136,24 @@ class SerializedDimensionRecord(BaseModel):
},
)

class Config:
"""Local configuration overrides for model."""

schema_extra = {
"example": {
"definition": "detector",
"record": {
"instrument": "HSC",
"id": 72,
"full_name": "0_01",
"name_in_raft": "01",
"raft": "0",
"purpose": "SCIENCE",
},
if not PYDANTIC_V2:

class Config:
"""Local configuration overrides for model."""

schema_extra = {
"example": {
"definition": "detector",
"record": {
"instrument": "HSC",
"id": 72,
"full_name": "0_01",
"name_in_raft": "01",
"raft": "0",
"purpose": "SCIENCE",
},
}
}
}

@classmethod
def direct(
Expand Down
Loading

0 comments on commit d29b608

Please sign in to comment.