diff --git a/python/lsst/daf/butler/_compat.py b/python/lsst/daf/butler/_compat.py new file mode 100644 index 0000000000..8f16d452e5 --- /dev/null +++ b/python/lsst/daf/butler/_compat.py @@ -0,0 +1,112 @@ +# This file is part of pipe_base. +# +# Developed for the LSST Data Management System. +# This product includes software developed by the LSST Project +# (https://www.lsst.org). +# See the COPYRIGHT file at the top-level directory of this distribution +# for details of code ownership. +# +# This program is free software: you can redistribute it and/or modify +# it under the terms of the GNU General Public License as published by +# the Free Software Foundation, either version 3 of the License, or +# (at your option) any later version. +# +# This program is distributed in the hope that it will be useful, +# but WITHOUT ANY WARRANTY; without even the implied warranty of +# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +# GNU General Public License for more details. +# +# You should have received a copy of the GNU General Public License +# along with this program. If not, see . + +"""Code to support backwards compatibility.""" + +__all__ = ["PYDANTIC_V2", "_BaseModelCompat"] + +from collections.abc import Callable, Mapping +from typing import Any, Self + +from pydantic import BaseModel +from pydantic.version import VERSION as PYDANTIC_VERSION + +PYDANTIC_V2 = PYDANTIC_VERSION.startswith("2.") + + +if PYDANTIC_V2: + + class _BaseModelCompat(BaseModel): + """Methods from pydantic v1 that we want to emulate in v2. + + Some of these methods are provided by v2 but issue deprecation + warnings. We need to decide whether we are also okay with deprecating + them or want to support them without the deprecation message. + """ + + def json( + self, + *, + include: set[int | str] | Mapping[int | str, Any] | None = None, + exclude: set[int | str] | Mapping[int | str, Any] | None = None, + by_alias: bool = False, + skip_defaults: bool | None = None, + exclude_unset: bool = False, + exclude_defaults: bool = False, + exclude_none: bool = False, + encoder: Callable[[Any], Any] | None = None, + models_as_dict: bool = True, + **dumps_kwargs: Any, + ) -> str: + if dumps_kwargs: + raise TypeError("dumps_kwargs no longer supported.") + if encoder is not None: + raise TypeError("json encoder is no longer supported.") + # Can catch warnings and call BaseModel.json() directly. + return self.model_dump_json( + include=include, + exclude=exclude, + by_alias=by_alias, + exclude_defaults=exclude_defaults, + exclude_none=exclude_none, + exclude_unset=exclude_unset, + ) + + @classmethod + def parse_obj(cls, obj: Any) -> Self: + # Catch warnings and call BaseModel.parse_obj directly? + return cls.model_validate(obj) + +else: + + class _BaseModelCompat(BaseModel): + @classmethod + def model_validate( + cls, + obj: Any, + *, + strict: bool | None = None, + from_attributes: bool | None = None, + context: dict[str, Any] | None = None, + ) -> Self: + return cls.parse_obj(obj) + + def model_dump_json( + self, + *, + indent: int | None = None, + include: set[int] | set[str] | dict[int, Any] | dict[str, Any] | None = None, + exclude: set[int] | set[str] | dict[int, Any] | dict[str, Any] | None = None, + by_alias: bool = False, + exclude_unset: bool = False, + exclude_defaults: bool = False, + exclude_none: bool = False, + round_trip: bool = False, + warnings: bool = True, + ) -> str: + return self.json( + include=include, + exclude=exclude, + by_alias=by_alias, + exclude_unset=exclude_unset, + exclude_defaults=exclude_defaults, + exclude_none=exclude_none, + ) diff --git a/python/lsst/daf/butler/_quantum_backed.py b/python/lsst/daf/butler/_quantum_backed.py index 62c33eaa56..9f2aabb4a2 100644 --- a/python/lsst/daf/butler/_quantum_backed.py +++ b/python/lsst/daf/butler/_quantum_backed.py @@ -31,13 +31,9 @@ from typing import TYPE_CHECKING, Any from deprecated.sphinx import deprecated +from lsst.daf.butler._compat import _BaseModelCompat from lsst.resources import ResourcePathExpression -try: - from pydantic.v1 import BaseModel -except ModuleNotFoundError: - from pydantic import BaseModel # type: ignore - from ._butlerConfig import ButlerConfig from ._deferredDatasetHandle import DeferredDatasetHandle from ._limited_butler import LimitedButler @@ -597,7 +593,7 @@ def extract_provenance_data(self) -> QuantumProvenanceData: ) -class QuantumProvenanceData(BaseModel): +class QuantumProvenanceData(_BaseModelCompat): """A serializable struct for per-quantum provenance information and datastore records. diff --git a/python/lsst/daf/butler/core/datasets/ref.py b/python/lsst/daf/butler/core/datasets/ref.py index d036c8857f..030c97e5a0 100644 --- a/python/lsst/daf/butler/core/datasets/ref.py +++ b/python/lsst/daf/butler/core/datasets/ref.py @@ -35,12 +35,9 @@ from collections.abc import Iterable from typing import TYPE_CHECKING, Any, ClassVar, Protocol, runtime_checkable +from lsst.daf.butler._compat import _BaseModelCompat from lsst.utils.classes import immutable - -try: - from pydantic.v1 import BaseModel, StrictStr, validator -except ModuleNotFoundError: - from pydantic import BaseModel, StrictStr, validator # type: ignore +from pydantic import StrictStr, validator from ..configSupport import LookupKey from ..dimensions import DataCoordinate, DimensionGraph, DimensionUniverse, SerializedDataCoordinate @@ -173,7 +170,7 @@ def makeDatasetId( _serializedDatasetRefFieldsSet = {"id", "datasetType", "dataId", "run", "component"} -class SerializedDatasetRef(BaseModel): +class SerializedDatasetRef(_BaseModelCompat): """Simplified model of a `DatasetRef` suitable for serialization.""" id: uuid.UUID diff --git a/python/lsst/daf/butler/core/datasets/type.py b/python/lsst/daf/butler/core/datasets/type.py index f8b5293e6d..80e5f31ecb 100644 --- a/python/lsst/daf/butler/core/datasets/type.py +++ b/python/lsst/daf/butler/core/datasets/type.py @@ -29,10 +29,8 @@ from types import MappingProxyType from typing import TYPE_CHECKING, Any, ClassVar -try: - from pydantic.v1 import BaseModel, StrictBool, StrictStr -except ModuleNotFoundError: - from pydantic import BaseModel, StrictBool, StrictStr # type: ignore +from lsst.daf.butler._compat import _BaseModelCompat +from pydantic import StrictBool, StrictStr from ..configSupport import LookupKey from ..dimensions import DimensionGraph, SerializedDimensionGraph @@ -51,7 +49,7 @@ def _safeMakeMappingProxyType(data: Mapping | None) -> Mapping: return MappingProxyType(data) -class SerializedDatasetType(BaseModel): +class SerializedDatasetType(_BaseModelCompat): """Simplified model of a `DatasetType` suitable for serialization.""" name: StrictStr diff --git a/python/lsst/daf/butler/core/datastoreCacheManager.py b/python/lsst/daf/butler/core/datastoreCacheManager.py index 37b67bf695..e8d03e6add 100644 --- a/python/lsst/daf/butler/core/datastoreCacheManager.py +++ b/python/lsst/daf/butler/core/datastoreCacheManager.py @@ -45,12 +45,9 @@ from random import Random from typing import TYPE_CHECKING +from lsst.daf.butler._compat import _BaseModelCompat from lsst.resources import ResourcePath - -try: - from pydantic.v1 import BaseModel, PrivateAttr -except ModuleNotFoundError: - from pydantic import BaseModel, PrivateAttr # type: ignore +from pydantic import PrivateAttr from .config import ConfigSubset from .configSupport import processLookupConfigs @@ -124,7 +121,7 @@ def _parse_cache_name(cached_location: str) -> tuple[uuid.UUID, str | None, str return id_, component, ext -class CacheEntry(BaseModel): +class CacheEntry(_BaseModelCompat): """Represent an entry in the cache.""" name: str @@ -172,7 +169,7 @@ class _MarkerEntry(CacheEntry): pass -class CacheRegistry(BaseModel): +class CacheRegistry(_BaseModelCompat): """Collection of cache entries.""" _size: int = PrivateAttr(0) diff --git a/python/lsst/daf/butler/core/datastoreRecordData.py b/python/lsst/daf/butler/core/datastoreRecordData.py index 744af46006..c2a13c8fe8 100644 --- a/python/lsst/daf/butler/core/datastoreRecordData.py +++ b/python/lsst/daf/butler/core/datastoreRecordData.py @@ -30,14 +30,10 @@ from collections.abc import Mapping from typing import TYPE_CHECKING, Any +from lsst.daf.butler._compat import _BaseModelCompat from lsst.utils import doImportType from lsst.utils.introspection import get_full_type_name -try: - from pydantic.v1 import BaseModel -except ModuleNotFoundError: - from pydantic import BaseModel # type: ignore - from .datasets import DatasetId from .dimensions import DimensionUniverse from .persistenceContext import PersistenceContextVars @@ -49,7 +45,7 @@ _Record = dict[str, Any] -class SerializedDatastoreRecordData(BaseModel): +class SerializedDatastoreRecordData(_BaseModelCompat): """Representation of a `DatastoreRecordData` suitable for serialization.""" dataset_ids: list[uuid.UUID] diff --git a/python/lsst/daf/butler/core/dimensions/_coordinate.py b/python/lsst/daf/butler/core/dimensions/_coordinate.py index 175e52a855..6c66194ad7 100644 --- a/python/lsst/daf/butler/core/dimensions/_coordinate.py +++ b/python/lsst/daf/butler/core/dimensions/_coordinate.py @@ -34,13 +34,9 @@ from typing import TYPE_CHECKING, Any, ClassVar, Literal, overload from deprecated.sphinx import deprecated +from lsst.daf.butler._compat import _BaseModelCompat from lsst.sphgeom import IntersectionRegion, Region -try: - from pydantic.v1 import BaseModel -except ModuleNotFoundError: - from pydantic import BaseModel # type: ignore - from ..json import from_json_pydantic, to_json_pydantic from ..named import NamedKeyDict, NamedKeyMapping, NamedValueAbstractSet, NameLookupMapping from ..persistenceContext import PersistenceContextVars @@ -65,7 +61,7 @@ """ -class SerializedDataCoordinate(BaseModel): +class SerializedDataCoordinate(_BaseModelCompat): """Simplified model for serializing a `DataCoordinate`.""" dataId: dict[str, DataIdValue] diff --git a/python/lsst/daf/butler/core/dimensions/_graph.py b/python/lsst/daf/butler/core/dimensions/_graph.py index c5233aba33..d98749fe03 100644 --- a/python/lsst/daf/butler/core/dimensions/_graph.py +++ b/python/lsst/daf/butler/core/dimensions/_graph.py @@ -28,13 +28,9 @@ from types import MappingProxyType from typing import TYPE_CHECKING, Any, ClassVar +from lsst.daf.butler._compat import _BaseModelCompat from lsst.utils.classes import cached_getter, immutable -try: - from pydantic.v1 import BaseModel -except ModuleNotFoundError: - from pydantic import BaseModel # type: ignore - from .._topology import TopologicalFamily, TopologicalSpace from ..json import from_json_pydantic, to_json_pydantic from ..named import NamedValueAbstractSet, NamedValueSet @@ -46,7 +42,7 @@ from ._universe import DimensionUniverse -class SerializedDimensionGraph(BaseModel): +class SerializedDimensionGraph(_BaseModelCompat): """Simplified model of a `DimensionGraph` suitable for serialization.""" names: list[str] diff --git a/python/lsst/daf/butler/core/dimensions/_records.py b/python/lsst/daf/butler/core/dimensions/_records.py index b8fdbde73b..71c2ab27b7 100644 --- a/python/lsst/daf/butler/core/dimensions/_records.py +++ b/python/lsst/daf/butler/core/dimensions/_records.py @@ -26,20 +26,9 @@ from typing import TYPE_CHECKING, Any, ClassVar, Optional, Tuple import lsst.sphgeom +from lsst.daf.butler._compat import PYDANTIC_V2, _BaseModelCompat from lsst.utils.classes import immutable - -try: - from pydantic.v1 import BaseModel, Field, StrictBool, StrictFloat, StrictInt, StrictStr, create_model -except ModuleNotFoundError: - from pydantic import ( # type: ignore - BaseModel, - Field, - StrictBool, - StrictFloat, - StrictInt, - StrictStr, - create_model, - ) +from pydantic import Field, StrictBool, StrictFloat, StrictInt, StrictStr, create_model from ..json import from_json_pydantic, to_json_pydantic from ..persistenceContext import PersistenceContextVars @@ -78,7 +67,7 @@ def _subclassDimensionRecord(definition: DimensionElement) -> type[DimensionReco return type(definition.name + ".RecordClass", (DimensionRecord,), d) -class SpecificSerializedDimensionRecord(BaseModel, extra="forbid"): +class SpecificSerializedDimensionRecord(_BaseModelCompat, extra="forbid"): """Base model for a specific serialized record content.""" @@ -128,7 +117,7 @@ def _createSimpleRecordSubclass(definition: DimensionElement) -> type[SpecificSe return model -class SerializedDimensionRecord(BaseModel): +class SerializedDimensionRecord(_BaseModelCompat): """Simplified model for serializing a `DimensionRecord`.""" definition: str = Field( @@ -147,22 +136,24 @@ class SerializedDimensionRecord(BaseModel): }, ) - class Config: - """Local configuration overrides for model.""" - - schema_extra = { - "example": { - "definition": "detector", - "record": { - "instrument": "HSC", - "id": 72, - "full_name": "0_01", - "name_in_raft": "01", - "raft": "0", - "purpose": "SCIENCE", - }, + if not PYDANTIC_V2: + + class Config: + """Local configuration overrides for model.""" + + schema_extra = { + "example": { + "definition": "detector", + "record": { + "instrument": "HSC", + "id": 72, + "full_name": "0_01", + "name_in_raft": "01", + "raft": "0", + "purpose": "SCIENCE", + }, + } } - } @classmethod def direct( diff --git a/python/lsst/daf/butler/core/logging.py b/python/lsst/daf/butler/core/logging.py index bd2ed137a5..7fe04a00f2 100644 --- a/python/lsst/daf/butler/core/logging.py +++ b/python/lsst/daf/butler/core/logging.py @@ -29,13 +29,10 @@ from logging import Formatter, LogRecord, StreamHandler from typing import IO, Any, ClassVar, Union, overload +from lsst.daf.butler._compat import PYDANTIC_V2, _BaseModelCompat from lsst.utils.introspection import get_full_type_name from lsst.utils.iteration import isplit - -try: - from pydantic.v1 import BaseModel, PrivateAttr -except ModuleNotFoundError: - from pydantic import BaseModel, PrivateAttr # type: ignore +from pydantic import PrivateAttr _LONG_LOG_FORMAT = "{levelname} {asctime} {name} {filename}:{lineno} - {message}" """Default format for log records.""" @@ -160,7 +157,7 @@ def restore_log_record_factory(cls) -> None: logging.setLogRecordFactory(cls._old_factory) -class ButlerLogRecord(BaseModel): +class ButlerLogRecord(_BaseModelCompat): """A model representing a `logging.LogRecord`. A `~logging.LogRecord` always uses the current time in its record @@ -271,12 +268,27 @@ def __str__(self) -> str: Record = LogRecord | ButlerLogRecord +if PYDANTIC_V2: + from pydantic import RootModel + + class _ButlerLogRecords(RootModel): + root: list[ButlerLogRecord] + +else: + + class _ButlerLogRecords(_BaseModelCompat): + __root__: list[ButlerLogRecord] + + @property + def root(self) -> list[ButlerLogRecord]: + return self.__root__ + + # Do not inherit from MutableSequence since mypy insists on the values # being Any even though we wish to constrain them to Record. -class ButlerLogRecords(BaseModel): +class ButlerLogRecords(_ButlerLogRecords): """Class representing a collection of `ButlerLogRecord`.""" - __root__: list[ButlerLogRecord] _log_format: str | None = PrivateAttr(None) @classmethod @@ -288,7 +300,10 @@ def from_records(cls, records: Iterable[ButlerLogRecord]) -> "ButlerLogRecords": records : iterable of `ButlerLogRecord` The records to seed this class with. """ - return cls(__root__=list(records)) + if PYDANTIC_V2: + return cls(list(records)) + else: + return cls(__root__=list(records)) @classmethod def from_file(cls, filename: str) -> "ButlerLogRecords": @@ -461,16 +476,16 @@ def set_log_format(self, format: str | None) -> str | None: return previous def __len__(self) -> int: - return len(self.__root__) + return len(self.root) # The signature does not match the one in BaseModel but that is okay # if __root__ is being used. # See https://pydantic-docs.helpmanual.io/usage/models/#custom-root-types def __iter__(self) -> Iterator[ButlerLogRecord]: # type: ignore - return iter(self.__root__) + return iter(self.root) def __setitem__(self, index: int, value: Record) -> None: - self.__root__[index] = self._validate_record(value) + self.root[index] = self._validate_record(value) @overload def __getitem__(self, index: int) -> ButlerLogRecord: @@ -483,21 +498,24 @@ def __getitem__(self, index: slice) -> "ButlerLogRecords": def __getitem__(self, index: slice | int) -> "Union[ButlerLogRecords, ButlerLogRecord]": # Handles slices and returns a new collection in that # case. - item = self.__root__[index] + item = self.root[index] if isinstance(item, list): - return type(self)(__root__=item) + if PYDANTIC_V2: + return type(self)(item) + else: + return type(self)(__root__=item) else: return item def __reversed__(self) -> Iterator[ButlerLogRecord]: - return self.__root__.__reversed__() + return self.root.__reversed__() def __delitem__(self, index: slice | int) -> None: - del self.__root__[index] + del self.root[index] def __str__(self) -> str: # Ensure that every record uses the same format string. - return "\n".join(record.format(self.log_format) for record in self.__root__) + return "\n".join(record.format(self.log_format) for record in self.root) def _validate_record(self, record: Record) -> ButlerLogRecord: if isinstance(record, ButlerLogRecord): @@ -509,23 +527,23 @@ def _validate_record(self, record: Record) -> ButlerLogRecord: return record def insert(self, index: int, value: Record) -> None: - self.__root__.insert(index, self._validate_record(value)) + self.root.insert(index, self._validate_record(value)) def append(self, value: Record) -> None: value = self._validate_record(value) - self.__root__.append(value) + self.root.append(value) def clear(self) -> None: - self.__root__.clear() + self.root.clear() def extend(self, records: Iterable[Record]) -> None: - self.__root__.extend(self._validate_record(record) for record in records) + self.root.extend(self._validate_record(record) for record in records) def pop(self, index: int = -1) -> ButlerLogRecord: - return self.__root__.pop(index) + return self.root.pop(index) def reverse(self) -> None: - self.__root__.reverse() + self.root.reverse() class ButlerLogRecordHandler(StreamHandler): @@ -533,7 +551,10 @@ class ButlerLogRecordHandler(StreamHandler): def __init__(self) -> None: super().__init__() - self.records = ButlerLogRecords(__root__=[]) + if PYDANTIC_V2: + self.records = ButlerLogRecords([]) + else: + self.records = ButlerLogRecords(__root__=[]) def emit(self, record: LogRecord) -> None: self.records.append(record) diff --git a/python/lsst/daf/butler/core/quantum.py b/python/lsst/daf/butler/core/quantum.py index a488cdf448..96dfaefa1e 100644 --- a/python/lsst/daf/butler/core/quantum.py +++ b/python/lsst/daf/butler/core/quantum.py @@ -28,14 +28,10 @@ from collections.abc import Iterable, Mapping, MutableMapping, Sequence from typing import Any +from lsst.daf.butler._compat import _BaseModelCompat from lsst.utils import doImportType from lsst.utils.introspection import find_outside_stacklevel -try: - from pydantic.v1 import BaseModel -except ModuleNotFoundError: - from pydantic import BaseModel # type: ignore - from .datasets import DatasetRef, DatasetType, SerializedDatasetRef, SerializedDatasetType from .datastoreRecordData import DatastoreRecordData, SerializedDatastoreRecordData from .dimensions import ( @@ -73,7 +69,7 @@ def _reconstructDatasetRef( return rebuiltDatasetRef -class SerializedQuantum(BaseModel): +class SerializedQuantum(_BaseModelCompat): """Simplified model of a `Quantum` suitable for serialization.""" taskName: str | None = None diff --git a/python/lsst/daf/butler/core/serverModels.py b/python/lsst/daf/butler/core/serverModels.py index f8f2b56397..113fe20d75 100644 --- a/python/lsst/daf/butler/core/serverModels.py +++ b/python/lsst/daf/butler/core/serverModels.py @@ -33,12 +33,9 @@ from collections.abc import Mapping from typing import Any, ClassVar +from lsst.daf.butler._compat import _BaseModelCompat from lsst.utils.iteration import ensure_iterable - -try: - from pydantic.v1 import BaseModel, Field, validator -except ModuleNotFoundError: - from pydantic import BaseModel, Field, validator # type: ignore +from pydantic import Field, validator from .dimensions import DataIdValue, SerializedDataCoordinate from .utils import globToRegex @@ -53,7 +50,7 @@ SimpleDataId = Mapping[str, DataIdValue] -class ExpressionQueryParameter(BaseModel): +class ExpressionQueryParameter(_BaseModelCompat): """Represents a specification for an expression query. Generally used for collection or dataset type expressions. This @@ -198,7 +195,7 @@ class DatasetsQueryParameter(ExpressionQueryParameter): ) -class QueryBaseModel(BaseModel): +class QueryBaseModel(_BaseModelCompat): """Base model for all query models.""" @validator("keyword_args", check_fields=False) diff --git a/python/lsst/daf/butler/registry/obscore/_config.py b/python/lsst/daf/butler/registry/obscore/_config.py index 2c2e21e584..7f84a2f5b8 100644 --- a/python/lsst/daf/butler/registry/obscore/_config.py +++ b/python/lsst/daf/butler/registry/obscore/_config.py @@ -35,10 +35,8 @@ from collections.abc import Mapping from typing import Any -try: - from pydantic.v1 import BaseModel, StrictBool, StrictFloat, StrictInt, StrictStr, validator -except ModuleNotFoundError: - from pydantic import BaseModel, StrictBool, StrictFloat, StrictInt, StrictStr, validator # type: ignore +from lsst.daf.butler._compat import _BaseModelCompat +from pydantic import StrictBool, StrictFloat, StrictInt, StrictStr, validator class ExtraColumnType(str, enum.Enum): @@ -50,7 +48,7 @@ class ExtraColumnType(str, enum.Enum): string = "string" -class ExtraColumnConfig(BaseModel): +class ExtraColumnConfig(_BaseModelCompat): """Configuration class describing specification of additional column in obscore table. """ @@ -68,7 +66,7 @@ class ExtraColumnConfig(BaseModel): """Documentation string for this column.""" -class DatasetTypeConfig(BaseModel): +class DatasetTypeConfig(_BaseModelCompat): """Configuration describing dataset type-related options.""" dataproduct_type: str @@ -107,7 +105,7 @@ class DatasetTypeConfig(BaseModel): values, or ExtraColumnConfig mappings.""" -class SpatialPluginConfig(BaseModel): +class SpatialPluginConfig(_BaseModelCompat): """Configuration class for a spatial plugin.""" cls: str @@ -117,7 +115,7 @@ class SpatialPluginConfig(BaseModel): """Configuration object passed to plugin ``initialize()`` method.""" -class ObsCoreConfig(BaseModel): +class ObsCoreConfig(_BaseModelCompat): """Configuration which controls conversion of Registry datasets into obscore records. diff --git a/python/lsst/daf/butler/registry/wildcards.py b/python/lsst/daf/butler/registry/wildcards.py index 8affcc2b7a..4d371df9a4 100644 --- a/python/lsst/daf/butler/registry/wildcards.py +++ b/python/lsst/daf/butler/registry/wildcards.py @@ -34,13 +34,9 @@ from typing import Any from deprecated.sphinx import deprecated +from lsst.daf.butler._compat import PYDANTIC_V2 from lsst.utils.iteration import ensure_iterable -try: - from pydantic.v1 import BaseModel -except ModuleNotFoundError: - from pydantic import BaseModel # type: ignore - from ..core import DatasetType from ..core.utils import globToRegex from ._exceptions import CollectionExpressionError, DatasetTypeExpressionError @@ -262,12 +258,29 @@ def process(element: Any, alreadyCoerced: bool = False) -> EllipsisType | None: """ +if PYDANTIC_V2: + from pydantic import RootModel + + class _CollectionSearch(RootModel, Sequence[str]): + root: tuple[str, ...] + +else: + from pydantic import BaseModel + + class _CollectionSearch(BaseModel, Sequence[str]): + __root_: tuple[str, ...] + + @property + def root(self) -> tuple[str, ...]: + return self.__root__ + + @deprecated( reason="Tuples of string collection names are now preferred. Will be removed after v26.", version="v25.0", category=FutureWarning, ) -class CollectionSearch(BaseModel, Sequence[str]): +class CollectionSearch(_CollectionSearch): """An ordered search path of collections. The `fromExpression` method should almost always be used to construct @@ -297,8 +310,6 @@ class CollectionSearch(BaseModel, Sequence[str]): how different the original expressions appear. """ - __root__: tuple[str, ...] - @classmethod def fromExpression(cls, expression: Any) -> CollectionSearch: """Process a general expression to construct a `CollectionSearch` @@ -342,31 +353,35 @@ def fromExpression(cls, expression: Any) -> CollectionSearch: for name in wildcard.strings: if name not in deduplicated: deduplicated.append(name) - return cls(__root__=tuple(deduplicated)) + if PYDANTIC_V2: + model = cls(tuple(deduplicated)) + else: + model = cls(__root__=tuple(deduplicated)) + return model def explicitNames(self) -> Iterator[str]: """Iterate over collection names that were specified explicitly.""" - yield from self.__root__ + yield from self.root def __iter__(self) -> Iterator[str]: # type: ignore - yield from self.__root__ + yield from self.root def __len__(self) -> int: - return len(self.__root__) + return len(self.root) def __getitem__(self, index: Any) -> str: - return self.__root__[index] + return self.root[index] def __eq__(self, other: Any) -> bool: if isinstance(other, CollectionSearch): - return self.__root__ == other.__root__ + return self.root == other.root return False def __str__(self) -> str: return "[{}]".format(", ".join(self)) def __repr__(self) -> str: - return f"CollectionSearch({self.__root__!r})" + return f"CollectionSearch({self.root!r})" @dataclasses.dataclass(frozen=True) diff --git a/python/lsst/daf/butler/tests/_examplePythonTypes.py b/python/lsst/daf/butler/tests/_examplePythonTypes.py index c7599dac6d..96a9bd01f1 100644 --- a/python/lsst/daf/butler/tests/_examplePythonTypes.py +++ b/python/lsst/daf/butler/tests/_examplePythonTypes.py @@ -44,11 +44,7 @@ from typing import TYPE_CHECKING, Any from lsst.daf.butler import StorageClass, StorageClassDelegate - -try: - from pydantic.v1 import BaseModel -except ModuleNotFoundError: - from pydantic import BaseModel # type: ignore +from lsst.daf.butler._compat import _BaseModelCompat if TYPE_CHECKING: from lsst.daf.butler import Butler, Datastore, FormatterFactory @@ -268,7 +264,7 @@ def makeFromDict(cls, exportDict: dict[str, list | dict | None]) -> MetricsExamp return cls(exportDict["summary"], exportDict["output"], data) -class MetricsExampleModel(BaseModel): +class MetricsExampleModel(_BaseModelCompat): """A variant of `MetricsExample` based on model.""" summary: dict[str, Any] | None = None diff --git a/python/lsst/daf/butler/tests/dict_convertible_model.py b/python/lsst/daf/butler/tests/dict_convertible_model.py index ca8b206420..fb67fa4052 100644 --- a/python/lsst/daf/butler/tests/dict_convertible_model.py +++ b/python/lsst/daf/butler/tests/dict_convertible_model.py @@ -25,13 +25,11 @@ from collections.abc import Mapping -try: - from pydantic.v1 import BaseModel, Field -except ModuleNotFoundError: - from pydantic import BaseModel, Field # type: ignore +from lsst.daf.butler._compat import _BaseModelCompat +from pydantic import Field -class DictConvertibleModel(BaseModel): +class DictConvertibleModel(_BaseModelCompat): """A pydantic model to/from dict conversion in which the dict representation is intentionally different from pydantics' own dict conversions.