Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

DM-40002: Try to support pydantic v1 and v2 #866

Merged
merged 18 commits into from
Jul 19, 2023
Merged
Show file tree
Hide file tree
Changes from 12 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 8 additions & 6 deletions .github/workflows/build.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -27,16 +27,18 @@ jobs:
channels: conda-forge,defaults
channel-priority: strict
show-channel-urls: true
miniforge-variant: Mambaforge
use-mamba: true

- name: Update pip/wheel infrastructure
shell: bash -l {0}
run: |
conda install -y -q pip wheel
mamba install -y -q pip wheel

- name: Install sqlite
shell: bash -l {0}
run: |
conda install -y -q sqlite
mamba install -y -q sqlite

# Postgres-14 is already installed from official postgres repo, but we
# also need pgsphere which is not installed. The repo is not in the list,
Expand All @@ -52,13 +54,13 @@ jobs:
- name: Install postgresql Python packages
shell: bash -l {0}
run: |
conda install -y -q psycopg2
mamba install -y -q psycopg2
pip install testing.postgresql

- name: Install cryptography package for moto
shell: bash -l {0}
run: |
conda install -y -q cryptography
mamba install -y -q cryptography

- name: Install dependencies
shell: bash -l {0}
Expand All @@ -69,13 +71,13 @@ jobs:
- name: Install pytest packages
shell: bash -l {0}
run: |
conda install -y -q \
mamba install -y -q \
pytest pytest-xdist pytest-openfiles pytest-cov

- name: List installed packages
shell: bash -l {0}
run: |
conda list
mamba list
pip list -v

- name: Build and install
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/build_docs.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ jobs:
- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: '3.10'
python-version: '3.11'
cache: "pip"
cache-dependency-path: "setup.cfg"

Expand Down
2 changes: 1 addition & 1 deletion mypy.ini
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ disallow_untyped_defs = True
disallow_incomplete_defs = True
strict_equality = True
warn_unreachable = True
warn_unused_ignores = True
warn_unused_ignores = False

# ...except the modules and subpackages below (can't find a way to do line
# breaks in the lists of modules).
Expand Down
162 changes: 162 additions & 0 deletions python/lsst/daf/butler/_compat.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,162 @@
# This file is part of pipe_base.
#
# Developed for the LSST Data Management System.
# This product includes software developed by the LSST Project
# (https://www.lsst.org).
# See the COPYRIGHT file at the top-level directory of this distribution
# for details of code ownership.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.

"""Code to support backwards compatibility."""

__all__ = ["PYDANTIC_V2", "_BaseModelCompat"]

import sys
from collections.abc import Callable
from typing import TYPE_CHECKING, Any

from pydantic import BaseModel
from pydantic.version import VERSION as PYDANTIC_VERSION

if sys.version_info >= (3, 11, 0):
from typing import Self
else:
from typing import TypeVar

Self = TypeVar("Self", bound="_BaseModelCompat") # type: ignore


PYDANTIC_V2 = PYDANTIC_VERSION.startswith("2.")
timj marked this conversation as resolved.
Show resolved Hide resolved


if PYDANTIC_V2:

class _BaseModelCompat(BaseModel):
"""Methods from pydantic v1 that we want to emulate in v2.

Some of these methods are provided by v2 but issue deprecation
warnings. We need to decide whether we are also okay with deprecating
them or want to support them without the deprecation message.
"""

def json(
self,
*,
include: set[int] | set[str] | dict[int, Any] | dict[str, Any] | None = None, # type: ignore
exclude: set[int] | set[str] | dict[int, Any] | dict[str, Any] | None = None, # type: ignore
by_alias: bool = False,
skip_defaults: bool | None = None,
timj marked this conversation as resolved.
Show resolved Hide resolved
exclude_unset: bool = False,
exclude_defaults: bool = False,
exclude_none: bool = False,
encoder: Callable[[Any], Any] | None = None,
models_as_dict: bool = True,
**dumps_kwargs: Any,
) -> str:
if dumps_kwargs:
raise TypeError("dumps_kwargs no longer supported.")

Check warning on line 69 in python/lsst/daf/butler/_compat.py

View check run for this annotation

Codecov / codecov/patch

python/lsst/daf/butler/_compat.py#L69

Added line #L69 was not covered by tests
if encoder is not None:
raise TypeError("json encoder is no longer supported.")

Check warning on line 71 in python/lsst/daf/butler/_compat.py

View check run for this annotation

Codecov / codecov/patch

python/lsst/daf/butler/_compat.py#L71

Added line #L71 was not covered by tests
# Can catch warnings and call BaseModel.json() directly.
return self.model_dump_json(
include=include,
exclude=exclude,
by_alias=by_alias,
exclude_defaults=exclude_defaults,
exclude_none=exclude_none,
exclude_unset=exclude_unset,
)

@classmethod
def parse_obj(cls, obj: Any) -> Self:
# Catch warnings and call BaseModel.parse_obj directly?
return cls.model_validate(obj)

if TYPE_CHECKING and not PYDANTIC_V2:
# mypy sees the first definition of a class and ignores any
# redefinition. This means that if mypy is run with pydantic v1
# it will not see the classes defined in the else block below.

@classmethod

Check warning on line 92 in python/lsst/daf/butler/_compat.py

View check run for this annotation

Codecov / codecov/patch

python/lsst/daf/butler/_compat.py#L92

Added line #L92 was not covered by tests
def model_construct(cls, _fields_set: set[str] | None = None, **values: Any) -> Self:
return cls()

Check warning on line 94 in python/lsst/daf/butler/_compat.py

View check run for this annotation

Codecov / codecov/patch

python/lsst/daf/butler/_compat.py#L94

Added line #L94 was not covered by tests

@classmethod

Check warning on line 96 in python/lsst/daf/butler/_compat.py

View check run for this annotation

Codecov / codecov/patch

python/lsst/daf/butler/_compat.py#L96

Added line #L96 was not covered by tests
def model_validate(
cls,
obj: Any,
*,
strict: bool | None = None,
from_attributes: bool | None = None,
context: dict[str, Any] | None = None,
) -> Self:
return cls()

Check warning on line 105 in python/lsst/daf/butler/_compat.py

View check run for this annotation

Codecov / codecov/patch

python/lsst/daf/butler/_compat.py#L105

Added line #L105 was not covered by tests

def model_dump_json(

Check warning on line 107 in python/lsst/daf/butler/_compat.py

View check run for this annotation

Codecov / codecov/patch

python/lsst/daf/butler/_compat.py#L107

Added line #L107 was not covered by tests
self,
*,
indent: int | None = None,
include: set[int] | set[str] | dict[int, Any] | dict[str, Any] | None = None,
exclude: set[int] | set[str] | dict[int, Any] | dict[str, Any] | None = None,
by_alias: bool = False,
exclude_unset: bool = False,
exclude_defaults: bool = False,
exclude_none: bool = False,
round_trip: bool = False,
warnings: bool = True,
) -> str:
return ""

Check warning on line 120 in python/lsst/daf/butler/_compat.py

View check run for this annotation

Codecov / codecov/patch

python/lsst/daf/butler/_compat.py#L120

Added line #L120 was not covered by tests

else:

class _BaseModelCompat(BaseModel): # type:ignore[no-redef]

Check warning on line 124 in python/lsst/daf/butler/_compat.py

View check run for this annotation

Codecov / codecov/patch

python/lsst/daf/butler/_compat.py#L124

Added line #L124 was not covered by tests
"""Methods from pydantic v2 that can be used in pydantic v1."""

@classmethod

Check warning on line 127 in python/lsst/daf/butler/_compat.py

View check run for this annotation

Codecov / codecov/patch

python/lsst/daf/butler/_compat.py#L127

Added line #L127 was not covered by tests
def model_validate(
cls,
obj: Any,
*,
strict: bool | None = None,
from_attributes: bool | None = None,
context: dict[str, Any] | None = None,
) -> Self:
return cls.parse_obj(obj)

Check warning on line 136 in python/lsst/daf/butler/_compat.py

View check run for this annotation

Codecov / codecov/patch

python/lsst/daf/butler/_compat.py#L136

Added line #L136 was not covered by tests

def model_dump_json(

Check warning on line 138 in python/lsst/daf/butler/_compat.py

View check run for this annotation

Codecov / codecov/patch

python/lsst/daf/butler/_compat.py#L138

Added line #L138 was not covered by tests
self,
*,
indent: int | None = None,
include: set[int] | set[str] | dict[int, Any] | dict[str, Any] | None = None,
exclude: set[int] | set[str] | dict[int, Any] | dict[str, Any] | None = None,
by_alias: bool = False,
exclude_unset: bool = False,
exclude_defaults: bool = False,
exclude_none: bool = False,
round_trip: bool = False,
warnings: bool = True,
) -> str:
return self.json(

Check warning on line 151 in python/lsst/daf/butler/_compat.py

View check run for this annotation

Codecov / codecov/patch

python/lsst/daf/butler/_compat.py#L151

Added line #L151 was not covered by tests
include=include, # type: ignore
exclude=exclude, # type: ignore
by_alias=by_alias,
exclude_unset=exclude_unset,
exclude_defaults=exclude_defaults,
exclude_none=exclude_none,
)

@classmethod # type: ignore

Check warning on line 160 in python/lsst/daf/butler/_compat.py

View check run for this annotation

Codecov / codecov/patch

python/lsst/daf/butler/_compat.py#L160

Added line #L160 was not covered by tests
def model_construct(cls, _fields_set: set[str] | None = None, **values: Any) -> Self:
return cls.construct(_fields_set=_fields_set, **values)

Check warning on line 162 in python/lsst/daf/butler/_compat.py

View check run for this annotation

Codecov / codecov/patch

python/lsst/daf/butler/_compat.py#L162

Added line #L162 was not covered by tests
59 changes: 38 additions & 21 deletions python/lsst/daf/butler/_quantum_backed.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,13 +31,9 @@
from typing import TYPE_CHECKING, Any

from deprecated.sphinx import deprecated
from lsst.daf.butler._compat import PYDANTIC_V2, _BaseModelCompat
from lsst.resources import ResourcePathExpression

try:
from pydantic.v1 import BaseModel
except ModuleNotFoundError:
from pydantic import BaseModel # type: ignore

from ._butlerConfig import ButlerConfig
from ._deferredDatasetHandle import DeferredDatasetHandle
from ._limited_butler import LimitedButler
Expand Down Expand Up @@ -597,7 +593,7 @@
)


class QuantumProvenanceData(BaseModel):
class QuantumProvenanceData(_BaseModelCompat):
"""A serializable struct for per-quantum provenance information and
datastore records.

Expand Down Expand Up @@ -749,19 +745,40 @@
"""
return {uuid.UUID(id) if isinstance(id, str) else id for id in uuids}

data = QuantumProvenanceData.__new__(cls)
setter = object.__setattr__
setter(data, "predicted_inputs", _to_uuid_set(predicted_inputs))
setter(data, "available_inputs", _to_uuid_set(available_inputs))
setter(data, "actual_inputs", _to_uuid_set(actual_inputs))
setter(data, "predicted_outputs", _to_uuid_set(predicted_outputs))
setter(data, "actual_outputs", _to_uuid_set(actual_outputs))
setter(
data,
"datastore_records",
{
key: SerializedDatastoreRecordData.direct(**records)
for key, records in datastore_records.items()
},
)
if PYDANTIC_V2:
data = cls.model_construct(
_fields_set={
"predicted_inputs",
"available_inputs",
"actual_inputs",
"predicted_outputs",
"actual_outputs",
"datastore_records",
},
predicted_inputs=_to_uuid_set(predicted_inputs),
available_inputs=_to_uuid_set(available_inputs),
actual_inputs=_to_uuid_set(actual_inputs),
predicted_outputs=_to_uuid_set(predicted_outputs),
actual_outputs=_to_uuid_set(actual_outputs),
datastore_records={
key: SerializedDatastoreRecordData.direct(**records)
for key, records in datastore_records.items()
},
)
else:
data = QuantumProvenanceData.__new__(cls)
setter = object.__setattr__
setter(data, "predicted_inputs", _to_uuid_set(predicted_inputs))
setter(data, "available_inputs", _to_uuid_set(available_inputs))
setter(data, "actual_inputs", _to_uuid_set(actual_inputs))
setter(data, "predicted_outputs", _to_uuid_set(predicted_outputs))
setter(data, "actual_outputs", _to_uuid_set(actual_outputs))

Check warning on line 775 in python/lsst/daf/butler/_quantum_backed.py

View check run for this annotation

Codecov / codecov/patch

python/lsst/daf/butler/_quantum_backed.py#L769-L775

Added lines #L769 - L775 were not covered by tests
setter(
data,
"datastore_records",
{
key: SerializedDatastoreRecordData.direct(**records)
for key, records in datastore_records.items()
},
)
timj marked this conversation as resolved.
Show resolved Hide resolved
return data
47 changes: 28 additions & 19 deletions python/lsst/daf/butler/core/datasets/ref.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,14 +33,11 @@
import sys
import uuid
from collections.abc import Iterable
from typing import TYPE_CHECKING, Any, ClassVar, Protocol, runtime_checkable
from typing import TYPE_CHECKING, Any, ClassVar, Protocol, TypeAlias, runtime_checkable

from lsst.daf.butler._compat import PYDANTIC_V2, _BaseModelCompat
from lsst.utils.classes import immutable

try:
from pydantic.v1 import BaseModel, StrictStr, validator
except ModuleNotFoundError:
from pydantic import BaseModel, StrictStr, validator # type: ignore
from pydantic import StrictStr, validator

from ..configSupport import LookupKey
from ..dimensions import DataCoordinate, DimensionGraph, DimensionUniverse, SerializedDataCoordinate
Expand Down Expand Up @@ -173,7 +170,7 @@
_serializedDatasetRefFieldsSet = {"id", "datasetType", "dataId", "run", "component"}


class SerializedDatasetRef(BaseModel):
class SerializedDatasetRef(_BaseModelCompat):
"""Simplified model of a `DatasetRef` suitable for serialization."""

id: uuid.UUID
Expand Down Expand Up @@ -224,22 +221,34 @@

This method should only be called when the inputs are trusted.
"""
node = SerializedDatasetRef.__new__(cls)
setter = object.__setattr__
setter(node, "id", uuid.UUID(id))
setter(
node,
"datasetType",
datasetType if datasetType is None else SerializedDatasetType.direct(**datasetType),
serialized_datasetType = (
SerializedDatasetType.direct(**datasetType) if datasetType is not None else None
)
setter(node, "dataId", dataId if dataId is None else SerializedDataCoordinate.direct(**dataId))
setter(node, "run", sys.intern(run))
setter(node, "component", component)
setter(node, "__fields_set__", _serializedDatasetRefFieldsSet)
serialized_dataId = SerializedDataCoordinate.direct(**dataId) if dataId is not None else None

if PYDANTIC_V2:
node = cls.model_construct(
_fields_set=_serializedDatasetRefFieldsSet,
id=uuid.UUID(id),
datasetType=serialized_datasetType,
dataId=serialized_dataId,
run=sys.intern(run),
component=component,
)
else:
node = SerializedDatasetRef.__new__(cls)
setter = object.__setattr__
setter(node, "id", uuid.UUID(id))
setter(node, "datasetType", serialized_datasetType)
setter(node, "dataId", serialized_dataId)
setter(node, "run", sys.intern(run))
setter(node, "component", component)
setter(node, "__fields_set__", _serializedDatasetRefFieldsSet)

Check warning on line 246 in python/lsst/daf/butler/core/datasets/ref.py

View check run for this annotation

Codecov / codecov/patch

python/lsst/daf/butler/core/datasets/ref.py#L239-L246

Added lines #L239 - L246 were not covered by tests

return node


DatasetId = uuid.UUID
DatasetId: TypeAlias = uuid.UUID
"""A type-annotation alias for dataset ID providing typing flexibility.
"""

Expand Down
Loading
Loading