Skip to content

Commit

Permalink
Add from __future__ import annotations
Browse files Browse the repository at this point in the history
That is for files with pre-3.9 annotations with view to migrate
Incidentally run isort (see unionai-oss#1295)

Signed-off-by: Nathan McDougall <[email protected]>
  • Loading branch information
nathanjmcdougall committed Aug 7, 2023
1 parent 57d8269 commit 3c42df9
Show file tree
Hide file tree
Showing 38 changed files with 86 additions and 39 deletions.
11 changes: 5 additions & 6 deletions noxfile.py
Original file line number Diff line number Diff line change
@@ -1,22 +1,21 @@
"""Nox sessions."""
from __future__ import annotations

# isort: skip_file
import os
import re
import shutil
import sys
from distutils.core import run_setup
from typing import Dict, List

import nox

# setuptools must be imported before distutils !
import setuptools
from distutils.core import (
run_setup,
)

import nox
from nox import Session
from pkg_resources import Requirement, parse_requirements


nox.options.sessions = (
"requirements",
"mypy",
Expand Down
1 change: 1 addition & 0 deletions pandera/accessors/pandas_accessor.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Register pandas accessor for pandera schema metadata."""
from __future__ import annotations

from typing import Optional, Union

Expand Down
1 change: 1 addition & 0 deletions pandera/accessors/pyspark_sql_accessor.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
"""Custom accessor functionality for PySpark.Sql. Register pyspark accessor for pandera schema metadata.
"""
from __future__ import annotations

import warnings
from typing import Optional
Expand Down
1 change: 1 addition & 0 deletions pandera/api/base/checks.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Data validation base check."""
from __future__ import annotations

import inspect
from itertools import chain
Expand Down
1 change: 1 addition & 0 deletions pandera/api/base/model.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Base classes for model api."""
from __future__ import annotations

import os
from typing import (
Expand Down
1 change: 1 addition & 0 deletions pandera/api/base/model_components.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Model component base classes."""
from __future__ import annotations

from typing import (
Any,
Expand Down
1 change: 1 addition & 0 deletions pandera/api/base/model_config.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Class-based dataframe model API configuration."""
from __future__ import annotations

from typing import Any, Optional

Expand Down
5 changes: 3 additions & 2 deletions pandera/api/base/schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,15 +4,16 @@
data validation. These operations are exposed as methods that are composed
together to implement the pandera schema specification.
"""
from __future__ import annotations

import inspect
from abc import ABC
from functools import wraps
from typing import Any, Dict, Tuple, Type, Optional, Union
from typing import Any, Dict, Optional, Tuple, Type, Union

from pandera.backends.base import BaseSchemaBackend
from pandera.errors import BackendNotFoundError
from pandera.dtypes import DataType
from pandera.errors import BackendNotFoundError

DtypeInputTypes = Union[str, type, DataType, Type]

Expand Down
1 change: 1 addition & 0 deletions pandera/api/checks.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Data validation check definition."""
from __future__ import annotations

import re
from typing import (
Expand Down
1 change: 1 addition & 0 deletions pandera/api/extensions.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Extensions module."""
from __future__ import annotations

import inspect
import warnings
Expand Down
1 change: 1 addition & 0 deletions pandera/api/hypotheses.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Data validation checks for hypothesis testing."""
from __future__ import annotations

from typing import Any, Callable, Dict, List, Optional, TypeVar, Union

Expand Down
1 change: 1 addition & 0 deletions pandera/api/pandas/array.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Core pandas array specification."""
from __future__ import annotations

import copy
import warnings
Expand Down
1 change: 1 addition & 0 deletions pandera/api/pandas/components.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Core pandas schema component specifications."""
from __future__ import annotations

import warnings
from typing import Any, Dict, Iterable, List, Optional, Tuple, Union, cast
Expand Down
1 change: 1 addition & 0 deletions pandera/api/pandas/model.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Class-based api for pandas models."""
from __future__ import annotations

import copy
import inspect
Expand Down
2 changes: 2 additions & 0 deletions pandera/api/pandas/model_components.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
"""DataFrameModel components"""
from __future__ import annotations

from typing import (
Any,
Callable,
Expand Down
1 change: 1 addition & 0 deletions pandera/api/pandas/model_config.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Class-based dataframe model API configuration for pandas."""
from __future__ import annotations

from typing import Any, Callable, Dict, List, Optional, Union

Expand Down
1 change: 1 addition & 0 deletions pandera/api/pyspark/column_schema.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Core pyspark column specification."""
from __future__ import annotations

import copy
from typing import Any, List, Optional, TypeVar, cast
Expand Down
1 change: 1 addition & 0 deletions pandera/api/pyspark/components.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Core pyspark schema component specifications."""
from __future__ import annotations

from typing import Any, Dict, Iterable, Optional, Tuple, Union

Expand Down
1 change: 1 addition & 0 deletions pandera/api/pyspark/error_handler.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Handle schema errors."""
from __future__ import annotations

from collections import defaultdict
from enum import Enum
Expand Down
2 changes: 2 additions & 0 deletions pandera/api/pyspark/model_components.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
"""DataFrameModel components"""
from __future__ import annotations

from typing import (
Any,
Callable,
Expand Down
4 changes: 3 additions & 1 deletion pandera/backends/pandas/builtin_hypotheses.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,13 @@
# pylint: disable=missing-function-docstring
"""Pandas implementation of built-in hypotheses."""
from __future__ import annotations

from typing import Tuple

from pandera.api.extensions import register_builtin_hypothesis
from pandera.backends.pandas.builtin_checks import PandasData

# pylint: disable=missing-function-docstring


@register_builtin_hypothesis(
error="failed two sample ttest between '{sample1}' and '{sample2}'",
Expand Down
14 changes: 8 additions & 6 deletions pandera/backends/pandas/components.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
"""Backend implementation for pandas schema components."""
# pylint: disable=too-many-locals
from __future__ import annotations

import traceback
from copy import copy, deepcopy
Expand All @@ -8,24 +8,26 @@
import numpy as np
import pandas as pd

from pandera.backends.base import CoreCheckResult
from pandera.backends.pandas.array import ArraySchemaBackend
from pandera.backends.pandas.container import DataFrameSchemaBackend
from pandera.api.pandas.types import (
is_field,
is_index,
is_multiindex,
is_table,
)
from pandera.backends.base import CoreCheckResult
from pandera.backends.pandas.array import ArraySchemaBackend
from pandera.backends.pandas.container import DataFrameSchemaBackend
from pandera.backends.pandas.error_formatters import scalar_failure_case
from pandera.error_handlers import SchemaErrorHandler
from pandera.errors import (
SchemaDefinitionError,
SchemaError,
SchemaErrors,
SchemaErrorReason,
SchemaDefinitionError,
SchemaErrors,
)

# pylint: disable=too-many-locals


class ColumnBackend(ArraySchemaBackend):
"""Backend implementation for pandas dataframe columns."""
Expand Down
7 changes: 4 additions & 3 deletions pandera/backends/pandas/container.py
Original file line number Diff line number Diff line change
@@ -1,22 +1,23 @@
"""Pandas Parsing, Validation, and Error Reporting Backends."""
from __future__ import annotations

import copy
import itertools
import traceback
from typing import Any, Callable, List, Optional, Tuple, Dict
from typing import Any, Callable, Dict, List, Optional, Tuple

import pandas as pd
from pydantic import BaseModel

from pandera.backends.base import CoreCheckResult
from pandera.api.pandas.types import is_table
from pandera.backends.base import CoreCheckResult
from pandera.backends.pandas.base import ColumnInfo, PandasSchemaBackend
from pandera.backends.pandas.error_formatters import (
reshape_failure_cases,
scalar_failure_case,
)
from pandera.engines import pandas_engine
from pandera.backends.pandas.utils import convert_uniquesettings
from pandera.engines import pandas_engine
from pandera.error_handlers import SchemaErrorHandler
from pandera.errors import (
ParserError,
Expand Down
1 change: 1 addition & 0 deletions pandera/backends/pandas/hypotheses.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Hypothesis backend for pandas."""
from __future__ import annotations

from functools import partial
from typing import Any, Callable, Dict, Union, cast
Expand Down
1 change: 1 addition & 0 deletions pandera/backends/pyspark/base.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""pyspark Parsing, Validation, and Error Reporting Backends."""
from __future__ import annotations

import warnings
from typing import (
Expand Down
1 change: 1 addition & 0 deletions pandera/backends/pyspark/checks.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Check backend for pyspark."""
from __future__ import annotations

from functools import partial
from typing import Dict, List, Optional
Expand Down
5 changes: 3 additions & 2 deletions pandera/backends/pyspark/container.py
Original file line number Diff line number Diff line change
@@ -1,17 +1,18 @@
"""Pyspark Parsing, Validation, and Error Reporting Backends."""
from __future__ import annotations

import copy
import traceback
import warnings
from typing import Any, List, Optional, Dict
from typing import Any, Dict, List, Optional

from pyspark.sql import DataFrame
from pyspark.sql.functions import col

from pandera.api.pyspark.error_handler import ErrorCategory, ErrorHandler
from pandera.api.pyspark.types import is_table
from pandera.backends.pyspark.base import ColumnInfo, PysparkSchemaBackend
from pandera.backends.pyspark.decorators import validate_scope, ValidationScope
from pandera.backends.pyspark.decorators import ValidationScope, validate_scope
from pandera.backends.pyspark.error_formatters import scalar_failure_case
from pandera.config import CONFIG
from pandera.errors import (
Expand Down
2 changes: 2 additions & 0 deletions pandera/decorators.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
"""Decorators for integrating pandera into existing data pipelines."""
from __future__ import annotations

import functools
import inspect
import sys
Expand Down
3 changes: 2 additions & 1 deletion pandera/engines/engine.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
"""Data types engine interface."""
from __future__ import annotations

# https://github.com/PyCQA/pylint/issues/3268
# pylint:disable=no-value-for-parameter
import functools
Expand All @@ -25,7 +27,6 @@

from pandera.dtypes import DataType


# register different TypedDict type depending on python version
if sys.version_info >= (3, 9):
from typing import TypedDict
Expand Down
15 changes: 9 additions & 6 deletions pandera/engines/pandas_engine.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,6 @@
"""Pandas engine and data types."""
# pylint:disable=too-many-ancestors

# docstrings are inherited
# pylint:disable=missing-class-docstring
from __future__ import annotations

# pylint doesn't know about __init__ generated with dataclass
# pylint:disable=unexpected-keyword-arg,no-value-for-parameter
import builtins
import dataclasses
import datetime
Expand Down Expand Up @@ -42,6 +37,14 @@
from pandera.engines.utils import pandas_version
from pandera.system import FLOAT_128_AVAILABLE

# pylint doesn't know about __init__ generated with dataclass
# pylint:disable=unexpected-keyword-arg,no-value-for-parameter
# pylint:disable=too-many-ancestors

# docstrings are inherited
# pylint:disable=missing-class-docstring


try:
import pyarrow # pylint: disable=unused-import

Expand Down
21 changes: 12 additions & 9 deletions pandera/engines/pyspark_engine.py
Original file line number Diff line number Diff line change
@@ -1,18 +1,12 @@
"""PySpark engine and data types."""
# pylint:disable=too-many-ancestors,no-member

# docstrings are inherited
# pylint:disable=missing-class-docstring

# pylint doesn't know about __init__ generated with dataclass
# pylint:disable=unexpected-keyword-arg,no-value-for-parameter
from __future__ import annotations

import dataclasses
import inspect
import re
import warnings
from typing import Any, Iterable, Union, Optional
import sys
import warnings
from typing import Any, Iterable, Optional, Union

import pyspark.sql.types as pst

Expand All @@ -21,6 +15,15 @@
from pandera.engines import engine
from pandera.engines.type_aliases import PysparkObject

# pylint:disable=too-many-ancestors,no-member

# docstrings are inherited
# pylint:disable=missing-class-docstring

# pylint doesn't know about __init__ generated with dataclass
# pylint:disable=unexpected-keyword-arg,no-value-for-parameter


try:
import pyarrow # pylint:disable=unused-import

Expand Down
1 change: 1 addition & 0 deletions pandera/engines/utils.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Engine module utilities."""
from __future__ import annotations

from typing import Any, Union

Expand Down
3 changes: 2 additions & 1 deletion pandera/error_handlers.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,9 @@
"""Handle schema errors."""
from __future__ import annotations

from typing import List, Optional

from pandera.errors import SchemaError, SchemaErrors, SchemaErrorReason
from pandera.errors import SchemaError, SchemaErrorReason, SchemaErrors


class SchemaErrorHandler:
Expand Down
1 change: 1 addition & 0 deletions pandera/errors.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""pandera-specific errors."""
from __future__ import annotations

import warnings
from enum import Enum
Expand Down
1 change: 1 addition & 0 deletions pandera/mypy.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
"""Pandera mypy plugin."""
from __future__ import annotations

from typing import Callable, Optional, Union, cast

Expand Down
Loading

0 comments on commit 3c42df9

Please sign in to comment.