diff --git a/.github/workflows/CICD.yaml b/.github/workflows/CICD.yaml index 60280365..b7eadfee 100644 --- a/.github/workflows/CICD.yaml +++ b/.github/workflows/CICD.yaml @@ -163,10 +163,7 @@ jobs: with: name: pytest-and-coverage-report path: | - pytest.xml - cov.xml - .coverage - coverage/ + reports/pytest/ retention-days: 1 if-no-files-found: error @@ -219,14 +216,14 @@ jobs: with: name: bandit-sast-report path: | - bandit.sarif + reports/bandit.sarif retention-days: 1 if-no-files-found: error - name: Upload bandit report to CodeQL uses: github/codeql-action/upload-sarif@v3 with: - sarif_file: bandit.sarif + sarif_file: reports/bandit.sarif Style: name: Style and formatting diff --git a/Makefile b/Makefile index 6b653bd0..bb092064 100644 --- a/Makefile +++ b/Makefile @@ -23,9 +23,16 @@ GITHUB_REF ?= 00000000-0000-0000-0000-000000000000 # Can be overridden. GITHUB_WORKSPACE ?= $(CURDIR) +# What repository to publish packages to. +# `testpypi` and `pypi` are valid values. PYPI_REPO ?= testpypi +# The directory to write ephermal reports to, +# such as pytest coverage reports. +REPORTS_DIR ?= reports BANDIT_REPORT := bandit.sarif +PYTEST_REPORT := pytest + # Can be overridden. This is used to change the prereqs # of some supporting targets, like `format-ruff`. @@ -70,8 +77,7 @@ PYPROJECT_FILES=./pyproject.toml $(wildcard src/*/pyproject.toml) PACKAGE_PATHS=$(subst /pyproject.toml,,$(PYPROJECT_FILES)) PACKAGES=$(subst /pyproject.toml,,$(subst src/,BL_Python.,$(wildcard src/*/pyproject.toml))) -# Rather than duplicating BL_Python.all, -# just prereq it. +.PHONY: dev dev : $(VENV) $(SETUP_DEPENDENCIES) $(MAKE) _dev_build DEFAULT_TARGET=dev _dev_configure : $(VENV) $(PYPROJECT_FILES) @@ -108,6 +114,7 @@ _cicd_build : _cicd_configure @$(REPORT_VENV_USAGE) +BL_Python.all: $(DEFAULT_TARGET) $(PACKAGES) : BL_Python.%: src/%/pyproject.toml $(VENV) $(CONFIGURE_TARGET) $(PYPROJECT_FILES) @if [ -d $(call package_to_dist,$*) ]; then @echo "Package $@ is already built, skipping..." @@ -165,6 +172,7 @@ format-ruff : $(VENV) $(BUILD_TARGET) ruff format --preview --respect-gitignore +.PHONY: format format-ruff format-isort format : $(VENV) $(BUILD_TARGET) format-isort format-ruff @@ -201,22 +209,32 @@ test-bandit : $(VENV) $(BUILD_TARGET) # while testing bandit. -bandit -c pyproject.toml \ --format sarif \ - --output $(BANDIT_REPORT) \ + --output $(REPORTS_DIR)/$(BANDIT_REPORT) \ -r . test-pytest : $(VENV) $(BUILD_TARGET) $(ACTIVATE_VENV) - pytest $(PYTEST_FLAGS) + pytest $(PYTEST_FLAGS) \ + && PYTEST_EXIT_CODE=0 \ + || PYTEST_EXIT_CODE=$$? + + -coverage html --data-file=$(REPORTS_DIR)/$(PYTEST_REPORT)/.coverage + -junit2html $(REPORTS_DIR)/$(PYTEST_REPORT)/pytest.xml $(REPORTS_DIR)/$(PYTEST_REPORT)/pytest.html - coverage html -d coverage + exit $$PYTEST_EXIT_CODE +.PHONY: test test-pytest test-bandit test-pyright test-ruff test-isort +_test : $(VENV) $(BUILD_TARGET) test-isort test-ruff test-pyright test-bandit test-pytest test : CMD_PREFIX=@ -test : $(VENV) $(BUILD_TARGET) clean-test test-isort test-ruff test-pyright test-bandit test-pytest +test : clean-test + $(MAKE) -j --keep-going _test +.PHONY: publish-all # Publishing should use a real install, which `cicd` fulfills publish-all : REWRITE_DEPENDENCIES=false +# Publishing should use a real install. Reset the build env. publish-all : reset $(VENV) $(ACTIVATE_VENV) @@ -224,26 +242,31 @@ publish-all : reset $(VENV) clean-build : - find . -type d \( \ + find . -type d \ + \( \ + -path ./$(VENV) \ + -o -path ./.git \ + \) -prune -false \ + -o \( \ -name build \ + -o -name dist \ -o -name __pycache__ \ -o -name \*.egg-info \ -o -name .pytest-cache \ \) -prune -exec rm -rf {} \; clean-test : - $(CMD_PREFIX)rm -rf cov.xml \ - pytest.xml \ - coverage \ - .coverage \ - $(BANDIT_REPORT) - + $(CMD_PREFIX)rm -rf \ + $(REPORTS_DIR)/$(PYTEST_REPORT) \ + $(REPORTS_DIR)/$(BANDIT_REPORT) +.PHONY: clean clean-test clean-build clean : clean-build clean-test rm -rf $(VENV) @echo '\nDeactivate your venv with `deactivate`' +.PHONY: remake remake : $(MAKE) clean $(MAKE) @@ -253,5 +276,6 @@ reset-check: @echo -n "This will make destructive changes! Considering stashing changes first.\n" @( read -p "Are you sure? [y/N]: " response && case "$$response" in [yY]) true;; *) false;; esac ) +.PHONY: reset reset-check reset : reset-check clean git checkout -- $(PYPROJECT_FILES) diff --git a/pyproject.toml b/pyproject.toml index 9d90c88b..6ad06880 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -83,6 +83,8 @@ dev-dependencies = [ "pytest-mock", "mock", "pytest-cov ~= 4.1", + "coverage ~= 7.4", + "junit2html ~= 30.1", "pyright ~= 1.1", "isort ~= 5.13", "ruff ~= 0.3", @@ -138,6 +140,7 @@ reportUninitializedInstanceVariable = "information" reportUnnecessaryTypeIgnoreComment = "information" reportUnusedCallResult = "information" reportMissingTypeStubs = "information" +reportWildcardImportFromLibrary = "warning" [tool.pytest.ini_options] pythonpath = [ @@ -173,9 +176,9 @@ addopts = [ # and # https://github.com/microsoft/vscode-python/issues/21845 "--cov=.", - "--junitxml=pytest.xml", + "--junitxml=reports/pytest/pytest.xml", "-o=junit_family=xunit2", - "--cov-report=xml:cov.xml", + "--cov-report=xml:reports/pytest/cov.xml", "--cov-report=term-missing", ] @@ -187,9 +190,11 @@ norecursedirs = "__pycache__ build .pytest_cache *.egg-info .venv .github-venv" include_namespace_packages = true [tool.coverage.html] +directory = "reports/pytest/coverage" show_contexts = true [tool.coverage.run] +data_file = "reports/pytest/.coverage" dynamic_context = "test_function" relative_files = true omit = [ diff --git a/reports/.gitignore b/reports/.gitignore new file mode 100644 index 00000000..cf9001b2 --- /dev/null +++ b/reports/.gitignore @@ -0,0 +1,6 @@ +# https://stackoverflow.com/a/932982 + +# Ignore everything in this directory +* +# Except this file +!.gitignore diff --git a/src/database/BL_Python/database/config.py b/src/database/BL_Python/database/config.py index c2de07b2..9dd2371d 100644 --- a/src/database/BL_Python/database/config.py +++ b/src/database/BL_Python/database/config.py @@ -1,13 +1,47 @@ +from typing import Any + from BL_Python.programming.config import AbstractConfig from pydantic import BaseModel +from pydantic.config import ConfigDict class DatabaseConnectArgsConfig(BaseModel): + # allow any values, as this type is not + # specifically the type to be used elsewhere + model_config = ConfigDict(extra="allow") + + +class PostgreSQLDatabaseConnectArgsConfig(DatabaseConnectArgsConfig): + # ignore anything that DatabaseConnectArgsConfig + # allowed to be set, except for any other attributes + # of this class, which will end up assigned through + # the instatiation of the __init__ override of DatabaseConfig + model_config = ConfigDict(extra="ignore") + sslmode: str = "" options: str = "" +class SQLiteDatabaseConnectArgsConfig(DatabaseConnectArgsConfig): + model_config = ConfigDict(extra="ignore") + + class DatabaseConfig(BaseModel, AbstractConfig): + def __init__(self, **data: Any): + super().__init__(**data) + + model_data = self.connect_args.model_dump() if self.connect_args else {} + if self.connection_string.startswith("sqlite://"): + self.connect_args = SQLiteDatabaseConnectArgsConfig(**model_data) + elif self.connection_string.startswith("postgresql://"): + self.connect_args = PostgreSQLDatabaseConnectArgsConfig(**model_data) + connection_string: str = "sqlite:///:memory:" sqlalchemy_echo: bool = False + # the static field allows Pydantic to store + # values from a dictionary connect_args: DatabaseConnectArgsConfig | None = None + + +class Config(BaseModel, AbstractConfig): + database: DatabaseConfig diff --git a/src/database/BL_Python/database/migrations/__init__.py b/src/database/BL_Python/database/migrations/__init__.py index ce824a72..e69de29b 100644 --- a/src/database/BL_Python/database/migrations/__init__.py +++ b/src/database/BL_Python/database/migrations/__init__.py @@ -1,115 +0,0 @@ -from typing import TYPE_CHECKING, List, Optional, Protocol, cast, final - -from sqlalchemy.orm import DeclarativeMeta - -MetaBaseType = Type[DeclarativeMeta] - -if TYPE_CHECKING: - from typing import Dict, Protocol, Type, TypeVar, Union - - from sqlalchemy.engine import Dialect - - TBase = TypeVar("TBase") - - class TableNameCallback(Protocol): - def __call__( - self, - dialect_schema: "Union[str, None]", - full_table_name: str, - base_table: str, - meta_base: MetaBaseType, - ) -> None: ... - - class Connection(Protocol): - dialect: Dialect - - class Op(Protocol): - @staticmethod - def get_bind() -> Connection: ... - - -@final -class DialectHelper: - """ - Utilities to get database schema and table names - for different SQL dialects and database engines. - - For example, PostgreSQL supports schemas. This means: - * get_dialect_schema(meta) returns a schema name, if there is one, e.g. "cap" - * get_full_table_name(table_name, meta) returns the schema name, followed by the table name, e.g. " cap.assay_plate " - - SQLite does not support schemas. This means: - * get_dialect_schema(meta) returns None - * get_full_table_name(table_name, meta) returns the table name, with the schema name prepended to it, e.g. " 'cap.assay_plate' " - The key difference is that there is no schema, and the table name comes from the SQLite - engine instantiation, which prepends the "schema" to the table name. - """ - - dialect: "Dialect" - dialect_supports_schemas: bool - - def __init__(self, dialect: "Dialect"): - self.dialect = dialect - # right now we only care about SQLite and PSQL, - # so if the dialect is PSQL, then we consider the - # dialect to support schemas, otherwise it does not. - self.dialect_supports_schemas = dialect.name == "postgresql" - - @staticmethod - def get_schema(meta: "MetaBaseType"): - table_args = cast( - Optional[dict[str, str]], getattr(meta, "__table_args__", None) - ) - if table_args is None: - return None - return table_args.get("schema") - - def get_dialect_schema(self, meta: "MetaBaseType"): - """Get the database schema as a string, or None if the dialect does not support schemas.""" - if not self.dialect_supports_schemas: - return None - return DialectHelper.get_schema(meta) - - def get_full_table_name(self, table_name: str, meta: "MetaBaseType"): - """ - If the dialect supports schemas, then the table name does not have the schema prepended. - In dialects that don't support schemas, e.g., SQLite, the table name has the schema prepended. - This is because, when schemas are supported, the dialect automatically handles which schema - to use, while non-schema dialects do not reference any schemas. - """ - if self.get_dialect_schema(meta): - return table_name - else: - return f"{DialectHelper.get_schema(meta)}.{table_name}" - - def get_timestamp_sql(self): - timestamp_default_sql = "now()" - if self.dialect.name == "sqlite": - timestamp_default_sql = "CURRENT_TIMESTAMP" - return timestamp_default_sql - - @staticmethod - def iterate_table_names( - op: "Op", - schema_tables: "Dict[MetaBaseType, List[str]]", - table_name_callback: "TableNameCallback", - ): - """ - Call `table_name_callback` once for every table in every Base. - - op: The `op` object from Alembic. - schema_tables: A dictionary of the tables this call applies to for every Base. - table_name_callback: A callback executed for every table in `schema_tables`. - """ - dialect: Dialect = op.get_bind().dialect - schema = DialectHelper(dialect) - get_full_table_name = schema.get_full_table_name - get_dialect_schema = schema.get_dialect_schema - - for meta_base, schema_base_tables in schema_tables.items(): - dialect_schema = get_dialect_schema(meta_base) - for base_table in schema_base_tables: - full_table_name = get_full_table_name(base_table, meta_base) - table_name_callback( - dialect_schema, full_table_name, base_table, meta_base - ) diff --git a/src/database/BL_Python/database/migrations/alembic/README b/src/database/BL_Python/database/migrations/alembic/README new file mode 100644 index 00000000..e69de29b diff --git a/src/database/BL_Python/database/migrations/alembic/__init__.py b/src/database/BL_Python/database/migrations/alembic/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/src/database/BL_Python/database/migrations/alembic/__main__.py b/src/database/BL_Python/database/migrations/alembic/__main__.py new file mode 100644 index 00000000..68fe9dcd --- /dev/null +++ b/src/database/BL_Python/database/migrations/alembic/__main__.py @@ -0,0 +1,31 @@ +import logging +from os import environ + +# this is Alembic's main entry point +from .bl_alembic import BLAlembic + + +def bl_alembic( + argv: list[str] | None = None, + log_level: int | str | None = None, + allow_overwrite: bool | None = None, +) -> None: + """ + A method to support the `bl-alembic` command, which replaces `alembic. + + :param list[str] | None argv: CLI arguments, defaults to None + :param int | str | None log_level: An integer log level to configure logging verbosity, defaults to None + """ + logging.basicConfig(level=logging.INFO) + if not log_level: + log_level = environ.get(BLAlembic.LOG_LEVEL_NAME) + log_level = int(log_level) if log_level else logging.INFO + + logger = logging.getLogger() + logger.setLevel(log_level) + + BLAlembic(argv, logger).run() + + +if __name__ == "__main__": + bl_alembic() diff --git a/src/database/BL_Python/database/migrations/alembic/_replacement_env.py b/src/database/BL_Python/database/migrations/alembic/_replacement_env.py new file mode 100644 index 00000000..2915ec9b --- /dev/null +++ b/src/database/BL_Python/database/migrations/alembic/_replacement_env.py @@ -0,0 +1,6 @@ +from pathlib import Path + +from BL_Python.database.migrations.alembic.env import run_migrations + +# TODO replace with your MetaBase types and config file path +run_migrations(bases=[], config_filename=Path("config.toml")) diff --git a/src/database/BL_Python/database/migrations/alembic/_replacement_env_setup.py b/src/database/BL_Python/database/migrations/alembic/_replacement_env_setup.py new file mode 100644 index 00000000..a9474781 --- /dev/null +++ b/src/database/BL_Python/database/migrations/alembic/_replacement_env_setup.py @@ -0,0 +1 @@ +from BL_Python.database.migrations.alembic.env_setup import * diff --git a/src/database/BL_Python/database/migrations/alembic/alembic.ini b/src/database/BL_Python/database/migrations/alembic/alembic.ini new file mode 100644 index 00000000..4233cf8e --- /dev/null +++ b/src/database/BL_Python/database/migrations/alembic/alembic.ini @@ -0,0 +1,41 @@ +[alembic] +script_location = migrations +prepend_sys_path = . +version_path_separator = os +sqlalchemy.url = driver://user:pass@localhost/dbname + +[post_write_hooks] + +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S \ No newline at end of file diff --git a/src/database/BL_Python/database/migrations/alembic/bl_alembic.py b/src/database/BL_Python/database/migrations/alembic/bl_alembic.py new file mode 100644 index 00000000..695e38bc --- /dev/null +++ b/src/database/BL_Python/database/migrations/alembic/bl_alembic.py @@ -0,0 +1,352 @@ +import sys +from logging import Logger +from pathlib import Path +from types import TracebackType +from typing import Callable + +import alembic.util.messaging + +# this is Alembic's main entry point +from alembic.config import CommandLine, Config +from alembic.config import main as alembic_main +from attr import dataclass +from typing_extensions import final + + +@final +class BLAlembic: + DEFAULT_CONFIG_NAME: str = "alembic.ini" + LOG_LEVEL_NAME: str = "LOG_LEVEL" + + _run: Callable[[], None] + _log: Logger + + @dataclass + class FileCopy: + source: Path + destination: Path + + def __init__(self, argv: list[str] | None, logger: Logger) -> None: + """ + _summary_ + + :param list[str] | None argv: The command line arguments to be parsed by ArgumentParser. + If None, this will use `sys.argv[1:]` to use CLI arguments. + :param Logger logger: A logger for writing messages. + """ + self._log = logger + + if not argv: + argv = sys.argv[1:] + + args = set(argv) + + if not args or "-h" in args or "--help" in args: + self._log.debug(f"Empty or 'help' args passed from Alembic: {args}") + self._run = lambda: self._run_with_alembic_defaults(argv) + elif "-c" in args or "--config" in args: + self._log.debug(f"'config' args passed from Alembic: {args}") + self._run = lambda: self._run_with_specified_config(argv) + else: + self._log.debug(f"Execution-only args passed from Alembic: {args}") + self._run = lambda: self._run_with_config(argv) + + def _get_config(self, argv: list[str]) -> Config: + """ + Get a parsed Alembic INI file as a `Config` object. + + :param list[str] argv: The command line arguments to be parsed by ArgumentParser. + :return Config: The `Config` object with options set from an INI file. + """ + # needs to open the config and return it + # so we can get the alembic migration directory + alembic_cli = CommandLine() + parsed_args = alembic_cli.parser.parse_args(argv) + self._log.debug(f"Parsed arguments: {parsed_args}") + config = Config(parsed_args.config) + self._log.debug(f"Instantiated config: {repr(config)}") + return config + + def _run_with_alembic_defaults(self, argv: list[str]) -> None: + """ + Calls `alembic` programmatically. + + Used when no command line arguments, or `-h` or `--help`, are specified. + + :param list[str] argv: The command line arguments to be parsed by ArgumentParser. + :return None: + """ + self._log.debug("Running unmodified `alembic` command.") + return alembic_main(argv) + + def _run_with_specified_config(self, argv: list[str]) -> None: + """ + Calls `alembic` programmatically. + + Used when `-c` or `--config` are specified. + + :param list[str] argv: The command line arguments to be parsed by ArgumentParser. + :return None: + """ + self._log.debug("Running unmodified `alembic` command.") + self._execute_alembic(argv) + + def _run_with_config(self, argv: list[str]) -> None: + """ + Calls `alembic` programmatically either: + - if the file 'alembic.ini' exists in the same working + directory in which the command is run. + - if the file 'alembic.ini' does not exist and after creating + a temporary configuration file from the BL_Python default Alembic + config, and forcing the temporary configuration file to be used + by `alembic`. + + :param list[str] argv: The command line arguments to be parsed by ArgumentParser. + :return None: + """ + self._log.debug("Running `alembic` with modified command.") + self._write_bl_alembic_config() + argv = ["-c", BLAlembic.DEFAULT_CONFIG_NAME] + argv + + self._execute_alembic(argv) + + def _execute_alembic(self, argv: list[str]) -> None: + """ + Programmatically run `alembic`. + + :param list[str] argv: The command line arguments to be parsed by ArgumentParser. + :return None: + """ + config = self._get_config(argv) + + with self._initialize_alembic(config) as msg_capture: + try: + return alembic_main(argv) + except SystemExit as e: + self._log.error(e) + # If SystemExit is from anything other than + # needing to create the init dir, then crash. + # This is doable/reliable because Alembic first writes + # a message that the directory needs to be created, + # then calls `sys.exit(-1)`. + if not msg_capture.seen: + raise + + self._log.debug( + f"The Alembic initialization error was seen. Ignoring `{SystemExit.__name__}` exception." + ) + + def _initialize_alembic(self, config: Config): + """ + Set up Alembic to run `alembic init` programmatically if it is needed. + + :param Config config: The config, parsed from an Alembic INI configuration file. + :return MsgCaptureCtxManager: A type indicating whether an expected message was + written by Alembic. In the case of this method, if the "use the 'init'" + message is seen, then `alembic init` is executed. This type can be used to + determine whether `alembic init` was executed. + :return MsgCaptureCtxManager: + """ + script_location = config.get_main_option("script_location") or "alembic" + + def _msg_new(msg: Callable[[str, bool, bool, bool], None]): + nonlocal script_location + self._log.debug("Executing `alembic init`.") + msg( + "'alembic' migration directory does not exist. Creating it.", + # these bool values are defaults for Alembic msg function + True, + False, + False, + ) + alembic_main(["init", script_location]) + + self._overwrite_alembic_env_files(config) + + return self._alembic_msg_capture( + "use the 'init' command to create a new scripts folder", _msg_new + ) + + def _overwrite_alembic_env_files(self, config: Config) -> None: + """ + Overwrite env.py and env_setup.py in an Alembic migrations directory. + Currently this only runs if `alembic init` is executed, and care must + be taken if we intend to change this to overwrite the files if they exist. + The files will exist if `alembic init` was executed prior to this tool. + + :param Config config: The config, parsed from an Alembic INI configuration file. + :return None: + """ + script_location = config.get_main_option("script_location") or "alembic" + bl_python_alembic_file_dir = Path(__file__).resolve().parent + + files = [ + BLAlembic.FileCopy( + Path(bl_python_alembic_file_dir, f"_replacement_{basename}.py"), + Path(Path.cwd(), Path(script_location, f"{basename}.py")), + ) + for basename in ["env", "env_setup"] + ] + + self._log.debug(f"Rewriting base Alembic files: '{files}'") + # force the overwrite because Alembic creates the + # files that we want to replace. + self._copy_files(files, force_overwrite=True) + + def _write_bl_alembic_config( + self, + ) -> None: + """ + Write the BL_Python Alembic tool's default configuration file to a temp file. + + :yield Generator[tempfile._TemporaryFileWrapper[bytes], Any, None]: The temp file. + """ + config_file_destination = Path(Path.cwd(), BLAlembic.DEFAULT_CONFIG_NAME) + if config_file_destination.exists(): + self._log.debug( + f"Configuration file '{BLAlembic.DEFAULT_CONFIG_NAME}' exists. Will not attempt to create it." + ) + return + + # copy the default alembic.ini + # to the directory in which bl-alembic is executed. + self._log.debug( + f"Writing configuration file '{BLAlembic.DEFAULT_CONFIG_NAME}'." + ) + self._copy_files([ + BLAlembic.FileCopy( + Path(Path(__file__).resolve().parent, BLAlembic.DEFAULT_CONFIG_NAME), + config_file_destination, + ) + ]) + + def _copy_files(self, files: list[FileCopy], force_overwrite: bool = False): + for file in files: + write_mode = "w+b" if force_overwrite else "x+b" + try: + with ( + open(file.source, "r") as source, + open(file.destination, write_mode) as destination, + ): + destination.writelines(source.buffer) + except FileExistsError as e: + if e.filename != str(file.destination): + raise + + self._log.debug( + f"The file '{file.destination}' already exists. Refusing to overwrite, but ignoring exception." + ) + + def _alembic_msg_capture( + self, + msg_to_capture: str, + callback: Callable[[Callable[[str, bool, bool, bool], None]], None], + ): + """ + Capture a specific message written by Alembic, and call `callback` if it matches. + + This method override's Alembic's `msg` function and restores it when the + context is closed. + + :param str msg_to_capture: The specific message to monitor in Alembic's writes. + :param Callable[[Callable[[str, bool, bool, bool], None]], None] callback: + A callable that receives Alembic's `msg` function as a parameter. + :return MsgCaptureCtxManager: + """ + + OVERRIDDEN_ORIGINAL_ATTR_NAME = "_overridden_original" + if hasattr(alembic.util.messaging.msg, OVERRIDDEN_ORIGINAL_ATTR_NAME): + # if the attr exists that means we have already overriden it, + # so we set `_msg_original` to the real original. + self._log.debug( + f"`alembic.util.messaging.msg` has already been overwritten. Using `{OVERRIDDEN_ORIGINAL_ATTR_NAME}` attribute to get the original method." + ) + _msg_original = getattr( + alembic.util.messaging.msg, OVERRIDDEN_ORIGINAL_ATTR_NAME + ) + else: + self._log.debug( + f"`alembic.util.messaging.msg` has not been overridden. Using it as the original method." + ) + # if the attr does not exist, then we assume `msg` is + # the original Alembic `msg` function. + _msg_original: Callable[[str, bool, bool, bool], None] = ( + alembic.util.messaging.msg + ) + + @dataclass + class MessageSeen: + seen: bool = False + + @final + class MsgCaptureCtxManager: + _msg_seen: MessageSeen = MessageSeen() + _log: Logger + + def __init__(self, logger: Logger) -> None: + self._log = logger + + def __enter__(self) -> MessageSeen: + """ + Replace Alembic's `msg` function in order to execute + a callback when certain messages are seen. + + :return _type_: _description_ + """ + self._log.debug(f"Entering `{MsgCaptureCtxManager.__name__}` context.") + + def _msg_new( + msg: str, + newline: bool = True, + flush: bool = False, + quiet: bool = False, + ): + if msg_to_capture in msg: + self._log.debug( + f"The msg '{msg_to_capture}' was written by Alembic." + ) + callback(_msg_original) + self._msg_seen.seen = True + else: + _msg_original(msg, newline, flush, quiet) + + setattr( + _msg_new, OVERRIDDEN_ORIGINAL_ATTR_NAME, alembic.util.messaging.msg + ) + + self._log.debug( + f"Overwritting `alembic.util.messaging.msg` with `{repr(_msg_new)}`." + ) + alembic.util.messaging.msg = _msg_new + + return self._msg_seen + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> bool: + """ + Revert replacing Alembic's `msg` function by restoring the original. + + :param type[BaseException] | None exc_type: + :param BaseException | None exc_val: + :param TracebackType | None exc_tb: + :return bool: + """ + self._log.debug(f"Exiting `{MsgCaptureCtxManager.__name__}` context.") + alembic.util.messaging.msg = _msg_original + return True + + return MsgCaptureCtxManager(self._log) + + def run(self) -> None: + """ + Run Alembic migrations, initializing Alembic if necessary. + + :return None: + """ + self._log.debug("Bootstrapping and executing `alembic` process.") + # FIXME this ends up logging ERROR:root:-1 in some cases + return self._run() diff --git a/src/database/BL_Python/database/migrations/alembic/env.py b/src/database/BL_Python/database/migrations/alembic/env.py new file mode 100644 index 00000000..0a86eac7 --- /dev/null +++ b/src/database/BL_Python/database/migrations/alembic/env.py @@ -0,0 +1,28 @@ +from pathlib import Path + +from alembic import context +from BL_Python.database.config import Config, DatabaseConfig +from BL_Python.database.migrations.alembic.env_setup import AlembicEnvSetup +from BL_Python.database.types import MetaBase +from BL_Python.programming.config import ConfigBuilder, load_config +from BL_Python.programming.dependency_injection import ConfigModule +from injector import Injector + + +def run_migrations(bases: list[MetaBase], config_filename: Path | None = None): + if config_filename is None: + config_filename = Path("config.toml") + + config_type = ConfigBuilder[Config]().with_root_config(Config).build() + config = load_config(config_type, config_filename) + config_module = ConfigModule(config, Config) + database_config_module = ConfigModule(config.database, DatabaseConfig) + + ioc_container = Injector([config_module, database_config_module]) + + alembic_env = ioc_container.create_object(AlembicEnvSetup) + + if context.is_offline_mode(): + alembic_env.run_migrations_offline(bases) + else: + alembic_env.run_migrations_online(bases) diff --git a/src/database/BL_Python/database/migrations/alembic/env_setup.py b/src/database/BL_Python/database/migrations/alembic/env_setup.py new file mode 100644 index 00000000..7d30c3f5 --- /dev/null +++ b/src/database/BL_Python/database/migrations/alembic/env_setup.py @@ -0,0 +1,254 @@ +# import logging +# from configparser import ConfigParser +from dataclasses import dataclass +from functools import lru_cache +from logging.config import fileConfig +from typing import Any, List, Literal, Protocol, cast, final + +from alembic import context +from BL_Python.database.config import DatabaseConfig +from BL_Python.database.schema.postgresql import PostgreSQLDialect +from BL_Python.database.schema.sqlite import SQLiteDialect +from BL_Python.database.types import MetaBase +from injector import inject + +# TODO only do this when using PostgreSQL, +# and detect if the module is installed +# so we can show a helpful error message +from psycopg2.errors import UndefinedTable +from sqlalchemy import MetaData, Table, engine_from_config, pool +from sqlalchemy.engine import Connectable, Connection, Engine +from sqlalchemy.exc import ProgrammingError +from sqlalchemy.schema import SchemaItem + +# from AWS import load_ssm_application_parameters + + +class type_include_object(Protocol): + # self, object: Table, name: str, type_: str, reflected: Any, compare_to: Any + def __call__( + self, + object: SchemaItem, + name: str | None, + type_: Literal[ + "schema", + "table", + "column", + "index", + "unique_constraint", + "foreign_key_constraint", + ], + reflected: bool, + compare_to: SchemaItem | None, + ) -> bool: ... + + +class type_include_schemas(Protocol): + def __call__(self, names: List[str]) -> type_include_object: ... + + +@dataclass +class type_metadata: + include_schemas: type_include_schemas + target_metadata: List[MetaData] + schemas: List[str] + + +@final +class AlembicEnvSetup: + _config: DatabaseConfig + + @inject + def __init__(self, config: DatabaseConfig) -> None: + self._config = config + + @lru_cache(maxsize=1) + def get_config(self): + # TODO re-integrate AWS SSM at a later time + # aws_ssm_config = ConfigParser() + # loaded_config_files = aws_ssm_config.read("aws-ssm.ini") + # if loaded_config_files: + # load_ssm_application_parameters(aws_ssm_config) + # else: + # logging.getLogger().info( + # "Could not read aws-ssm.ini config file. Skipping SSM parameter lookup." + # ) + + # this is the Alembic Config object, which provides + # access to the values within the .ini file in use. + config = context.config + + # Interpret the config file for Python logging. + # This line sets up loggers basically. + if config.config_file_name is not None: + # raise Exception("Config file is missing.") + fileConfig(config.config_file_name) + + # FIXME why is this here? + config.set_main_option("sqlalchemy.url", self._config.connection_string) + + return config + + _type_metadata: type_metadata | None = None + + def get_metadata(self, bases: list[MetaBase]): + if self._type_metadata is not None: + return self._type_metadata + + def include_schemas(names: List[str]): + def include_object( + object: SchemaItem | Table, + name: str | None, + type_: Literal[ + "schema", + "table", + "column", + "index", + "unique_constraint", + "foreign_key_constraint", + ], + reflected: bool, + compare_to: SchemaItem | None, + ) -> bool: + if type_ == "table" and isinstance(object, Table): + return object.schema in names + return True + + return include_object + + target_metadata = [base.metadata for base in bases] + schemas = [ + base.__table_args__["schema"] + for base in bases + if hasattr(base, "__table_args__") + and isinstance(base.__table_args__, dict) + and base.__table_args__["schema"] is not None + ] + + self._type_metadata = type_metadata(include_schemas, target_metadata, schemas) + return self._type_metadata + + def _configure_context( + self, bases: list[MetaBase], connection: Connection | Connectable | Engine + ): + metadata = self.get_metadata(bases) + target_metadata = metadata.target_metadata + include_schemas = metadata.include_schemas + schemas = metadata.schemas + + if connection.engine is None: + raise Exception("Unknown error. Connection engine is not set.") + + if not isinstance(connection, Connection): + raise Exception( + f"Unknown error. Connection is not a connection; it is a `{type(connection).__name__}`." + ) + + if connection.engine.name == SQLiteDialect.DIALECT_NAME: + context.configure( + connection=connection, + target_metadata=target_metadata, + compare_type=True, + include_schemas=True, + include_object=include_schemas(schemas), + render_as_batch=True, + ) + elif connection.engine.name == PostgreSQLDialect.DIALECT_NAME: + context.configure( + connection=connection, + target_metadata=target_metadata, + compare_type=True, + include_schemas=True, + include_object=include_schemas(schemas), + ) + else: + raise Exception( + f"Unsupported database dialect `{connection.engine.name}`. Expected one of {[SQLiteDialect.DIALECT_NAME, PostgreSQLDialect.DIALECT_NAME]}" + ) + + def _run_migrations( + self, bases: list[MetaBase], connection: Connection | Connectable | Engine + ): + if connection.engine is None: + raise Exception( + "SQLAlchemy Session is not bound to an engine. This is not supported." + ) + + metadata = self.get_metadata(bases) + schemas = metadata.schemas + with context.begin_transaction(): + try: + if connection.engine.name == "postgresql": + _ = connection.execute( + f"SET search_path TO {','.join(schemas)},public;" + ) + context.run_migrations() + except ProgrammingError as error: + # This occurs when downgrading from the very last version + # because the `alembic_version` table is dropped. The exception + # can be safely ignored because the migration commits the transaction + # before the failure, and there is nothing left for Alembic to do. + if not ( + type(error.orig) is UndefinedTable + and "DELETE FROM alembic_version" in error.statement + ): + raise + + def run_migrations_offline(self, bases: list[MetaBase]): + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + config = self.get_config() + metadata = self.get_metadata(bases) + target_metadata = metadata.target_metadata + include_schemas = metadata.include_schemas + schemas = metadata.schemas + + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, + target_metadata=target_metadata, + literal_binds=True, + dialect_opts={"paramstyle": "named"}, + compare_type=True, + include_schemas=True, + include_object=include_schemas(schemas), + ) + + with context.begin_transaction(): + context.run_migrations() + + def run_migrations_online(self, bases: list[MetaBase]): + """Run migrations in 'online' mode. + + In this scenario we need to create an Engine + and associate a connection with the context. + + """ + config = self.get_config() + + connectable: Connectable | None = cast(dict[str, Any], config.attributes).get( + "connection", None + ) + + if connectable: + self._configure_context(bases, connectable) + self._run_migrations(bases, connectable) + else: + connectable = engine_from_config( + config.get_section(config.config_ini_section), + prefix="sqlalchemy.", + poolclass=pool.NullPool, + ) + + with connectable.connect() as connection: + self._configure_context(bases, connection) + self._run_migrations(bases, connection) diff --git a/src/database/BL_Python/database/migrations/alembic/script.py.mako b/src/database/BL_Python/database/migrations/alembic/script.py.mako new file mode 100644 index 00000000..2c015630 --- /dev/null +++ b/src/database/BL_Python/database/migrations/alembic/script.py.mako @@ -0,0 +1,24 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision = ${repr(up_revision)} +down_revision = ${repr(down_revision)} +branch_labels = ${repr(branch_labels)} +depends_on = ${repr(depends_on)} + + +def upgrade(): + ${upgrades if upgrades else "pass"} + + +def downgrade(): + ${downgrades if downgrades else "pass"} diff --git a/src/database/BL_Python/database/schema/__init__.py b/src/database/BL_Python/database/schema/__init__.py new file mode 100644 index 00000000..3ede3bb6 --- /dev/null +++ b/src/database/BL_Python/database/schema/__init__.py @@ -0,0 +1,21 @@ +from BL_Python.database.types import Op +from sqlalchemy.engine import Dialect + +from .postgresql import PostgreSQLDialect +from .sqlite import SQLiteDialect + +_dialect_type_map = {"sqlite": SQLiteDialect, "postgresql": PostgreSQLDialect} + + +def get_type_from_dialect(dialect: Dialect): + if not _dialect_type_map.get(dialect.name): + raise ValueError( + f"Unexpected dialect with name `{dialect.name}`. Expected one of {list(_dialect_type_map.keys())}." + ) + + return _dialect_type_map[dialect.name](dialect) + + +def get_type_from_op(op: Op): + dialect: Dialect = op.get_bind().dialect + return get_type_from_dialect(dialect) diff --git a/src/database/BL_Python/database/schema/dialect.py b/src/database/BL_Python/database/schema/dialect.py new file mode 100644 index 00000000..d30554bd --- /dev/null +++ b/src/database/BL_Python/database/schema/dialect.py @@ -0,0 +1,67 @@ +from abc import ABC, abstractmethod + +from BL_Python.database.types import MetaBase, TableNameCallback +from sqlalchemy.engine import Dialect + + +class DialectBase(ABC): + supports_schemas: bool = False + + @staticmethod + def get_schema(meta: MetaBase): + table_args = hasattr(meta, "__table_args__") and meta.__table_args__ or None + + if isinstance(table_args, dict): + return table_args.get("schema") + + return None + + @staticmethod + def iterate_table_names( + dialect: "DialectBase", + schema_tables: dict[MetaBase, list[str]], + table_name_callback: TableNameCallback, + ): + """ + Call `table_name_callback` once for every table in every Base. + + op: The `op` object from Alembic. + schema_tables: A dictionary of the tables this call applies to for every Base. + table_name_callback: A callback executed for every table in `schema_tables`. + """ + get_full_table_name = dialect.get_full_table_name + get_dialect_schema = dialect.get_dialect_schema + + for meta_base, schema_base_tables in schema_tables.items(): + dialect_schema = get_dialect_schema(meta_base) + for base_table in schema_base_tables: + full_table_name = get_full_table_name(base_table, meta_base) + table_name_callback( + dialect_schema, full_table_name, base_table, meta_base + ) + + def get_dialect_schema(self, meta: MetaBase): + if self.supports_schemas: + return DialectBase.get_schema(meta) + + return None + + def get_full_table_name(self, table_name: str, meta: MetaBase): + """ + If the dialect supports schemas, then the table name does not have the schema prepended. + In dialects that don't support schemas, e.g., SQLite, the table name has the schema prepended. + This is because, when schemas are supported, the dialect automatically handles which schema + to use, while non-schema dialects do not reference any schemas. + """ + if self.get_dialect_schema(meta): + return table_name + else: + return f"{DialectBase.get_schema(meta)}.{table_name}" + + @property + @abstractmethod + def dialect(self) -> Dialect: ... # pragma: nocover + + @property + @abstractmethod + def timestamp_sql(self) -> str: ... # pragma: nocover diff --git a/src/database/BL_Python/database/schema/postgresql.py b/src/database/BL_Python/database/schema/postgresql.py new file mode 100644 index 00000000..3a3609e7 --- /dev/null +++ b/src/database/BL_Python/database/schema/postgresql.py @@ -0,0 +1,29 @@ +from typing import final + +from BL_Python.database.schema.dialect import DialectBase +from sqlalchemy.engine import Dialect +from typing_extensions import override + + +@final +class PostgreSQLDialect(DialectBase): + DIALECT_NAME = "postgresql" + _dialect: Dialect + supports_schemas: bool = True + + def __init__(self, dialect: Dialect) -> None: + if dialect.name != PostgreSQLDialect.DIALECT_NAME: + raise ValueError( + f"Invalid Dialect with name `{dialect.name}` provided for `{PostgreSQLDialect.__name__}`. Expected `{self.DIALECT_NAME}`." + ) + self._dialect = dialect + + @property + @override + def dialect(self) -> Dialect: + return self._dialect + + @property + @override + def timestamp_sql(self): + return "now()" diff --git a/src/database/BL_Python/database/schema/sqlite.py b/src/database/BL_Python/database/schema/sqlite.py new file mode 100644 index 00000000..f62972ba --- /dev/null +++ b/src/database/BL_Python/database/schema/sqlite.py @@ -0,0 +1,30 @@ +from typing import final + +from BL_Python.database.schema.dialect import DialectBase +from sqlalchemy.engine import Dialect +from typing_extensions import override + + +@final +class SQLiteDialect(DialectBase): + DIALECT_NAME = "sqlite" + _dialect: Dialect + supports_schemas: bool = False + + def __init__(self, dialect: Dialect) -> None: + if dialect.name != SQLiteDialect.DIALECT_NAME: + raise ValueError( + f"Invalid Dialect with name `{dialect.name}` provided for `{SQLiteDialect.__name__}`. Expected `{self.DIALECT_NAME}`." + ) + + self._dialect = dialect + + @property + @override + def dialect(self) -> Dialect: + return self._dialect + + @property + @override + def timestamp_sql(self): + return "CURRENT_TIMESTAMP" diff --git a/src/database/BL_Python/database/types.py b/src/database/BL_Python/database/types.py index fbb5d70c..115574c9 100644 --- a/src/database/BL_Python/database/types.py +++ b/src/database/BL_Python/database/types.py @@ -1,6 +1,10 @@ -from typing import Protocol, TypedDict +from typing import Protocol, TypedDict, TypeVar from sqlalchemy import Constraint, MetaData +from sqlalchemy.engine import Dialect + +TBase = TypeVar("TBase") + TableArgsDict = TypedDict("TableArgsDict", {"schema": str | None}) @@ -9,3 +13,22 @@ class MetaBase(Protocol): metadata: MetaData __tablename__: str __table_args__: tuple[Constraint | TableArgsDict, ...] | TableArgsDict + + +class TableNameCallback(Protocol): + def __call__( # pragma: nocover + self, + dialect_schema: str | None, + full_table_name: str, + base_table: str, + meta_base: MetaBase, + ) -> None: ... + + +class Connection(Protocol): + dialect: Dialect + + +class Op(Protocol): # pragma: nocover + @staticmethod + def get_bind() -> Connection: ... diff --git a/src/database/MANIFEST.in b/src/database/MANIFEST.in index cfda70e0..795e1fd6 100644 --- a/src/database/MANIFEST.in +++ b/src/database/MANIFEST.in @@ -1,2 +1,3 @@ +graft BL_Python/database/migrations/alembic/ global-include *.pyi global-include py.typed diff --git a/src/database/pyproject.toml b/src/database/pyproject.toml index ec2f38a2..81c90173 100644 --- a/src/database/pyproject.toml +++ b/src/database/pyproject.toml @@ -57,6 +57,9 @@ exclude = ["build*"] [tool.setuptools.package-data] "BL_Python.database" = ["py.typed"] +[project.scripts] +bl-alembic = "BL_Python.database.migrations.alembic.__main__:bl_alembic" + [project.optional-dependencies] postgres = [ "psycopg2 ~= 2.9" diff --git a/src/database/test/unit/migrations/alembic/test_bl_alembic.py b/src/database/test/unit/migrations/alembic/test_bl_alembic.py new file mode 100644 index 00000000..7f2a7114 --- /dev/null +++ b/src/database/test/unit/migrations/alembic/test_bl_alembic.py @@ -0,0 +1,225 @@ +from typing import Any, Generator, Protocol +from unittest.mock import MagicMock + +import alembic +import alembic.util +import pytest +from BL_Python.database.migrations.alembic.bl_alembic import BLAlembic +from mock import MagicMock +from pytest_mock import MockerFixture +from pytest_mock.plugin import MockType + + +class MockArgv(Protocol): + def __call__(self, args: list[str]) -> MockType: ... + + +@pytest.fixture +def mock_argv(mocker: MockerFixture) -> Generator[MockArgv, Any, None]: + argv = mocker.patch("BL_Python.database.migrations.alembic.bl_alembic.sys.argv") + + def set_args(args: list[str]): + argv.__getitem__ = ( + lambda _argv, _slice: (["bl-alembic"] + args)[_slice] # pyright: ignore[reportUnknownLambdaType] + ) + return argv + + yield set_args + + +def mock_alembic(mocker: MockerFixture): + return mocker.patch("BL_Python.database.migrations.alembic.bl_alembic.alembic_main") + + +def mock_config(mocker: MockerFixture): + return mocker.patch("BL_Python.database.migrations.alembic.bl_alembic.Config") + + +@pytest.mark.parametrize( + "args", + [ + [], + ["-h"], + ["--help"], + ["-c", "test-config.ini"], + ["--config", "test-config.ini"], + ["-c", "alembic.ini", "upgrade", "head"], + ], +) +def test__BLAlembic__passes_through_to_alembic_with_correct_args( + args: list[str], mock_argv: MockArgv, mocker: MockerFixture +): + _ = mock_argv(args) + _ = mocker.patch("BL_Python.database.migrations.alembic.bl_alembic.Config") + alembic_main = mocker.patch( + "BL_Python.database.migrations.alembic.bl_alembic.alembic_main" + ) + + bl_alembic = BLAlembic(None, MagicMock()) + bl_alembic.run() + + assert alembic_main.called + alembic_main.assert_called_once_with(args) + + +def test__BLAlembic__passes_through_to_alembic_with_default_config_when_not_specified( + mock_argv: MockArgv, + mocker: MockerFixture, +): + args = ["upgrade", "head"] + _ = mock_argv(args) + _ = mocker.patch("BL_Python.database.migrations.alembic.bl_alembic.Config") + alembic_main = mocker.patch( + "BL_Python.database.migrations.alembic.bl_alembic.alembic_main" + ) + + bl_alembic = BLAlembic(None, MagicMock()) + bl_alembic.run() + + assert alembic_main.called + alembic_main.assert_called_once_with(["-c", BLAlembic.DEFAULT_CONFIG_NAME] + args) + + +def test__BLAlembic__creates_default_config(mock_argv: MockArgv, mocker: MockerFixture): + _ = mock_alembic(mocker) + _ = mock_config(mocker) + _ = mock_argv(["upgrade", "head"]) + + def path_se(*args: Any, **kwargs: Any): + # set the call args for the Path mocks that are passed + # into the FileCopy mock so we can examine them when FileCopy + # is called + return MagicMock(args=args, exists=MagicMock(return_value=False)) + + def file_copy_se(*args: Any, **kwargs: Any): + # set a mocked FileCopy whose src/dest are strings (filenames) + return MagicMock(source=args[0].args[1], destination=args[1].args[1]) + + _ = mocker.patch( + "BL_Python.database.migrations.alembic.bl_alembic.Path", side_effect=path_se + ) + _ = mocker.patch( + "BL_Python.database.migrations.alembic.bl_alembic.BLAlembic.FileCopy", + side_effect=file_copy_se, + ) + open_mock = mocker.patch("builtins.open", mocker.mock_open()) + + bl_alembic = BLAlembic(None, MagicMock()) + bl_alembic.run() + + assert open_mock.called + call_args = [call[0] for call in open_mock.call_args_list] + assert (BLAlembic.DEFAULT_CONFIG_NAME, "r") in call_args + assert (BLAlembic.DEFAULT_CONFIG_NAME, "x+b") in call_args + + +def test__BLAlembic__does_not_overwrite_existing_config( + mock_argv: MockArgv, mocker: MockerFixture +): + _ = mock_alembic(mocker) + _ = mock_argv(["upgrade", "head"]) + + _ = mocker.patch( + "BL_Python.database.migrations.alembic.bl_alembic.Path", + return_value=MagicMock(exists=MagicMock(return_value=True)), + ) + _ = mocker.patch("builtins.open", mocker.mock_open()) + log_mock = mocker.patch("BL_Python.database.migrations.alembic.bl_alembic.Logger") + + bl_alembic = BLAlembic(None, log_mock) + + try: + bl_alembic.run() + except: + pass + + assert [ + True + for call in log_mock.mock_calls + if call.args[0].startswith( + f"Configuration file '{BLAlembic.DEFAULT_CONFIG_NAME}' exists." + ) + ] + + +def test__BLAlembic__crashes_when_overwriting_unexpected_file( + mock_argv: MockArgv, mocker: MockerFixture +): + _ = mock_alembic(mocker) + _ = mock_argv(["upgrade", "head"]) + + _ = mocker.patch( + "BL_Python.database.migrations.alembic.bl_alembic.Path", + return_value=MagicMock(exists=MagicMock(return_value=False)), + ) + open_mock = mocker.patch("builtins.open", mocker.mock_open()) + + def raise_file_exists_error(*args: Any, **kwargs: Any): + raise FileExistsError() + + open_mock.side_effect = raise_file_exists_error + + with pytest.raises(FileExistsError): + bl_alembic = BLAlembic(None, MagicMock()) + bl_alembic.run() + + +def test__BLAlembic__initializes_alembic_if_not_already_initialized( + mock_argv: MockArgv, mocker: MockerFixture +): + _ = mock_argv(["upgrade", "head"]) + + _ = mocker.patch("BL_Python.database.migrations.alembic.bl_alembic.Path") + _ = mocker.patch("builtins.open", mocker.mock_open()) + + _ = mock_config(mocker) + _mock_alembic = mock_alembic(mocker) + + def write_init_message(*args: Any, **kwargs: Any): + _mock_alembic.side_effect = None + alembic.util.messaging.msg( + "use the 'init' command to create a new scripts folder" + ) + + _mock_alembic.side_effect = write_init_message + + bl_alembic = BLAlembic(None, MagicMock()) + bl_alembic.run() + + assert "init" in [call[0][0][0] for call in _mock_alembic.call_args_list] + + +def test__BLAlembic__initializes_alembic_into_correct_directory_if_not_already_initialized( + mock_argv: MockArgv, mocker: MockerFixture +): + _ = mock_argv(["upgrade", "head"]) + + _ = mocker.patch("BL_Python.database.migrations.alembic.bl_alembic.Path") + _ = mocker.patch("builtins.open", mocker.mock_open()) + _mock_alembic = mock_alembic(mocker) + + # get_main_option_mock = MagicMock() + def get_main_option(option: str): + if option == "script_location": + return "migrations/" + return MagicMock() + + _ = mocker.patch( + "BL_Python.database.migrations.alembic.bl_alembic.Config", + return_value=MagicMock(get_main_option=get_main_option), + ) + + def write_init_message(*args: Any, **kwargs: Any): + _mock_alembic.side_effect = None + alembic.util.messaging.msg( + "use the 'init' command to create a new scripts folder" + ) + + _mock_alembic.side_effect = write_init_message + + bl_alembic = BLAlembic(None, MagicMock()) + bl_alembic.run() + + assert ["init", "migrations/"] in [ + call[0][0] for call in _mock_alembic.call_args_list + ] diff --git a/src/database/test/unit/migrations/alembic/test_env.py b/src/database/test/unit/migrations/alembic/test_env.py new file mode 100644 index 00000000..3ddb2f57 --- /dev/null +++ b/src/database/test/unit/migrations/alembic/test_env.py @@ -0,0 +1,59 @@ +from pathlib import Path + +import pytest +from BL_Python.database.migrations.alembic.env import run_migrations +from BL_Python.programming.str import get_random_str +from mock import MagicMock +from pytest_mock import MockerFixture + + +def test__run_migrations__sets_default_config_filename(mocker: MockerFixture): + _ = mocker.patch("BL_Python.database.migrations.alembic.env.Injector") + _ = mocker.patch("BL_Python.database.migrations.alembic.env.load_config") + _ = mocker.patch("BL_Python.database.migrations.alembic.env.context") + path_mock = mocker.patch("BL_Python.database.migrations.alembic.env.Path") + + run_migrations(MagicMock()) + + path_mock.assert_called_once_with("config.toml") + + +def test__run_migrations__uses_specified_config_filename(mocker: MockerFixture): + _ = mocker.patch("BL_Python.database.migrations.alembic.env.Path") + _ = mocker.patch("BL_Python.database.migrations.alembic.env.Injector") + config_mock = mocker.patch("BL_Python.database.migrations.alembic.env.Config") + load_config_mock = mocker.patch( + "BL_Python.database.migrations.alembic.env.load_config" + ) + _ = mocker.patch("BL_Python.database.migrations.alembic.env.context") + + config_filename = Path(get_random_str()) + run_migrations(MagicMock(), config_filename=config_filename) + + load_config_mock.assert_called_once_with(config_mock, config_filename) + + +@pytest.mark.parametrize("mode", ["online", "offline"]) +def test__run_migrations__runs_correct_migration_mode(mode: str, mocker: MockerFixture): + _ = mocker.patch("BL_Python.database.migrations.alembic.env.load_config") + _ = mocker.patch("BL_Python.database.migrations.alembic.env.Path") + _ = mocker.patch( + "BL_Python.database.migrations.alembic.env.context", + is_offline_mode=MagicMock(return_value=mode == "offline"), + ) + alembic_env_setup_mock = MagicMock( + run_migrations_offline=MagicMock(), run_migrations_online=MagicMock() + ) + _ = mocker.patch( + "BL_Python.database.migrations.alembic.env.Injector", + return_value=MagicMock( + create_object=MagicMock(return_value=alembic_env_setup_mock) + ), + ) + + run_migrations(MagicMock()) + + if mode == "offline": + alembic_env_setup_mock.run_migrations_offline.assert_called_once() + else: + alembic_env_setup_mock.run_migrations_online.assert_called_once() diff --git a/src/database/test/unit/schema/test_dialect.py b/src/database/test/unit/schema/test_dialect.py new file mode 100644 index 00000000..f8a85b9b --- /dev/null +++ b/src/database/test/unit/schema/test_dialect.py @@ -0,0 +1,169 @@ +from typing import ClassVar + +import pytest +from BL_Python.database.schema.dialect import DialectBase +from BL_Python.database.schema.postgresql import PostgreSQLDialect +from BL_Python.database.schema.sqlite import SQLiteDialect +from BL_Python.database.types import MetaBase +from BL_Python.programming.str import get_random_str +from mock import MagicMock +from sqlalchemy import Column, Integer +from sqlalchemy.engine import Dialect +from sqlalchemy.ext.declarative import declarative_base + + +def get_test_table(schema_name: str | None = "foo_schema"): + if schema_name is None: + + class _Base: ... # pyright: ignore[reportRedeclaration] + else: + + class _Base: + __table_args__ = {"schema": schema_name} + + Base = declarative_base(cls=_Base) + + class Foo(Base): # pyright: ignore[reportUntypedBaseClass] + __tablename__: ClassVar = "foo" + foo_id = Column("foo_id", Integer, primary_key=True) + + class Bar(Base): # pyright: ignore[reportUntypedBaseClass] + __tablename__: ClassVar = "bar" + bar_id = Column("bar_id", Integer, primary_key=True) + + return (schema_name, [Foo, Bar], _Base) + + +@pytest.fixture +def test_table(schema_name: str | None = "foo_schema"): + return get_test_table(schema_name) + + +@pytest.mark.parametrize("schema_name", ["foo_schema", None]) +def test__DialectBase__get_schema__returns_correct_table_schema_name( + schema_name: str | None, +): + (table_schema_name, tables, _) = get_test_table(schema_name) + + schema = DialectBase.get_schema(tables[0]) + + assert schema == table_schema_name + + +@pytest.mark.parametrize("dialect_type", [SQLiteDialect, PostgreSQLDialect]) +def test__DialectBase__init__raises_error_when_wrong_dialect_used( + dialect_type: type[SQLiteDialect] | type[PostgreSQLDialect], +): + dialect = Dialect() + dialect.name = get_random_str() + with pytest.raises( + ValueError, match=rf"Invalid Dialect with name `{dialect.name}`.+" + ): + _ = dialect_type(dialect) + + +@pytest.mark.parametrize( + "dialect_type,expected_schema_name", + [(SQLiteDialect, None), (PostgreSQLDialect, "foo_schema")], +) +def test__DialectBase__get_dialect_schema__returns_expected_schema_name( + dialect_type: type[SQLiteDialect] | type[PostgreSQLDialect], + expected_schema_name: str, + test_table: tuple[str, list[MetaBase], MetaBase], +): + (_, tables, _) = test_table + dialect = Dialect() + dialect.name = dialect_type.DIALECT_NAME + test_dialect = dialect_type(dialect) + + schema = test_dialect.get_dialect_schema(tables[0]) + + assert schema == expected_schema_name + + +@pytest.mark.parametrize( + "dialect_type,expected_table_name", + [(SQLiteDialect, "foo_schema.foo"), (PostgreSQLDialect, "foo")], +) +def test__DialectBase__get_full_table_name__returns_expected_table_name( + dialect_type: type[SQLiteDialect] | type[PostgreSQLDialect], + expected_table_name: str, + test_table: tuple[str, list[MetaBase], MetaBase], +): + (_, tables, _) = test_table + dialect = Dialect() + dialect.name = dialect_type.DIALECT_NAME + test_dialect = dialect_type(dialect) + + table_name = test_dialect.get_full_table_name("foo", tables[0]) + + assert table_name == expected_table_name + + +@pytest.mark.parametrize( + "dialect_type", + [SQLiteDialect, PostgreSQLDialect], +) +def test__DialectBase__iterate_table_names__calls_callback_for_every_table_in_metabase( + dialect_type: type[SQLiteDialect] | type[PostgreSQLDialect], + test_table: tuple[str, list[MetaBase], MetaBase], +): + (schema_name, tables, meta_base) = test_table + dialect = Dialect() + dialect.name = dialect_type.DIALECT_NAME + test_dialect = dialect_type(dialect) + schema_tables = {meta_base: [table.__tablename__ for table in tables]} + table_name_callback = MagicMock() + + test_dialect.iterate_table_names(test_dialect, schema_tables, table_name_callback) + + if test_dialect.supports_schemas: + table_name_callback.assert_any_call( + schema_name, tables[0].__tablename__, tables[0].__tablename__, meta_base + ) + table_name_callback.assert_any_call( + schema_name, tables[1].__tablename__, tables[1].__tablename__, meta_base + ) + else: + table_name_callback.assert_any_call( + None, + f"{schema_name}.{tables[0].__tablename__}", + tables[0].__tablename__, + meta_base, + ) + table_name_callback.assert_any_call( + None, + f"{schema_name}.{tables[1].__tablename__}", + tables[1].__tablename__, + meta_base, + ) + + +@pytest.mark.parametrize( + "dialect_name,dialect_type", + [("sqlite", SQLiteDialect), ("postgresql", PostgreSQLDialect)], +) +def test__get_type_from_dialect__dialect_type_uses_correct_sqlalchemy_dialect( + dialect_name: str, dialect_type: type[SQLiteDialect] | type[PostgreSQLDialect] +): + dialect = Dialect() + dialect.name = dialect_type.DIALECT_NAME + test_dialect = dialect_type(dialect) + assert test_dialect.dialect.name == dialect_name + + +@pytest.mark.parametrize( + "dialect_type,expected_sql", + [ + (SQLiteDialect, "CURRENT_TIMESTAMP"), + (PostgreSQLDialect, "now()"), + ], +) +def test__get_type_from_dialect__dialect_type_uses_correct_timestamp_sql( + dialect_type: type[SQLiteDialect] | type[PostgreSQLDialect], + expected_sql: str, +): + dialect = Dialect() + dialect.name = dialect_type.DIALECT_NAME + test_dialect = dialect_type(dialect) + assert test_dialect.timestamp_sql == expected_sql diff --git a/src/database/test/unit/test_engine.py b/src/database/test/unit/test_engine.py index afa6acbd..c22fcb68 100644 --- a/src/database/test/unit/test_engine.py +++ b/src/database/test/unit/test_engine.py @@ -129,6 +129,17 @@ def test__PostgreSQLScopedSession__create__uses_correct_connection_pool_type( assert isinstance(session.bind.pool, connection_pool_type) # type: ignore[reportUnknownMemberType,reportAttributeAccessIssue,reportOptionalMemberAccess] +def test__PostgreSQLScopedSession__create__verifies_dependencies_installed( + mocker: MockerFixture, +): + _ = mocker.patch( + "BL_Python.database.engine.postgresql.find_spec", return_value=None + ) + + with pytest.raises(ModuleNotFoundError): + _ = PostgreSQLScopedSession.create(POSTGRESQL_TEST_CONNECTION_STR) + + @patch.object(MetaData, "reflect", MagicMock()) @pytest.mark.parametrize( "session_type,connection_string", diff --git a/src/database/test/unit/test_schema.py b/src/database/test/unit/test_schema.py new file mode 100644 index 00000000..225f19e4 --- /dev/null +++ b/src/database/test/unit/test_schema.py @@ -0,0 +1,59 @@ +import pytest +from alembic.operations.base import Operations +from BL_Python.database.schema import get_type_from_dialect, get_type_from_op +from BL_Python.database.schema.postgresql import PostgreSQLDialect +from BL_Python.database.schema.sqlite import SQLiteDialect +from mock import MagicMock +from sqlalchemy.engine import Dialect + + +@pytest.mark.parametrize( + "dialect_name,expected_type", + [("sqlite", SQLiteDialect), ("postgresql", PostgreSQLDialect)], +) +def test__get_type_from_dialect__returns_correct_dialect_instance( + dialect_name: str, expected_type: type[SQLiteDialect] | type[PostgreSQLDialect] +): + dialect = Dialect() + dialect.name = dialect_name + dialect_type = get_type_from_dialect(dialect) + assert isinstance(dialect_type, expected_type) + + +def test__get_type_from_dialect__raises_exception_when_given_unknown_dialect(): + dialect_name = "mssql" + dialect = Dialect() + dialect.name = dialect_name + + with pytest.raises( + ValueError, match=rf"Unexpected dialect with name `{dialect_name}`.+" + ): + _ = get_type_from_dialect(dialect) + + +@pytest.mark.parametrize( + "dialect_name,expected_type", + [("sqlite", SQLiteDialect), ("postgresql", PostgreSQLDialect)], +) +def test__get_type_from_op__returns_correct_dialect_instance( + dialect_name: str, expected_type: type[SQLiteDialect] | type[PostgreSQLDialect] +): + dialect = Dialect() + dialect.name = dialect_name + migration_context = MagicMock(impl=MagicMock(bind=MagicMock(dialect=dialect))) + op = Operations(migration_context) + dialect_type = get_type_from_op(op) + assert isinstance(dialect_type, expected_type) + + +def test__get_type_from_op__raises_exception_when_given_unknown_dialect(): + dialect_name = "mssql" + dialect = Dialect() + dialect.name = dialect_name + migration_context = MagicMock(impl=MagicMock(bind=MagicMock(dialect=dialect))) + op = Operations(migration_context) + + with pytest.raises( + ValueError, match=rf"Unexpected dialect with name `{dialect_name}`.+" + ): + _ = get_type_from_op(op) diff --git a/typings/alembic/__init__.pyi b/src/database/typings/alembic/__init__.pyi similarity index 90% rename from typings/alembic/__init__.pyi rename to src/database/typings/alembic/__init__.pyi index c8d63214..cb635d53 100644 --- a/typings/alembic/__init__.pyi +++ b/src/database/typings/alembic/__init__.pyi @@ -2,7 +2,6 @@ This type stub file was generated by pyright. """ -import sys from . import context, op __version__ = ... diff --git a/typings/alembic/__main__.pyi b/src/database/typings/alembic/__main__.pyi similarity index 100% rename from typings/alembic/__main__.pyi rename to src/database/typings/alembic/__main__.pyi diff --git a/src/database/typings/alembic/autogenerate/__init__.pyi b/src/database/typings/alembic/autogenerate/__init__.pyi new file mode 100644 index 00000000..bff3dfff --- /dev/null +++ b/src/database/typings/alembic/autogenerate/__init__.pyi @@ -0,0 +1,9 @@ +""" +This type stub file was generated by pyright. +""" + +from .api import RevisionContext as RevisionContext, _render_migration_diffs as _render_migration_diffs, compare_metadata as compare_metadata, produce_migrations as produce_migrations, render_python_code as render_python_code +from .compare import _produce_net_changes as _produce_net_changes, comparators as comparators +from .render import render_op_text as render_op_text, renderers as renderers +from .rewriter import Rewriter as Rewriter + diff --git a/typings/alembic/autogenerate/api.pyi b/src/database/typings/alembic/autogenerate/api.pyi similarity index 89% rename from typings/alembic/autogenerate/api.pyi rename to src/database/typings/alembic/autogenerate/api.pyi index e1f90a5f..57092137 100644 --- a/typings/alembic/autogenerate/api.pyi +++ b/src/database/typings/alembic/autogenerate/api.pyi @@ -2,15 +2,17 @@ This type stub file was generated by pyright. """ -from typing import Any, Callable, Dict, Iterator, Optional, Sequence, Set, TYPE_CHECKING, Union +from typing import Any, Dict, Iterator, List, Optional, Sequence, Set, TYPE_CHECKING, Union from .. import util +from ..util import sqla_compat from sqlalchemy.engine import Connection, Dialect, Inspector -from sqlalchemy.sql.schema import MetaData, SchemaItem +from sqlalchemy.sql.schema import MetaData, SchemaItem, Table from ..config import Config from ..operations.ops import DowngradeOps, MigrationScript, UpgradeOps -from ..runtime.environment import NameFilterParentNames, NameFilterType, RenderItemFn +from ..runtime.environment import NameFilterParentNames, NameFilterType, ProcessRevisionDirectiveFn, RenderItemFn from ..runtime.migration import MigrationContext from ..script.base import Script, ScriptDirectory +from ..script.revision import _GetRevArg if TYPE_CHECKING: ... @@ -182,7 +184,7 @@ class AutogenContext: dialect: Optional[Dialect] = ... imports: Set[str] = ... migration_context: MigrationContext = ... - def __init__(self, migration_context: MigrationContext, metadata: Optional[MetaData] = ..., opts: Optional[dict] = ..., autogenerate: bool = ...) -> None: + def __init__(self, migration_context: MigrationContext, metadata: Optional[MetaData] = ..., opts: Optional[Dict[str, Any]] = ..., autogenerate: bool = ...) -> None: ... @util.memoized_property @@ -202,7 +204,7 @@ class AutogenContext: """ ... - def run_object_filters(self, object_: SchemaItem, name: Optional[str], type_: NameFilterType, reflected: bool, compare_to: Optional[SchemaItem]) -> bool: + def run_object_filters(self, object_: SchemaItem, name: sqla_compat._ConstraintName, type_: NameFilterType, reflected: bool, compare_to: Optional[SchemaItem]) -> bool: """Run the context's object filters and return True if the targets should be part of the autogenerate operation. @@ -217,7 +219,7 @@ class AutogenContext: run_filters = ... @util.memoized_property - def sorted_tables(self): # -> list[Unknown]: + def sorted_tables(self) -> List[Table]: """Return an aggregate of the :attr:`.MetaData.sorted_tables` collection(s). @@ -230,7 +232,7 @@ class AutogenContext: ... @util.memoized_property - def table_key_to_table(self): # -> dict[Unknown, Unknown]: + def table_key_to_table(self) -> Dict[str, Table]: """Return an aggregate of the :attr:`.MetaData.tables` dictionaries. The :attr:`.MetaData.tables` collection is a dictionary of table key @@ -248,13 +250,15 @@ class AutogenContext: class RevisionContext: """Maintains configuration and state that's specific to a revision file generation operation.""" - def __init__(self, config: Config, script_directory: ScriptDirectory, command_args: Dict[str, Any], process_revision_directives: Optional[Callable] = ...) -> None: + generated_revisions: List[MigrationScript] + process_revision_directives: Optional[ProcessRevisionDirectiveFn] + def __init__(self, config: Config, script_directory: ScriptDirectory, command_args: Dict[str, Any], process_revision_directives: Optional[ProcessRevisionDirectiveFn] = ...) -> None: ... - def run_autogenerate(self, rev: tuple, migration_context: MigrationContext) -> None: + def run_autogenerate(self, rev: _GetRevArg, migration_context: MigrationContext) -> None: ... - def run_no_autogenerate(self, rev: tuple, migration_context: MigrationContext) -> None: + def run_no_autogenerate(self, rev: _GetRevArg, migration_context: MigrationContext) -> None: ... def generate_scripts(self) -> Iterator[Optional[Script]]: diff --git a/src/database/typings/alembic/autogenerate/compare.pyi b/src/database/typings/alembic/autogenerate/compare.pyi new file mode 100644 index 00000000..9e5668cf --- /dev/null +++ b/src/database/typings/alembic/autogenerate/compare.pyi @@ -0,0 +1,13 @@ +""" +This type stub file was generated by pyright. +""" + +from typing import Any, Mapping, TYPE_CHECKING, TypeVar, Union +from sqlalchemy.sql.schema import ForeignKeyConstraint, Index, UniqueConstraint + +if TYPE_CHECKING: + ... +log = ... +comparators = ... +_IndexColumnSortingOps: Mapping[str, Any] = ... +_C = TypeVar("_C", bound=Union[UniqueConstraint, ForeignKeyConstraint, Index]) diff --git a/typings/alembic/autogenerate/render.pyi b/src/database/typings/alembic/autogenerate/render.pyi similarity index 100% rename from typings/alembic/autogenerate/render.pyi rename to src/database/typings/alembic/autogenerate/render.pyi diff --git a/typings/alembic/autogenerate/rewriter.pyi b/src/database/typings/alembic/autogenerate/rewriter.pyi similarity index 76% rename from typings/alembic/autogenerate/rewriter.pyi rename to src/database/typings/alembic/autogenerate/rewriter.pyi index 0936ce63..ed4efa76 100644 --- a/typings/alembic/autogenerate/rewriter.pyi +++ b/src/database/typings/alembic/autogenerate/rewriter.pyi @@ -2,13 +2,14 @@ This type stub file was generated by pyright. """ -from typing import Callable, List, Optional, TYPE_CHECKING, Type, Union -from alembic.operations.ops import AddColumnOp, AlterColumnOp, CreateTableOp, MigrateOperation, MigrationScript, ModifyTableOps -from alembic.runtime.migration import MigrationContext -from alembic.script.revision import Revision +from typing import Any, Callable, List, TYPE_CHECKING, Tuple, Type, Union +from ..operations.ops import AddColumnOp, AlterColumnOp, CreateTableOp, MigrateOperation, MigrationScript, ModifyTableOps +from ..runtime.migration import MigrationContext +from ..script.revision import _GetRevArg if TYPE_CHECKING: ... +ProcessRevisionDirectiveFn = Callable[["MigrationContext", "_GetRevArg", List["MigrationScript"]], None] class Rewriter: """A helper object that allows easy 'rewriting' of ops streams. @@ -35,14 +36,14 @@ class Rewriter: """ _traverse = ... - _chained: Optional[Rewriter] = ... + _chained: Tuple[Union[ProcessRevisionDirectiveFn, Rewriter], ...] = ... def __init__(self) -> None: ... - def chain(self, other: Rewriter) -> Rewriter: + def chain(self, other: Union[ProcessRevisionDirectiveFn, Rewriter,]) -> Rewriter: """Produce a "chain" of this :class:`.Rewriter` to another. - This allows two rewriters to operate serially on a stream, + This allows two or more rewriters to operate serially on a stream, e.g.:: writer1 = autogenerate.Rewriter() @@ -71,7 +72,7 @@ class Rewriter: """ ... - def rewrites(self, operator: Union[Type[AddColumnOp], Type[MigrateOperation], Type[AlterColumnOp], Type[CreateTableOp], Type[ModifyTableOps],]) -> Callable: + def rewrites(self, operator: Union[Type[AddColumnOp], Type[MigrateOperation], Type[AlterColumnOp], Type[CreateTableOp], Type[ModifyTableOps],]) -> Callable[..., Any]: """Register a function as rewriter for a given type. The function should receive three arguments, which are @@ -86,10 +87,10 @@ class Rewriter: """ ... - def __call__(self, context: MigrationContext, revision: Revision, directives: List[MigrationScript]) -> None: + def __call__(self, context: MigrationContext, revision: _GetRevArg, directives: List[MigrationScript]) -> None: ... - def process_revision_directives(self, context: MigrationContext, revision: Revision, directives: List[MigrationScript]) -> None: + def process_revision_directives(self, context: MigrationContext, revision: _GetRevArg, directives: List[MigrationScript]) -> None: ... diff --git a/typings/alembic/command.pyi b/src/database/typings/alembic/command.pyi similarity index 89% rename from typings/alembic/command.pyi rename to src/database/typings/alembic/command.pyi index a7fa7a1d..5bf17b66 100644 --- a/typings/alembic/command.pyi +++ b/src/database/typings/alembic/command.pyi @@ -6,10 +6,11 @@ from typing import List, Optional, TYPE_CHECKING, Union from .runtime.environment import ProcessRevisionDirectiveFn from alembic.config import Config from alembic.script.base import Script +from alembic.script.revision import _RevIdType if TYPE_CHECKING: ... -def list_templates(config: Config): # -> None: +def list_templates(config: Config) -> None: """List available templates. :param config: a :class:`.Config` object. @@ -30,13 +31,10 @@ def init(config: Config, directory: str, template: str = ..., package: bool = .. :param package: when True, write ``__init__.py`` files into the environment location as well as the versions/ location. - .. versionadded:: 1.2 - - """ ... -def revision(config: Config, message: Optional[str] = ..., autogenerate: bool = ..., sql: bool = ..., head: str = ..., splice: bool = ..., branch_label: Optional[str] = ..., version_path: Optional[str] = ..., rev_id: Optional[str] = ..., depends_on: Optional[str] = ..., process_revision_directives: Optional[ProcessRevisionDirectiveFn] = ...) -> Union[Optional[Script], List[Optional[Script]]]: +def revision(config: Config, message: Optional[str] = ..., autogenerate: bool = ..., sql: bool = ..., head: str = ..., splice: bool = ..., branch_label: Optional[_RevIdType] = ..., version_path: Optional[str] = ..., rev_id: Optional[str] = ..., depends_on: Optional[str] = ..., process_revision_directives: Optional[ProcessRevisionDirectiveFn] = ...) -> Union[Optional[Script], List[Optional[Script]]]: """Create a new revision file. :param config: a :class:`.Config` object. @@ -93,7 +91,7 @@ def check(config: Config) -> None: """ ... -def merge(config: Config, revisions: str, message: Optional[str] = ..., branch_label: Optional[str] = ..., rev_id: Optional[str] = ...) -> Optional[Script]: +def merge(config: Config, revisions: _RevIdType, message: Optional[str] = ..., branch_label: Optional[_RevIdType] = ..., rev_id: Optional[str] = ...) -> Optional[Script]: """Merge two revisions together. Creates a new migration file. :param config: a :class:`.Config` instance @@ -200,7 +198,7 @@ def current(config: Config, verbose: bool = ...) -> None: """ ... -def stamp(config: Config, revision: str, sql: bool = ..., tag: Optional[str] = ..., purge: bool = ...) -> None: +def stamp(config: Config, revision: _RevIdType, sql: bool = ..., tag: Optional[str] = ..., purge: bool = ...) -> None: """'stamp' the revision table with the given revision; don't run any migrations. @@ -212,9 +210,6 @@ def stamp(config: Config, revision: str, sql: bool = ..., tag: Optional[str] = . .. note:: this parameter is called "revisions" in the command line interface. - .. versionchanged:: 1.2 The revision may be a single revision or - list of revisions when stamping multiple branch heads. - :param sql: use ``--sql`` mode :param tag: an arbitrary "tag" that can be intercepted by custom @@ -223,8 +218,6 @@ def stamp(config: Config, revision: str, sql: bool = ..., tag: Optional[str] = . :param purge: delete all entries in the version table before stamping. - .. versionadded:: 1.2 - """ ... diff --git a/typings/alembic/config.pyi b/src/database/typings/alembic/config.pyi similarity index 92% rename from typings/alembic/config.pyi rename to src/database/typings/alembic/config.pyi index a986a131..3875e344 100644 --- a/typings/alembic/config.pyi +++ b/src/database/typings/alembic/config.pyi @@ -2,11 +2,13 @@ This type stub file was generated by pyright. """ -from configparser import ConfigParser import os -from argparse import Namespace -from typing import Any, Dict, Mapping, Optional, TextIO, Union, overload +from argparse import ArgumentParser, Namespace +from configparser import ConfigParser +from typing import Any, Dict, Mapping, Optional, Sequence, TextIO, Union, overload + from typing_extensions import TypedDict + from . import util class Config: @@ -20,7 +22,7 @@ class Config: some_param = context.config.get_main_option("my option") - When invoking Alembic programatically, a new + When invoking Alembic programmatically, a new :class:`.Config` can be created by passing the name of an .ini file to the constructor:: @@ -81,7 +83,7 @@ class Config: :ref:`connection_sharing` """ - def __init__(self, file_: Union[str, os.PathLike[str], None] = ..., ini_section: str = ..., output_buffer: Optional[TextIO] = ..., stdout: TextIO = ..., cmd_opts: Optional[Namespace] = ..., config_args: Mapping[str, Any] = ..., attributes: Optional[dict] = ...) -> None: + def __init__(self, file_: Union[str, os.PathLike[str], None] = ..., ini_section: str = ..., output_buffer: Optional[TextIO] = ..., stdout: TextIO = ..., cmd_opts: Optional[Namespace] = ..., config_args: Mapping[str, Any] = ..., attributes: Optional[Dict[str, Any]] = ...) -> None: """Construct a new :class:`.Config`""" ... @@ -89,7 +91,7 @@ class Config: config_file_name: Union[str, os.PathLike[str], None] = ... config_ini_section: str = ... @util.memoized_property - def attributes(self): -> dict[Any, Any]: + def attributes(self) -> Dict[str, Any]: """A Python dictionary for storage of additional state. @@ -108,7 +110,7 @@ class Config: """ ... - def print_stdout(self, text: str, *arg) -> None: + def print_stdout(self, text: str, *arg: Any) -> None: """Render a message to standard out. When :meth:`.Config.print_stdout` is called with additional args @@ -221,7 +223,7 @@ class Config: def get_main_option(self, name: str, default: Optional[str] = ...) -> Optional[str]: ... - def get_main_option(self, name, default=...): # -> str | None: + def get_main_option(self, name: str, default: Optional[str] = ...) -> Optional[str]: """Return an option from the 'main' section of the .ini file. This defaults to being a key from the ``[alembic]`` @@ -244,18 +246,20 @@ class MessagingOptions(TypedDict, total=False): class CommandLine: + parser: ArgumentParser + def __init__(self, prog: Optional[str] = ...) -> None: ... def run_cmd(self, config: Config, options: Namespace) -> None: ... - def main(self, argv=...): # -> None: + def main(self, argv: Optional[Sequence[str]] = ...) -> None: ... -def main(argv=..., prog=..., **kwargs): # -> None: +def main(argv: Optional[Sequence[str]] = ..., prog: Optional[str] = ..., **kwargs: Any) -> None: """The console runner function for Alembic.""" ... diff --git a/typings/alembic/context.pyi b/src/database/typings/alembic/context.pyi similarity index 87% rename from typings/alembic/context.pyi rename to src/database/typings/alembic/context.pyi index b6404878..5f74e561 100644 --- a/typings/alembic/context.pyi +++ b/src/database/typings/alembic/context.pyi @@ -2,38 +2,20 @@ This type stub file was generated by pyright. """ -from typing import ( - TYPE_CHECKING, - Any, - Callable, - Collection, - ContextManager, - Dict, - List, - Literal, - Mapping, - MutableMapping, - Optional, - TextIO, - Tuple, - Union, - overload, -) - -from sqlalchemy import Table +from typing import Any, Callable, Collection, ContextManager, Dict, Iterable, List, Literal, Mapping, MutableMapping, Optional, Sequence, TYPE_CHECKING, TextIO, Tuple, Union, overload from sqlalchemy.engine.base import Connection from sqlalchemy.engine.url import URL -from sqlalchemy.sql.elements import ClauseElement +from sqlalchemy.sql import Executable from sqlalchemy.sql.schema import Column, FetchedValue, MetaData, SchemaItem - +from sqlalchemy.sql.type_api import TypeEngine from .autogenerate.api import AutogenContext from .config import Config -from .operations.ops import MigrateOperation +from .operations.ops import MigrationScript from .runtime.migration import MigrationContext, MigrationInfo, _ProxyTransaction from .script import ScriptDirectory -if TYPE_CHECKING: ... - +if TYPE_CHECKING: + ... def begin_transaction() -> Union[_ProxyTransaction, ContextManager[None]]: """Return a context manager that will enclose an operation within a "transaction", @@ -80,102 +62,7 @@ def begin_transaction() -> Union[_ProxyTransaction, ContextManager[None]]: ... config: Config - -def configure( - connection: Optional[Connection] = ..., - url: Union[str, URL, None] = ..., - dialect_name: Optional[str] = ..., - dialect_opts: Optional[Dict[str, Any]] = ..., - transactional_ddl: Optional[bool] = ..., - transaction_per_migration: bool = ..., - output_buffer: Optional[TextIO] = ..., - starting_rev: Optional[str] = ..., - tag: Optional[str] = ..., - template_args: Optional[Dict[str, Any]] = ..., - render_as_batch: bool = ..., - # Alembic documents and supports list[MetaData] - # despite the typehint not including it in the - # library - target_metadata: Optional[MetaData | list[MetaData]] = ..., - include_name: Optional[ - Callable[ - [ - Optional[str], - Literal[ - "schema", - "table", - "column", - "index", - "unique_constraint", - "foreign_key_constraint", - ], - MutableMapping[ - Literal[ - "schema_name", - "table_name", - "schema_qualified_table_name", - ], - Optional[str], - ], - ], - bool, - ] - ] = ..., - include_object: Optional[ - Callable[ - [ - Table, - str, - Literal[ - "schema", - "table", - "column", - "index", - "unique_constraint", - "foreign_key_constraint", - ], - bool, - Optional[SchemaItem], - ], - bool, - ] - ] = ..., - include_schemas: bool = ..., - process_revision_directives: Optional[ - Callable[[MigrationContext, Tuple[str, str], List[MigrateOperation]], None] - ] = ..., - compare_type: bool = ..., - compare_server_default: Union[ - bool, - Callable[ - [ - MigrationContext, - Column[Any], - Column[Any], - Optional[str], - Optional[FetchedValue], - Optional[str], - ], - Optional[bool], - ], - ] = ..., - render_item: Optional[ - Callable[[str, Any, AutogenContext], Union[str, Literal[False]]] - ] = ..., - literal_binds: bool = ..., - upgrade_token: str = ..., - downgrade_token: str = ..., - alembic_module_prefix: str = ..., - sqlalchemy_module_prefix: str = ..., - user_module_prefix: Optional[str] = ..., - on_version_apply: Optional[ - Callable[ - [MigrationContext, MigrationInfo, Collection[Any], Mapping[str, Any]], - None, - ] - ] = ..., - **kw: Any -) -> None: +def configure(connection: Optional[Connection] = ..., url: Union[str, URL, None] = ..., dialect_name: Optional[str] = ..., dialect_opts: Optional[Dict[str, Any]] = ..., transactional_ddl: Optional[bool] = ..., transaction_per_migration: bool = ..., output_buffer: Optional[TextIO] = ..., starting_rev: Optional[str] = ..., tag: Optional[str] = ..., template_args: Optional[Dict[str, Any]] = ..., render_as_batch: bool = ..., target_metadata: Union[MetaData, Sequence[MetaData], None] = ..., include_name: Optional[Callable[[Optional[str], Literal["schema", "table", "column", "index", "unique_constraint", "foreign_key_constraint",], MutableMapping[Literal["schema_name", "table_name", "schema_qualified_table_name",], Optional[str],]], bool,]] = ..., include_object: Optional[Callable[[SchemaItem, Optional[str], Literal["schema", "table", "column", "index", "unique_constraint", "foreign_key_constraint",], bool, Optional[SchemaItem]], bool,]] = ..., include_schemas: bool = ..., process_revision_directives: Optional[Callable[[MigrationContext, Union[str, Iterable[Optional[str]], Iterable[str]], List[MigrationScript]], None,]] = ..., compare_type: Union[bool, Callable[[MigrationContext, Column[Any], Column[Any], TypeEngine[Any], TypeEngine[Any]], Optional[bool],],] = ..., compare_server_default: Union[bool, Callable[[MigrationContext, Column[Any], Column[Any], Optional[str], Optional[FetchedValue], Optional[str]], Optional[bool],],] = ..., render_item: Optional[Callable[[str, Any, AutogenContext], Union[str, Literal[False]]]] = ..., literal_binds: bool = ..., upgrade_token: str = ..., downgrade_token: str = ..., alembic_module_prefix: str = ..., sqlalchemy_module_prefix: str = ..., user_module_prefix: Optional[str] = ..., on_version_apply: Optional[Callable[[MigrationContext, MigrationInfo, Collection[Any], Mapping[str, Any]], None,]] = ..., **kw: Any) -> None: """Configure a :class:`.MigrationContext` within this :class:`.EnvironmentContext` which will provide database connectivity and other configuration to a series of @@ -220,9 +107,6 @@ def configure( ``connection`` and ``url`` are not passed. :param dialect_opts: dictionary of options to be passed to dialect constructor. - - .. versionadded:: 1.0.12 - :param transactional_ddl: Force the usage of "transactional" DDL on or off; this otherwise defaults to whether or not the dialect in @@ -305,12 +189,16 @@ def configure( to produce candidate upgrade/downgrade operations. :param compare_type: Indicates type comparison behavior during an autogenerate - operation. Defaults to ``False`` which disables type - comparison. Set to - ``True`` to turn on default type comparison, which has varied - accuracy depending on backend. See :ref:`compare_types` + operation. Defaults to ``True`` turning on type comparison, which + has good accuracy on most backends. See :ref:`compare_types` for an example as well as information on other type - comparison options. + comparison options. Set to ``False`` which disables type + comparison. A callable can also be passed to provide custom type + comparison, see :ref:`compare_types` for additional details. + + .. versionchanged:: 1.12.0 The default value of + :paramref:`.EnvironmentContext.configure.compare_type` has been + changed to ``True``. .. seealso:: @@ -394,8 +282,6 @@ def configure( include_name = include_name ) - .. versionadded:: 1.5 - .. seealso:: :ref:`autogenerate_include_hooks` @@ -611,9 +497,7 @@ def configure( """ ... -def execute( - sql: Union[ClauseElement, str], execution_options: Optional[dict] = ... -) -> None: +def execute(sql: Union[Executable, str], execution_options: Optional[Dict[str, Any]] = ...) -> None: """Execute the given SQL using the current change context. The behavior of :meth:`.execute` is the same @@ -727,9 +611,13 @@ def get_tag_argument() -> Optional[str]: ... @overload -def get_x_argument(as_dictionary: Literal[False]) -> List[str]: ... +def get_x_argument(as_dictionary: Literal[False]) -> List[str]: + ... + @overload -def get_x_argument(as_dictionary: Literal[True]) -> Dict[str, str]: ... +def get_x_argument(as_dictionary: Literal[True]) -> Dict[str, str]: + ... + @overload def get_x_argument(as_dictionary: bool = ...) -> Union[List[str], Dict[str, str]]: """Return the value(s) passed for the ``-x`` argument, if any. @@ -741,7 +629,11 @@ def get_x_argument(as_dictionary: bool = ...) -> Union[List[str], Dict[str, str] The return value is a list, returned directly from the ``argparse`` structure. If ``as_dictionary=True`` is passed, the ``x`` arguments are parsed using ``key=value`` format into a dictionary that is - then returned. + then returned. If there is no ``=`` in the argument, value is an empty + string. + + .. versionchanged:: 1.13.1 Support ``as_dictionary=True`` when + arguments are passed without the ``=`` symbol. For example, to support passing a database URL on the command line, the standard ``env.py`` script can be modified like this:: @@ -785,7 +677,7 @@ def is_offline_mode() -> bool: """ ... -def is_transactional_ddl(): +def is_transactional_ddl() -> bool: """Return True if the context is configured to expect a transactional DDL capable backend. @@ -822,7 +714,6 @@ def run_migrations(**kw: Any) -> None: ... script: ScriptDirectory - def static_output(text: str) -> None: """Emit text directly to the "offline" SQL stream. @@ -833,3 +724,4 @@ def static_output(text: str) -> None: """ ... + diff --git a/typings/alembic/ddl/__init__.pyi b/src/database/typings/alembic/ddl/__init__.pyi similarity index 71% rename from typings/alembic/ddl/__init__.pyi rename to src/database/typings/alembic/ddl/__init__.pyi index 6cc9d0d4..490cfb62 100644 --- a/typings/alembic/ddl/__init__.pyi +++ b/src/database/typings/alembic/ddl/__init__.pyi @@ -3,5 +3,5 @@ This type stub file was generated by pyright. """ from . import mssql, mysql, oracle, postgresql, sqlite -from .impl import DefaultImpl +from .impl import DefaultImpl as DefaultImpl diff --git a/src/database/typings/alembic/ddl/_autogen.pyi b/src/database/typings/alembic/ddl/_autogen.pyi new file mode 100644 index 00000000..a9ada48a --- /dev/null +++ b/src/database/typings/alembic/ddl/_autogen.pyi @@ -0,0 +1,162 @@ +""" +This type stub file was generated by pyright. +""" + +from typing import Any, ClassVar, Dict, Generic, Literal, NamedTuple, Optional, Sequence, TYPE_CHECKING, Tuple, Type, TypeVar, Union +from sqlalchemy.sql.schema import Constraint, ForeignKeyConstraint, Index, UniqueConstraint +from typing_extensions import TypeGuard +from .. import util +from ..util import sqla_compat +from alembic.autogenerate.api import AutogenContext +from alembic.ddl.impl import DefaultImpl + +if TYPE_CHECKING: + ... +CompareConstraintType = Union[Constraint, Index] +_C = TypeVar("_C", bound=CompareConstraintType) +_clsreg: Dict[str, Type[_constraint_sig]] = ... +class ComparisonResult(NamedTuple): + status: Literal["equal", "different", "skip"] + message: str + @property + def is_equal(self) -> bool: + ... + + @property + def is_different(self) -> bool: + ... + + @property + def is_skip(self) -> bool: + ... + + @classmethod + def Equal(cls) -> ComparisonResult: + """the constraints are equal.""" + ... + + @classmethod + def Different(cls, reason: Union[str, Sequence[str]]) -> ComparisonResult: + """the constraints are different for the provided reason(s).""" + ... + + @classmethod + def Skip(cls, reason: Union[str, Sequence[str]]) -> ComparisonResult: + """the constraint cannot be compared for the provided reason(s). + + The message is logged, but the constraints will be otherwise + considered equal, meaning that no migration command will be + generated. + """ + ... + + + +class _constraint_sig(Generic[_C]): + const: _C + _sig: Tuple[Any, ...] + name: Optional[sqla_compat._ConstraintNameDefined] + impl: DefaultImpl + _is_index: ClassVar[bool] = ... + _is_fk: ClassVar[bool] = ... + _is_uq: ClassVar[bool] = ... + _is_metadata: bool + def __init_subclass__(cls) -> None: + ... + + def __init__(self, is_metadata: bool, impl: DefaultImpl, const: _C) -> None: + ... + + def compare_to_reflected(self, other: _constraint_sig[Any]) -> ComparisonResult: + ... + + @classmethod + def from_constraint(cls, is_metadata: bool, impl: DefaultImpl, constraint: _C) -> _constraint_sig[_C]: + ... + + def md_name_to_sql_name(self, context: AutogenContext) -> Optional[str]: + ... + + @util.memoized_property + def is_named(self): # -> bool: + ... + + @util.memoized_property + def unnamed(self) -> Tuple[Any, ...]: + ... + + @util.memoized_property + def unnamed_no_options(self) -> Tuple[Any, ...]: + ... + + def __eq__(self, other) -> bool: + ... + + def __ne__(self, other) -> bool: + ... + + def __hash__(self) -> int: + ... + + + +class _uq_constraint_sig(_constraint_sig[UniqueConstraint]): + _is_uq = ... + is_unique = ... + def __init__(self, is_metadata: bool, impl: DefaultImpl, const: UniqueConstraint) -> None: + ... + + @property + def column_names(self) -> Tuple[str, ...]: + ... + + + +class _ix_constraint_sig(_constraint_sig[Index]): + _is_index = ... + name: sqla_compat._ConstraintName + def __init__(self, is_metadata: bool, impl: DefaultImpl, const: Index) -> None: + ... + + @util.memoized_property + def has_expressions(self): # -> bool: + ... + + @util.memoized_property + def column_names(self) -> Tuple[str, ...]: + ... + + @util.memoized_property + def column_names_optional(self) -> Tuple[Optional[str], ...]: + ... + + @util.memoized_property + def is_named(self): # -> Literal[True]: + ... + + @util.memoized_property + def unnamed(self): # -> tuple[bool, *tuple[str | None, ...]]: + ... + + + +class _fk_constraint_sig(_constraint_sig[ForeignKeyConstraint]): + _is_fk = ... + def __init__(self, is_metadata: bool, impl: DefaultImpl, const: ForeignKeyConstraint) -> None: + ... + + @util.memoized_property + def unnamed_no_options(self): # -> tuple[Any, Any, tuple[Any, ...], Any, Any, tuple[Any, ...]]: + ... + + + +def is_index_sig(sig: _constraint_sig) -> TypeGuard[_ix_constraint_sig]: + ... + +def is_uq_sig(sig: _constraint_sig) -> TypeGuard[_uq_constraint_sig]: + ... + +def is_fk_sig(sig: _constraint_sig) -> TypeGuard[_fk_constraint_sig]: + ... + diff --git a/typings/alembic/ddl/base.pyi b/src/database/typings/alembic/ddl/base.pyi similarity index 100% rename from typings/alembic/ddl/base.pyi rename to src/database/typings/alembic/ddl/base.pyi diff --git a/typings/alembic/ddl/impl.pyi b/src/database/typings/alembic/ddl/impl.pyi similarity index 81% rename from typings/alembic/ddl/impl.pyi rename to src/database/typings/alembic/ddl/impl.pyi index fc2fca8d..7579c7ec 100644 --- a/typings/alembic/ddl/impl.pyi +++ b/src/database/typings/alembic/ddl/impl.pyi @@ -2,9 +2,11 @@ This type stub file was generated by pyright. """ -from typing import Any, Dict, List, Literal, Optional, Sequence, Set, TYPE_CHECKING, TextIO, Tuple, Type, Union +from typing import Any, Dict, List, Literal, NamedTuple, Optional, Sequence, Set, TYPE_CHECKING, TextIO, Tuple, Type, Union +from ._autogen import ComparisonResult as ComparisonResult from sqlalchemy.engine import Connection, Dialect -from sqlalchemy.sql.elements import ClauseElement, ColumnElement, quoted_name +from sqlalchemy.sql import ClauseElement, Executable +from sqlalchemy.sql.elements import ColumnElement, quoted_name from sqlalchemy.sql.schema import Column, Constraint, ForeignKeyConstraint, Index, Table, UniqueConstraint from sqlalchemy.sql.selectable import TableClause from sqlalchemy.sql.type_api import TypeEngine @@ -14,6 +16,7 @@ from ..operations.batch import ApplyBatchImpl, BatchOperationsImpl if TYPE_CHECKING: ... +log = ... class ImplMeta(type): def __init__(cls, classname: str, bases: Tuple[Type[DefaultImpl]], dict_: Dict[str, Any]) -> None: ... @@ -21,7 +24,6 @@ class ImplMeta(type): _impls: Dict[str, Type[DefaultImpl]] = ... -Params = ... class DefaultImpl(metaclass=ImplMeta): """Provide the entrypoint for major migration operations, including database-specific behavioral variances. @@ -75,7 +77,7 @@ class DefaultImpl(metaclass=ImplMeta): def bind(self) -> Optional[Connection]: ... - def execute(self, sql: Union[ClauseElement, str], execution_options: Optional[dict[str, Any]] = ...) -> None: + def execute(self, sql: Union[Executable, str], execution_options: Optional[dict[str, Any]] = ...) -> None: ... def alter_column(self, table_name: str, column_name: str, nullable: Optional[bool] = ..., server_default: Union[_ServerDefault, Literal[False]] = ..., name: Optional[str] = ..., type_: Optional[TypeEngine] = ..., schema: Optional[str] = ..., autoincrement: Optional[bool] = ..., comment: Optional[Union[str, Literal[False]]] = ..., existing_comment: Optional[str] = ..., existing_type: Optional[TypeEngine] = ..., existing_server_default: Optional[_ServerDefault] = ..., existing_nullable: Optional[bool] = ..., existing_autoincrement: Optional[bool] = ..., **kw: Any) -> None: @@ -102,7 +104,7 @@ class DefaultImpl(metaclass=ImplMeta): def drop_table(self, table: Table) -> None: ... - def create_index(self, index: Index) -> None: + def create_index(self, index: Index, **kw: Any) -> None: ... def create_table_comment(self, table: Table) -> None: @@ -114,7 +116,7 @@ class DefaultImpl(metaclass=ImplMeta): def create_column_comment(self, column: ColumnElement[Any]) -> None: ... - def drop_index(self, index: Index) -> None: + def drop_index(self, index: Index, **kw: Any) -> None: ... def bulk_insert(self, table: Union[TableClause, Table], rows: List[dict], multiinsert: bool = ...) -> None: @@ -140,8 +142,6 @@ class DefaultImpl(metaclass=ImplMeta): """Render a SQL expression that is typically a server default, index expression, etc. - .. versionadded:: 1.0.11 - """ ... @@ -190,8 +190,34 @@ class DefaultImpl(metaclass=ImplMeta): def render_type(self, type_obj: TypeEngine, autogen_context: AutogenContext) -> Union[str, Literal[False]]: ... - def create_index_sig(self, index: Index) -> Tuple[Any, ...]: + def compare_indexes(self, metadata_index: Index, reflected_index: Index) -> ComparisonResult: + """Compare two indexes by comparing the signature generated by + ``create_index_sig``. + + This method returns a ``ComparisonResult``. + """ ... + def compare_unique_constraint(self, metadata_constraint: UniqueConstraint, reflected_constraint: UniqueConstraint) -> ComparisonResult: + """Compare two unique constraints by comparing the two signatures. + + The arguments are two tuples that contain the unique constraint and + the signatures generated by ``create_unique_constraint_sig``. + + This method returns a ``ComparisonResult``. + """ + ... + + def adjust_reflected_dialect_options(self, reflected_object: Dict[str, Any], kind: str) -> Dict[str, Any]: + ... + + + +class Params(NamedTuple): + token0: str + tokens: List[str] + args: List[str] + kwargs: Dict[str, str] + ... diff --git a/typings/alembic/ddl/mssql.pyi b/src/database/typings/alembic/ddl/mssql.pyi similarity index 90% rename from typings/alembic/ddl/mssql.pyi rename to src/database/typings/alembic/ddl/mssql.pyi index cd43ddfa..76c040bb 100644 --- a/typings/alembic/ddl/mssql.pyi +++ b/src/database/typings/alembic/ddl/mssql.pyi @@ -2,13 +2,13 @@ This type stub file was generated by pyright. """ -from typing import Any, List, Literal, Optional, TYPE_CHECKING, Union -from sqlalchemy.ext.compiler import compiles +from typing import Any, Dict, List, Literal, Optional, TYPE_CHECKING, Union from sqlalchemy.schema import Column from sqlalchemy.sql.base import Executable from sqlalchemy.sql.elements import ClauseElement from .base import AddColumn, ColumnDefault, ColumnName, ColumnNullable, ColumnType, RenameTable, _ServerDefault from .impl import DefaultImpl +from ..util.sqla_compat import compiles from sqlalchemy.dialects.mssql.base import MSDDLCompiler from sqlalchemy.sql.schema import Index, Table from sqlalchemy.sql.selectable import TableClause @@ -34,7 +34,7 @@ class MSSQLImpl(DefaultImpl): def alter_column(self, table_name: str, column_name: str, nullable: Optional[bool] = ..., server_default: Optional[Union[_ServerDefault, Literal[False]]] = ..., name: Optional[str] = ..., type_: Optional[TypeEngine] = ..., schema: Optional[str] = ..., existing_type: Optional[TypeEngine] = ..., existing_server_default: Optional[_ServerDefault] = ..., existing_nullable: Optional[bool] = ..., **kw: Any) -> None: ... - def create_index(self, index: Index) -> None: + def create_index(self, index: Index, **kw: Any) -> None: ... def bulk_insert(self, table: Union[TableClause, Table], rows: List[dict], **kw: Any) -> None: @@ -46,6 +46,9 @@ class MSSQLImpl(DefaultImpl): def compare_server_default(self, inspector_column, metadata_column, rendered_metadata_default, rendered_inspector_default): # -> bool: ... + def adjust_reflected_dialect_options(self, reflected_object: Dict[str, Any], kind: str) -> Dict[str, Any]: + ... + class _ExecDropConstraint(Executable, ClauseElement): diff --git a/typings/alembic/ddl/mysql.pyi b/src/database/typings/alembic/ddl/mysql.pyi similarity index 100% rename from typings/alembic/ddl/mysql.pyi rename to src/database/typings/alembic/ddl/mysql.pyi diff --git a/typings/alembic/ddl/oracle.pyi b/src/database/typings/alembic/ddl/oracle.pyi similarity index 97% rename from typings/alembic/ddl/oracle.pyi rename to src/database/typings/alembic/ddl/oracle.pyi index 67f54112..d29e2b5b 100644 --- a/typings/alembic/ddl/oracle.pyi +++ b/src/database/typings/alembic/ddl/oracle.pyi @@ -3,9 +3,9 @@ This type stub file was generated by pyright. """ from typing import Any, TYPE_CHECKING -from sqlalchemy.ext.compiler import compiles from .base import AddColumn, ColumnComment, ColumnDefault, ColumnName, ColumnNullable, ColumnType, IdentityColumnDefault, RenameTable from .impl import DefaultImpl +from ..util.sqla_compat import compiles from sqlalchemy.dialects.oracle.base import OracleDDLCompiler from sqlalchemy.sql.schema import Column diff --git a/typings/alembic/ddl/postgresql.pyi b/src/database/typings/alembic/ddl/postgresql.pyi similarity index 79% rename from typings/alembic/ddl/postgresql.pyi rename to src/database/typings/alembic/ddl/postgresql.pyi index cc0b228d..99b93987 100644 --- a/typings/alembic/ddl/postgresql.pyi +++ b/src/database/typings/alembic/ddl/postgresql.pyi @@ -2,15 +2,16 @@ This type stub file was generated by pyright. """ -from typing import Any, Literal, Optional, Sequence, TYPE_CHECKING, Tuple, Union -from sqlalchemy import Index +from typing import Any, Dict, Literal, Optional, Sequence, TYPE_CHECKING, Tuple, Union +from sqlalchemy import Index, UniqueConstraint from sqlalchemy.dialects.postgresql import ExcludeConstraint -from sqlalchemy.sql.elements import BinaryExpression, ColumnClause, quoted_name -from .base import AlterColumn, ColumnComment, IdentityColumnDefault, RenameTable, _ServerDefault, compiles -from .impl import DefaultImpl +from sqlalchemy.sql.elements import ClauseElement, ColumnClause, ColumnElement, quoted_name +from .base import AlterColumn, ColumnComment, IdentityColumnDefault, RenameTable, _ServerDefault +from .impl import ComparisonResult, DefaultImpl from ..operations import ops from ..operations.base import BatchOperations, Operations from ..util import sqla_compat +from ..util.sqla_compat import compiles from sqlalchemy.dialects.postgresql.base import PGDDLCompiler from sqlalchemy.sql.schema import Table from sqlalchemy.sql.type_api import TypeEngine @@ -24,8 +25,7 @@ class PostgresqlImpl(DefaultImpl): __dialect__ = ... transactional_ddl = ... type_synonyms = ... - identity_attrs_ignore = ... - def create_index(self, index): # -> None: + def create_index(self, index: Index, **kw: Any) -> None: ... def prep_table_for_batch(self, batch_impl, table): # -> None: @@ -43,7 +43,21 @@ class PostgresqlImpl(DefaultImpl): def correct_for_autogen_constraints(self, conn_unique_constraints, conn_indexes, metadata_unique_constraints, metadata_indexes): # -> None: ... - def create_index_sig(self, index: Index) -> Tuple[Any, ...]: + _default_modifiers_re = ... + def compare_indexes(self, metadata_index: Index, reflected_index: Index) -> ComparisonResult: + ... + + def compare_unique_constraint(self, metadata_constraint: UniqueConstraint, reflected_constraint: UniqueConstraint) -> ComparisonResult: + ... + + def adjust_reflected_dialect_options(self, reflected_options: Dict[str, Any], kind: str) -> Dict[str, Any]: + ... + + def render_ddl_sql_expr(self, expr: ClauseElement, is_server_default: bool = ..., is_index: bool = ..., **kw: Any) -> str: + """Render a SQL expression that is typically a server default, + index expression, etc. + + """ ... def render_type(self, type_: TypeEngine, autogen_context: AutogenContext) -> Union[str, Literal[False]]: @@ -79,7 +93,7 @@ def visit_identity_column(element: IdentityColumnDefault, compiler: PGDDLCompile class CreateExcludeConstraintOp(ops.AddConstraintOp): """Represent a create exclude constraint operation.""" constraint_type = ... - def __init__(self, constraint_name: sqla_compat._ConstraintName, table_name: Union[str, quoted_name], elements: Union[Sequence[Tuple[str, str]], Sequence[Tuple[ColumnClause[Any], str]],], where: Optional[Union[BinaryExpression, str]] = ..., schema: Optional[str] = ..., _orig_constraint: Optional[ExcludeConstraint] = ..., **kw) -> None: + def __init__(self, constraint_name: sqla_compat._ConstraintName, table_name: Union[str, quoted_name], elements: Union[Sequence[Tuple[str, str]], Sequence[Tuple[ColumnClause[Any], str]],], where: Optional[Union[ColumnElement[bool], str]] = ..., schema: Optional[str] = ..., _orig_constraint: Optional[ExcludeConstraint] = ..., **kw) -> None: ... @classmethod @@ -128,7 +142,7 @@ class CreateExcludeConstraintOp(ops.AddConstraintOp): ... @classmethod - def batch_create_exclude_constraint(cls, operations: BatchOperations, constraint_name: str, *elements: Any, **kw: Any): # -> Any: + def batch_create_exclude_constraint(cls, operations: BatchOperations, constraint_name: str, *elements: Any, **kw: Any) -> Optional[Table]: """Issue a "create exclude constraint" instruction using the current batch migration context. diff --git a/typings/alembic/ddl/sqlite.pyi b/src/database/typings/alembic/ddl/sqlite.pyi similarity index 97% rename from typings/alembic/ddl/sqlite.pyi rename to src/database/typings/alembic/ddl/sqlite.pyi index 039835a2..1787b38b 100644 --- a/typings/alembic/ddl/sqlite.pyi +++ b/src/database/typings/alembic/ddl/sqlite.pyi @@ -3,9 +3,9 @@ This type stub file was generated by pyright. """ from typing import Any, Dict, Optional, TYPE_CHECKING, Union -from sqlalchemy.ext.compiler import compiles from .base import RenameTable from .impl import DefaultImpl +from ..util.sqla_compat import compiles from sqlalchemy.engine.reflection import Inspector from sqlalchemy.sql.compiler import DDLCompiler from sqlalchemy.sql.elements import Cast, ClauseElement diff --git a/typings/alembic/environment.pyi b/src/database/typings/alembic/environment.pyi similarity index 100% rename from typings/alembic/environment.pyi rename to src/database/typings/alembic/environment.pyi diff --git a/typings/alembic/migration.pyi b/src/database/typings/alembic/migration.pyi similarity index 100% rename from typings/alembic/migration.pyi rename to src/database/typings/alembic/migration.pyi diff --git a/typings/alembic/op.pyi b/src/database/typings/alembic/op.pyi similarity index 92% rename from typings/alembic/op.pyi rename to src/database/typings/alembic/op.pyi index 835d2a9d..75783e36 100644 --- a/typings/alembic/op.pyi +++ b/src/database/typings/alembic/op.pyi @@ -3,20 +3,23 @@ This type stub file was generated by pyright. """ from contextlib import contextmanager -from typing import Any, Awaitable, Callable, Dict, Iterator, List, Literal, Mapping, Optional, Sequence, TYPE_CHECKING, Tuple, Type, TypeVar, Union -from sqlalchemy.sql.expression import TableClause, Update +from typing import Any, Awaitable, Callable, Dict, Iterator, List, Literal, Mapping, Optional, Sequence, TYPE_CHECKING, Tuple, Type, TypeVar, Union, overload from sqlalchemy.engine import Connection -from sqlalchemy.sql.elements import BinaryExpression, TextClause, conv +from sqlalchemy.sql import Executable +from sqlalchemy.sql.elements import ColumnElement, TextClause, conv +from sqlalchemy.sql.expression import TableClause from sqlalchemy.sql.functions import Function from sqlalchemy.sql.schema import Column, Computed, Identity, SchemaItem, Table from sqlalchemy.sql.type_api import TypeEngine -from .operations.ops import BatchOperations, MigrateOperation +from .operations.base import BatchOperations +from .operations.ops import AddColumnOp, AddConstraintOp, AlterColumnOp, AlterTableOp, BulkInsertOp, CreateIndexOp, CreateTableCommentOp, CreateTableOp, DropColumnOp, DropConstraintOp, DropIndexOp, DropTableCommentOp, DropTableOp, ExecuteSQLOp, MigrateOperation from .runtime.migration import MigrationContext from .util.sqla_compat import _literal_bindparam if TYPE_CHECKING: ... _T = TypeVar("_T") +_C = TypeVar("_C", bound=Callable[..., Any]) def add_column(table_name: str, column: Column[Any], *, schema: Optional[str] = ...) -> None: """Issue an "add column" instruction using the current migration context. @@ -97,7 +100,7 @@ def add_column(table_name: str, column: Column[Any], *, schema: Optional[str] = """ ... -def alter_column(table_name: str, column_name: str, *, nullable: Optional[bool] = ..., comment: Union[str, Literal[False], None] = ..., server_default: Any = ..., new_column_name: Optional[str] = ..., type_: Union[TypeEngine, Type[TypeEngine], None] = ..., existing_type: Union[TypeEngine, Type[TypeEngine], None] = ..., existing_server_default: Union[str, bool, Identity, Computed, None] = ..., existing_nullable: Optional[bool] = ..., existing_comment: Optional[str] = ..., schema: Optional[str] = ..., **kw: Any) -> None: +def alter_column(table_name: str, column_name: str, *, nullable: Optional[bool] = ..., comment: Union[str, Literal[False], None] = ..., server_default: Any = ..., new_column_name: Optional[str] = ..., type_: Union[TypeEngine[Any], Type[TypeEngine[Any]], None] = ..., existing_type: Union[TypeEngine[Any], Type[TypeEngine[Any]], None] = ..., existing_server_default: Union[str, bool, Identity, Computed, None] = ..., existing_nullable: Optional[bool] = ..., existing_comment: Optional[str] = ..., schema: Optional[str] = ..., **kw: Any) -> None: r"""Issue an "alter column" instruction using the current migration context. @@ -136,9 +139,6 @@ def alter_column(table_name: str, column_name: str, *, nullable: Optional[bool] Set to ``None`` to have the default removed. :param comment: optional string text of a new comment to add to the column. - - .. versionadded:: 1.0.6 - :param new_column_name: Optional; specify a string name here to indicate the new name within a column rename operation. :param type\_: Optional; a :class:`~sqlalchemy.types.TypeEngine` @@ -155,7 +155,7 @@ def alter_column(table_name: str, column_name: str, *, nullable: Optional[bool] don't otherwise specify a new type, as well as for when nullability is being changed on a SQL Server column. It is also used if the type is a so-called - SQLlchemy "schema" type which may define a constraint (i.e. + SQLAlchemy "schema" type which may define a constraint (i.e. :class:`~sqlalchemy.types.Boolean`, :class:`~sqlalchemy.types.Enum`), so that the constraint can be dropped. @@ -172,9 +172,6 @@ def alter_column(table_name: str, column_name: str, *, nullable: Optional[bool] :param existing_comment: string text of the existing comment on the column to be maintained. Required on MySQL if the existing comment on the column is not being changed. - - .. versionadded:: 1.0.6 - :param schema: Optional schema name to operate within. To control quoting of the schema outside of the default behavior, use the SQLAlchemy construct @@ -189,7 +186,7 @@ def alter_column(table_name: str, column_name: str, *, nullable: Optional[bool] ... @contextmanager -def batch_alter_table(table_name: str, schema: Optional[str] = ..., recreate: Literal["auto", "always", "never"] = ..., partial_reordering: Optional[tuple] = ..., copy_from: Optional[Table] = ..., table_args: Tuple[Any, ...] = ..., table_kwargs: Mapping[str, Any] = ..., reflect_args: Tuple[Any, ...] = ..., reflect_kwargs: Mapping[str, Any] = ..., naming_convention: Optional[Dict[str, str]] = ...) -> Iterator[BatchOperations]: +def batch_alter_table(table_name: str, schema: Optional[str] = ..., recreate: Literal["auto", "always", "never"] = ..., partial_reordering: Optional[Tuple[Any, ...]] = ..., copy_from: Optional[Table] = ..., table_args: Tuple[Any, ...] = ..., table_kwargs: Mapping[str, Any] = ..., reflect_args: Tuple[Any, ...] = ..., reflect_kwargs: Mapping[str, Any] = ..., naming_convention: Optional[Dict[str, str]] = ...) -> Iterator[BatchOperations]: """Invoke a series of per-table migrations in batch. Batch mode allows a series of operations specific to a table @@ -319,8 +316,6 @@ def batch_alter_table(table_name: str, schema: Optional[str] = ..., recreate: Li set is undefined. Therefore it is best to specify the complete ordering of all columns for best results. - .. versionadded:: 1.4.0 - .. note:: batch mode requires SQLAlchemy 0.8 or above. .. seealso:: @@ -330,7 +325,7 @@ def batch_alter_table(table_name: str, schema: Optional[str] = ..., recreate: Li """ ... -def bulk_insert(table: Union[Table, TableClause], rows: List[dict], *, multiinsert: bool = ...) -> None: +def bulk_insert(table: Union[Table, TableClause], rows: List[Dict[str, Any]], *, multiinsert: bool = ...) -> None: """Issue a "bulk insert" operation using the current migration context. @@ -428,7 +423,7 @@ def bulk_insert(table: Union[Table, TableClause], rows: List[dict], *, multiinse """ ... -def create_check_constraint(constraint_name: Optional[str], table_name: str, condition: Union[str, BinaryExpression, TextClause], *, schema: Optional[str] = ..., **kw: Any) -> None: +def create_check_constraint(constraint_name: Optional[str], table_name: str, condition: Union[str, ColumnElement[bool], TextClause], *, schema: Optional[str] = ..., **kw: Any) -> None: """Issue a "create check constraint" instruction using the current migration context. @@ -560,7 +555,7 @@ def create_foreign_key(constraint_name: Optional[str], source_table: str, refere """ ... -def create_index(index_name: Optional[str], table_name: str, columns: Sequence[Union[str, TextClause, Function[Any]]], *, schema: Optional[str] = ..., unique: bool = ..., **kw: Any) -> None: +def create_index(index_name: Optional[str], table_name: str, columns: Sequence[Union[str, TextClause, Function[Any]]], *, schema: Optional[str] = ..., unique: bool = ..., if_not_exists: Optional[bool] = ..., **kw: Any) -> None: r"""Issue a "create index" instruction using the current migration context. @@ -588,20 +583,24 @@ def create_index(index_name: Optional[str], table_name: str, columns: Sequence[U :class:`~sqlalchemy.sql.elements.quoted_name`. :param unique: If True, create a unique index. - :param quote: - Force quoting of this column's name on or off, corresponding - to ``True`` or ``False``. When left at its default - of ``None``, the column identifier will be quoted according to - whether the name is case sensitive (identifiers with at least one - upper case character are treated as case sensitive), or if it's a - reserved word. This flag is only needed to force quoting of a - reserved word which is not known by the SQLAlchemy dialect. + :param quote: Force quoting of this column's name on or off, + corresponding to ``True`` or ``False``. When left at its default + of ``None``, the column identifier will be quoted according to + whether the name is case sensitive (identifiers with at least one + upper case character are treated as case sensitive), or if it's a + reserved word. This flag is only needed to force quoting of a + reserved word which is not known by the SQLAlchemy dialect. + + :param if_not_exists: If True, adds IF NOT EXISTS operator when + creating the new index. + + .. versionadded:: 1.12.0 :param \**kw: Additional keyword arguments not mentioned above are - dialect specific, and passed in the form - ``_``. - See the documentation regarding an individual dialect at - :ref:`dialect_toplevel` for detail on documented arguments. + dialect specific, and passed in the form + ``_``. + See the documentation regarding an individual dialect at + :ref:`dialect_toplevel` for detail on documented arguments. """ ... @@ -726,8 +725,6 @@ def create_table(table_name: str, *columns: SchemaItem, **kw: Any) -> Table: def create_table_comment(table_name: str, comment: Optional[str], *, existing_comment: Optional[str] = ..., schema: Optional[str] = ...) -> None: """Emit a COMMENT ON operation to set the comment for a table. - .. versionadded:: 1.0.6 - :param table_name: string name of the target table. :param comment: string value of the comment being registered against the specified table. @@ -839,7 +836,7 @@ def drop_constraint(constraint_name: str, table_name: str, type_: Optional[str] """ ... -def drop_index(index_name: str, table_name: Optional[str] = ..., *, schema: Optional[str] = ..., **kw: Any) -> None: +def drop_index(index_name: str, table_name: Optional[str] = ..., *, schema: Optional[str] = ..., if_exists: Optional[bool] = ..., **kw: Any) -> None: r"""Issue a "drop index" instruction using the current migration context. @@ -854,11 +851,17 @@ def drop_index(index_name: str, table_name: Optional[str] = ..., *, schema: Opti quoting of the schema outside of the default behavior, use the SQLAlchemy construct :class:`~sqlalchemy.sql.elements.quoted_name`. + + :param if_exists: If True, adds IF EXISTS operator when + dropping the index. + + .. versionadded:: 1.12.0 + :param \**kw: Additional keyword arguments not mentioned above are - dialect specific, and passed in the form - ``_``. - See the documentation regarding an individual dialect at - :ref:`dialect_toplevel` for detail on documented arguments. + dialect specific, and passed in the form + ``_``. + See the documentation regarding an individual dialect at + :ref:`dialect_toplevel` for detail on documented arguments. """ ... @@ -887,8 +890,6 @@ def drop_table_comment(table_name: str, *, existing_comment: Optional[str] = ... """Issue a "drop table comment" operation to remove an existing comment set on a table. - .. versionadded:: 1.0.6 - :param table_name: string name of the target table. :param existing_comment: An optional string value of a comment already registered on the specified table. @@ -902,7 +903,7 @@ def drop_table_comment(table_name: str, *, existing_comment: Optional[str] = ... """ ... -def execute(sqltext: Union[str, TextClause, Update], *, execution_options: Optional[dict[str, Any]] = ...) -> None: +def execute(sqltext: Union[Executable, str], *, execution_options: Optional[dict[str, Any]] = ...) -> None: r"""Execute the given SQL using the current migration context. The given SQL can be a plain string, e.g.:: @@ -955,7 +956,7 @@ def execute(sqltext: Union[str, TextClause, Update], *, execution_options: Optio ) Additionally, when passing the statement as a plain string, it is first - coerceed into a :func:`sqlalchemy.sql.expression.text` construct + coerced into a :func:`sqlalchemy.sql.expression.text` construct before being passed along. In the less likely case that the literal SQL string contains a colon, it must be escaped with a backslash, as:: @@ -968,9 +969,8 @@ def execute(sqltext: Union[str, TextClause, Update], *, execution_options: Optio * a string * a :func:`sqlalchemy.sql.expression.text` construct. * a :func:`sqlalchemy.sql.expression.insert` construct. - * a :func:`sqlalchemy.sql.expression.update`, - :func:`sqlalchemy.sql.expression.insert`, - or :func:`sqlalchemy.sql.expression.delete` construct. + * a :func:`sqlalchemy.sql.expression.update` construct. + * a :func:`sqlalchemy.sql.expression.delete` construct. * Any "executable" described in SQLAlchemy Core documentation, noting that no result set is returned. @@ -1043,7 +1043,7 @@ def get_context() -> MigrationContext: """ ... -def implementation_for(op_cls: Any) -> Callable[..., Any]: +def implementation_for(op_cls: Any) -> Callable[[_C], _C]: """Register an implementation for a given :class:`.MigrateOperation`. This is part of the operation extensibility API. @@ -1055,7 +1055,7 @@ def implementation_for(op_cls: Any) -> Callable[..., Any]: """ ... -def inline_literal(value: Union[str, int], type_: Optional[TypeEngine] = ...) -> _literal_bindparam: +def inline_literal(value: Union[str, int], type_: Optional[TypeEngine[Any]] = ...) -> _literal_bindparam: r"""Produce an 'inline literal' expression, suitable for using in an INSERT, UPDATE, or DELETE statement. @@ -1099,6 +1099,15 @@ def inline_literal(value: Union[str, int], type_: Optional[TypeEngine] = ...) -> """ ... +@overload +def invoke(operation: CreateTableOp) -> Table: + ... + +@overload +def invoke(operation: Union[AddConstraintOp, DropConstraintOp, CreateIndexOp, DropIndexOp, AddColumnOp, AlterColumnOp, AlterTableOp, CreateTableCommentOp, DropTableCommentOp, DropColumnOp, BulkInsertOp, DropTableOp, ExecuteSQLOp,]) -> None: + ... + +@overload def invoke(operation: MigrateOperation) -> Any: """Given a :class:`.MigrateOperation`, invoke it in terms of this :class:`.Operations` instance. @@ -1106,7 +1115,7 @@ def invoke(operation: MigrateOperation) -> Any: """ ... -def register_operation(name: str, sourcename: Optional[str] = ...) -> Callable[..., Any]: +def register_operation(name: str, sourcename: Optional[str] = ...) -> Callable[[Type[_T]], Type[_T]]: """Register a new operation for this class. This method is normally used to add new operations diff --git a/typings/alembic/operations/__init__.pyi b/src/database/typings/alembic/operations/__init__.pyi similarity index 69% rename from typings/alembic/operations/__init__.pyi rename to src/database/typings/alembic/operations/__init__.pyi index 83bb8ce7..5399e0ba 100644 --- a/typings/alembic/operations/__init__.pyi +++ b/src/database/typings/alembic/operations/__init__.pyi @@ -4,6 +4,6 @@ This type stub file was generated by pyright. from . import toimpl from .base import AbstractOperations, BatchOperations, Operations -from .ops import MigrateOperation +from .ops import MigrateOperation, MigrationScript -__all__ = ["AbstractOperations", "Operations", "BatchOperations", "MigrateOperation"] +__all__ = ["AbstractOperations", "Operations", "BatchOperations", "MigrateOperation", "MigrationScript"] diff --git a/typings/alembic/operations/base.pyi b/src/database/typings/alembic/operations/base.pyi similarity index 93% rename from typings/alembic/operations/base.pyi rename to src/database/typings/alembic/operations/base.pyi index 91c28397..406004ed 100644 --- a/typings/alembic/operations/base.pyi +++ b/src/database/typings/alembic/operations/base.pyi @@ -3,18 +3,19 @@ This type stub file was generated by pyright. """ from contextlib import contextmanager -from typing import Any, Awaitable, Callable, Dict, Iterator, List, Literal, Mapping, Optional, Sequence, TYPE_CHECKING, Tuple, Type, TypeVar, Union +from typing import Any, Awaitable, Callable, Dict, Iterator, List, Literal, Mapping, Optional, Sequence, TYPE_CHECKING, Tuple, Type, TypeVar, Union, overload from sqlalchemy.sql.elements import conv from .. import util from ..util.sqla_compat import _literal_bindparam from sqlalchemy import Table from sqlalchemy.engine import Connection -from sqlalchemy.sql.expression import BinaryExpression, TableClause, TextClause, Update +from sqlalchemy.sql import Executable +from sqlalchemy.sql.expression import ColumnElement, TableClause, TextClause from sqlalchemy.sql.functions import Function from sqlalchemy.sql.schema import Column, Computed, Identity, SchemaItem from sqlalchemy.types import TypeEngine from .batch import BatchOperationsImpl -from .ops import MigrateOperation +from .ops import AddColumnOp, AddConstraintOp, AlterColumnOp, AlterTableOp, BulkInsertOp, CreateIndexOp, CreateTableCommentOp, CreateTableOp, DropColumnOp, DropConstraintOp, DropIndexOp, DropTableCommentOp, DropTableOp, ExecuteSQLOp, MigrateOperation from ..ddl import DefaultImpl from ..runtime.migration import MigrationContext @@ -22,6 +23,7 @@ if TYPE_CHECKING: ... __all__ = ("Operations", "BatchOperations") _T = TypeVar("_T") +_C = TypeVar("_C", bound=Callable[..., Any]) class AbstractOperations(util.ModuleClsProxy): """Base class for Operations and BatchOperations. @@ -40,7 +42,7 @@ class AbstractOperations(util.ModuleClsProxy): ... @classmethod - def register_operation(cls, name: str, sourcename: Optional[str] = ...) -> Callable[..., Any]: + def register_operation(cls, name: str, sourcename: Optional[str] = ...) -> Callable[[Type[_T]], Type[_T]]: """Register a new operation for this class. This method is normally used to add new operations @@ -59,7 +61,7 @@ class AbstractOperations(util.ModuleClsProxy): ... @classmethod - def implementation_for(cls, op_cls: Any) -> Callable[..., Any]: + def implementation_for(cls, op_cls: Any) -> Callable[[_C], _C]: """Register an implementation for a given :class:`.MigrateOperation`. This is part of the operation extensibility API. @@ -77,7 +79,7 @@ class AbstractOperations(util.ModuleClsProxy): ... @contextmanager - def batch_alter_table(self, table_name: str, schema: Optional[str] = ..., recreate: Literal["auto", "always", "never"] = ..., partial_reordering: Optional[tuple] = ..., copy_from: Optional[Table] = ..., table_args: Tuple[Any, ...] = ..., table_kwargs: Mapping[str, Any] = ..., reflect_args: Tuple[Any, ...] = ..., reflect_kwargs: Mapping[str, Any] = ..., naming_convention: Optional[Dict[str, str]] = ...) -> Iterator[BatchOperations]: + def batch_alter_table(self, table_name: str, schema: Optional[str] = ..., recreate: Literal["auto", "always", "never"] = ..., partial_reordering: Optional[Tuple[Any, ...]] = ..., copy_from: Optional[Table] = ..., table_args: Tuple[Any, ...] = ..., table_kwargs: Mapping[str, Any] = ..., reflect_args: Tuple[Any, ...] = ..., reflect_kwargs: Mapping[str, Any] = ..., naming_convention: Optional[Dict[str, str]] = ...) -> Iterator[BatchOperations]: """Invoke a series of per-table migrations in batch. Batch mode allows a series of operations specific to a table @@ -207,8 +209,6 @@ class AbstractOperations(util.ModuleClsProxy): set is undefined. Therefore it is best to specify the complete ordering of all columns for best results. - .. versionadded:: 1.4.0 - .. note:: batch mode requires SQLAlchemy 0.8 or above. .. seealso:: @@ -225,6 +225,18 @@ class AbstractOperations(util.ModuleClsProxy): """ ... + @overload + def invoke(self, operation: CreateTableOp) -> Table: + ... + + @overload + def invoke(self, operation: Union[AddConstraintOp, DropConstraintOp, CreateIndexOp, DropIndexOp, AddColumnOp, AlterColumnOp, AlterTableOp, CreateTableCommentOp, DropTableCommentOp, DropColumnOp, BulkInsertOp, DropTableOp, ExecuteSQLOp,]) -> None: + ... + + @overload + def invoke(self, operation: MigrateOperation) -> Any: + ... + def invoke(self, operation: MigrateOperation) -> Any: """Given a :class:`.MigrateOperation`, invoke it in terms of this :class:`.Operations` instance. @@ -462,7 +474,7 @@ class Operations(AbstractOperations): """ ... - def alter_column(self, table_name: str, column_name: str, *, nullable: Optional[bool] = ..., comment: Union[str, Literal[False], None] = ..., server_default: Any = ..., new_column_name: Optional[str] = ..., type_: Union[TypeEngine, Type[TypeEngine], None] = ..., existing_type: Union[TypeEngine, Type[TypeEngine], None] = ..., existing_server_default: Union[str, bool, Identity, Computed, None] = ..., existing_nullable: Optional[bool] = ..., existing_comment: Optional[str] = ..., schema: Optional[str] = ..., **kw: Any) -> None: + def alter_column(self, table_name: str, column_name: str, *, nullable: Optional[bool] = ..., comment: Union[str, Literal[False], None] = ..., server_default: Any = ..., new_column_name: Optional[str] = ..., type_: Union[TypeEngine[Any], Type[TypeEngine[Any]], None] = ..., existing_type: Union[TypeEngine[Any], Type[TypeEngine[Any]], None] = ..., existing_server_default: Union[str, bool, Identity, Computed, None] = ..., existing_nullable: Optional[bool] = ..., existing_comment: Optional[str] = ..., schema: Optional[str] = ..., **kw: Any) -> None: r"""Issue an "alter column" instruction using the current migration context. @@ -501,9 +513,6 @@ class Operations(AbstractOperations): Set to ``None`` to have the default removed. :param comment: optional string text of a new comment to add to the column. - - .. versionadded:: 1.0.6 - :param new_column_name: Optional; specify a string name here to indicate the new name within a column rename operation. :param type\_: Optional; a :class:`~sqlalchemy.types.TypeEngine` @@ -520,7 +529,7 @@ class Operations(AbstractOperations): don't otherwise specify a new type, as well as for when nullability is being changed on a SQL Server column. It is also used if the type is a so-called - SQLlchemy "schema" type which may define a constraint (i.e. + SQLAlchemy "schema" type which may define a constraint (i.e. :class:`~sqlalchemy.types.Boolean`, :class:`~sqlalchemy.types.Enum`), so that the constraint can be dropped. @@ -537,9 +546,6 @@ class Operations(AbstractOperations): :param existing_comment: string text of the existing comment on the column to be maintained. Required on MySQL if the existing comment on the column is not being changed. - - .. versionadded:: 1.0.6 - :param schema: Optional schema name to operate within. To control quoting of the schema outside of the default behavior, use the SQLAlchemy construct @@ -553,7 +559,7 @@ class Operations(AbstractOperations): """ ... - def bulk_insert(self, table: Union[Table, TableClause], rows: List[dict], *, multiinsert: bool = ...) -> None: + def bulk_insert(self, table: Union[Table, TableClause], rows: List[Dict[str, Any]], *, multiinsert: bool = ...) -> None: """Issue a "bulk insert" operation using the current migration context. @@ -651,7 +657,7 @@ class Operations(AbstractOperations): """ ... - def create_check_constraint(self, constraint_name: Optional[str], table_name: str, condition: Union[str, BinaryExpression, TextClause], *, schema: Optional[str] = ..., **kw: Any) -> None: + def create_check_constraint(self, constraint_name: Optional[str], table_name: str, condition: Union[str, ColumnElement[bool], TextClause], *, schema: Optional[str] = ..., **kw: Any) -> None: """Issue a "create check constraint" instruction using the current migration context. @@ -783,7 +789,7 @@ class Operations(AbstractOperations): """ ... - def create_index(self, index_name: Optional[str], table_name: str, columns: Sequence[Union[str, TextClause, Function[Any]]], *, schema: Optional[str] = ..., unique: bool = ..., **kw: Any) -> None: + def create_index(self, index_name: Optional[str], table_name: str, columns: Sequence[Union[str, TextClause, Function[Any]]], *, schema: Optional[str] = ..., unique: bool = ..., if_not_exists: Optional[bool] = ..., **kw: Any) -> None: r"""Issue a "create index" instruction using the current migration context. @@ -811,20 +817,24 @@ class Operations(AbstractOperations): :class:`~sqlalchemy.sql.elements.quoted_name`. :param unique: If True, create a unique index. - :param quote: - Force quoting of this column's name on or off, corresponding - to ``True`` or ``False``. When left at its default - of ``None``, the column identifier will be quoted according to - whether the name is case sensitive (identifiers with at least one - upper case character are treated as case sensitive), or if it's a - reserved word. This flag is only needed to force quoting of a - reserved word which is not known by the SQLAlchemy dialect. + :param quote: Force quoting of this column's name on or off, + corresponding to ``True`` or ``False``. When left at its default + of ``None``, the column identifier will be quoted according to + whether the name is case sensitive (identifiers with at least one + upper case character are treated as case sensitive), or if it's a + reserved word. This flag is only needed to force quoting of a + reserved word which is not known by the SQLAlchemy dialect. + + :param if_not_exists: If True, adds IF NOT EXISTS operator when + creating the new index. + + .. versionadded:: 1.12.0 :param \**kw: Additional keyword arguments not mentioned above are - dialect specific, and passed in the form - ``_``. - See the documentation regarding an individual dialect at - :ref:`dialect_toplevel` for detail on documented arguments. + dialect specific, and passed in the form + ``_``. + See the documentation regarding an individual dialect at + :ref:`dialect_toplevel` for detail on documented arguments. """ ... @@ -949,8 +959,6 @@ class Operations(AbstractOperations): def create_table_comment(self, table_name: str, comment: Optional[str], *, existing_comment: Optional[str] = ..., schema: Optional[str] = ...) -> None: """Emit a COMMENT ON operation to set the comment for a table. - .. versionadded:: 1.0.6 - :param table_name: string name of the target table. :param comment: string value of the comment being registered against the specified table. @@ -1062,7 +1070,7 @@ class Operations(AbstractOperations): """ ... - def drop_index(self, index_name: str, table_name: Optional[str] = ..., *, schema: Optional[str] = ..., **kw: Any) -> None: + def drop_index(self, index_name: str, table_name: Optional[str] = ..., *, schema: Optional[str] = ..., if_exists: Optional[bool] = ..., **kw: Any) -> None: r"""Issue a "drop index" instruction using the current migration context. @@ -1077,11 +1085,17 @@ class Operations(AbstractOperations): quoting of the schema outside of the default behavior, use the SQLAlchemy construct :class:`~sqlalchemy.sql.elements.quoted_name`. + + :param if_exists: If True, adds IF EXISTS operator when + dropping the index. + + .. versionadded:: 1.12.0 + :param \**kw: Additional keyword arguments not mentioned above are - dialect specific, and passed in the form - ``_``. - See the documentation regarding an individual dialect at - :ref:`dialect_toplevel` for detail on documented arguments. + dialect specific, and passed in the form + ``_``. + See the documentation regarding an individual dialect at + :ref:`dialect_toplevel` for detail on documented arguments. """ ... @@ -1110,8 +1124,6 @@ class Operations(AbstractOperations): """Issue a "drop table comment" operation to remove an existing comment set on a table. - .. versionadded:: 1.0.6 - :param table_name: string name of the target table. :param existing_comment: An optional string value of a comment already registered on the specified table. @@ -1125,7 +1137,7 @@ class Operations(AbstractOperations): """ ... - def execute(self, sqltext: Union[str, TextClause, Update], *, execution_options: Optional[dict[str, Any]] = ...) -> None: + def execute(self, sqltext: Union[Executable, str], *, execution_options: Optional[dict[str, Any]] = ...) -> None: r"""Execute the given SQL using the current migration context. The given SQL can be a plain string, e.g.:: @@ -1178,7 +1190,7 @@ class Operations(AbstractOperations): ) Additionally, when passing the statement as a plain string, it is first - coerceed into a :func:`sqlalchemy.sql.expression.text` construct + coerced into a :func:`sqlalchemy.sql.expression.text` construct before being passed along. In the less likely case that the literal SQL string contains a colon, it must be escaped with a backslash, as:: @@ -1191,9 +1203,8 @@ class Operations(AbstractOperations): * a string * a :func:`sqlalchemy.sql.expression.text` construct. * a :func:`sqlalchemy.sql.expression.insert` construct. - * a :func:`sqlalchemy.sql.expression.update`, - :func:`sqlalchemy.sql.expression.insert`, - or :func:`sqlalchemy.sql.expression.delete` construct. + * a :func:`sqlalchemy.sql.expression.update` construct. + * a :func:`sqlalchemy.sql.expression.delete` construct. * Any "executable" described in SQLAlchemy Core documentation, noting that no result set is returned. @@ -1253,7 +1264,7 @@ class BatchOperations(AbstractOperations): """ ... - def alter_column(self, column_name: str, *, nullable: Optional[bool] = ..., comment: Union[str, Literal[False], None] = ..., server_default: Any = ..., new_column_name: Optional[str] = ..., type_: Union[TypeEngine, Type[TypeEngine], None] = ..., existing_type: Union[TypeEngine, Type[TypeEngine], None] = ..., existing_server_default: Union[str, bool, Identity, Computed, None] = ..., existing_nullable: Optional[bool] = ..., existing_comment: Optional[str] = ..., insert_before: Optional[str] = ..., insert_after: Optional[str] = ..., **kw: Any) -> None: + def alter_column(self, column_name: str, *, nullable: Optional[bool] = ..., comment: Union[str, Literal[False], None] = ..., server_default: Any = ..., new_column_name: Optional[str] = ..., type_: Union[TypeEngine[Any], Type[TypeEngine[Any]], None] = ..., existing_type: Union[TypeEngine[Any], Type[TypeEngine[Any]], None] = ..., existing_server_default: Union[str, bool, Identity, Computed, None] = ..., existing_nullable: Optional[bool] = ..., existing_comment: Optional[str] = ..., insert_before: Optional[str] = ..., insert_after: Optional[str] = ..., **kw: Any) -> None: """Issue an "alter column" instruction using the current batch migration context. @@ -1263,8 +1274,6 @@ class BatchOperations(AbstractOperations): :param insert_before: String name of an existing column which this column should be placed before, when creating the new table. - .. versionadded:: 1.4.0 - :param insert_after: String name of an existing column which this column should be placed after, when creating the new table. If both :paramref:`.BatchOperations.alter_column.insert_before` @@ -1272,8 +1281,6 @@ class BatchOperations(AbstractOperations): omitted, the column is inserted after the last existing column in the table. - .. versionadded:: 1.4.0 - .. seealso:: :meth:`.Operations.alter_column` @@ -1282,7 +1289,7 @@ class BatchOperations(AbstractOperations): """ ... - def create_check_constraint(self, constraint_name: str, condition: Union[str, BinaryExpression, TextClause], **kw: Any) -> None: + def create_check_constraint(self, constraint_name: str, condition: Union[str, ColumnElement[bool], TextClause], **kw: Any) -> None: """Issue a "create check constraint" instruction using the current batch migration context. @@ -1296,7 +1303,7 @@ class BatchOperations(AbstractOperations): """ ... - def create_exclude_constraint(self, constraint_name: str, *elements: Any, **kw: Any): # -> None: + def create_exclude_constraint(self, constraint_name: str, *elements: Any, **kw: Any) -> Optional[Table]: """Issue a "create exclude constraint" instruction using the current batch migration context. @@ -1363,8 +1370,6 @@ class BatchOperations(AbstractOperations): """Emit a COMMENT ON operation to set the comment for a table using the current batch migration context. - .. versionadded:: 1.6.0 - :param comment: string value of the comment being registered against the specified table. :param existing_comment: String value of a comment @@ -1430,15 +1435,13 @@ class BatchOperations(AbstractOperations): remove an existing comment set on a table using the current batch operations context. - .. versionadded:: 1.6.0 - :param existing_comment: An optional string value of a comment already registered on the specified table. """ ... - def execute(self, sqltext: Union[str, TextClause, Update], *, execution_options: Optional[dict[str, Any]] = ...) -> None: + def execute(self, sqltext: Union[Executable, str], *, execution_options: Optional[dict[str, Any]] = ...) -> None: """Execute the given SQL using the current migration context. .. seealso:: diff --git a/typings/alembic/operations/batch.pyi b/src/database/typings/alembic/operations/batch.pyi similarity index 96% rename from typings/alembic/operations/batch.pyi rename to src/database/typings/alembic/operations/batch.pyi index 66d2e080..a8059672 100644 --- a/typings/alembic/operations/batch.pyi +++ b/src/database/typings/alembic/operations/batch.pyi @@ -4,10 +4,10 @@ This type stub file was generated by pyright. from typing import Any, Dict, Literal, Optional, TYPE_CHECKING, Union from sqlalchemy import Column, Index, Table +from sqlalchemy.sql.schema import Constraint from sqlalchemy.engine import Dialect from sqlalchemy.sql.elements import ColumnClause from sqlalchemy.sql.functions import Function -from sqlalchemy.sql.schema import Constraint from sqlalchemy.sql.type_api import TypeEngine from ..ddl.impl import DefaultImpl @@ -46,10 +46,10 @@ class BatchOperationsImpl: def rename_table(self, *arg, **kw): # -> None: ... - def create_index(self, idx: Index) -> None: + def create_index(self, idx: Index, **kw: Any) -> None: ... - def drop_index(self, idx: Index) -> None: + def drop_index(self, idx: Index, **kw: Any) -> None: ... def create_table_comment(self, table): # -> None: diff --git a/typings/alembic/operations/ops.pyi b/src/database/typings/alembic/operations/ops.pyi similarity index 92% rename from typings/alembic/operations/ops.pyi rename to src/database/typings/alembic/operations/ops.pyi index 97e57266..746e3bec 100644 --- a/typings/alembic/operations/ops.pyi +++ b/src/database/typings/alembic/operations/ops.pyi @@ -3,21 +3,24 @@ This type stub file was generated by pyright. """ from abc import abstractmethod -from typing import Any, Callable, FrozenSet, List, Literal, MutableMapping, Optional, Sequence, Set, TYPE_CHECKING, Tuple, Type, Union +from typing import Any, Callable, Dict, FrozenSet, List, Literal, MutableMapping, Optional, Sequence, Set, TYPE_CHECKING, Tuple, Type, TypeVar, Union from .base import BatchOperations, Operations from .. import util from ..util import sqla_compat -from sqlalchemy.sql.dml import Insert, Update -from sqlalchemy.sql.elements import BinaryExpression, ColumnElement, TextClause, conv, quoted_name +from sqlalchemy.sql import Executable +from sqlalchemy.sql.elements import ColumnElement, TextClause, conv, quoted_name from sqlalchemy.sql.functions import Function from sqlalchemy.sql.schema import CheckConstraint, Column, Computed, Constraint, ForeignKeyConstraint, Identity, Index, MetaData, PrimaryKeyConstraint, SchemaItem, Table, UniqueConstraint from sqlalchemy.sql.selectable import TableClause from sqlalchemy.sql.type_api import TypeEngine from ..autogenerate.rewriter import Rewriter from ..runtime.migration import MigrationContext +from ..script.revision import _RevIdType if TYPE_CHECKING: ... +_T = TypeVar("_T", bound=Any) +_AC = TypeVar("_AC", bound="AddConstraintOp") class MigrateOperation: """base class for migration command and organization objects. @@ -33,7 +36,7 @@ class MigrateOperation: """ @util.memoized_property - def info(self): # -> dict[Unknown, Unknown]: + def info(self) -> Dict[Any, Any]: """A dictionary that may be used to store arbitrary information along with this :class:`.MigrateOperation` object. @@ -53,11 +56,11 @@ class AddConstraintOp(MigrateOperation): """Represent an add constraint operation.""" add_constraint_ops = ... @property - def constraint_type(self): + def constraint_type(self) -> str: ... @classmethod - def register_add_constraint(cls, type_: str) -> Callable: + def register_add_constraint(cls, type_: str) -> Callable[[Type[_AC]], Type[_AC]]: ... @classmethod @@ -392,7 +395,7 @@ class CreateCheckConstraintOp(AddConstraintOp): ... @classmethod - def create_check_constraint(cls, operations: Operations, constraint_name: Optional[str], table_name: str, condition: Union[str, BinaryExpression, TextClause], *, schema: Optional[str] = ..., **kw: Any) -> None: + def create_check_constraint(cls, operations: Operations, constraint_name: Optional[str], table_name: str, condition: Union[str, ColumnElement[bool], TextClause], *, schema: Optional[str] = ..., **kw: Any) -> None: """Issue a "create check constraint" instruction using the current migration context. @@ -436,7 +439,7 @@ class CreateCheckConstraintOp(AddConstraintOp): ... @classmethod - def batch_create_check_constraint(cls, operations: BatchOperations, constraint_name: str, condition: Union[str, BinaryExpression, TextClause], **kw: Any) -> None: + def batch_create_check_constraint(cls, operations: BatchOperations, constraint_name: str, condition: Union[str, ColumnElement[bool], TextClause], **kw: Any) -> None: """Issue a "create check constraint" instruction using the current batch migration context. @@ -456,7 +459,7 @@ class CreateCheckConstraintOp(AddConstraintOp): @BatchOperations.register_operation("create_index", "batch_create_index") class CreateIndexOp(MigrateOperation): """Represent a create index operation.""" - def __init__(self, index_name: Optional[str], table_name: str, columns: Sequence[Union[str, TextClause, ColumnElement[Any]]], *, schema: Optional[str] = ..., unique: bool = ..., **kw: Any) -> None: + def __init__(self, index_name: Optional[str], table_name: str, columns: Sequence[Union[str, TextClause, ColumnElement[Any]]], *, schema: Optional[str] = ..., unique: bool = ..., if_not_exists: Optional[bool] = ..., **kw: Any) -> None: ... def reverse(self) -> DropIndexOp: @@ -473,7 +476,7 @@ class CreateIndexOp(MigrateOperation): ... @classmethod - def create_index(cls, operations: Operations, index_name: Optional[str], table_name: str, columns: Sequence[Union[str, TextClause, Function[Any]]], *, schema: Optional[str] = ..., unique: bool = ..., **kw: Any) -> None: + def create_index(cls, operations: Operations, index_name: Optional[str], table_name: str, columns: Sequence[Union[str, TextClause, Function[Any]]], *, schema: Optional[str] = ..., unique: bool = ..., if_not_exists: Optional[bool] = ..., **kw: Any) -> None: r"""Issue a "create index" instruction using the current migration context. @@ -501,20 +504,24 @@ class CreateIndexOp(MigrateOperation): :class:`~sqlalchemy.sql.elements.quoted_name`. :param unique: If True, create a unique index. - :param quote: - Force quoting of this column's name on or off, corresponding - to ``True`` or ``False``. When left at its default - of ``None``, the column identifier will be quoted according to - whether the name is case sensitive (identifiers with at least one - upper case character are treated as case sensitive), or if it's a - reserved word. This flag is only needed to force quoting of a - reserved word which is not known by the SQLAlchemy dialect. + :param quote: Force quoting of this column's name on or off, + corresponding to ``True`` or ``False``. When left at its default + of ``None``, the column identifier will be quoted according to + whether the name is case sensitive (identifiers with at least one + upper case character are treated as case sensitive), or if it's a + reserved word. This flag is only needed to force quoting of a + reserved word which is not known by the SQLAlchemy dialect. + + :param if_not_exists: If True, adds IF NOT EXISTS operator when + creating the new index. + + .. versionadded:: 1.12.0 :param \**kw: Additional keyword arguments not mentioned above are - dialect specific, and passed in the form - ``_``. - See the documentation regarding an individual dialect at - :ref:`dialect_toplevel` for detail on documented arguments. + dialect specific, and passed in the form + ``_``. + See the documentation regarding an individual dialect at + :ref:`dialect_toplevel` for detail on documented arguments. """ ... @@ -537,7 +544,7 @@ class CreateIndexOp(MigrateOperation): @BatchOperations.register_operation("drop_index", "batch_drop_index") class DropIndexOp(MigrateOperation): """Represent a drop index operation.""" - def __init__(self, index_name: Union[quoted_name, str, conv], table_name: Optional[str] = ..., *, schema: Optional[str] = ..., _reverse: Optional[CreateIndexOp] = ..., **kw: Any) -> None: + def __init__(self, index_name: Union[quoted_name, str, conv], table_name: Optional[str] = ..., *, schema: Optional[str] = ..., if_exists: Optional[bool] = ..., _reverse: Optional[CreateIndexOp] = ..., **kw: Any) -> None: ... def to_diff_tuple(self) -> Tuple[str, Index]: @@ -554,7 +561,7 @@ class DropIndexOp(MigrateOperation): ... @classmethod - def drop_index(cls, operations: Operations, index_name: str, table_name: Optional[str] = ..., *, schema: Optional[str] = ..., **kw: Any) -> None: + def drop_index(cls, operations: Operations, index_name: str, table_name: Optional[str] = ..., *, schema: Optional[str] = ..., if_exists: Optional[bool] = ..., **kw: Any) -> None: r"""Issue a "drop index" instruction using the current migration context. @@ -569,11 +576,17 @@ class DropIndexOp(MigrateOperation): quoting of the schema outside of the default behavior, use the SQLAlchemy construct :class:`~sqlalchemy.sql.elements.quoted_name`. + + :param if_exists: If True, adds IF EXISTS operator when + dropping the index. + + .. versionadded:: 1.12.0 + :param \**kw: Additional keyword arguments not mentioned above are - dialect specific, and passed in the form - ``_``. - See the documentation regarding an individual dialect at - :ref:`dialect_toplevel` for detail on documented arguments. + dialect specific, and passed in the form + ``_``. + See the documentation regarding an individual dialect at + :ref:`dialect_toplevel` for detail on documented arguments. """ ... @@ -776,8 +789,6 @@ class CreateTableCommentOp(AlterTableOp): def create_table_comment(cls, operations: Operations, table_name: str, comment: Optional[str], *, existing_comment: Optional[str] = ..., schema: Optional[str] = ...) -> None: """Emit a COMMENT ON operation to set the comment for a table. - .. versionadded:: 1.0.6 - :param table_name: string name of the target table. :param comment: string value of the comment being registered against the specified table. @@ -800,8 +811,6 @@ class CreateTableCommentOp(AlterTableOp): """Emit a COMMENT ON operation to set the comment for a table using the current batch migration context. - .. versionadded:: 1.6.0 - :param comment: string value of the comment being registered against the specified table. :param existing_comment: String value of a comment @@ -812,14 +821,14 @@ class CreateTableCommentOp(AlterTableOp): """ ... - def reverse(self): # -> Any: + def reverse(self) -> Union[CreateTableCommentOp, DropTableCommentOp]: """Reverses the COMMENT ON operation against a table.""" ... - def to_table(self, migration_context=...): # -> Table: + def to_table(self, migration_context: Optional[MigrationContext] = ...) -> Table: ... - def to_diff_tuple(self): # -> tuple[Literal['add_table_comment'], Table, str | None]: + def to_diff_tuple(self) -> Tuple[Any, ...]: ... @@ -836,8 +845,6 @@ class DropTableCommentOp(AlterTableOp): """Issue a "drop table comment" operation to remove an existing comment set on a table. - .. versionadded:: 1.0.6 - :param table_name: string name of the target table. :param existing_comment: An optional string value of a comment already registered on the specified table. @@ -857,22 +864,20 @@ class DropTableCommentOp(AlterTableOp): remove an existing comment set on a table using the current batch operations context. - .. versionadded:: 1.6.0 - :param existing_comment: An optional string value of a comment already registered on the specified table. """ ... - def reverse(self): # -> Any: + def reverse(self) -> CreateTableCommentOp: """Reverses the COMMENT ON operation against a table.""" ... - def to_table(self, migration_context=...): # -> Table: + def to_table(self, migration_context: Optional[MigrationContext] = ...) -> Table: ... - def to_diff_tuple(self): # -> tuple[Literal['remove_table_comment'], Table]: + def to_diff_tuple(self) -> Tuple[Any, ...]: ... @@ -894,7 +899,7 @@ class AlterColumnOp(AlterTableOp): ... @classmethod - def alter_column(cls, operations: Operations, table_name: str, column_name: str, *, nullable: Optional[bool] = ..., comment: Optional[Union[str, Literal[False]]] = ..., server_default: Any = ..., new_column_name: Optional[str] = ..., type_: Optional[Union[TypeEngine, Type[TypeEngine]]] = ..., existing_type: Optional[Union[TypeEngine, Type[TypeEngine]]] = ..., existing_server_default: Optional[Union[str, bool, Identity, Computed]] = ..., existing_nullable: Optional[bool] = ..., existing_comment: Optional[str] = ..., schema: Optional[str] = ..., **kw: Any) -> None: + def alter_column(cls, operations: Operations, table_name: str, column_name: str, *, nullable: Optional[bool] = ..., comment: Optional[Union[str, Literal[False]]] = ..., server_default: Any = ..., new_column_name: Optional[str] = ..., type_: Optional[Union[TypeEngine[Any], Type[TypeEngine[Any]]]] = ..., existing_type: Optional[Union[TypeEngine[Any], Type[TypeEngine[Any]]]] = ..., existing_server_default: Optional[Union[str, bool, Identity, Computed]] = ..., existing_nullable: Optional[bool] = ..., existing_comment: Optional[str] = ..., schema: Optional[str] = ..., **kw: Any) -> None: r"""Issue an "alter column" instruction using the current migration context. @@ -933,9 +938,6 @@ class AlterColumnOp(AlterTableOp): Set to ``None`` to have the default removed. :param comment: optional string text of a new comment to add to the column. - - .. versionadded:: 1.0.6 - :param new_column_name: Optional; specify a string name here to indicate the new name within a column rename operation. :param type\_: Optional; a :class:`~sqlalchemy.types.TypeEngine` @@ -952,7 +954,7 @@ class AlterColumnOp(AlterTableOp): don't otherwise specify a new type, as well as for when nullability is being changed on a SQL Server column. It is also used if the type is a so-called - SQLlchemy "schema" type which may define a constraint (i.e. + SQLAlchemy "schema" type which may define a constraint (i.e. :class:`~sqlalchemy.types.Boolean`, :class:`~sqlalchemy.types.Enum`), so that the constraint can be dropped. @@ -969,9 +971,6 @@ class AlterColumnOp(AlterTableOp): :param existing_comment: string text of the existing comment on the column to be maintained. Required on MySQL if the existing comment on the column is not being changed. - - .. versionadded:: 1.0.6 - :param schema: Optional schema name to operate within. To control quoting of the schema outside of the default behavior, use the SQLAlchemy construct @@ -986,7 +985,7 @@ class AlterColumnOp(AlterTableOp): ... @classmethod - def batch_alter_column(cls, operations: BatchOperations, column_name: str, *, nullable: Optional[bool] = ..., comment: Optional[Union[str, Literal[False]]] = ..., server_default: Any = ..., new_column_name: Optional[str] = ..., type_: Optional[Union[TypeEngine, Type[TypeEngine]]] = ..., existing_type: Optional[Union[TypeEngine, Type[TypeEngine]]] = ..., existing_server_default: Optional[Union[str, bool, Identity, Computed]] = ..., existing_nullable: Optional[bool] = ..., existing_comment: Optional[str] = ..., insert_before: Optional[str] = ..., insert_after: Optional[str] = ..., **kw: Any) -> None: + def batch_alter_column(cls, operations: BatchOperations, column_name: str, *, nullable: Optional[bool] = ..., comment: Optional[Union[str, Literal[False]]] = ..., server_default: Any = ..., new_column_name: Optional[str] = ..., type_: Optional[Union[TypeEngine[Any], Type[TypeEngine[Any]]]] = ..., existing_type: Optional[Union[TypeEngine[Any], Type[TypeEngine[Any]]]] = ..., existing_server_default: Optional[Union[str, bool, Identity, Computed]] = ..., existing_nullable: Optional[bool] = ..., existing_comment: Optional[str] = ..., insert_before: Optional[str] = ..., insert_after: Optional[str] = ..., **kw: Any) -> None: """Issue an "alter column" instruction using the current batch migration context. @@ -996,8 +995,6 @@ class AlterColumnOp(AlterTableOp): :param insert_before: String name of an existing column which this column should be placed before, when creating the new table. - .. versionadded:: 1.4.0 - :param insert_after: String name of an existing column which this column should be placed after, when creating the new table. If both :paramref:`.BatchOperations.alter_column.insert_before` @@ -1005,8 +1002,6 @@ class AlterColumnOp(AlterTableOp): omitted, the column is inserted after the last existing column in the table. - .. versionadded:: 1.4.0 - .. seealso:: :meth:`.Operations.alter_column` @@ -1030,11 +1025,11 @@ class AddColumnOp(AlterTableOp): def to_diff_tuple(self) -> Tuple[str, Optional[str], str, Column[Any]]: ... - def to_column(self) -> Column: + def to_column(self) -> Column[Any]: ... @classmethod - def from_column(cls, col: Column) -> AddColumnOp: + def from_column(cls, col: Column[Any]) -> AddColumnOp: ... @classmethod @@ -1153,7 +1148,7 @@ class DropColumnOp(AlterTableOp): def from_column_and_tablename(cls, schema: Optional[str], tname: str, col: Column[Any]) -> DropColumnOp: ... - def to_column(self, migration_context: Optional[MigrationContext] = ...) -> Column: + def to_column(self, migration_context: Optional[MigrationContext] = ...) -> Column[Any]: ... @classmethod @@ -1213,11 +1208,11 @@ class DropColumnOp(AlterTableOp): @Operations.register_operation("bulk_insert") class BulkInsertOp(MigrateOperation): """Represent a bulk insert operation.""" - def __init__(self, table: Union[Table, TableClause], rows: List[dict], *, multiinsert: bool = ...) -> None: + def __init__(self, table: Union[Table, TableClause], rows: List[Dict[str, Any]], *, multiinsert: bool = ...) -> None: ... @classmethod - def bulk_insert(cls, operations: Operations, table: Union[Table, TableClause], rows: List[dict], *, multiinsert: bool = ...) -> None: + def bulk_insert(cls, operations: Operations, table: Union[Table, TableClause], rows: List[Dict[str, Any]], *, multiinsert: bool = ...) -> None: """Issue a "bulk insert" operation using the current migration context. @@ -1321,11 +1316,11 @@ class BulkInsertOp(MigrateOperation): @BatchOperations.register_operation("execute", "batch_execute") class ExecuteSQLOp(MigrateOperation): """Represent an execute SQL operation.""" - def __init__(self, sqltext: Union[Update, str, Insert, TextClause], *, execution_options: Optional[dict[str, Any]] = ...) -> None: + def __init__(self, sqltext: Union[Executable, str], *, execution_options: Optional[dict[str, Any]] = ...) -> None: ... @classmethod - def execute(cls, operations: Operations, sqltext: Union[str, TextClause, Update], *, execution_options: Optional[dict[str, Any]] = ...) -> None: + def execute(cls, operations: Operations, sqltext: Union[Executable, str], *, execution_options: Optional[dict[str, Any]] = ...) -> None: r"""Execute the given SQL using the current migration context. The given SQL can be a plain string, e.g.:: @@ -1378,7 +1373,7 @@ class ExecuteSQLOp(MigrateOperation): ) Additionally, when passing the statement as a plain string, it is first - coerceed into a :func:`sqlalchemy.sql.expression.text` construct + coerced into a :func:`sqlalchemy.sql.expression.text` construct before being passed along. In the less likely case that the literal SQL string contains a colon, it must be escaped with a backslash, as:: @@ -1391,9 +1386,8 @@ class ExecuteSQLOp(MigrateOperation): * a string * a :func:`sqlalchemy.sql.expression.text` construct. * a :func:`sqlalchemy.sql.expression.insert` construct. - * a :func:`sqlalchemy.sql.expression.update`, - :func:`sqlalchemy.sql.expression.insert`, - or :func:`sqlalchemy.sql.expression.delete` construct. + * a :func:`sqlalchemy.sql.expression.update` construct. + * a :func:`sqlalchemy.sql.expression.delete` construct. * Any "executable" described in SQLAlchemy Core documentation, noting that no result set is returned. @@ -1410,7 +1404,7 @@ class ExecuteSQLOp(MigrateOperation): ... @classmethod - def batch_execute(cls, operations: Operations, sqltext: Union[str, TextClause, Update], *, execution_options: Optional[dict[str, Any]] = ...) -> None: + def batch_execute(cls, operations: Operations, sqltext: Union[Executable, str], *, execution_options: Optional[dict[str, Any]] = ...) -> None: """Execute the given SQL using the current migration context. .. seealso:: @@ -1420,6 +1414,9 @@ class ExecuteSQLOp(MigrateOperation): """ ... + def to_diff_tuple(self) -> Tuple[str, Union[Executable, str]]: + ... + class OpContainer(MigrateOperation): @@ -1477,7 +1474,7 @@ class DowngradeOps(OpContainer): def __init__(self, ops: Sequence[MigrateOperation] = ..., downgrade_token: str = ...) -> None: ... - def reverse(self): # -> UpgradeOps: + def reverse(self) -> UpgradeOps: ... @@ -1506,11 +1503,13 @@ class MigrationScript(MigrateOperation): """ _needs_render: Optional[bool] - def __init__(self, rev_id: Optional[str], upgrade_ops: UpgradeOps, downgrade_ops: DowngradeOps, *, message: Optional[str] = ..., imports: Set[str] = ..., head: Optional[str] = ..., splice: Optional[bool] = ..., branch_label: Optional[str] = ..., version_path: Optional[str] = ..., depends_on: Optional[Union[str, Sequence[str]]] = ...) -> None: + _upgrade_ops: List[UpgradeOps] + _downgrade_ops: List[DowngradeOps] + def __init__(self, rev_id: Optional[str], upgrade_ops: UpgradeOps, downgrade_ops: DowngradeOps, *, message: Optional[str] = ..., imports: Set[str] = ..., head: Optional[str] = ..., splice: Optional[bool] = ..., branch_label: Optional[_RevIdType] = ..., version_path: Optional[str] = ..., depends_on: Optional[_RevIdType] = ...) -> None: ... @property - def upgrade_ops(self): # -> Any | None: + def upgrade_ops(self) -> Optional[UpgradeOps]: """An instance of :class:`.UpgradeOps`. .. seealso:: @@ -1520,11 +1519,11 @@ class MigrationScript(MigrateOperation): ... @upgrade_ops.setter - def upgrade_ops(self, upgrade_ops): # -> None: + def upgrade_ops(self, upgrade_ops: Union[UpgradeOps, List[UpgradeOps]]) -> None: ... @property - def downgrade_ops(self): # -> Any | None: + def downgrade_ops(self) -> Optional[DowngradeOps]: """An instance of :class:`.DowngradeOps`. .. seealso:: @@ -1534,7 +1533,7 @@ class MigrationScript(MigrateOperation): ... @downgrade_ops.setter - def downgrade_ops(self, downgrade_ops): # -> None: + def downgrade_ops(self, downgrade_ops: Union[DowngradeOps, List[DowngradeOps]]) -> None: ... @property diff --git a/typings/alembic/operations/schemaobj.pyi b/src/database/typings/alembic/operations/schemaobj.pyi similarity index 100% rename from typings/alembic/operations/schemaobj.pyi rename to src/database/typings/alembic/operations/schemaobj.pyi diff --git a/typings/alembic/operations/toimpl.pyi b/src/database/typings/alembic/operations/toimpl.pyi similarity index 100% rename from typings/alembic/operations/toimpl.pyi rename to src/database/typings/alembic/operations/toimpl.pyi diff --git a/typings/alembic/runtime/__init__.pyi b/src/database/typings/alembic/runtime/__init__.pyi similarity index 100% rename from typings/alembic/runtime/__init__.pyi rename to src/database/typings/alembic/runtime/__init__.pyi diff --git a/typings/alembic/runtime/environment.pyi b/src/database/typings/alembic/runtime/environment.pyi similarity index 89% rename from typings/alembic/runtime/environment.pyi rename to src/database/typings/alembic/runtime/environment.pyi index 7b9d5a4f..12c9f4f0 100644 --- a/typings/alembic/runtime/environment.pyi +++ b/src/database/typings/alembic/runtime/environment.pyi @@ -2,74 +2,31 @@ This type stub file was generated by pyright. """ -from typing import ( - TYPE_CHECKING, - Any, - Callable, - Collection, - ContextManager, - Dict, - List, - Mapping, - MutableMapping, - Optional, - TextIO, - Tuple, - Union, - overload, -) - -from sqlalchemy.engine import URL -from sqlalchemy.engine.base import Connection -from sqlalchemy.sql.elements import ClauseElement +from typing import Any, Callable, Collection, ContextManager, Dict, List, Mapping, MutableMapping, Optional, Sequence, TYPE_CHECKING, TextIO, Tuple, Union, overload from sqlalchemy.sql.schema import FetchedValue, MetaData from typing_extensions import Literal - +from .migration import MigrationContext, _ProxyTransaction from .. import util +from ..script.revision import _GetRevArg +from sqlalchemy.engine import URL +from sqlalchemy.engine.base import Connection +from sqlalchemy.sql import Executable from ..config import Config from ..ddl import DefaultImpl from ..script.base import ScriptDirectory -from .migration import MigrationContext, _ProxyTransaction -if TYPE_CHECKING: ... +if TYPE_CHECKING: + ... _RevNumber = Optional[Union[str, Tuple[str, ...]]] -ProcessRevisionDirectiveFn = Callable[ - [MigrationContext, Tuple[str, str], List["MigrateOperation"]], None -] +ProcessRevisionDirectiveFn = Callable[[MigrationContext, _GetRevArg, List["MigrationScript"]], None] RenderItemFn = Callable[[str, Any, "AutogenContext"], Union[str, Literal[False]]] -NameFilterType = Literal[ - "schema", - "table", - "column", - "index", - "unique_constraint", - "foreign_key_constraint", -] -NameFilterParentNames = MutableMapping[ - Literal["schema_name", "table_name", "schema_qualified_table_name"], - Optional[str], -] +NameFilterType = Literal["schema", "table", "column", "index", "unique_constraint", "foreign_key_constraint",] +NameFilterParentNames = MutableMapping[Literal["schema_name", "table_name", "schema_qualified_table_name"], Optional[str],] IncludeNameFn = Callable[[Optional[str], NameFilterType, NameFilterParentNames], bool] -IncludeObjectFn = Callable[ - ["SchemaItem", Optional[str], NameFilterType, bool, Optional["SchemaItem"]], - bool, -] -OnVersionApplyFn = Callable[ - [MigrationContext, "MigrationInfo", Collection[Any], Mapping[str, Any]], - None, -] -CompareServerDefault = Callable[ - [ - MigrationContext, - "Column[Any]", - "Column[Any]", - Optional[str], - Optional[FetchedValue], - Optional[str], - ], - Optional[bool], -] - +IncludeObjectFn = Callable[["SchemaItem", Optional[str], NameFilterType, bool, Optional["SchemaItem"]], bool,] +OnVersionApplyFn = Callable[[MigrationContext, "MigrationInfo", Collection[Any], Mapping[str, Any]], None,] +CompareServerDefault = Callable[[MigrationContext, "Column[Any]", "Column[Any]", Optional[str], Optional[FetchedValue], Optional[str]], Optional[bool],] +CompareType = Callable[[MigrationContext, "Column[Any]", "Column[Any]", "TypeEngine[Any]", "TypeEngine[Any]"], Optional[bool],] class EnvironmentContext(util.ModuleClsProxy): """A configurational facade made available in an ``env.py`` script. @@ -133,7 +90,6 @@ class EnvironmentContext(util.ModuleClsProxy): ``env.py`` module present in the migration environment. """ - _migration_context: Optional[MigrationContext] = ... config: Config = ... script: ScriptDirectory = ... @@ -148,6 +104,7 @@ class EnvironmentContext(util.ModuleClsProxy): """ ... + def __enter__(self) -> EnvironmentContext: """Establish a context which provides a :class:`.EnvironmentContext` object to @@ -158,7 +115,10 @@ class EnvironmentContext(util.ModuleClsProxy): """ ... - def __exit__(self, *arg: Any, **kw: Any) -> None: ... + + def __exit__(self, *arg: Any, **kw: Any) -> None: + ... + def is_offline_mode(self) -> bool: """Return True if the current migrations environment is running in "offline mode". @@ -171,7 +131,8 @@ class EnvironmentContext(util.ModuleClsProxy): """ ... - def is_transactional_ddl(self): # -> bool: + + def is_transactional_ddl(self) -> bool: """Return True if the context is configured to expect a transactional DDL capable backend. @@ -184,7 +145,10 @@ class EnvironmentContext(util.ModuleClsProxy): """ ... - def requires_connection(self) -> bool: ... + + def requires_connection(self) -> bool: + ... + def get_head_revision(self) -> _RevNumber: """Return the hex identifier of the 'head' script revision. @@ -199,6 +163,7 @@ class EnvironmentContext(util.ModuleClsProxy): """ ... + def get_head_revisions(self) -> _RevNumber: """Return the hex identifier of the 'heads' script revision(s). @@ -210,6 +175,7 @@ class EnvironmentContext(util.ModuleClsProxy): """ ... + def get_starting_revision_argument(self) -> _RevNumber: """Return the 'starting revision' argument, if the revision was passed using ``start:end``. @@ -223,6 +189,7 @@ class EnvironmentContext(util.ModuleClsProxy): """ ... + def get_revision_argument(self) -> _RevNumber: """Get the 'destination' revision argument. @@ -238,6 +205,7 @@ class EnvironmentContext(util.ModuleClsProxy): """ ... + def get_tag_argument(self) -> Optional[str]: """Return the value passed for the ``--tag`` argument, if any. @@ -257,17 +225,20 @@ class EnvironmentContext(util.ModuleClsProxy): """ ... + @overload - def get_x_argument(self, as_dictionary: Literal[False]) -> List[str]: ... + def get_x_argument(self, as_dictionary: Literal[False]) -> List[str]: + ... + @overload - def get_x_argument(self, as_dictionary: Literal[True]) -> Dict[str, str]: ... + def get_x_argument(self, as_dictionary: Literal[True]) -> Dict[str, str]: + ... + @overload - def get_x_argument( - self, as_dictionary: bool = ... - ) -> Union[List[str], Dict[str, str]]: ... - def get_x_argument( - self, as_dictionary: bool = ... - ) -> Union[List[str], Dict[str, str]]: + def get_x_argument(self, as_dictionary: bool = ...) -> Union[List[str], Dict[str, str]]: + ... + + def get_x_argument(self, as_dictionary: bool = ...) -> Union[List[str], Dict[str, str]]: """Return the value(s) passed for the ``-x`` argument, if any. The ``-x`` argument is an open ended flag that allows any user-defined @@ -277,7 +248,11 @@ class EnvironmentContext(util.ModuleClsProxy): The return value is a list, returned directly from the ``argparse`` structure. If ``as_dictionary=True`` is passed, the ``x`` arguments are parsed using ``key=value`` format into a dictionary that is - then returned. + then returned. If there is no ``=`` in the argument, value is an empty + string. + + .. versionchanged:: 1.13.1 Support ``as_dictionary=True`` when + arguments are passed without the ``=`` symbol. For example, to support passing a database URL on the command line, the standard ``env.py`` script can be modified like this:: @@ -307,39 +282,8 @@ class EnvironmentContext(util.ModuleClsProxy): """ ... - def configure( - self, - connection: Optional[Connection] = ..., - url: Optional[Union[str, URL]] = ..., - dialect_name: Optional[str] = ..., - dialect_opts: Optional[Dict[str, Any]] = ..., - transactional_ddl: Optional[bool] = ..., - transaction_per_migration: bool = ..., - output_buffer: Optional[TextIO] = ..., - starting_rev: Optional[str] = ..., - tag: Optional[str] = ..., - template_args: Optional[Dict[str, Any]] = ..., - render_as_batch: bool = ..., - # Alembic documents and supports list[MetaData] - # despite the typehint not including it in the - # library - target_metadata: Optional[MetaData | list[MetaData]] = ..., - include_name: Optional[IncludeNameFn] = ..., - include_object: Optional[IncludeObjectFn] = ..., - include_schemas: bool = ..., - process_revision_directives: Optional[ProcessRevisionDirectiveFn] = ..., - compare_type: bool = ..., - compare_server_default: Union[bool, CompareServerDefault] = ..., - render_item: Optional[RenderItemFn] = ..., - literal_binds: bool = ..., - upgrade_token: str = ..., - downgrade_token: str = ..., - alembic_module_prefix: str = ..., - sqlalchemy_module_prefix: str = ..., - user_module_prefix: Optional[str] = ..., - on_version_apply: Optional[OnVersionApplyFn] = ..., - **kw: Any - ) -> None: + + def configure(self, connection: Optional[Connection] = ..., url: Optional[Union[str, URL]] = ..., dialect_name: Optional[str] = ..., dialect_opts: Optional[Dict[str, Any]] = ..., transactional_ddl: Optional[bool] = ..., transaction_per_migration: bool = ..., output_buffer: Optional[TextIO] = ..., starting_rev: Optional[str] = ..., tag: Optional[str] = ..., template_args: Optional[Dict[str, Any]] = ..., render_as_batch: bool = ..., target_metadata: Union[MetaData, Sequence[MetaData], None] = ..., include_name: Optional[IncludeNameFn] = ..., include_object: Optional[IncludeObjectFn] = ..., include_schemas: bool = ..., process_revision_directives: Optional[ProcessRevisionDirectiveFn] = ..., compare_type: Union[bool, CompareType] = ..., compare_server_default: Union[bool, CompareServerDefault] = ..., render_item: Optional[RenderItemFn] = ..., literal_binds: bool = ..., upgrade_token: str = ..., downgrade_token: str = ..., alembic_module_prefix: str = ..., sqlalchemy_module_prefix: str = ..., user_module_prefix: Optional[str] = ..., on_version_apply: Optional[OnVersionApplyFn] = ..., **kw: Any) -> None: """Configure a :class:`.MigrationContext` within this :class:`.EnvironmentContext` which will provide database connectivity and other configuration to a series of @@ -384,9 +328,6 @@ class EnvironmentContext(util.ModuleClsProxy): ``connection`` and ``url`` are not passed. :param dialect_opts: dictionary of options to be passed to dialect constructor. - - .. versionadded:: 1.0.12 - :param transactional_ddl: Force the usage of "transactional" DDL on or off; this otherwise defaults to whether or not the dialect in @@ -469,12 +410,16 @@ class EnvironmentContext(util.ModuleClsProxy): to produce candidate upgrade/downgrade operations. :param compare_type: Indicates type comparison behavior during an autogenerate - operation. Defaults to ``False`` which disables type - comparison. Set to - ``True`` to turn on default type comparison, which has varied - accuracy depending on backend. See :ref:`compare_types` + operation. Defaults to ``True`` turning on type comparison, which + has good accuracy on most backends. See :ref:`compare_types` for an example as well as information on other type - comparison options. + comparison options. Set to ``False`` which disables type + comparison. A callable can also be passed to provide custom type + comparison, see :ref:`compare_types` for additional details. + + .. versionchanged:: 1.12.0 The default value of + :paramref:`.EnvironmentContext.configure.compare_type` has been + changed to ``True``. .. seealso:: @@ -558,8 +503,6 @@ class EnvironmentContext(util.ModuleClsProxy): include_name = include_name ) - .. versionadded:: 1.5 - .. seealso:: :ref:`autogenerate_include_hooks` @@ -774,6 +717,7 @@ class EnvironmentContext(util.ModuleClsProxy): """ ... + def run_migrations(self, **kw: Any) -> None: """Run migrations as determined by the current command line configuration @@ -795,9 +739,8 @@ class EnvironmentContext(util.ModuleClsProxy): """ ... - def execute( - self, sql: Union[ClauseElement, str], execution_options: Optional[dict] = ... - ) -> None: + + def execute(self, sql: Union[Executable, str], execution_options: Optional[Dict[str, Any]] = ...) -> None: """Execute the given SQL using the current change context. The behavior of :meth:`.execute` is the same @@ -810,6 +753,7 @@ class EnvironmentContext(util.ModuleClsProxy): """ ... + def static_output(self, text: str) -> None: """Emit text directly to the "offline" SQL stream. @@ -820,6 +764,7 @@ class EnvironmentContext(util.ModuleClsProxy): """ ... + def begin_transaction(self) -> Union[_ProxyTransaction, ContextManager[None]]: """Return a context manager that will enclose an operation within a "transaction", @@ -864,6 +809,7 @@ class EnvironmentContext(util.ModuleClsProxy): """ ... + def get_context(self) -> MigrationContext: """Return the current :class:`.MigrationContext` object. @@ -872,6 +818,7 @@ class EnvironmentContext(util.ModuleClsProxy): """ ... + def get_bind(self) -> Connection: """Return the current 'bind'. @@ -884,4 +831,9 @@ class EnvironmentContext(util.ModuleClsProxy): """ ... - def get_impl(self) -> DefaultImpl: ... + + def get_impl(self) -> DefaultImpl: + ... + + + diff --git a/typings/alembic/runtime/migration.pyi b/src/database/typings/alembic/runtime/migration.pyi similarity index 98% rename from typings/alembic/runtime/migration.pyi rename to src/database/typings/alembic/runtime/migration.pyi index c975722a..fb1aeea5 100644 --- a/typings/alembic/runtime/migration.pyi +++ b/src/database/typings/alembic/runtime/migration.pyi @@ -6,7 +6,7 @@ from contextlib import contextmanager from typing import Any, Collection, ContextManager, Dict, Iterator, List, Optional, Set, TYPE_CHECKING, Tuple, Union from sqlalchemy.engine import Dialect, URL from sqlalchemy.engine.base import Connection -from sqlalchemy.sql.elements import ClauseElement +from sqlalchemy.sql import Executable from .environment import EnvironmentContext from ..config import Config from ..script.base import Script, ScriptDirectory @@ -151,8 +151,6 @@ class MigrationContext: migrations whether or not one of them has an autocommit block. - .. versionadded:: 1.2.0 - """ ... @@ -271,7 +269,7 @@ class MigrationContext: """ ... - def execute(self, sql: Union[ClauseElement, str], execution_options: Optional[dict] = ...) -> None: + def execute(self, sql: Union[Executable, str], execution_options: Optional[Dict[str, Any]] = ...) -> None: """Execute a SQL construct or string statement. The underlying execution mechanics are used, that is @@ -396,6 +394,11 @@ class MigrationStep: to_revisions_no_deps: Tuple[str, ...] is_upgrade: bool migration_fn: Any + if TYPE_CHECKING: + @property + def doc(self) -> Optional[str]: + ... + @property def name(self) -> str: ... @@ -432,7 +435,7 @@ class RevisionStep(MigrationStep): ... @property - def doc(self) -> str: + def doc(self) -> Optional[str]: ... @property @@ -462,7 +465,7 @@ class RevisionStep(MigrationStep): def merge_branch_idents(self, heads: Set[str]) -> Tuple[List[str], str, str]: ... - def unmerge_branch_idents(self, heads: Collection[str]) -> Tuple[str, str, Tuple[str, ...]]: + def unmerge_branch_idents(self, heads: Set[str]) -> Tuple[str, str, Tuple[str, ...]]: ... def should_create_branch(self, heads: Set[str]) -> bool: diff --git a/typings/alembic/script/__init__.pyi b/src/database/typings/alembic/script/__init__.pyi similarity index 100% rename from typings/alembic/script/__init__.pyi rename to src/database/typings/alembic/script/__init__.pyi diff --git a/typings/alembic/script/base.pyi b/src/database/typings/alembic/script/base.pyi similarity index 90% rename from typings/alembic/script/base.pyi rename to src/database/typings/alembic/script/base.pyi index 339ae9c3..e886afdd 100644 --- a/typings/alembic/script/base.pyi +++ b/src/database/typings/alembic/script/base.pyi @@ -3,18 +3,21 @@ This type stub file was generated by pyright. """ from types import ModuleType -from typing import Any, Iterator, List, Mapping, Optional, Sequence, Set, TYPE_CHECKING, Tuple, Union +from typing import Any, Iterator, List, Mapping, Optional, Set, TYPE_CHECKING, Tuple, Union from . import revision +from ..util import compat +from .revision import Revision, _GetRevArg, _RevIdType from ..config import Config, MessagingOptions -from ..script.revision import Revision if TYPE_CHECKING: ... -_RevIdType = Union[str, Sequence[str]] +if compat.py39: + ... +else: + ... _sourceless_rev_file = ... _only_source_rev_file = ... _legacy_rev = ... -_mod_def_re = ... _slug_re = ... _default_file_template = ... _split_on_space_comma = ... @@ -69,17 +72,17 @@ class ScriptDirectory: """ ... - def get_revisions(self, id_: _RevIdType) -> Tuple[Optional[Script], ...]: + def get_revisions(self, id_: _GetRevArg) -> Tuple[Script, ...]: """Return the :class:`.Script` instance with the given rev identifier, symbolic name, or sequence of identifiers. """ ... - def get_all_current(self, id_: Tuple[str, ...]) -> Set[Optional[Script]]: + def get_all_current(self, id_: Tuple[str, ...]) -> Set[Script]: ... - def get_revision(self, id_: str) -> Optional[Script]: + def get_revision(self, id_: str) -> Script: """Return the :class:`.Script` instance with the given rev id. .. seealso:: @@ -176,10 +179,10 @@ class ScriptDirectory: ... @property - def env_py_location(self): # -> str: + def env_py_location(self) -> str: ... - def generate_revision(self, revid: str, message: Optional[str], head: Optional[str] = ..., refresh: bool = ..., splice: Optional[bool] = ..., branch_labels: Optional[str] = ..., version_path: Optional[str] = ..., depends_on: Optional[_RevIdType] = ..., **kw: Any) -> Optional[Script]: + def generate_revision(self, revid: str, message: Optional[str], head: Optional[_RevIdType] = ..., splice: Optional[bool] = ..., branch_labels: Optional[_RevIdType] = ..., version_path: Optional[str] = ..., depends_on: Optional[_RevIdType] = ..., **kw: Any) -> Optional[Script]: """Generate a new revision file. This runs the ``script.py.mako`` template, given @@ -195,7 +198,6 @@ class ScriptDirectory: :param splice: if True, allow the "head" version to not be an actual head; otherwise, the selected head must be a head (e.g. endpoint) revision. - :param refresh: deprecated. """ ... diff --git a/typings/alembic/script/revision.pyi b/src/database/typings/alembic/script/revision.pyi similarity index 87% rename from typings/alembic/script/revision.pyi rename to src/database/typings/alembic/script/revision.pyi index 3cd91379..92db9ee5 100644 --- a/typings/alembic/script/revision.pyi +++ b/src/database/typings/alembic/script/revision.pyi @@ -2,20 +2,28 @@ This type stub file was generated by pyright. """ -from typing import Any, Callable, Collection, Dict, FrozenSet, Iterable, Iterator, Optional, Sequence, Set, TYPE_CHECKING, Tuple, TypeVar, Union, overload +from typing import Any, Callable, Dict, FrozenSet, Iterable, Iterator, List, Optional, Protocol, Sequence, Set, TYPE_CHECKING, Tuple, TypeVar, Union, overload from .. import util if TYPE_CHECKING: ... -_RevIdType = Union[str, Sequence[str]] +_RevIdType = Union[str, List[str], Tuple[str, ...]] +_GetRevArg = Union[str, Iterable[Optional[str]], Iterable[str],] _RevisionIdentifierType = Union[str, Tuple[str, ...], None] _RevisionOrStr = Union["Revision", str] _RevisionOrBase = Union["Revision", "Literal['base']"] _InterimRevisionMapType = Dict[str, "Revision"] _RevisionMapType = Dict[Union[None, str, Tuple[()]], Optional["Revision"]] -_T = TypeVar("_T", bound=Union[str, "Revision"]) +_T = TypeVar("_T") +_TR = TypeVar("_TR", bound=Optional[_RevisionOrStr]) _relative_destination = ... _revision_illegal_chars = ... +class _CollectRevisionsProtocol(Protocol): + def __call__(self, upper: _RevisionIdentifierType, lower: _RevisionIdentifierType, inclusive: bool, implicit_base: bool, assert_relative_length: bool) -> Tuple[Set[Revision], Tuple[Optional[_RevisionOrBase], ...]]: + ... + + + class RevisionError(Exception): ... @@ -137,7 +145,7 @@ class RevisionMap: """ ... - def get_revisions(self, id_: Union[str, Collection[Optional[str]], None]) -> Tuple[Optional[_RevisionOrBase], ...]: + def get_revisions(self, id_: Optional[_GetRevArg]) -> Tuple[Optional[_RevisionOrBase], ...]: """Return the :class:`.Revision` instances with the given rev id or identifiers. @@ -171,7 +179,7 @@ class RevisionMap: """ ... - def filter_for_lineage(self, targets: Iterable[_T], check_against: Optional[str], include_dependencies: bool = ...) -> Tuple[_T, ...]: + def filter_for_lineage(self, targets: Iterable[_TR], check_against: Optional[str], include_dependencies: bool = ...) -> Tuple[_TR, ...]: ... def iterate_revisions(self, upper: _RevisionIdentifierType, lower: _RevisionIdentifierType, implicit_base: bool = ..., inclusive: bool = ..., assert_relative_length: bool = ..., select_for_downgrade: bool = ...) -> Iterator[Revision]: @@ -257,14 +265,14 @@ class Revision: @overload -def tuple_rev_as_scalar(rev: Optional[Sequence[str]]) -> Optional[Union[str, Sequence[str]]]: +def tuple_rev_as_scalar(rev: None) -> None: ... @overload -def tuple_rev_as_scalar(rev: Optional[Sequence[Optional[str]]]) -> Optional[Union[Optional[str], Sequence[Optional[str]]]]: +def tuple_rev_as_scalar(rev: Union[Tuple[_T, ...], List[_T]]) -> Union[_T, Tuple[_T, ...], List[_T]]: ... -def tuple_rev_as_scalar(rev): # -> None: +def tuple_rev_as_scalar(rev: Optional[Sequence[_T]]) -> Union[_T, Sequence[_T], None]: ... def is_revision(rev: Any) -> Revision: diff --git a/typings/alembic/script/write_hooks.pyi b/src/database/typings/alembic/script/write_hooks.pyi similarity index 83% rename from typings/alembic/script/write_hooks.pyi rename to src/database/typings/alembic/script/write_hooks.pyi index 031c161a..748c1dd4 100644 --- a/typings/alembic/script/write_hooks.pyi +++ b/src/database/typings/alembic/script/write_hooks.pyi @@ -11,8 +11,6 @@ def register(name: str) -> Callable: See the documentation linked below for an example. - .. versionadded:: 1.2.0 - .. seealso:: :ref:`post_write_hooks_custom` @@ -25,3 +23,7 @@ def register(name: str) -> Callable: def console_scripts(path: str, options: dict, ignore_output: bool = ...) -> None: ... +@register("exec") +def exec_(path: str, options: dict, ignore_output: bool = ...) -> None: + ... + diff --git a/typings/alembic/testing/__init__.pyi b/src/database/typings/alembic/testing/__init__.pyi similarity index 100% rename from typings/alembic/testing/__init__.pyi rename to src/database/typings/alembic/testing/__init__.pyi diff --git a/typings/alembic/testing/assertions.pyi b/src/database/typings/alembic/testing/assertions.pyi similarity index 94% rename from typings/alembic/testing/assertions.pyi rename to src/database/typings/alembic/testing/assertions.pyi index d86a42ae..848def88 100644 --- a/typings/alembic/testing/assertions.pyi +++ b/src/database/typings/alembic/testing/assertions.pyi @@ -45,7 +45,7 @@ def expect_warnings(*messages, **kw): """ ... -def emits_python_deprecation_warning(*messages): # -> (_Fn@decorator) -> _Fn@decorator: +def emits_python_deprecation_warning(*messages): # -> Any: """Decorator form of expect_warnings(). Note that emits_warning does **not** assert that the warnings diff --git a/typings/alembic/testing/env.pyi b/src/database/typings/alembic/testing/env.pyi similarity index 100% rename from typings/alembic/testing/env.pyi rename to src/database/typings/alembic/testing/env.pyi diff --git a/typings/alembic/testing/fixtures.pyi b/src/database/typings/alembic/testing/fixtures.pyi similarity index 92% rename from typings/alembic/testing/fixtures.pyi rename to src/database/typings/alembic/testing/fixtures.pyi index d1be4210..bbd09422 100644 --- a/typings/alembic/testing/fixtures.pyi +++ b/src/database/typings/alembic/testing/fixtures.pyi @@ -21,7 +21,7 @@ class TestBase(SQLAlchemyTestBase): ... @testing.fixture - def connection(self): # -> Generator[db, Any, None]: + def connection(self): # -> Generator[Any, Any, None]: ... @@ -37,7 +37,7 @@ else: __requires__ = ... -def capture_db(dialect=...): # -> tuple[Engine, list[Unknown]]: +def capture_db(dialect=...): # -> tuple[Engine, list[Any]]: ... _engs: Dict[Any, Any] = ... diff --git a/typings/alembic/testing/util.pyi b/src/database/typings/alembic/testing/util.pyi similarity index 89% rename from typings/alembic/testing/util.pyi rename to src/database/typings/alembic/testing/util.pyi index cee802b5..c3f3113f 100644 --- a/typings/alembic/testing/util.pyi +++ b/src/database/typings/alembic/testing/util.pyi @@ -2,7 +2,7 @@ This type stub file was generated by pyright. """ -def flag_combinations(*combinations): # -> (_FN@combinations) -> _FN@combinations: +def flag_combinations(*combinations): """A facade around @testing.combinations() oriented towards boolean keyword-based arguments. @@ -43,7 +43,7 @@ def resolve_lambda(__fn, **kw): # -> Any: """ ... -def metadata_fixture(ddl=...): # -> (fn: Unknown) -> Unknown: +def metadata_fixture(ddl=...): # -> Callable[..., Any]: """Provide MetaData for a pytest fixture.""" ... diff --git a/src/database/typings/alembic/util/__init__.pyi b/src/database/typings/alembic/util/__init__.pyi new file mode 100644 index 00000000..5f57f0e8 --- /dev/null +++ b/src/database/typings/alembic/util/__init__.pyi @@ -0,0 +1,13 @@ +""" +This type stub file was generated by pyright. +""" + +from .editor import open_in_editor as open_in_editor +from .exc import AutogenerateDiffsDetected as AutogenerateDiffsDetected, CommandError as CommandError +from .langhelpers import Dispatcher as Dispatcher, EMPTY_DICT as EMPTY_DICT, ModuleClsProxy as ModuleClsProxy, _with_legacy_names as _with_legacy_names, asbool as asbool, dedupe_tuple as dedupe_tuple, immutabledict as immutabledict, memoized_property as memoized_property, not_none as not_none, rev_id as rev_id, to_list as to_list, to_tuple as to_tuple, unique_list as unique_list +from .messaging import err as err, format_as_comma as format_as_comma, msg as msg, obfuscate_url_pw as obfuscate_url_pw, status as status, warn as warn, write_outstream as write_outstream +from .pyfiles import coerce_resource_to_filename as coerce_resource_to_filename, load_python_file as load_python_file, pyc_file_from_path as pyc_file_from_path, template_to_file as template_to_file +from .sqla_compat import has_computed as has_computed, sqla_13 as sqla_13, sqla_14 as sqla_14, sqla_2 as sqla_2 + +if not sqla_13: + ... diff --git a/src/database/typings/alembic/util/compat.pyi b/src/database/typings/alembic/util/compat.pyi new file mode 100644 index 00000000..d68cb70f --- /dev/null +++ b/src/database/typings/alembic/util/compat.pyi @@ -0,0 +1,40 @@ +""" +This type stub file was generated by pyright. +""" + +import io +import os +import importlib_resources +import importlib_metadata +from configparser import ConfigParser +from typing import Any, List, Optional, Sequence, Union +from importlib.metadata import EntryPoint as EntryPoint +from importlib_metadata import EntryPoint + +if True: + ... +is_posix = ... +py311 = ... +py310 = ... +py39 = ... +class EncodedIO(io.TextIOWrapper): + def close(self) -> None: + ... + + + +if py39: + importlib_resources = ... + importlib_metadata = ... +else: + ... +def importlib_metadata_get(group: str) -> Sequence[EntryPoint]: + ... + +def formatannotation_fwdref(annotation: Any, base_module: Optional[Any] = ...) -> str: + """vendored from python 3.7""" + ... + +def read_config_parser(file_config: ConfigParser, file_argument: Sequence[Union[str, os.PathLike[str]]]) -> List[str]: + ... + diff --git a/typings/alembic/util/editor.pyi b/src/database/typings/alembic/util/editor.pyi similarity index 100% rename from typings/alembic/util/editor.pyi rename to src/database/typings/alembic/util/editor.pyi diff --git a/typings/alembic/util/exc.pyi b/src/database/typings/alembic/util/exc.pyi similarity index 100% rename from typings/alembic/util/exc.pyi rename to src/database/typings/alembic/util/exc.pyi diff --git a/typings/alembic/util/langhelpers.pyi b/src/database/typings/alembic/util/langhelpers.pyi similarity index 57% rename from typings/alembic/util/langhelpers.pyi rename to src/database/typings/alembic/util/langhelpers.pyi index c466b204..d0de856a 100644 --- a/typings/alembic/util/langhelpers.pyi +++ b/src/database/typings/alembic/util/langhelpers.pyi @@ -2,13 +2,15 @@ This type stub file was generated by pyright. """ -from typing import (Any, Callable, Dict, Mapping, Optional, Tuple, TypeVar, - overload) +from typing import Any, Callable, Dict, List, Mapping, MutableMapping, Optional, Set, Tuple, Type, TypeVar, overload +if True: + ... EMPTY_DICT: Mapping[Any, Any] = ... -_T = TypeVar("_T") +_T = TypeVar("_T", bound=Any) +_C = TypeVar("_C", bound=Callable[..., Any]) class _ModuleClsMeta(type): - def __setattr__(cls, key: str, value: Callable) -> None: + def __setattr__(cls, key: str, value: Callable[..., Any]) -> None: ... @@ -21,9 +23,9 @@ class ModuleClsProxy(metaclass=_ModuleClsMeta): as the methods. """ - _setups: Dict[type, Tuple[set, list]] = ... + _setups: Dict[Type[Any], Tuple[Set[str], List[Tuple[MutableMapping[str, Any], MutableMapping[str, Any]]],],] = ... @classmethod - def create_module_class_proxy(cls, globals_, locals_): # -> None: + def create_module_class_proxy(cls, globals_: MutableMapping[str, Any], locals_: MutableMapping[str, Any]) -> None: ... @@ -32,7 +34,7 @@ def rev_id() -> str: ... @overload -def to_tuple(x: Any, default: tuple) -> tuple: +def to_tuple(x: Any, default: Tuple[Any, ...]) -> Tuple[Any, ...]: ... @overload @@ -40,10 +42,10 @@ def to_tuple(x: None, default: Optional[_T] = ...) -> _T: ... @overload -def to_tuple(x: Any, default: Optional[tuple] = ...) -> tuple: +def to_tuple(x: Any, default: Optional[Tuple[Any, ...]] = ...) -> Tuple[Any, ...]: ... -def to_tuple(x, default=...): # -> tuple[str] | tuple[Unknown, ...] | tuple[Unknown] | None: +def to_tuple(x: Any, default: Optional[Tuple[Any, ...]] = ...) -> Optional[Tuple[Any, ...]]: ... def dedupe_tuple(tup: Tuple[str, ...]) -> Tuple[str, ...]: @@ -53,7 +55,7 @@ class Dispatcher: def __init__(self, uselist: bool = ...) -> None: ... - def dispatch_for(self, target: Any, qualifier: str = ...) -> Callable: + def dispatch_for(self, target: Any, qualifier: str = ...) -> Callable[[_C], _C]: ... def dispatch(self, obj: Any, qualifier: str = ...) -> Any: diff --git a/typings/alembic/util/messaging.pyi b/src/database/typings/alembic/util/messaging.pyi similarity index 84% rename from typings/alembic/util/messaging.pyi rename to src/database/typings/alembic/util/messaging.pyi index 83ce7d11..f0bcf425 100644 --- a/typings/alembic/util/messaging.pyi +++ b/src/database/typings/alembic/util/messaging.pyi @@ -4,7 +4,7 @@ This type stub file was generated by pyright. from collections.abc import Iterable from contextlib import contextmanager -from typing import Optional, TextIO, Union +from typing import Iterator, Optional, TextIO, Union log = ... ioctl = ... @@ -14,10 +14,10 @@ def write_outstream(stream: TextIO, *text: Union[str, bytes], quiet: bool = ...) ... @contextmanager -def status(status_msg: str, newline: bool = ..., quiet: bool = ...): # -> Generator[None, Any, None]: +def status(status_msg: str, newline: bool = ..., quiet: bool = ...) -> Iterator[None]: ... -def err(message: str, quiet: bool = ...): +def err(message: str, quiet: bool = ...) -> None: ... def obfuscate_url_pw(input_url: str) -> str: diff --git a/typings/alembic/util/pyfiles.pyi b/src/database/typings/alembic/util/pyfiles.pyi similarity index 73% rename from typings/alembic/util/pyfiles.pyi rename to src/database/typings/alembic/util/pyfiles.pyi index d56dcd65..f651fbdd 100644 --- a/typings/alembic/util/pyfiles.pyi +++ b/src/database/typings/alembic/util/pyfiles.pyi @@ -2,9 +2,10 @@ This type stub file was generated by pyright. """ -from typing import Optional +from types import ModuleType +from typing import Any, Optional -def template_to_file(template_file: str, dest: str, output_encoding: str, **kw) -> None: +def template_to_file(template_file: str, dest: str, output_encoding: str, **kw: Any) -> None: ... def coerce_resource_to_filename(fname: str) -> str: @@ -21,10 +22,10 @@ def pyc_file_from_path(path: str) -> Optional[str]: """Given a python source path, locate the .pyc.""" ... -def load_python_file(dir_: str, filename: str): # -> ModuleType: +def load_python_file(dir_: str, filename: str) -> ModuleType: """Load a file from the given path as a Python module.""" ... -def load_module_py(module_id: str, path: str): # -> ModuleType: +def load_module_py(module_id: str, path: str) -> ModuleType: ... diff --git a/typings/alembic/util/sqla_compat.pyi b/src/database/typings/alembic/util/sqla_compat.pyi similarity index 77% rename from typings/alembic/util/sqla_compat.pyi rename to src/database/typings/alembic/util/sqla_compat.pyi index db7d1b70..2a5df80a 100644 --- a/typings/alembic/util/sqla_compat.pyi +++ b/src/database/typings/alembic/util/sqla_compat.pyi @@ -2,28 +2,40 @@ This type stub file was generated by pyright. """ -from typing import Optional, TYPE_CHECKING, TypeVar, Union -from sqlalchemy import Index, Table, __version__, sql, types as sqltypes +from typing import Any, Callable, Optional, Protocol, TYPE_CHECKING, Type, TypeVar, Union +from sqlalchemy import ClauseElement, Index, Table, sql, types as sqltypes +from sqlalchemy.sql.base import _NoneName from sqlalchemy.sql.elements import BindParameter, ColumnElement, TextClause from typing_extensions import TypeGuard -from sqlalchemy.sql.base import _NoneName from sqlalchemy.util import symbol as _NoneName if TYPE_CHECKING: ... _CE = TypeVar("_CE", bound=Union["ColumnElement[Any]", "SchemaItem"]) +class _CompilerProtocol(Protocol): + def __call__(self, element: Any, compiler: Any, **kw: Any) -> str: + ... + + + _vers = ... sqla_13 = ... sqla_14 = ... sqla_14_18 = ... sqla_14_26 = ... sqla_2 = ... -sqlalchemy_version = __version__ +sqlalchemy_version = ... class _Unsupported: "Placeholder for unsupported SQLAlchemy classes" ... +if TYPE_CHECKING: + def compiles(element: Type[ClauseElement], *dialects: str) -> Callable[[_CompilerProtocol], _CompilerProtocol]: + ... + +else: + ... if sqla_2: ... else: @@ -66,7 +78,7 @@ class _textual_index_element(sql.ColumnElement): def __init__(self, table: Table, text: TextClause) -> None: ... - def get_children(self): # -> list[Unknown]: + def get_children(self): # -> list[Column[NullType]]: ... @@ -76,7 +88,7 @@ class _literal_bindparam(BindParameter): if sqla_14: - ... + _select = ... else: def create_mock_engine(url, executor, **kw): # -> Engine: ... @@ -84,3 +96,6 @@ else: def is_expression_index(index: Index) -> bool: ... +def is_expression(expr: Any) -> bool: + ... + diff --git a/src/programming/BL_Python/programming/config/__init__.py b/src/programming/BL_Python/programming/config/__init__.py index b1f5b133..fb2e5f5f 100644 --- a/src/programming/BL_Python/programming/config/__init__.py +++ b/src/programming/BL_Python/programming/config/__init__.py @@ -1,4 +1,5 @@ import abc +from pathlib import Path from typing import Any, Generic, TypeVar, cast import toml @@ -59,7 +60,7 @@ def build(self) -> type[TConfig]: def load_config( config_type: type[TConfig], - toml_file_path: str, + toml_file_path: str | Path, config_overrides: AnyDict | None = None, ) -> TConfig: config_dict: dict[str, Any] = toml.load(toml_file_path) diff --git a/typings/alembic b/typings/alembic new file mode 120000 index 00000000..b6eb5412 --- /dev/null +++ b/typings/alembic @@ -0,0 +1 @@ +../src/database/typings/alembic/ \ No newline at end of file diff --git a/typings/alembic/autogenerate/__init__.pyi b/typings/alembic/autogenerate/__init__.pyi deleted file mode 100644 index 6320c04f..00000000 --- a/typings/alembic/autogenerate/__init__.pyi +++ /dev/null @@ -1,9 +0,0 @@ -""" -This type stub file was generated by pyright. -""" - -from .api import RevisionContext, _render_migration_diffs, compare_metadata, produce_migrations, render_python_code -from .compare import _produce_net_changes, comparators -from .render import render_op_text, renderers -from .rewriter import Rewriter - diff --git a/typings/alembic/autogenerate/compare.pyi b/typings/alembic/autogenerate/compare.pyi deleted file mode 100644 index eb1032db..00000000 --- a/typings/alembic/autogenerate/compare.pyi +++ /dev/null @@ -1,63 +0,0 @@ -""" -This type stub file was generated by pyright. -""" - -from typing import Any, List, Mapping, Optional, TYPE_CHECKING, Union -from sqlalchemy.sql.elements import quoted_name -from sqlalchemy.sql.schema import ForeignKeyConstraint, Index, UniqueConstraint -from alembic.autogenerate.api import AutogenContext -from alembic.ddl.impl import DefaultImpl - -if TYPE_CHECKING: - ... -log = ... -comparators = ... -_IndexColumnSortingOps: Mapping[str, Any] = ... -class _constraint_sig: - const: Union[UniqueConstraint, ForeignKeyConstraint, Index] - def md_name_to_sql_name(self, context: AutogenContext) -> Optional[str]: - ... - - def __eq__(self, other) -> bool: - ... - - def __ne__(self, other) -> bool: - ... - - def __hash__(self) -> int: - ... - - - -class _uq_constraint_sig(_constraint_sig): - is_index = ... - is_unique = ... - def __init__(self, const: UniqueConstraint) -> None: - ... - - @property - def column_names(self) -> List[str]: - ... - - - -class _ix_constraint_sig(_constraint_sig): - is_index = ... - def __init__(self, const: Index, impl: DefaultImpl) -> None: - ... - - def md_name_to_sql_name(self, context: AutogenContext) -> Optional[str]: - ... - - @property - def column_names(self) -> Union[List[quoted_name], List[None]]: - ... - - - -class _fk_constraint_sig(_constraint_sig): - def __init__(self, const: ForeignKeyConstraint, include_options: bool = ...) -> None: - ... - - - diff --git a/typings/alembic/util/__init__.pyi b/typings/alembic/util/__init__.pyi deleted file mode 100644 index 71cf917e..00000000 --- a/typings/alembic/util/__init__.pyi +++ /dev/null @@ -1,13 +0,0 @@ -""" -This type stub file was generated by pyright. -""" - -from .editor import open_in_editor -from .exc import AutogenerateDiffsDetected, CommandError -from .langhelpers import Dispatcher, EMPTY_DICT, ModuleClsProxy, _with_legacy_names, asbool, dedupe_tuple, immutabledict, memoized_property, not_none, rev_id, to_list, to_tuple, unique_list -from .messaging import err, format_as_comma, msg, obfuscate_url_pw, status, warn, write_outstream -from .pyfiles import coerce_resource_to_filename, load_python_file, pyc_file_from_path, template_to_file -from .sqla_compat import has_computed, sqla_13, sqla_14, sqla_2 - -if notsqla_13: - ... diff --git a/typings/alembic/util/compat.pyi b/typings/alembic/util/compat.pyi deleted file mode 100644 index 3ace0450..00000000 --- a/typings/alembic/util/compat.pyi +++ /dev/null @@ -1,30 +0,0 @@ -""" -This type stub file was generated by pyright. -""" - -import io -from typing import Sequence -from importlib.metadata import EntryPoint -from importlib_metadata import EntryPoint - -is_posix = ... -py311 = ... -py39 = ... -py38 = ... -class EncodedIO(io.TextIOWrapper): - def close(self) -> None: - ... - - - -if py39: - ... -else: - ... -def importlib_metadata_get(group: str) -> Sequence[EntryPoint]: - ... - -def formatannotation_fwdref(annotation, base_module=...): # -> str: - """vendored from python 3.7""" - ... -