diff --git a/.claude/settings.local.json b/.claude/settings.local.json deleted file mode 100644 index ec6f75a..0000000 --- a/.claude/settings.local.json +++ /dev/null @@ -1,11 +0,0 @@ -{ - "permissions": { - "allow": [ - "Bash(dotnet test:*)", - "WebFetch(domain:github.com)", - "Bash(dotnet build:*)" - ], - "deny": [], - "ask": [] - } -} \ No newline at end of file diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 910eb90..e997e11 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -23,10 +23,9 @@ updates: - "ericburcham" assignees: - "ericburcham" - versioning-strategy: increase - package-ecosystem: "pip" # Python packages - directory: "/python" # Location of Python package manifests + directory: "/python" schedule: interval: "weekly" day: "monday" @@ -43,4 +42,3 @@ updates: - "ericburcham" assignees: - "ericburcham" - versioning-strategy: increase diff --git a/.github/workflows/nuke-build.yml b/.github/workflows/dotnet-build.yml similarity index 51% rename from .github/workflows/nuke-build.yml rename to .github/workflows/dotnet-build.yml index c57cc38..808a6a3 100644 --- a/.github/workflows/nuke-build.yml +++ b/.github/workflows/dotnet-build.yml @@ -1,58 +1,69 @@ -# ------------------------------------------------------------------------------ -# -# -# This code was generated. -# -# - To turn off auto-generation set: -# -# [GitHubActions (AutoGenerate = false)] -# -# - To trigger manual generation invoke: -# -# nuke --generate-configuration GitHubActions_nuke-build --host GitHubActions -# -# -# ------------------------------------------------------------------------------ - -name: nuke-build - -on: [push] +# Manual .NET build workflow with path filtering +# This supplements the auto-generated nuke-build.yml with path-specific triggers + +name: .NET DataJam Build + +on: + push: + paths: + - 'src/**' + - 'build/**' + - '*.sln' + - '*.props' + - '*.targets' + - 'global.json' + - '.github/workflows/dotnet-build.yml' + pull_request: + paths: + - 'src/**' + - 'build/**' + - '*.sln' + - '*.props' + - '*.targets' + - 'global.json' + - '.github/workflows/dotnet-build.yml' permissions: contents: read actions: write jobs: - ubuntu-latest: - name: ubuntu-latest + dotnet-build: + name: .NET Build and Test runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - name: Checkout code + uses: actions/checkout@v4 with: fetch-depth: 0 - - name: 'Cache: .nuke/temp, ~/.nuget/packages' + + - name: Cache .nuke/temp and NuGet packages uses: actions/cache@v4 with: path: | .nuke/temp ~/.nuget/packages key: ${{ runner.os }}-${{ hashFiles('**/global.json', '**/*.csproj', '**/Directory.Packages.props') }} + - name: Install .NET 8 SDK uses: actions/setup-dotnet@v4.1.0 with: dotnet-version: 8.0.x dotnet-quality: ga + - name: Install .NET 9 SDK uses: actions/setup-dotnet@v4.1.0 with: dotnet-version: 9.0.x dotnet-quality: ga - - name: 'Run: Nuke' + + - name: Run NUKE build run: ./build.cmd Nuke env: NuGetApiKey: ${{ secrets.NUGET_API_KEY }} - - name: 'Publish: artifacts' + + - name: Upload artifacts uses: actions/upload-artifact@v4 with: - name: artifacts + name: dotnet-artifacts path: artifacts \ No newline at end of file diff --git a/.github/workflows/python-build.yml b/.github/workflows/python-build.yml new file mode 100644 index 0000000..e160f47 --- /dev/null +++ b/.github/workflows/python-build.yml @@ -0,0 +1,110 @@ +name: Python DataJam Build + +on: + push: + paths: + - 'python/**' + - '.github/workflows/python-build.yml' + pull_request: + paths: + - 'python/**' + - '.github/workflows/python-build.yml' + +permissions: + contents: read + actions: write + +jobs: + python-build: + name: Python Build and Test + runs-on: ubuntu-latest + strategy: + matrix: + python-version: ["3.13"] + + services: + docker: + image: docker:dind + options: --privileged + + steps: + - name: Checkout code + uses: actions/checkout@v4 + with: + fetch-depth: 0 + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + + - name: Cache Python dependencies + uses: actions/cache@v4 + with: + path: | + ~/.cache/pip + python/.nox + key: ${{ runner.os }}-python-${{ matrix.python-version }}-${{ hashFiles('python/pyproject.toml', 'python/noxfile.py') }} + restore-keys: | + ${{ runner.os }}-python-${{ matrix.python-version }}- + + - name: Install Nox and ensure consistent tool versions + run: | + python -m pip install --upgrade pip + python -m pip install nox + # Pre-install linting tools to match noxfile versions + python -m pip install ruff==0.8.0 black==24.10.0 mypy==1.13.0 isort==5.13.2 + + - name: Run linting + working-directory: python + run: nox -s lint + + - name: Run unit tests + working-directory: python + run: nox -s test + + - name: Run integration tests + working-directory: python + run: nox -s test-integration + env: + TESTCONTAINERS_ORACLE_PASSWORD: oracle123 + + - name: Build packages + working-directory: python + run: nox -s build + + - name: Upload test coverage + if: matrix.python-version == '3.11' + uses: codecov/codecov-action@v3 + with: + file: python/coverage.xml + flags: python + name: python-coverage + + - name: Upload build artifacts + if: matrix.python-version == '3.11' + uses: actions/upload-artifact@v4 + with: + name: python-packages + path: python/artifacts/ + + publish: + name: Publish Python packages + runs-on: ubuntu-latest + needs: python-build + if: github.ref == 'refs/heads/main' && github.event_name == 'push' + environment: pypi + permissions: + id-token: write # For trusted publishing to PyPI + + steps: + - name: Download build artifacts + uses: actions/download-artifact@v4 + with: + name: python-packages + path: dist/ + + - name: Publish to PyPI + uses: pypa/gh-action-pypi-publish@release/v1 + with: + packages-dir: dist/ \ No newline at end of file diff --git a/.gitignore b/.gitignore index f6ee760..4f34047 100644 --- a/.gitignore +++ b/.gitignore @@ -402,4 +402,77 @@ FodyWeavers.xsd *.bak # Nuke's temp directory -.nuke/temp/ \ No newline at end of file +.nuke/temp/ + +# Python +__pycache__/ +*.py[cod] +*$py.class +*.so +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +*.manifest +*.spec + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ + +# Virtual environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Python artifacts in root +python/artifacts/ +python/.nox/ +python/htmlcov/ +python/.coverage +python/.pytest_cache/ + +# Claude Code settings +.claude/settings.local.json \ No newline at end of file diff --git a/python/.pre-commit-config.yaml b/python/.pre-commit-config.yaml new file mode 100644 index 0000000..3798665 --- /dev/null +++ b/python/.pre-commit-config.yaml @@ -0,0 +1,33 @@ +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.4.0 + hooks: + - id: trailing-whitespace + - id: end-of-file-fixer + - id: check-yaml + - id: check-added-large-files + - id: check-merge-conflict + + - repo: https://github.com/psf/black + rev: 23.9.1 + hooks: + - id: black + language_version: python3 + + - repo: https://github.com/pycqa/isort + rev: 5.12.0 + hooks: + - id: isort + + - repo: https://github.com/astral-sh/ruff-pre-commit + rev: v0.1.3 + hooks: + - id: ruff + args: [--fix, --exit-non-zero-on-fix] + + - repo: https://github.com/pre-commit/mirrors-mypy + rev: v1.5.1 + hooks: + - id: mypy + additional_dependencies: [types-all] + args: [--strict, --ignore-missing-imports] \ No newline at end of file diff --git a/python/README.md b/python/README.md new file mode 100644 index 0000000..dbefa78 --- /dev/null +++ b/python/README.md @@ -0,0 +1,198 @@ +# DataJam Python + +DataJam provides abstraction patterns for data access across different ORM technologies in Python. This is a Python port of the .NET DataJam library, following domain-driven design principles with clear separation of concerns. + +## Features + +- **Domain-Driven Architecture**: Organize data access around domains that encapsulate related entities and configuration +- **Multi-ORM Support**: Currently supports SQLAlchemy with extensible architecture for other ORMs +- **Command/Query Pattern**: Separate command objects for operations that don't return values +- **Repository Pattern**: Abstract data access through repository interfaces +- **Unit of Work Pattern**: Manage transaction boundaries and change tracking +- **Oracle Support**: First-class support for Oracle databases with proper case sensitivity handling +- **Async/Await Support**: Modern Python async patterns throughout the stack +- **Type Safety**: Full type hints and mypy compatibility + +## Quick Start + +```python +import asyncio +from sqlalchemy.ext.asyncio import create_async_engine +from datajam_sqlalchemy import Domain, DataContext +from datajam import IQuery + +# Define your entities using SQLAlchemy +from sqlalchemy import Column, Integer, String +from sqlalchemy.ext.declarative import declarative_base + +Base = declarative_base() + +class User(Base): + __tablename__ = 'users' + id = Column(Integer, primary_key=True) + name = Column(String(50)) + +# Create your domain +class UserDomain(Domain): + def __init__(self, engine): + super().__init__( + connection_string=str(engine.url), + engine=engine, + metadata=Base.metadata + ) + +# Define queries +class GetAllUsers(IQuery[User]): + async def execute(self, data_source): + queryable = data_source.create_query(User) + return await queryable.to_list() + +# Usage +async def main(): + engine = create_async_engine("oracle+oracledb_async://user:pass@host/service") + domain = UserDomain(engine) + + # Create schema + await domain.create_tables() + + # Use repository + async with await domain.create_data_context() as context: + repository = Repository(context) + + # Add a user + user = User(name="John Doe") + context.add(user) + await context.commit() + + # Query users + users = await repository.find(GetAllUsers()) + print(f"Found {len(users)} users") + +asyncio.run(main()) +``` + +## Installation + +```bash +# Basic installation +pip install datajam + +# With SQLAlchemy support +pip install datajam[sqlalchemy] + +# For development +pip install datajam[dev] +``` + +## Development + +This project uses Nox for build automation (equivalent to NUKE in .NET): + +```bash +# Setup development environment +nox -s dev-setup + +# Run tests +nox -s test + +# Run integration tests (requires Docker) +nox -s test-integration + +# Run linting +nox -s lint + +# Format code +nox -s format + +# Full build pipeline +nox -s datajam +``` + +## Architecture + +The library follows the same architectural patterns as the .NET DataJam: + +### Core Abstractions (`datajam` package) +- `IDataContext` - Primary abstraction combining data source, unit of work, and context management +- `IRepository` - Repository pattern for command and query execution +- `IUnitOfWork` - Transaction boundary management +- `IQuery[T]` / `IScalar[T]` - Query pattern interfaces +- `ICommand` - Command pattern for operations without return values +- `IDomain` - Domain configuration and mapping + +### SQLAlchemy Implementation (`datajam_sqlalchemy` package) +- `DataContext` - SQLAlchemy-based implementation with async session management +- `Domain` - SQLAlchemy domain configuration +- `Repository` - Repository implementation +- `QueryableImpl` - Fluent query API implementation +- Oracle-specific utilities for case sensitivity and sequences + +### Testing Infrastructure +- TestContainers integration for database testing +- Family domain test entities (Person, Father, Mother, Child) +- Pytest fixtures and async test support + +## Oracle Database Support + +DataJam Python provides first-class Oracle support with: + +```python +from datajam_sqlalchemy import create_oracle_connection_string, OracleNamingConvention + +# Oracle connection +connection_string = create_oracle_connection_string( + host="localhost", + port=1521, + service_name="XE", + username="system", + password="oracle" +) + +# Oracle naming conventions (automatic uppercase conversion) +class OracleDomain(Domain): + def __init__(self, engine): + super().__init__( + connection_string=str(engine.url), + engine=engine, + mapping_configurator=OracleMappingConfigurator() + ) + +class OracleMappingConfigurator(IConfigureDomainMappings[MetaData]): + def configure(self, metadata): + OracleNamingConvention.configure_metadata_for_oracle(metadata) +``` + +## Testing + +Integration tests use TestContainers for database provisioning: + +```bash +# Run all tests +nox -s test-all + +# Run only Oracle integration tests +nox -s test-integration -- tests/integration/oracle/ + +# Run with coverage +nox -s test-all -- --cov-report=html +``` + +### Running Oracle Tests + +The Oracle integration tests require Docker and will automatically: +1. Start an Oracle Free container +2. Create test schema and tables +3. Run integration tests +4. Clean up containers + +## Contributing + +1. Fork the repository +2. Create a feature branch +3. Make your changes +4. Run the full test suite: `nox -s datajam` +5. Submit a pull request + +## License + +MIT License - see LICENSE file for details. \ No newline at end of file diff --git a/python/noxfile.py b/python/noxfile.py new file mode 100644 index 0000000..b01b3c9 --- /dev/null +++ b/python/noxfile.py @@ -0,0 +1,234 @@ +"""Build automation for DataJam Python - equivalent to NUKE build system.""" + +import os +import shutil +from pathlib import Path + +import nox + +# Python versions to test against +PYTHON_VERSIONS = ["3.13"] +PROJECT_ROOT = Path(__file__).parent +SRC_DIR = PROJECT_ROOT / "src" +TESTS_DIR = PROJECT_ROOT / "tests" +ARTIFACTS_DIR = PROJECT_ROOT / "artifacts" + +# Pinned tool versions for consistency across all environments +LINT_TOOLS = [ + "ruff==0.8.0", + "black==24.10.0", + "mypy==1.13.0", + "isort==5.13.2", +] + +nox.options.error_on_missing_interpreters = True + + +@nox.session(name="clean") +def clean(session: nox.Session) -> None: + """Clean build artifacts and temporary files.""" + session.log("Cleaning build artifacts...") + + # Clean artifacts directory + if ARTIFACTS_DIR.exists(): + shutil.rmtree(ARTIFACTS_DIR) + + # Clean Python cache files + for pattern in ["**/__pycache__", "**/*.pyc", "**/*.pyo", "**/.pytest_cache"]: + for path in PROJECT_ROOT.glob(pattern): + if path.is_dir(): + shutil.rmtree(path) + else: + path.unlink() + + # Clean build directories + for pattern in ["build", "dist", "*.egg-info"]: + for path in PROJECT_ROOT.glob(pattern): + if path.exists(): + if path.is_dir(): + shutil.rmtree(path) + else: + path.unlink() + + +def _run_lint_tools(session: nox.Session) -> None: + """Unified linting function used by all lint sessions.""" + session.install(*LINT_TOOLS) + + session.log("Running ruff...") + session.run("ruff", "check", str(SRC_DIR), str(TESTS_DIR)) + + session.log("Running black...") + session.run("black", "--check", str(SRC_DIR), str(TESTS_DIR)) + + session.log("Running isort...") + session.run("isort", "--check-only", str(SRC_DIR), str(TESTS_DIR)) + + session.log("Running mypy...") + session.run("mypy", str(SRC_DIR)) + + +@nox.session(name="lint") +def lint(session: nox.Session) -> None: + """Run linting and code style checks.""" + _run_lint_tools(session) + + +@nox.session(name="format") +def format_code(session: nox.Session) -> None: + """Format code using black and isort.""" + session.install(*LINT_TOOLS) + + session.log("Running ruff --fix...") + session.run("ruff", "check", "--fix", str(SRC_DIR), str(TESTS_DIR)) + + session.log("Running black...") + session.run("black", str(SRC_DIR), str(TESTS_DIR)) + + session.log("Running isort...") + session.run("isort", str(SRC_DIR), str(TESTS_DIR)) + + +@nox.session(python=PYTHON_VERSIONS, name="test") +def test(session: nox.Session) -> None: + """Run unit tests.""" + session.install("-e", ".[testing]") + + # Run unit tests only (no integration tests) + session.run( + "pytest", + str(TESTS_DIR / "unit"), + "--cov=src", + "--cov-report=term-missing", + "--cov-report=xml", + *session.posargs, + ) + + +@nox.session(python=PYTHON_VERSIONS, name="test-integration") +def test_integration(session: nox.Session) -> None: + """Run integration tests (requires Docker for TestContainers).""" + session.install("-e", ".[testing,sqlalchemy]") + + # Set environment for Oracle TestContainer + session.env["TESTCONTAINERS_ORACLE_PASSWORD"] = "oracle123" + + # Run integration tests + session.run( + "pytest", + str(TESTS_DIR / "integration"), + "--cov=src", + "--cov-report=term-missing", + "--cov-report=xml", + "-v", + *session.posargs, + ) + + +@nox.session(name="test-all") +def test_all(session: nox.Session) -> None: + """Run all tests (unit and integration).""" + session.install("-e", ".[testing,sqlalchemy]") + + # Set environment for Oracle TestContainer + session.env["TESTCONTAINERS_ORACLE_PASSWORD"] = "oracle123" + + # Run all tests + session.run( + "pytest", + str(TESTS_DIR), + "--cov=src", + "--cov-report=term-missing", + "--cov-report=xml", + "--cov-report=html", + *session.posargs, + ) + + +@nox.session(name="build") +def build(session: nox.Session) -> None: + """Build distribution packages.""" + session.install("build") + + # Create artifacts directory + ARTIFACTS_DIR.mkdir(exist_ok=True) + + session.log("Building distribution packages...") + session.run("python", "-m", "build", "--outdir", str(ARTIFACTS_DIR)) + + +@nox.session(name="publish") +def publish(session: nox.Session) -> None: + """Publish packages to PyPI (requires PYPI_TOKEN environment variable).""" + session.install("twine") + + pypi_token = os.getenv("PYPI_TOKEN") + if not pypi_token: + session.log("PYPI_TOKEN environment variable not set. Skipping publish.") + return + + session.log("Publishing packages to PyPI...") + session.run( + "twine", + "upload", + "--username", "__token__", + "--password", pypi_token, + f"{ARTIFACTS_DIR}/*", + ) + + +@nox.session(name="dev-setup") +def dev_setup(session: nox.Session) -> None: + """Setup development environment.""" + session.install("-e", ".[dev,testing,sqlalchemy]") + session.install("pre-commit") + + session.log("Installing pre-commit hooks...") + session.run("pre-commit", "install") + + +@nox.session(name="datajam") +def datajam_build(session: nox.Session) -> None: + """Main build target - equivalent to NUKE's main target.""" + session.log("Running DataJam Python build pipeline...") + + # Clean first + clean(session) + + # Install dependencies + session.install("-e", ".[dev,testing,sqlalchemy]") + + # Run linting using unified function + _run_lint_tools(session) + + # Run tests + test_all(session) + + # Build packages + build(session) + + # Publish if token is available + publish(session) + + session.log("DataJam Python build completed successfully!") + + +# Convenience aliases +@nox.session(name="ci") +def ci(session: nox.Session) -> None: + """CI pipeline - runs tests and builds without publishing.""" + session.log("Running CI pipeline...") + + # Install dependencies + session.install("-e", ".[dev,testing,sqlalchemy]") + + # Run linting using unified function + _run_lint_tools(session) + + # Run all tests + test_all(session) + + # Build packages + build(session) + + session.log("CI pipeline completed successfully!") \ No newline at end of file diff --git a/python/pyproject.toml b/python/pyproject.toml new file mode 100644 index 0000000..3cb7929 --- /dev/null +++ b/python/pyproject.toml @@ -0,0 +1,172 @@ +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[project] +name = "datajam" +dynamic = ["version"] +description = "Abstraction patterns for data access across different ORM technologies" +readme = "README.md" +requires-python = ">=3.10" +license = "MIT" +keywords = ["orm", "data-access", "domain-driven-design", "repository-pattern"] +authors = [ + { name = "DataJam Contributors" }, +] +classifiers = [ + "Development Status :: 3 - Alpha", + "Intended Audience :: Developers", + "License :: OSI Approved :: MIT License", + "Operating System :: OS Independent", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", + "Topic :: Database", + "Topic :: Software Development :: Libraries :: Python Modules", +] +dependencies = [] + +[project.optional-dependencies] +sqlalchemy = [ + "SQLAlchemy>=2.0.0", + "oracledb>=1.4.0", +] +testing = [ + "pytest>=7.0.0", + "pytest-asyncio>=0.21.0", + "testcontainers[oracle]>=3.7.0", + "pytest-cov>=4.0.0", +] +dev = [ + "ruff==0.8.0", + "black==24.10.0", + "mypy==1.13.0", + "isort==5.13.2", + "nox>=2023.4.22", + "pre-commit>=3.3.0", +] + +[project.urls] +Documentation = "https://github.com/datajam/datajam-python#readme" +Issues = "https://github.com/datajam/datajam-python/issues" +Source = "https://github.com/datajam/datajam-python" + +[tool.hatch.version] +path = "src/datajam/__about__.py" + +[tool.hatch.build.targets.wheel] +packages = ["src/datajam", "src/datajam_sqlalchemy", "src/datajam_testing"] + +[tool.ruff] +target-version = "py310" +line-length = 120 + +[tool.ruff.lint] +select = [ + "E", # pycodestyle errors + "W", # pycodestyle warnings + "F", # pyflakes + "I", # isort + "B", # flake8-bugbear + "C4", # flake8-comprehensions + "UP", # pyupgrade + "ARG", # flake8-unused-arguments + "SIM", # flake8-simplify + "TCH", # flake8-type-checking +] +ignore = [ + "E501", # line too long, handled by black + "B008", # do not perform function calls in argument defaults + "B905", # zip without explicit strict parameter + "TC002", # Move third-party import into type-checking block (we need runtime imports) +] + +[tool.ruff.lint.per-file-ignores] +"__init__.py" = ["F401"] +"tests/**/*" = ["ARG", "S", "SLF", "TID252"] + +[tool.black] +target-version = ["py310"] +line-length = 120 +skip-string-normalization = true + +[tool.isort] +profile = "black" +line_length = 120 +multi_line_output = 3 +include_trailing_comma = true +force_grid_wrap = 0 +use_parentheses = true +ensure_newline_before_comments = true + +[tool.mypy] +python_version = "3.10" +check_untyped_defs = true +disallow_any_generics = true +disallow_incomplete_defs = true +disallow_untyped_defs = true +no_implicit_optional = true +warn_redundant_casts = true +warn_unused_ignores = true +warn_return_any = true +strict_equality = true +show_error_codes = true + +# Third-party libraries without type stubs +[[tool.mypy.overrides]] +module = "testcontainers.*" +ignore_missing_imports = true + +# SQLAlchemy imports (when not installed in lint-only environment) +[[tool.mypy.overrides]] +module = "sqlalchemy.*" +ignore_missing_imports = true + +# SQLAlchemy has complex typing - allow some flexibility for known edge cases +[[tool.mypy.overrides]] +module = "datajam_sqlalchemy.oracle_utils" +disable_error_code = ["assignment"] + +# Allow Any return from SQLAlchemy first() method +[[tool.mypy.overrides]] +module = "datajam_sqlalchemy.queryable" +disable_error_code = ["no-any-return"] + +# SQLAlchemy delete() returns coroutine that varies by environment +[[tool.mypy.overrides]] +module = "datajam_sqlalchemy.data_context" +disable_error_code = ["unused-coroutine", "unused-ignore"] + +# Protocol interfaces may have variance issues with generics +[[tool.mypy.overrides]] +module = "datajam.interfaces" +disable_error_code = ["misc"] + +[tool.pytest.ini_options] +minversion = "7.0" +addopts = "-ra -q --cov=src --cov-report=term-missing --cov-report=xml" +testpaths = ["tests"] +python_files = ["test_*.py", "*_test.py"] +python_classes = ["Test*", "*Tests"] +python_functions = ["test_*"] +asyncio_mode = "auto" + +[tool.coverage.run] +source = ["src"] +branch = true + +[tool.coverage.report] +exclude_lines = [ + "pragma: no cover", + "def __repr__", + "if self.debug:", + "if settings.DEBUG", + "raise AssertionError", + "raise NotImplementedError", + "if 0:", + "if __name__ == .__main__.:", + "class .*\\bProtocol\\):", + "@(abc\\.)?abstractmethod", +] \ No newline at end of file diff --git a/python/src/datajam/__about__.py b/python/src/datajam/__about__.py new file mode 100644 index 0000000..3ba546f --- /dev/null +++ b/python/src/datajam/__about__.py @@ -0,0 +1,3 @@ +"""Version information for DataJam.""" + +__version__ = "0.1.0" diff --git a/python/src/datajam/__init__.py b/python/src/datajam/__init__.py new file mode 100644 index 0000000..a46b44c --- /dev/null +++ b/python/src/datajam/__init__.py @@ -0,0 +1,70 @@ +"""DataJam - Abstraction patterns for data access across different ORM technologies. + +This package provides domain-driven design patterns and abstractions for data access +that work across different ORM technologies, similar to the .NET DataJam library. +""" + +from .__about__ import __version__ +from .exceptions import ( + CommandException, + ConcurrencyException, + ConfigurationException, + DataJamException, + DomainException, + EntityNotFoundException, + QueryException, + RepositoryException, + TransactionException, + UnitOfWorkException, +) +from .interfaces import ( + Command, + Domain, + ICommand, + IConfigureDomainMappings, + IDataContext, + IDataSource, + IDomain, + IDomainRepositoryFactory, + IQuery, + IQueryable, + IQueryWithProjection, + IRepository, + IScalar, + IUnitOfWork, + Query, + Scalar, +) + +__all__ = [ + "__version__", + # Core interfaces + "ICommand", + "IQuery", + "IScalar", + "IQueryWithProjection", + "IDataSource", + "IQueryable", + "IUnitOfWork", + "IDataContext", + "IRepository", + "IConfigureDomainMappings", + "IDomain", + "IDomainRepositoryFactory", + # Base classes + "Command", + "Query", + "Scalar", + "Domain", + # Exceptions + "DataJamException", + "ConfigurationException", + "RepositoryException", + "QueryException", + "CommandException", + "DomainException", + "UnitOfWorkException", + "EntityNotFoundException", + "ConcurrencyException", + "TransactionException", +] diff --git a/python/src/datajam/exceptions.py b/python/src/datajam/exceptions.py new file mode 100644 index 0000000..8211a3b --- /dev/null +++ b/python/src/datajam/exceptions.py @@ -0,0 +1,68 @@ +"""DataJam exceptions and error types.""" + + +class DataJamException(Exception): + """Base exception for all DataJam errors.""" + + pass + + +class ConfigurationException(DataJamException): + """Exception raised for configuration-related errors.""" + + pass + + +class RepositoryException(DataJamException): + """Exception raised for repository operation errors.""" + + pass + + +class QueryException(DataJamException): + """Exception raised for query execution errors.""" + + pass + + +class CommandException(DataJamException): + """Exception raised for command execution errors.""" + + pass + + +class DomainException(DataJamException): + """Exception raised for domain-related errors.""" + + pass + + +class UnitOfWorkException(DataJamException): + """Exception raised for unit of work operation errors.""" + + pass + + +class EntityNotFoundException(RepositoryException): + """Exception raised when an entity is not found.""" + + def __init__(self, entity_type: type, identifier: str | int | None = None) -> None: + if identifier is not None: + message = f"Entity of type {entity_type.__name__} with identifier '{identifier}' was not found." + else: + message = f"Entity of type {entity_type.__name__} was not found." + super().__init__(message) + self.entity_type = entity_type + self.identifier = identifier + + +class ConcurrencyException(UnitOfWorkException): + """Exception raised when a concurrency conflict occurs.""" + + pass + + +class TransactionException(UnitOfWorkException): + """Exception raised for transaction-related errors.""" + + pass diff --git a/python/src/datajam/interfaces.py b/python/src/datajam/interfaces.py new file mode 100644 index 0000000..047d756 --- /dev/null +++ b/python/src/datajam/interfaces.py @@ -0,0 +1,315 @@ +"""Core interfaces for DataJam - Python port of .NET DataJam abstractions.""" + +from __future__ import annotations + +from abc import ABC, abstractmethod +from typing import TYPE_CHECKING, Any, Generic, Protocol, TypeVar, runtime_checkable + +if TYPE_CHECKING: + from collections.abc import AsyncIterator + +# Type variables for generic interfaces +T = TypeVar("T") +TEntity = TypeVar("TEntity") +TResult = TypeVar("TResult") +TSelection = TypeVar("TSelection") +TProjection = TypeVar("TProjection") +TConfigurationBinder = TypeVar("TConfigurationBinder") +TConfigurationOptions = TypeVar("TConfigurationOptions") + + +@runtime_checkable +class ICommand(Protocol): + """Command pattern interface for operations that don't return values.""" + + @abstractmethod + async def execute(self, unit_of_work: IUnitOfWork) -> None: + """Execute the command with the given unit of work.""" + ... + + +@runtime_checkable +class IQuery(Protocol, Generic[T]): + """Query pattern interface for operations that return collections.""" + + @abstractmethod + async def execute(self, data_source: IDataSource) -> list[T]: + """Execute the query against the data source.""" + ... + + +@runtime_checkable +class IScalar(Protocol, Generic[T]): + """Scalar query interface for operations that return single values.""" + + @abstractmethod + async def execute(self, data_source: IDataSource) -> T | None: + """Execute the scalar query against the data source.""" + ... + + +@runtime_checkable +class IQueryWithProjection(Protocol, Generic[TSelection, TProjection]): + """Query interface supporting projection/transformation.""" + + @abstractmethod + async def execute(self, data_source: IDataSource) -> list[TProjection]: + """Execute the query with projection.""" + ... + + +@runtime_checkable +class IDataSource(Protocol): + """Provides query capabilities for data access.""" + + @abstractmethod + def create_query(self, entity_type: type[T]) -> IQueryable[T]: + """Create a queryable for the specified entity type.""" + ... + + +@runtime_checkable +class IQueryable(Protocol, Generic[T]): + """Queryable interface providing fluent query API.""" + + @abstractmethod + def filter(self, predicate: Any) -> IQueryable[T]: + """Add a filter predicate to the query.""" + ... + + @abstractmethod + def order_by(self, *columns: Any) -> IQueryable[T]: + """Add ordering to the query.""" + ... + + @abstractmethod + def limit(self, count: int) -> IQueryable[T]: + """Limit the number of results.""" + ... + + @abstractmethod + def offset(self, count: int) -> IQueryable[T]: + """Skip a number of results.""" + ... + + @abstractmethod + async def to_list(self) -> list[T]: + """Execute the query and return results as a list.""" + ... + + @abstractmethod + async def first_or_none(self) -> T | None: + """Execute the query and return the first result or None.""" + ... + + @abstractmethod + async def count(self) -> int: + """Execute the query and return the count of results.""" + ... + + @abstractmethod + def __aiter__(self) -> AsyncIterator[T]: + """Support async iteration over query results.""" + ... + + +@runtime_checkable +class IUnitOfWork(Protocol): + """Transaction boundary abstraction for managing entity changes.""" + + @abstractmethod + def add(self, entity: Any) -> None: + """Add an entity to be inserted.""" + ... + + @abstractmethod + def update(self, entity: Any) -> None: + """Mark an entity for update.""" + ... + + @abstractmethod + def remove(self, entity: Any) -> None: + """Mark an entity for deletion.""" + ... + + @abstractmethod + async def commit(self) -> None: + """Commit all pending changes.""" + ... + + @abstractmethod + async def rollback(self) -> None: + """Rollback all pending changes.""" + ... + + @abstractmethod + async def reload(self, entity: TEntity) -> TEntity: + """Reload an entity from the data store.""" + ... + + +@runtime_checkable +class IDataContext(IDataSource, IUnitOfWork, Protocol): + """Primary abstraction combining data source, unit of work, and context management.""" + + async def __aenter__(self) -> IDataContext: + """Enter the async context manager.""" + ... + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: Any, + ) -> None: + """Exit the async context manager.""" + ... + + +@runtime_checkable +class IRepository(Protocol): + """Repository pattern abstraction for command and query execution.""" + + @property + @abstractmethod + def context(self) -> IDataContext: + """Get the underlying data context.""" + ... + + @abstractmethod + async def execute(self, command: ICommand) -> None: + """Execute a command.""" + ... + + @abstractmethod + async def find(self, query: IQuery[T]) -> list[T]: + """Execute a query and return results.""" + ... + + @abstractmethod + async def find_scalar(self, scalar: IScalar[T]) -> T | None: + """Execute a scalar query and return the result.""" + ... + + +@runtime_checkable +class IConfigureDomainMappings(Protocol, Generic[TConfigurationBinder]): + """Interface for configuring domain entity mappings.""" + + @abstractmethod + def configure(self, configuration_binder: TConfigurationBinder) -> None: + """Configure entity mappings using the configuration binder.""" + ... + + +@runtime_checkable +class IDomain(Protocol, Generic[TConfigurationBinder, TConfigurationOptions]): + """Domain configuration and mapping interface.""" + + @property + @abstractmethod + def configuration_options(self) -> TConfigurationOptions: + """Get the configuration options for this domain.""" + ... + + @property + @abstractmethod + def mapping_configurator(self) -> IConfigureDomainMappings[TConfigurationBinder] | None: + """Get the mapping configurator for this domain.""" + ... + + +@runtime_checkable +class IDomainRepositoryFactory(Protocol, Generic[TConfigurationBinder, TConfigurationOptions]): + """Factory for creating domain-specific repositories.""" + + @abstractmethod + async def create_repository(self, domain: IDomain[TConfigurationBinder, TConfigurationOptions]) -> IRepository: + """Create a repository for the specified domain.""" + ... + + +# Base classes providing common functionality + + +class Command(ABC): + """Base class for command implementations.""" + + @abstractmethod + async def execute(self, unit_of_work: IUnitOfWork) -> None: + """Execute the command with the given unit of work.""" + pass + + +class Query(ABC, Generic[T]): + """Base class for query implementations with fluent API support.""" + + def __init__(self) -> None: + self._predicates: list[Any] = [] + self._ordering: list[Any] = [] + self._limit_count: int | None = None + self._offset_count: int | None = None + + def add_predicate(self, predicate: Any) -> Query[T]: + """Add a predicate to the query.""" + self._predicates.append(predicate) + return self + + def add_ordering(self, *columns: Any) -> Query[T]: + """Add ordering to the query.""" + self._ordering.extend(columns) + return self + + def set_limit(self, count: int) -> Query[T]: + """Set the limit for the query.""" + self._limit_count = count + return self + + def set_offset(self, count: int) -> Query[T]: + """Set the offset for the query.""" + self._offset_count = count + return self + + @abstractmethod + async def execute(self, data_source: IDataSource) -> list[T]: + """Execute the query against the data source.""" + pass + + +class Scalar(ABC, Generic[T]): + """Base class for scalar query implementations.""" + + def __init__(self) -> None: + self._predicates: list[Any] = [] + + def add_predicate(self, predicate: Any) -> Scalar[T]: + """Add a predicate to the scalar query.""" + self._predicates.append(predicate) + return self + + @abstractmethod + async def execute(self, data_source: IDataSource) -> T | None: + """Execute the scalar query against the data source.""" + pass + + +class Domain(ABC, Generic[TConfigurationBinder, TConfigurationOptions]): + """Base class for domain implementations.""" + + def __init__( + self, + configuration_options: TConfigurationOptions, + mapping_configurator: IConfigureDomainMappings[TConfigurationBinder] | None = None, + ) -> None: + self._configuration_options = configuration_options + self._mapping_configurator = mapping_configurator + + @property + def configuration_options(self) -> TConfigurationOptions: + """Get the configuration options for this domain.""" + return self._configuration_options + + @property + def mapping_configurator(self) -> IConfigureDomainMappings[TConfigurationBinder] | None: + """Get the mapping configurator for this domain.""" + return self._mapping_configurator diff --git a/python/src/datajam_sqlalchemy/__init__.py b/python/src/datajam_sqlalchemy/__init__.py new file mode 100644 index 0000000..8970a90 --- /dev/null +++ b/python/src/datajam_sqlalchemy/__init__.py @@ -0,0 +1,17 @@ +"""DataJam SQLAlchemy implementation - SQLAlchemy-specific data access patterns.""" + +from .data_context import DataContext +from .domain import Domain +from .oracle_utils import OracleNamingConvention, OracleSequenceHelper, create_oracle_connection_string +from .queryable import QueryableImpl +from .repository import Repository + +__all__ = [ + "DataContext", + "Domain", + "QueryableImpl", + "Repository", + "OracleNamingConvention", + "OracleSequenceHelper", + "create_oracle_connection_string", +] diff --git a/python/src/datajam_sqlalchemy/data_context.py b/python/src/datajam_sqlalchemy/data_context.py new file mode 100644 index 0000000..9cbc187 --- /dev/null +++ b/python/src/datajam_sqlalchemy/data_context.py @@ -0,0 +1,101 @@ +"""SQLAlchemy implementation of IDataContext.""" + +from __future__ import annotations + +from typing import Any, TypeVar + +from sqlalchemy import MetaData +from sqlalchemy.ext.asyncio import AsyncSession + +from datajam import IDataContext, IQueryable + +T = TypeVar("T") + + +class DataContext(IDataContext): + """SQLAlchemy implementation of IDataContext combining data source, unit of work, and context management.""" + + def __init__(self, session: AsyncSession, metadata: MetaData | None = None) -> None: + """Initialize the data context with an async session.""" + self._session = session + self._metadata = metadata or MetaData() + + @property + def session(self) -> AsyncSession: + """Get the underlying SQLAlchemy session.""" + return self._session + + @property + def metadata(self) -> MetaData: + """Get the SQLAlchemy metadata.""" + return self._metadata + + def create_query(self, entity_type: type[T]) -> IQueryable[T]: + """Create a queryable for the specified entity type.""" + from .queryable import QueryableImpl + + return QueryableImpl(entity_type, self._session) + + def add(self, entity: Any) -> None: + """Add an entity to be inserted.""" + self._session.add(entity) + + def update(self, entity: Any) -> None: + """Mark an entity for update. + + In SQLAlchemy, entities are automatically tracked for changes + when they are attached to a session, so this is typically a no-op. + """ + # SQLAlchemy automatically tracks changes for attached entities + if entity not in self._session: + self._session.add(entity) + + def remove(self, entity: Any) -> None: + """Mark an entity for deletion.""" + # Simply call delete - SQLAlchemy handles whether entity is attached + self._session.delete(entity) # type: ignore[unused-coroutine] + + async def commit(self) -> None: + """Commit all pending changes.""" + await self._session.commit() + + async def rollback(self) -> None: + """Rollback all pending changes.""" + await self._session.rollback() + + async def reload(self, entity: T) -> T: + """Reload an entity from the data store.""" + await self._session.refresh(entity) + return entity + + async def flush(self) -> None: + """Flush pending changes to the database without committing.""" + await self._session.flush() + + async def __aenter__(self) -> IDataContext: + """Enter the async context manager.""" + return self + + async def __aexit__(self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: Any) -> None: + """Exit the async context manager.""" + if exc_type is not None: + await self.rollback() + else: + await self.commit() + await self._session.close() + + def __enter__(self) -> DataContext: + """Enter the sync context manager (not recommended - use async version).""" + return self + + def __exit__(self, exc_type: type[BaseException] | None, exc_val: BaseException | None, exc_tb: Any) -> None: + """Exit the sync context manager (not recommended - use async version).""" + # For sync context manager, we'll use the sync methods + # This is not ideal but provides compatibility + if exc_type is not None: + # Note: This would require sync SQLAlchemy session for proper implementation + pass + else: + # Note: This would require sync SQLAlchemy session for proper implementation + pass + # Note: Session closing would also need to be sync diff --git a/python/src/datajam_sqlalchemy/domain.py b/python/src/datajam_sqlalchemy/domain.py new file mode 100644 index 0000000..4e8d756 --- /dev/null +++ b/python/src/datajam_sqlalchemy/domain.py @@ -0,0 +1,77 @@ +"""SQLAlchemy implementation of IDomain.""" + +from __future__ import annotations + +from sqlalchemy import MetaData +from sqlalchemy.ext.asyncio import AsyncEngine, AsyncSession, async_sessionmaker + +from datajam import IConfigureDomainMappings, IDomain + +from .data_context import DataContext +from .repository import Repository + + +class Domain(IDomain[MetaData, str]): + """SQLAlchemy implementation of IDomain using MetaData as configuration binder and connection string as options.""" + + def __init__( + self, + connection_string: str, + engine: AsyncEngine, + mapping_configurator: IConfigureDomainMappings[MetaData] | None = None, + metadata: MetaData | None = None, + ) -> None: + """Initialize the domain with connection string, engine, and optional mapping configurator.""" + self._connection_string = connection_string + self._engine = engine + self._mapping_configurator = mapping_configurator + self._metadata = metadata or MetaData() + self._session_factory = async_sessionmaker(engine, class_=AsyncSession, expire_on_commit=False) + + # Configure mappings if provided + if self._mapping_configurator: + self._mapping_configurator.configure(self._metadata) + + @property + def configuration_options(self) -> str: + """Get the configuration options (connection string) for this domain.""" + return self._connection_string + + @property + def mapping_configurator(self) -> IConfigureDomainMappings[MetaData] | None: + """Get the mapping configurator for this domain.""" + return self._mapping_configurator + + @property + def metadata(self) -> MetaData: + """Get the SQLAlchemy metadata.""" + return self._metadata + + @property + def engine(self) -> AsyncEngine: + """Get the SQLAlchemy async engine.""" + return self._engine + + async def create_session(self) -> AsyncSession: + """Create a new async session.""" + return self._session_factory() + + async def create_data_context(self) -> DataContext: + """Create a new data context with a fresh session.""" + session = await self.create_session() + return DataContext(session, self._metadata) + + async def create_repository(self) -> Repository: + """Create a new repository with a fresh data context.""" + data_context = await self.create_data_context() + return Repository(data_context) + + async def create_tables(self) -> None: + """Create all tables defined in the metadata.""" + async with self._engine.begin() as conn: + await conn.run_sync(self._metadata.create_all, checkfirst=False) + + async def drop_tables(self) -> None: + """Drop all tables defined in the metadata.""" + async with self._engine.begin() as conn: + await conn.run_sync(self._metadata.drop_all) diff --git a/python/src/datajam_sqlalchemy/oracle_utils.py b/python/src/datajam_sqlalchemy/oracle_utils.py new file mode 100644 index 0000000..bfad7ad --- /dev/null +++ b/python/src/datajam_sqlalchemy/oracle_utils.py @@ -0,0 +1,74 @@ +"""Oracle-specific utilities for DataJam SQLAlchemy implementation.""" + +from __future__ import annotations + +from sqlalchemy import MetaData, Table + + +class OracleNamingConvention: + """Oracle naming convention handler for case sensitivity.""" + + @staticmethod + def to_oracle_identifier(name: str) -> str: + """Convert identifier to Oracle format (uppercase).""" + return name.upper() + + @staticmethod + def configure_table_for_oracle(table: Table) -> None: + """Configure a table for Oracle naming conventions.""" + # Update table name to uppercase + if table.name: + table.name = OracleNamingConvention.to_oracle_identifier(table.name) + + # Update column names to uppercase + for column in table.columns: + if column.name: + column.name = OracleNamingConvention.to_oracle_identifier(column.name) + + # Update index names + for index in table.indexes: + if index.name and isinstance(index.name, str): + index.name = OracleNamingConvention.to_oracle_identifier(index.name) + + # Update constraint names + for constraint in table.constraints: + if constraint.name and isinstance(constraint.name, str): + constraint.name = OracleNamingConvention.to_oracle_identifier(constraint.name) + + @staticmethod + def configure_metadata_for_oracle(metadata: MetaData) -> None: + """Configure all tables in metadata for Oracle naming conventions.""" + for table in metadata.tables.values(): + OracleNamingConvention.configure_table_for_oracle(table) + + +class OracleSequenceHelper: + """Helper for working with Oracle sequences.""" + + @staticmethod + def create_sequence_ddl(table_name: str, column_name: str = "ID") -> str: + """Generate DDL for creating an Oracle sequence.""" + sequence_name = f"SEQ_{table_name}_{column_name}" + return f"CREATE SEQUENCE {sequence_name} START WITH 1 INCREMENT BY 1" + + @staticmethod + def drop_sequence_ddl(table_name: str, column_name: str = "ID") -> str: + """Generate DDL for dropping an Oracle sequence.""" + sequence_name = f"SEQ_{table_name}_{column_name}" + return f"DROP SEQUENCE {sequence_name}" + + @staticmethod + def get_sequence_name(table_name: str, column_name: str = "ID") -> str: + """Get the sequence name for a table and column.""" + return f"SEQ_{table_name}_{column_name}" + + +def create_oracle_connection_string( + host: str = "localhost", + port: int = 1521, + service_name: str = "XE", + username: str = "system", + password: str = "oracle", +) -> str: + """Create an Oracle connection string for SQLAlchemy using python-oracledb.""" + return f"oracle+oracledb://{username}:{password}@{host}:{port}/?service_name={service_name}" diff --git a/python/src/datajam_sqlalchemy/queryable.py b/python/src/datajam_sqlalchemy/queryable.py new file mode 100644 index 0000000..38e2325 --- /dev/null +++ b/python/src/datajam_sqlalchemy/queryable.py @@ -0,0 +1,75 @@ +"""SQLAlchemy implementation of IQueryable.""" + +from __future__ import annotations + +from typing import TYPE_CHECKING, Any, TypeVar + +from sqlalchemy import func, select +from sqlalchemy.ext.asyncio import AsyncSession + +from datajam import IQueryable + +if TYPE_CHECKING: + from collections.abc import AsyncIterator + +T = TypeVar("T") + + +class QueryableImpl(IQueryable[T]): + """SQLAlchemy implementation of IQueryable providing fluent query API.""" + + def __init__(self, entity_type: type[T], session: AsyncSession) -> None: + """Initialize the queryable with entity type and session.""" + self._entity_type = entity_type + self._session = session + self._query = select(entity_type) + + def filter(self, predicate: Any) -> IQueryable[T]: + """Add a filter predicate to the query.""" + new_query = QueryableImpl(self._entity_type, self._session) + new_query._query = self._query.where(predicate) + return new_query + + def order_by(self, *columns: Any) -> IQueryable[T]: + """Add ordering to the query.""" + new_query = QueryableImpl(self._entity_type, self._session) + new_query._query = self._query.order_by(*columns) + return new_query + + def limit(self, count: int) -> IQueryable[T]: + """Limit the number of results.""" + new_query = QueryableImpl(self._entity_type, self._session) + new_query._query = self._query.limit(count) + return new_query + + def offset(self, count: int) -> IQueryable[T]: + """Skip a number of results.""" + new_query = QueryableImpl(self._entity_type, self._session) + new_query._query = self._query.offset(count) + return new_query + + async def to_list(self) -> list[T]: + """Execute the query and return results as a list.""" + result = await self._session.execute(self._query) + return list(result.scalars().all()) + + async def first_or_none(self) -> T | None: + """Execute the query and return the first result or None.""" + result = await self._session.execute(self._query.limit(1)) + return result.scalars().first() + + async def count(self) -> int: + """Execute the query and return the count of results.""" + count_query = select(func.count()).select_from(self._query.subquery()) + result = await self._session.execute(count_query) + return result.scalar() or 0 + + def __aiter__(self) -> AsyncIterator[T]: + """Support async iteration over query results.""" + return self._async_iterate() + + async def _async_iterate(self) -> AsyncIterator[T]: + """Async iterator implementation.""" + result = await self._session.execute(self._query) + for row in result.scalars(): + yield row diff --git a/python/src/datajam_sqlalchemy/repository.py b/python/src/datajam_sqlalchemy/repository.py new file mode 100644 index 0000000..e42b7ba --- /dev/null +++ b/python/src/datajam_sqlalchemy/repository.py @@ -0,0 +1,34 @@ +"""SQLAlchemy implementation of IRepository.""" + +from __future__ import annotations + +from typing import TypeVar + +from datajam import ICommand, IDataContext, IQuery, IRepository, IScalar + +T = TypeVar("T") + + +class Repository(IRepository): + """SQLAlchemy implementation of IRepository for command and query execution.""" + + def __init__(self, context: IDataContext) -> None: + """Initialize the repository with a data context.""" + self._context = context + + @property + def context(self) -> IDataContext: + """Get the underlying data context.""" + return self._context + + async def execute(self, command: ICommand) -> None: + """Execute a command.""" + await command.execute(self._context) + + async def find(self, query: IQuery[T]) -> list[T]: + """Execute a query and return results.""" + return await query.execute(self._context) + + async def find_scalar(self, scalar: IScalar[T]) -> T | None: + """Execute a scalar query and return the result.""" + return await scalar.execute(self._context) diff --git a/python/tests/__init__.py b/python/tests/__init__.py new file mode 100644 index 0000000..551e786 --- /dev/null +++ b/python/tests/__init__.py @@ -0,0 +1 @@ +"""Test package for DataJam.""" diff --git a/python/tests/conftest.py b/python/tests/conftest.py new file mode 100644 index 0000000..49f9213 --- /dev/null +++ b/python/tests/conftest.py @@ -0,0 +1,31 @@ +"""Pytest configuration and fixtures for DataJam tests.""" + +import asyncio +from collections.abc import AsyncGenerator + +import pytest +import pytest_asyncio +from sqlalchemy.ext.asyncio import AsyncEngine + +from tests.integration.oracle.oracle_container_manager import OracleContainerManager, cleanup_oracle_container + + +@pytest_asyncio.fixture(scope="function") +async def oracle_container() -> AsyncGenerator[OracleContainerManager, None]: + """Function-scoped Oracle container fixture.""" + async with OracleContainerManager() as container: + yield container + + +@pytest_asyncio.fixture(scope="function") +async def oracle_engine(oracle_container: OracleContainerManager) -> AsyncEngine: + """Function-scoped Oracle engine fixture.""" + return oracle_container.engine + + +@pytest.fixture(scope="session", autouse=True) +def cleanup_containers(): + """Ensure containers are cleaned up after all tests.""" + yield + # This will run after all tests are complete + asyncio.run(cleanup_oracle_container()) diff --git a/python/tests/integration/__init__.py b/python/tests/integration/__init__.py new file mode 100644 index 0000000..517707c --- /dev/null +++ b/python/tests/integration/__init__.py @@ -0,0 +1 @@ +"""Integration tests for DataJam.""" diff --git a/python/tests/integration/oracle/__init__.py b/python/tests/integration/oracle/__init__.py new file mode 100644 index 0000000..1e04efd --- /dev/null +++ b/python/tests/integration/oracle/__init__.py @@ -0,0 +1 @@ +"""Oracle integration tests for DataJam.""" diff --git a/python/tests/integration/oracle/container_constants.py b/python/tests/integration/oracle/container_constants.py new file mode 100644 index 0000000..5ee735c --- /dev/null +++ b/python/tests/integration/oracle/container_constants.py @@ -0,0 +1,9 @@ +"""Constants for Oracle TestContainer configuration.""" + +# Oracle container configuration +ORACLE_CONTAINER_NAME = "oracle_test_container" +ORACLE_PASSWORD = "oracle123" +ORACLE_IMAGE = "gvenzl/oracle-xe:21-slim-faststart" +ORACLE_PORT = 1521 +ORACLE_SERVICE_NAME = "XE" +ORACLE_USERNAME = "system" diff --git a/python/tests/integration/oracle/family_domain.py b/python/tests/integration/oracle/family_domain.py new file mode 100644 index 0000000..8990b86 --- /dev/null +++ b/python/tests/integration/oracle/family_domain.py @@ -0,0 +1,47 @@ +"""Oracle-specific Family domain implementation for integration tests.""" + +from __future__ import annotations + +from sqlalchemy import MetaData +from sqlalchemy.ext.asyncio import AsyncEngine + +from datajam import IConfigureDomainMappings +from datajam_sqlalchemy import Domain, OracleNamingConvention +from tests.test_support.family.entities import Base + + +class FamilyMappingConfigurator(IConfigureDomainMappings[MetaData]): + """Mapping configurator for the Family domain with Oracle-specific settings.""" + + def configure(self, metadata: MetaData) -> None: + """Configure entity mappings for Oracle.""" + # Apply Oracle naming conventions to all tables + OracleNamingConvention.configure_metadata_for_oracle(metadata) + + +class OracleFamilyDomain(Domain): + """Oracle-specific Family domain implementation.""" + + def __init__(self, engine: AsyncEngine, connection_string: str) -> None: + """Initialize the Oracle Family domain.""" + # Create metadata from our entities + metadata = Base.metadata + + # Create mapping configurator + mapping_configurator = FamilyMappingConfigurator() + + # Initialize the domain + super().__init__( + connection_string=connection_string, + engine=engine, + mapping_configurator=mapping_configurator, + metadata=metadata, + ) + + async def create_schema(self) -> None: + """Create the database schema for the Family domain.""" + await self.create_tables() + + async def drop_schema(self) -> None: + """Drop the database schema for the Family domain.""" + await self.drop_tables() diff --git a/python/tests/integration/oracle/oracle_container_manager.py b/python/tests/integration/oracle/oracle_container_manager.py new file mode 100644 index 0000000..fc744a3 --- /dev/null +++ b/python/tests/integration/oracle/oracle_container_manager.py @@ -0,0 +1,154 @@ +"""Oracle TestContainer manager for integration tests.""" + +from __future__ import annotations + +import logging +from typing import TYPE_CHECKING + +from sqlalchemy.ext.asyncio import AsyncEngine, create_async_engine +from testcontainers.oracle import OracleDbContainer + +if TYPE_CHECKING: + from collections.abc import AsyncGenerator + + +logger = logging.getLogger(__name__) + + +class OracleContainerManager: + """Manages Oracle TestContainer lifecycle for integration tests.""" + + def __init__(self) -> None: + """Initialize the Oracle container manager.""" + self._container: OracleDbContainer | None = None + self._engine: AsyncEngine | None = None + self._connection_string: str | None = None + + async def start_container(self) -> None: + """Start the Oracle container and wait for it to be ready.""" + if self._container is not None: + logger.warning("Oracle container already started") + return + + logger.info("Starting Oracle container...") + + # Create and start the Oracle container using the free image + self._container = OracleDbContainer(image="gvenzl/oracle-free:slim") # Use the official free image + + # Start the container (this will block until ready) + self._container.start() + + # Get connection URL directly from the container + self._connection_string = self._container.get_connection_url() + + # For async support, convert the connection string to use async driver + if self._connection_string.startswith("oracle+cx_oracle://"): + async_connection_string = self._connection_string.replace("oracle+cx_oracle://", "oracle+oracledb_async://") + elif self._connection_string.startswith("oracle+oracledb://"): + async_connection_string = self._connection_string.replace("oracle+oracledb://", "oracle+oracledb_async://") + else: + async_connection_string = self._connection_string + + self._engine = create_async_engine( + async_connection_string, + echo=True, # Enable SQL logging for debugging + future=True, + ) + + logger.info(f"Oracle container started with connection: {self._connection_string}") + + # Test the connection + await self._test_connection() + + async def _test_connection(self) -> None: + """Test the Oracle connection.""" + if not self._engine: + raise RuntimeError("Engine not initialized") + + try: + from sqlalchemy import text + + async with self._engine.begin() as conn: + result = await conn.execute(text("SELECT 1 FROM DUAL")) + row = result.fetchone() + if row and row[0] == 1: + logger.info("Oracle connection test successful") + else: + raise RuntimeError("Oracle connection test failed") + except Exception as e: + logger.error(f"Oracle connection test failed: {e}") + raise + + async def stop_container(self) -> None: + """Stop the Oracle container and clean up resources.""" + if self._engine: + await self._engine.dispose() + self._engine = None + + if self._container: + logger.info("Stopping Oracle container...") + self._container.stop() + self._container = None + + self._connection_string = None + logger.info("Oracle container stopped") + + @property + def connection_string(self) -> str: + """Get the Oracle connection string.""" + if not self._connection_string: + raise RuntimeError("Oracle container not started") + return self._connection_string + + @property + def async_connection_string(self) -> str: + """Get the Oracle async connection string.""" + connection_string = self.connection_string + return connection_string.replace("oracle+oracledb://", "oracle+oracledb_async://") + + @property + def engine(self) -> AsyncEngine: + """Get the SQLAlchemy async engine.""" + if not self._engine: + raise RuntimeError("Oracle container not started") + return self._engine + + async def __aenter__(self) -> OracleContainerManager: + """Enter async context manager.""" + await self.start_container() + return self + + async def __aexit__(self, exc_type, exc_val, exc_tb) -> None: + """Exit async context manager.""" + await self.stop_container() + + +# Global container manager instance +_oracle_manager: OracleContainerManager | None = None + + +async def get_oracle_container() -> OracleContainerManager: + """Get or create the global Oracle container manager.""" + global _oracle_manager + + if _oracle_manager is None: + _oracle_manager = OracleContainerManager() + await _oracle_manager.start_container() + + return _oracle_manager + + +async def cleanup_oracle_container() -> None: + """Clean up the global Oracle container manager.""" + global _oracle_manager + + if _oracle_manager is not None: + await _oracle_manager.stop_container() + _oracle_manager = None + + +# Pytest fixture for Oracle container +async def oracle_container_fixture() -> AsyncGenerator[OracleContainerManager, None]: + """Pytest fixture that provides an Oracle container for tests.""" + async with OracleContainerManager() as container: + yield container diff --git a/python/tests/integration/oracle/test_when_persisting_and_retrieving_a_child.py b/python/tests/integration/oracle/test_when_persisting_and_retrieving_a_child.py new file mode 100644 index 0000000..ff7357b --- /dev/null +++ b/python/tests/integration/oracle/test_when_persisting_and_retrieving_a_child.py @@ -0,0 +1,78 @@ +"""Oracle integration test for persisting and retrieving a child - Python port of .NET test.""" + +from __future__ import annotations + +from typing import TYPE_CHECKING + +import pytest +from sqlalchemy.ext.asyncio import AsyncEngine + +from tests.integration.oracle.family_domain import OracleFamilyDomain +from tests.test_support.family import Child, Father, GetChildren, Mother + +if TYPE_CHECKING: + from tests.integration.oracle.oracle_container_manager import OracleContainerManager + + +class TestWhenPersistingAndRetrievingAChild: + """Test class for persisting and retrieving a child entity - matches .NET test pattern.""" + + @pytest.fixture(autouse=True) + async def setup_and_teardown( + self, + oracle_container: OracleContainerManager, + oracle_engine: AsyncEngine, + ) -> None: + """Setup and teardown for each test method.""" + # Create domain + self.domain = OracleFamilyDomain( + engine=oracle_engine, + connection_string=oracle_container.async_connection_string, + ) + + # Create schema + await self.domain.create_schema() + + # Create repository and execute the test scenario + repository = await self.domain.create_repository() + + try: + # Arrange - Create family entities + father = Father(name="Dad") + mother = Mother(name="Mom") + child = Child(name="Kid") + child.add_parents(father, mother) + + # Add child to repository (which should also add parents due to relationships) + repository.context.add(child) + await repository.context.commit() + + # Act - Retrieve the child + scalar_query = GetChildren() + self.result = await repository.find_scalar(scalar_query) + + finally: + # Cleanup + await repository.context.__aexit__(None, None, None) + + yield + + # Teardown - Drop schema + await self.domain.drop_schema() + + async def test_it_should_have_the_correct_name(self) -> None: + """Test that the retrieved child has the correct name.""" + assert self.result is not None + assert self.result.name == "Kid" + + async def test_it_should_have_the_correct_father(self) -> None: + """Test that the retrieved child has the correct father.""" + assert self.result is not None + assert self.result.father is not None + assert self.result.father.name == "Dad" + + async def test_it_should_have_the_correct_mother(self) -> None: + """Test that the retrieved child has the correct mother.""" + assert self.result is not None + assert self.result.mother is not None + assert self.result.mother.name == "Mom" diff --git a/python/tests/test_support/__init__.py b/python/tests/test_support/__init__.py new file mode 100644 index 0000000..3347d76 --- /dev/null +++ b/python/tests/test_support/__init__.py @@ -0,0 +1 @@ +"""Test support utilities for DataJam tests.""" diff --git a/python/tests/test_support/family/__init__.py b/python/tests/test_support/family/__init__.py new file mode 100644 index 0000000..c837b10 --- /dev/null +++ b/python/tests/test_support/family/__init__.py @@ -0,0 +1,6 @@ +"""Family domain test entities for DataJam testing.""" + +from .entities import Child, Father, Mother, Person +from .queries import GetChildren + +__all__ = ["Person", "Father", "Mother", "Child", "GetChildren"] diff --git a/python/tests/test_support/family/entities.py b/python/tests/test_support/family/entities.py new file mode 100644 index 0000000..e50db7e --- /dev/null +++ b/python/tests/test_support/family/entities.py @@ -0,0 +1,88 @@ +"""Family domain entities for testing DataJam patterns.""" + +from __future__ import annotations + +from sqlalchemy import Column, ForeignKey, Integer, Sequence, String +from sqlalchemy.orm import DeclarativeBase, relationship + + +# Create base class for our entities +class Base(DeclarativeBase): + """Base class for all entities.""" + + pass + + +class Person(Base): + """Base person entity.""" + + __tablename__ = "PERSON" + + id = Column("ID", Integer, Sequence("SEQ_PERSON_ID"), primary_key=True) + name = Column("NAME", String(100), nullable=False) + person_type = Column("PERSON_TYPE", String(20), nullable=False) + + # Polymorphic configuration + __mapper_args__ = { + "polymorphic_identity": "person", + "polymorphic_on": person_type, + "with_polymorphic": "*", + } + + def __repr__(self) -> str: + return f"<{self.__class__.__name__}(id={self.id}, name='{self.name}')>" + + +class Father(Person): + """Father entity inheriting from Person.""" + + __tablename__ = "FATHER" + + id = Column("ID", Integer, ForeignKey("PERSON.ID"), primary_key=True) + + # Relationship to children + children = relationship("Child", back_populates="father", foreign_keys="Child.father_id") + + __mapper_args__ = { + "polymorphic_identity": "father", + } + + +class Mother(Person): + """Mother entity inheriting from Person.""" + + __tablename__ = "MOTHER" + + id = Column("ID", Integer, ForeignKey("PERSON.ID"), primary_key=True) + + # Relationship to children + children = relationship("Child", back_populates="mother", foreign_keys="Child.mother_id") + + __mapper_args__ = { + "polymorphic_identity": "mother", + } + + +class Child(Person): + """Child entity inheriting from Person.""" + + __tablename__ = "CHILD" + + id = Column("ID", Integer, ForeignKey("PERSON.ID"), primary_key=True) + father_id = Column("FATHER_ID", Integer, ForeignKey("FATHER.ID"), nullable=True) + mother_id = Column("MOTHER_ID", Integer, ForeignKey("MOTHER.ID"), nullable=True) + + # Relationships to parents + father = relationship("Father", back_populates="children", foreign_keys=[father_id]) + mother = relationship("Mother", back_populates="children", foreign_keys=[mother_id]) + + __mapper_args__ = { + "polymorphic_identity": "child", + } + + def add_parents(self, father: Father, mother: Mother) -> None: + """Add parents to this child.""" + self.father = father + self.mother = mother + self.father_id = father.id + self.mother_id = mother.id diff --git a/python/tests/test_support/family/queries.py b/python/tests/test_support/family/queries.py new file mode 100644 index 0000000..e3203d5 --- /dev/null +++ b/python/tests/test_support/family/queries.py @@ -0,0 +1,17 @@ +"""Family domain queries for testing DataJam patterns.""" + +from __future__ import annotations + +from datajam import IDataSource, Scalar + +from .entities import Child + + +class GetChildren(Scalar[Child]): + """Scalar query to get a single child from the database.""" + + async def execute(self, data_source: IDataSource) -> Child | None: + """Execute the query to get a single child.""" + # Create a queryable and get first result + queryable = data_source.create_query(Child) + return await queryable.first_or_none() diff --git a/python/tests/unit/__init__.py b/python/tests/unit/__init__.py new file mode 100644 index 0000000..45f67fc --- /dev/null +++ b/python/tests/unit/__init__.py @@ -0,0 +1 @@ +"""Unit tests for DataJam.""" diff --git a/python/tests/unit/test_data_context.py b/python/tests/unit/test_data_context.py new file mode 100644 index 0000000..8b9bb7f --- /dev/null +++ b/python/tests/unit/test_data_context.py @@ -0,0 +1,78 @@ +"""Unit tests for DataContext implementation.""" + +from unittest.mock import AsyncMock, Mock + +import pytest +from sqlalchemy import MetaData +from sqlalchemy.ext.asyncio import AsyncSession + +from datajam_sqlalchemy import DataContext + + +class TestDataContext: + """Test DataContext implementation.""" + + @pytest.fixture + def mock_session(self): + """Create a mock async session.""" + session = Mock(spec=AsyncSession) + session.commit = AsyncMock() + session.rollback = AsyncMock() + session.close = AsyncMock() + session.add = Mock() + session.delete = Mock() + session.execute = AsyncMock() + session.__aenter__ = AsyncMock(return_value=session) + session.__aexit__ = AsyncMock() + return session + + @pytest.fixture + def metadata(self): + """Create a metadata instance.""" + return MetaData() + + @pytest.fixture + def data_context(self, mock_session, metadata): + """Create a DataContext with mock session.""" + return DataContext(mock_session, metadata) + + async def test_commit_calls_session_commit(self, data_context, mock_session): + """Test that commit calls session.commit().""" + await data_context.commit() + mock_session.commit.assert_called_once() + + async def test_rollback_calls_session_rollback(self, data_context, mock_session): + """Test that rollback calls session.rollback().""" + await data_context.rollback() + mock_session.rollback.assert_called_once() + + def test_add_calls_session_add(self, data_context, mock_session): + """Test that add calls session.add().""" + entity = Mock() + data_context.add(entity) + mock_session.add.assert_called_once_with(entity) + + def test_remove_calls_session_delete(self, data_context, mock_session): + """Test that remove calls session.delete().""" + entity = Mock() + # Mock the entity being in the session + mock_session.__contains__ = Mock(return_value=True) + data_context.remove(entity) + mock_session.delete.assert_called_once_with(entity) + + async def test_context_manager_enters_and_exits(self, data_context, mock_session): + """Test that DataContext works as async context manager.""" + async with data_context as ctx: + assert ctx == data_context + + # DataContext doesn't delegate to session's context manager, it manages commit/rollback itself + mock_session.commit.assert_called_once() + mock_session.close.assert_called_once() + + def test_session_property_returns_session(self, data_context, mock_session): + """Test that session property returns the underlying session.""" + assert data_context.session == mock_session + + def test_metadata_property_returns_metadata(self, data_context, metadata): + """Test that metadata property returns the metadata.""" + assert data_context.metadata == metadata diff --git a/python/tests/unit/test_domain.py b/python/tests/unit/test_domain.py new file mode 100644 index 0000000..366fa01 --- /dev/null +++ b/python/tests/unit/test_domain.py @@ -0,0 +1,116 @@ +"""Unit tests for Domain implementation.""" + +from unittest.mock import AsyncMock, Mock + +import pytest +from sqlalchemy import MetaData +from sqlalchemy.ext.asyncio import AsyncEngine + +from datajam import IConfigureDomainMappings +from datajam_sqlalchemy import DataContext, Domain, Repository + + +class MockMappingConfigurator(IConfigureDomainMappings[MetaData]): + """Mock mapping configurator for testing.""" + + def __init__(self): + self.configured = False + + def configure(self, metadata: MetaData) -> None: + self.configured = True + + +class TestDomain: + """Test Domain implementation.""" + + @pytest.fixture + def mock_engine(self): + """Create a mock async engine.""" + engine = Mock(spec=AsyncEngine) + + # Create a mock connection + mock_conn = Mock() + mock_conn.run_sync = AsyncMock() + + # Create an async context manager that returns the connection + async_context = AsyncMock() + async_context.__aenter__ = AsyncMock(return_value=mock_conn) + async_context.__aexit__ = AsyncMock(return_value=None) + + # Make engine.begin() return the async context manager + engine.begin.return_value = async_context + + # Store the connection for test assertions + engine._test_connection = mock_conn + + return engine + + @pytest.fixture + def metadata(self): + """Create a metadata instance.""" + return MetaData() + + @pytest.fixture + def mapping_configurator(self): + """Create a mock mapping configurator.""" + return MockMappingConfigurator() + + @pytest.fixture + def domain(self, mock_engine, metadata, mapping_configurator): + """Create a Domain instance.""" + return Domain( + connection_string="test://connection", + engine=mock_engine, + mapping_configurator=mapping_configurator, + metadata=metadata, + ) + + def test_configuration_options_returns_connection_string(self, domain): + """Test that configuration_options returns the connection string.""" + assert domain.configuration_options == "test://connection" + + def test_mapping_configurator_is_called_during_init(self, domain, mapping_configurator): + """Test that mapping configurator is called during initialization.""" + assert mapping_configurator.configured + + def test_metadata_property_returns_metadata(self, domain, metadata): + """Test that metadata property returns the metadata.""" + assert domain.metadata == metadata + + def test_engine_property_returns_engine(self, domain, mock_engine): + """Test that engine property returns the engine.""" + assert domain.engine == mock_engine + + async def test_create_data_context_returns_data_context(self, domain): + """Test that create_data_context returns a DataContext instance.""" + data_context = await domain.create_data_context() + assert isinstance(data_context, DataContext) + + async def test_create_repository_returns_repository(self, domain): + """Test that create_repository returns a Repository instance.""" + repository = await domain.create_repository() + assert isinstance(repository, Repository) + + async def test_create_tables_calls_metadata_create_all(self, domain, mock_engine): + """Test that create_tables calls metadata.create_all.""" + await domain.create_tables() + + # Verify that run_sync was called on the connection + mock_engine._test_connection.run_sync.assert_called_once() + + async def test_drop_tables_calls_metadata_drop_all(self, domain, mock_engine): + """Test that drop_tables calls metadata.drop_all.""" + await domain.drop_tables() + + # Verify that run_sync was called on the connection + mock_engine._test_connection.run_sync.assert_called_once() + + def test_domain_without_mapping_configurator(self, mock_engine, metadata): + """Test that Domain can be created without mapping configurator.""" + domain = Domain(connection_string="test://connection", engine=mock_engine, metadata=metadata) + assert domain.mapping_configurator is None + + def test_domain_without_metadata_creates_default(self, mock_engine): + """Test that Domain creates default metadata if none provided.""" + domain = Domain(connection_string="test://connection", engine=mock_engine) + assert isinstance(domain.metadata, MetaData) diff --git a/python/tests/unit/test_oracle_utils.py b/python/tests/unit/test_oracle_utils.py new file mode 100644 index 0000000..771fabf --- /dev/null +++ b/python/tests/unit/test_oracle_utils.py @@ -0,0 +1,110 @@ +"""Unit tests for Oracle utilities.""" + +from sqlalchemy import Column, Integer, MetaData, String, Table + +from datajam_sqlalchemy.oracle_utils import ( + OracleNamingConvention, + OracleSequenceHelper, + create_oracle_connection_string, +) + + +class TestOracleNamingConvention: + """Test OracleNamingConvention utility.""" + + def test_to_oracle_identifier_converts_to_uppercase(self): + """Test that to_oracle_identifier converts names to uppercase.""" + assert OracleNamingConvention.to_oracle_identifier("test_table") == "TEST_TABLE" + assert OracleNamingConvention.to_oracle_identifier("user_name") == "USER_NAME" + assert OracleNamingConvention.to_oracle_identifier("lowercase") == "LOWERCASE" + + def test_configure_table_for_oracle_updates_names(self): + """Test that configure_table_for_oracle updates table and column names.""" + metadata = MetaData() + table = Table( + "test_table", + metadata, + Column("test_column", String(50)), + Column("another_column", Integer), + ) + + OracleNamingConvention.configure_table_for_oracle(table) + + assert table.name == "TEST_TABLE" + assert "TEST_COLUMN" in [col.name for col in table.columns] + assert "ANOTHER_COLUMN" in [col.name for col in table.columns] + + def test_configure_metadata_for_oracle_updates_all_tables(self): + """Test that configure_metadata_for_oracle updates all tables.""" + metadata = MetaData() + table1 = Table("table_one", metadata, Column("col_one", String(50))) + table2 = Table("table_two", metadata, Column("col_two", Integer)) + + OracleNamingConvention.configure_metadata_for_oracle(metadata) + + assert table1.name == "TABLE_ONE" + assert table2.name == "TABLE_TWO" + assert "COL_ONE" in [col.name for col in table1.columns] + assert "COL_TWO" in [col.name for col in table2.columns] + + +class TestOracleSequenceHelper: + """Test OracleSequenceHelper utility.""" + + def test_create_sequence_ddl_generates_correct_sql(self): + """Test that create_sequence_ddl generates correct DDL.""" + ddl = OracleSequenceHelper.create_sequence_ddl("PERSON") + expected = "CREATE SEQUENCE SEQ_PERSON_ID START WITH 1 INCREMENT BY 1" + assert ddl == expected + + def test_create_sequence_ddl_with_custom_column(self): + """Test create_sequence_ddl with custom column name.""" + ddl = OracleSequenceHelper.create_sequence_ddl("USER", "USER_ID") + expected = "CREATE SEQUENCE SEQ_USER_USER_ID START WITH 1 INCREMENT BY 1" + assert ddl == expected + + def test_drop_sequence_ddl_generates_correct_sql(self): + """Test that drop_sequence_ddl generates correct DDL.""" + ddl = OracleSequenceHelper.drop_sequence_ddl("PERSON") + expected = "DROP SEQUENCE SEQ_PERSON_ID" + assert ddl == expected + + def test_drop_sequence_ddl_with_custom_column(self): + """Test drop_sequence_ddl with custom column name.""" + ddl = OracleSequenceHelper.drop_sequence_ddl("USER", "USER_ID") + expected = "DROP SEQUENCE SEQ_USER_USER_ID" + assert ddl == expected + + def test_get_sequence_name_returns_correct_name(self): + """Test that get_sequence_name returns correct sequence name.""" + name = OracleSequenceHelper.get_sequence_name("PERSON") + assert name == "SEQ_PERSON_ID" + + def test_get_sequence_name_with_custom_column(self): + """Test get_sequence_name with custom column name.""" + name = OracleSequenceHelper.get_sequence_name("USER", "USER_ID") + assert name == "SEQ_USER_USER_ID" + + +class TestCreateOracleConnectionString: + """Test create_oracle_connection_string function.""" + + def test_default_parameters(self): + """Test connection string with default parameters.""" + conn_str = create_oracle_connection_string() + expected = "oracle+oracledb://system:oracle@localhost:1521/?service_name=XE" + assert conn_str == expected + + def test_custom_parameters(self): + """Test connection string with custom parameters.""" + conn_str = create_oracle_connection_string( + host="oracle.example.com", port=1234, service_name="PROD", username="myuser", password="mypass" + ) + expected = "oracle+oracledb://myuser:mypass@oracle.example.com:1234/?service_name=PROD" + assert conn_str == expected + + def test_special_characters_in_password(self): + """Test connection string with special characters in password.""" + conn_str = create_oracle_connection_string(username="user", password="p@ssw0rd!") + expected = "oracle+oracledb://user:p@ssw0rd!@localhost:1521/?service_name=XE" + assert conn_str == expected diff --git a/python/tests/unit/test_repository.py b/python/tests/unit/test_repository.py new file mode 100644 index 0000000..a32151f --- /dev/null +++ b/python/tests/unit/test_repository.py @@ -0,0 +1,85 @@ +"""Unit tests for Repository implementation.""" + +from unittest.mock import AsyncMock, Mock + +import pytest + +from datajam import ICommand, IQuery, IScalar +from datajam_sqlalchemy import Repository + + +class MockCommand(ICommand): + """Mock command for testing.""" + + def __init__(self): + self.executed = False + + async def execute(self, unit_of_work): + self.executed = True + + +class MockQuery(IQuery[str]): + """Mock query for testing.""" + + def __init__(self, result: list[str]): + self.result = result + + async def execute(self, data_source): + return self.result + + +class MockScalar(IScalar[str]): + """Mock scalar query for testing.""" + + def __init__(self, result: str): + self.result = result + + async def execute(self, data_source): + return self.result + + +class TestRepository: + """Test Repository implementation.""" + + @pytest.fixture + def mock_context(self): + """Create a mock data context.""" + context = Mock() + context.__aenter__ = AsyncMock(return_value=context) + context.__aexit__ = AsyncMock(return_value=None) + return context + + @pytest.fixture + def repository(self, mock_context): + """Create a repository with mock context.""" + return Repository(mock_context) + + async def test_execute_command_calls_command_execute(self, repository, mock_context): + """Test that execute calls command.execute with unit of work.""" + command = MockCommand() + + await repository.execute(command) + + assert command.executed + + async def test_find_calls_query_execute(self, repository, mock_context): + """Test that find calls query.execute with data source.""" + expected_result = ["item1", "item2"] + query = MockQuery(expected_result) + + result = await repository.find(query) + + assert result == expected_result + + async def test_find_scalar_calls_scalar_execute(self, repository, mock_context): + """Test that find_scalar calls scalar.execute with data source.""" + expected_result = "single_item" + scalar = MockScalar(expected_result) + + result = await repository.find_scalar(scalar) + + assert result == expected_result + + def test_context_property_returns_context(self, repository, mock_context): + """Test that context property returns the underlying context.""" + assert repository.context == mock_context