diff --git a/.coveragerc b/.coveragerc new file mode 100644 index 0000000..f6c74c2 --- /dev/null +++ b/.coveragerc @@ -0,0 +1,20 @@ +[run] +omit = + */.local/* + */apps.py + */admin.py + */journey_tests/* + */asgi.py + */wsgi.py + */settings* + */manage.py + */streamlit_main.py + */migrations/* +source = django_google_structured_logger +relative_files = True + +[html] +directory = coverage-reports + +[xml] +output = coverage-reports/coverage.xml diff --git a/.github/workflows/pr-validation.yaml b/.github/workflows/pr-validation.yaml new file mode 100644 index 0000000..2dd8e3d --- /dev/null +++ b/.github/workflows/pr-validation.yaml @@ -0,0 +1,60 @@ +name: PR Validation + +on: + pull_request: + branches: main + push: + branches: main + +jobs: + quality-check: + runs-on: ubuntu-24.04 + name: Source code quality check + steps: + - name: Get latest SHA on the branch + id: get_sha + run: | + echo "COMMIT_SHA=${{ github.event.pull_request.head.sha || github.sha }}" >> $GITHUB_OUTPUT + + - name: Checkout Code + uses: actions/checkout@v4 + with: + ref: ${{ steps.get_sha.outputs.COMMIT_SHA }} + fetch-depth: 0 + + - uses: actions/setup-python@v5 + with: + python-version: "3.13" + + - name: Install Poetry and dependencies + run: | + pip install poetry==2.1.2 + poetry sync -vv --all-groups --no-cache --no-interaction + + - name: Run tests + run: | + echo '### tests result' >> $GITHUB_STEP_SUMMARY + echo '```' >> $GITHUB_STEP_SUMMARY + poetry run pytest >> $GITHUB_STEP_SUMMARY + echo '```' >> $GITHUB_STEP_SUMMARY + + - name: Type checks + run: | + echo '### mypy checks' >> $GITHUB_STEP_SUMMARY + echo '```' >> $GITHUB_STEP_SUMMARY + poetry run mypy django_google_structured_logger >> $GITHUB_STEP_SUMMARY + poetry run mypy tests >> $GITHUB_STEP_SUMMARY + echo '```' >> $GITHUB_STEP_SUMMARY + + - name: Lint + run: | + echo '### ruff linter checks' >> $GITHUB_STEP_SUMMARY + echo '```' >> $GITHUB_STEP_SUMMARY + poetry run ruff check >> $GITHUB_STEP_SUMMARY + echo '```' >> $GITHUB_STEP_SUMMARY + + - name: Upload coverage to Codecov + uses: codecov/codecov-action@v5 + with: + token: ${{ secrets.CODECOV_TOKEN }} + files: ./coverage-reports/coverage.xml diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 8a30af1..071e1ce 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -18,7 +18,7 @@ jobs: - name: Set up Python uses: actions/setup-python@v5 with: - python-version: 3.9 + python-version: 3.10 - name: Install dependencies run: | diff --git a/.gitignore b/.gitignore index 5af3942..f3c8914 100644 --- a/.gitignore +++ b/.gitignore @@ -5,6 +5,7 @@ *.pyc /reports /.coverage +coverage-reports/ /build .idea dist @@ -183,3 +184,5 @@ cython_debug/ # PyPI configuration file .pypirc + +.vscode/ diff --git a/README.md b/README.md index 443a128..ea644b6 100644 --- a/README.md +++ b/README.md @@ -3,6 +3,10 @@ [![PyPI version](https://badge.fury.io/py/django-google-structured-logger.svg)](https://badge.fury.io/py/django-google-structured-logger) [![Python Versions](https://img.shields.io/pypi/pyversions/django-google-structured-logger)](https://pypi.org/project/django-google-structured-logger/) [![Django Versions](https://img.shields.io/pypi/djversions/django-google-structured-logger)](https://pypi.org/project/django-google-structured-logger/) +[![codecov](https://codecov.io/gh/muehlemann-popp/django-google-structured-logger/graph/badge.svg?token=2X2RMRFOZO)](https://codecov.io/gh/muehlemann-popp/django-google-structured-logger) +![Mypy Checked](https://img.shields.io/badge/checked%20with-mypy-blue.svg) +[![Ruff](https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/astral-sh/ruff/main/assets/badge/v2.json)](https://github.com/astral-sh/ruff) +[![Taskfile](https://img.shields.io/badge/Task-Taskfile-blue?logo=data:image/svg+xml;base64,PHN2ZyB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciIHdpZHRoPSI1MDAiIGhlaWdodD0iNTAwIiB2aWV3Qm94PSIwIDAgMzc1IDM3NSI+PHBhdGggZmlsbD0iIzI5YmViMCIgZD0iTSAxODcuNTcwMzEyIDE5MC45MzM1OTQgTCAxODcuNTcwMzEyIDM3NSBMIDMwLjA3MDMxMiAyNzkuNTM1MTU2IEwgMzAuMDcwMzEyIDk1LjQ2NDg0NCBaIi8+PHBhdGggZmlsbD0iIzY5ZDJjOCIgZD0iTSAxODcuNTcwMzEyIDE5MC45MzM1OTQgTCAxODcuNTcwMzEyIDM3NSBMIDM0NS4wNzAzMTIgMjc5LjUzNTE1NiBMIDM0NS4wNzAzMTIgOTUuNDY0ODQ0IFoiLz48cGF0aCBmaWxsPSIjOTRkZmQ4IiBkPSJNIDE4Ny41NzAzMTIgMTkwLjkzMzU5NCBMIDMwLjA3MDMxMiA5NS40NjQ4NDQgTCAxODcuNTcwMzEyIDAgTCAzNDUuMDcwMzEyIDk1LjQ2NDg0NCBaIi8+PC9zdmc+)](https://taskfile.dev/) **Django Google Structured Logger** is a Django middleware designed to capture and log details from incoming requests and outgoing responses. It offers features to mask sensitive data, set default fields for Google Cloud Logging, and structure logs in a detailed and organized manner. @@ -32,15 +36,16 @@ pip install django-google-structured-logger #### Configuration -1. Add `GoogleFormatter` to your Django's `LOGGING` setting. - Example: +1. Add a formatter to your Django's `LOGGING` setting. + + **For standard JSON logging:** ```python LOGGING = { "version": 1, "disable_existing_loggers": False, "formatters": { "json": { - "()": "django_google_structured_logger.formatter.GoogleFormatter", + "()": "django_google_structured_logger.formatter.StandardJSONFormatter", }, }, "handlers": { @@ -48,11 +53,59 @@ pip install django-google-structured-logger "level": "INFO", "class": "logging.StreamHandler", }, - "google-json-handler": { + "json-handler": { "class": "logging.StreamHandler", "formatter": "json", }, }, + "root": { + "handlers": [env.str("DJANGO_LOG_HANDLER", "json-handler")], + "level": env.str("ROOT_LOG_LEVEL", "INFO"), + }, + "loggers": { + "()": { + "handlers": [env.str("DJANGO_LOG_HANDLER", "json-handler")], + "level": env.str("DJANGO_LOG_LEVEL", "INFO"), + }, + "django": { + "handlers": [env.str("DJANGO_LOG_HANDLER", "json-handler")], + "level": env.str("DJANGO_LOG_LEVEL", "INFO"), + "propagate": False, + }, + "django.server": { + "handlers": [env.str("DJANGO_LOG_HANDLER", "json-handler")], + "level": env.str("DJANGO_SERVER_LEVEL", "ERROR"), + "propagate": False, + }, + "django.request": { + "handlers": [env.str("DJANGO_LOG_HANDLER", "json-handler")], + "level": env.str("DJANGO_REQUEST_LEVEL", "ERROR"), + "propagate": False, + }, + }, + } + ``` + + **For Google Cloud Logging integration:** + ```python + LOGGING = { + "version": 1, + "disable_existing_loggers": False, + "formatters": { + "google": { + "()": "django_google_structured_logger.formatter.GoogleCloudFormatter", + }, + }, + "handlers": { + "console": { + "level": "INFO", + "class": "logging.StreamHandler", + }, + "google-json-handler": { + "class": "logging.StreamHandler", + "formatter": "google", + }, + }, "root": { "handlers": [env.str("DJANGO_LOG_HANDLER", "google-json-handler")], "level": env.str("ROOT_LOG_LEVEL", "INFO"), @@ -80,7 +133,17 @@ pip install django-google-structured-logger }, } ``` -2. Add `SetRequestToLoggerMiddleware` to your Django's `MIDDLEWARE` setting. + + Alternatively, you can configure the formatter class via Django settings: + ```python + # For standard JSON logging (default) + LOG_FORMATTER_CLASS = "django_google_structured_logger.formatter.StandardJSONFormatter" + + # For Google Cloud Logging + LOG_FORMATTER_CLASS = "django_google_structured_logger.formatter.GoogleCloudFormatter" + ``` + +2. Add middleware to your Django's `MIDDLEWARE` setting. Django middleware: ```python @@ -104,13 +167,15 @@ pip install django-google-structured-logger ### Key Components: -#### 1. middleware.py +#### 1. middlewares.py -- **SetRequestToLoggerMiddleware**: This class contains methods to process incoming requests and outgoing responses and then log them. It supports features like abridging lengthy data and masking sensitive information. +- **SetUserContextMiddleware**: Sets user context information for logging throughout the request lifecycle. +- **LogRequestAndResponseMiddleware**: Processes incoming requests and outgoing responses and logs them. It supports features like abridging lengthy data and masking sensitive information. #### 2. formatter.py -- **GoogleFormatter**: Extends `jsonlogger.JsonFormatter` to format logs specifically for Google Cloud Logging. It sets default fields such as severity, labels, operation, and source location based on Google's logging standards. +- **StandardJSONFormatter**: A universal JSON log formatter that creates structured logs with fields like severity, source_location, labels, operation, and http_request/http_response. Suitable for any logging system that accepts JSON format. +- **GoogleCloudFormatter**: Extends `StandardJSONFormatter` to format logs specifically for Google Cloud Logging. It remaps standard fields to Google Cloud's specific field names (e.g., `logging.googleapis.com/sourceLocation`) and adds trace correlation support. #### 3. settings.py @@ -121,11 +186,12 @@ pip install django-google-structured-logger These are the settings that can be customized for the middleware: +- `LOG_FORMATTER_CLASS`: Formatter class to use. Default is `"django_google_structured_logger.formatter.StandardJSONFormatter"`. - `LOG_MAX_STR_LEN`: Maximum string length before data is abridged. Default is `200`. - `LOG_MAX_LIST_LEN`: Maximum list length before data is abridged. Default is `10`. - `LOG_EXCLUDED_ENDPOINTS`: List of endpoints to exclude from logging. Default is an `empty list`. - `LOG_SENSITIVE_KEYS`: Regex patterns for keys which contain sensitive data. Defaults `DEFAULT_SENSITIVE_KEYS`. -- `LOG_MASK_STYLE`: Style for masking sensitive data. Default is `"partially"`. +- `LOG_MASK_STYLE`: Style for masking sensitive data. Default is `"partial"`. - `LOG_MIDDLEWARE_ENABLED`: Enable or disable the logging middleware. Default is `True`. - `LOG_EXCLUDED_HEADERS`: List of request headers to exclude from logging. Defaults `DEFAULT_SENSITIVE_HEADERS`. - `LOG_USER_ID_FIELD`: Field name for user ID. Default is `"id"`. @@ -143,11 +209,11 @@ Note: ``` will be logged as structured data in the `jsonPayload` field in Google Cloud Logging. Any data passed to extra kwargs will not be abridged or masked. -- `extra` kwargs passed to logger may override any default fields set by `GoogleFormatter`. +- `extra` kwargs passed to logger may override any default fields set by the formatters. ### Conclusion: -**SetRequestToLoggerMiddleware** is a comprehensive solution for those seeking enhanced logging capabilities in their Django projects, with particular attention to sensitive data protection and compatibility with Google Cloud Logging. +**Django Google Structured Logger** is a comprehensive solution for those seeking enhanced logging capabilities in their Django projects, with particular attention to sensitive data protection and compatibility with Google Cloud Logging. To get started, integrate the provided middleware, formatter, and settings into your Django project, customize as needed, and enjoy advanced logging capabilities! diff --git a/Taskfile.yaml b/Taskfile.yaml new file mode 100644 index 0000000..02952ef --- /dev/null +++ b/Taskfile.yaml @@ -0,0 +1,105 @@ +# https://taskfile.dev + +version: "3" + +vars: + GITHUB_REPOSITORY_OWNER: muehlemann-popp + GITHUB_REPOSITORY: django-google-structured-logger + GIT_SHA: + sh: git rev-parse --short=8 HEAD + CURRENT_DATETIME: + sh: date +"%Y-%m-%d_%H-%M-%S" + DIR_SRC: ./django_google_structured_logger + DIR_TESTS: ./tests + +tasks: + default: + desc: Display list of all available tasks + cmds: + - task --list-all + silent: true + + # Local environment + + poetry:install: + desc: Install dependencies without changes in lock file + cmds: + - poetry install -vv --all-groups + + poetry:install:ci: + desc: Install dependencies like in CI pipeline + cmds: + - poetry sync -vv --without=dev --no-cache --no-interaction + + # Python Specific + + py:format:src: + desc: Format python files in src directory + dir: "{{.DIR_SRC}}" + cmds: + - poetry run ruff check --select I --fix + - poetry run ruff format + + py:format:tests: + desc: Format python files in tests directory + dir: "{{.DIR_TESTS}}" + cmds: + - poetry run ruff check --select I --fix + - poetry run ruff format + + py:format:all: + desc: Format all Python files in src and tests directories + cmds: + - task: py:format:src + - task: py:format:tests + + checks:mypy: + desc: Run mypy check + cmds: + - poetry run mypy {{.DIR_SRC}} + - poetry run mypy {{.DIR_TESTS}} + + checks:tests: + desc: Run tests + cmds: + - poetry run pytest + + checks:ruff: + desc: Run ruff checks + cmds: + - poetry run ruff check --fix + + checks:coverage: + desc: Run code coverage check + ignore_error: true + cmds: + - poetry run coverage run -m pytest -vv {{.CLI_ARGS}} + + precommit: + desc: Run all checks + cmds: + - task: py:format:all + - task: checks:ruff + - task: checks:mypy + - task: checks:coverage + + # GitHub CLI + + github:repository:getid: + desc: Get repository id with GitHub CLI + cmds: + - "gh api -H 'Accept: application/vnd.github+json' repos/{{ .GITHUB_REPOSITORY_OWNER }}/{{ .CLI_ARGS | default .GITHUB_REPOSITORY_NAME }} | jq .id" + + # git cli + + git:diff: + desc: Prepare git diff for review + cmds: + - mkdir -p ./var + - git diff > ./var/git_diff + + git:diff:main: + desc: Prepare comparison between current branch and main branch for review + cmds: + - mkdir -p ./var + - git diff main..HEAD > ./var/git_diff diff --git a/django_google_structured_logger/apps.py b/django_google_structured_logger/apps.py index eaf8ab0..8778d8c 100644 --- a/django_google_structured_logger/apps.py +++ b/django_google_structured_logger/apps.py @@ -1,4 +1,4 @@ -from django.apps import AppConfig # type: ignore +from django.apps import AppConfig class DjangoMaterializedViewAppConfig(AppConfig): diff --git a/django_google_structured_logger/formatter.py b/django_google_structured_logger/formatter.py index d435444..8eddae4 100644 --- a/django_google_structured_logger/formatter.py +++ b/django_google_structured_logger/formatter.py @@ -1,3 +1,4 @@ +from logging import LogRecord from typing import Dict, Optional from django.conf import settings @@ -6,82 +7,98 @@ from .storages import RequestStorage, get_current_request -class GoogleFormatter(jsonlogger.JsonFormatter): - google_source_location_field = "logging.googleapis.com/sourceLocation" - google_operation_field = "logging.googleapis.com/operation" - google_labels_field = "logging.googleapis.com/labels" - google_trace_field = "logging.googleapis.com/trace" - - def add_fields(self, log_record: Dict, record, message_dict: Dict): - """ - Set Google default fields. - - List of Google supported fields: - https://cloud.google.com/logging/docs/reference/v2/rest/v2/LogEntry +class StandardJSONFormatter(jsonlogger.JsonFormatter): + """ + A standard JSON log formatter. It creates a base set of fields, including + severity, source_location, labels, operation, and http_request/http_response. + """ - List of associated JSON fields: - https://cloud.google.com/logging/docs/structured-logging#default-parsers - - Traces and metrics: - https://cloud.google.com/trace/docs/setup/python-ot - - This method sets these fields if present: - - severity - - labels - - operation - - sourceLocation - - trace (string) - REST resource name of trace - - spanId (string) - Trace span ID - - traceSampled (boolean) - W3C trace-context sampling decision - """ + def add_fields(self, log_record: Dict, record: LogRecord, message_dict: Dict): + """Set standard fields for JSON logging.""" super().add_fields(log_record, record, message_dict) current_request: Optional[RequestStorage] = get_current_request() log_record["severity"] = record.levelname - - # Update specialized fields - self._set_trace_correlation(log_record, record) + self._set_source_location(log_record, record) self._set_labels(log_record, current_request) self._set_operation(log_record, current_request) - self._set_source_location(log_record, record) + self._set_http_context(log_record) + + def _set_source_location(self, log_record: Dict, record): + """Set the source location in the log record under the `source_location` key.""" + log_record["source_location"] = { + "file": record.pathname, + "line": record.lineno, + "function": record.funcName, + "logger_name": record.name, + } def _set_labels(self, log_record: Dict, current_request: Optional[RequestStorage]): - """Set the Google labels in the log record.""" + """Set the labels in the log record under the `labels` key.""" labels = { - "user_id": current_request.user_id() if current_request else None, - "user_display_field": current_request.user_display_field() if current_request else None, - **log_record.get(self.google_labels_field, {}), + "user_id": current_request.user_id() if current_request is not None else None, + "user_display_field": current_request.user_display_field() if current_request is not None else None, **log_record.pop("labels", {}), } self.stringify_values(labels) - log_record[self.google_labels_field] = labels + log_record["labels"] = labels def _set_operation(self, log_record: Dict, current_request: Optional[RequestStorage]): - """Set the Google operation details in the log record.""" + """Set the operation details in the log record under the `operation` key.""" operation = { "id": getattr(current_request, "uuid", None), - **log_record.get(self.google_operation_field, {}), **log_record.pop("operation", {}), } - if "first_operation" in log_record: operation["first"] = log_record.pop("first_operation") if "last_operation" in log_record: operation["last"] = log_record.pop("last_operation") + log_record["operation"] = operation - log_record[self.google_operation_field] = operation + def _set_http_context(self, log_record: Dict): + """Move request and response data to `http_request` and `http_response` keys.""" + if "request" in log_record: + log_record["http_request"] = log_record.pop("request") + if "response" in log_record: + log_record["http_response"] = log_record.pop("response") - def _set_source_location(self, log_record: Dict, record): - """Set the Google source location in the log record.""" - log_record[self.google_source_location_field] = { - "file": record.pathname, - "line": record.lineno, - "function": record.funcName, - "logger_name": record.name, - } + @staticmethod + def stringify_values(dict_to_convert: Dict): + for key in dict_to_convert: + dict_to_convert[key] = str(dict_to_convert[key]) + + +class GoogleCloudFormatter(StandardJSONFormatter): + """ + A log formatter for Google Cloud Logging. It inherits from StandardJSONFormatter and maps the standard fields to + Google Cloud's specific field names. + """ + + google_source_location_field = "logging.googleapis.com/sourceLocation" + google_operation_field = "logging.googleapis.com/operation" + google_labels_field = "logging.googleapis.com/labels" + google_trace_field = "logging.googleapis.com/trace" + + def add_fields(self, log_record: Dict, record: LogRecord, message_dict: Dict): + """ + Set Google default fields by extending the standard formatter. It populates + standard fields first, then remaps them to Google-specific keys. + """ + super().add_fields(log_record, record, message_dict) + + if "source_location" in log_record: + log_record[self.google_source_location_field] = log_record.pop("source_location") - def _set_trace_correlation(self, log_record: Dict, record): + if "operation" in log_record: + log_record[self.google_operation_field] = log_record.pop("operation") + + if "labels" in log_record: + log_record[self.google_labels_field] = log_record.pop("labels") + + self._set_trace_correlation(log_record, record) + + def _set_trace_correlation(self, log_record: Dict, record: LogRecord): """Set the Google trace correlation fields in the log record.""" trace_id = getattr(record, "otelTraceID", None) span_id = getattr(record, "otelSpanID", None) @@ -94,8 +111,3 @@ def _set_trace_correlation(self, log_record: Dict, record): log_record["spanId"] = span_id if trace_sampled is not None: log_record["traceSampled"] = bool(trace_sampled) - - @staticmethod - def stringify_values(dict_to_convert: Dict): - for key in dict_to_convert: - dict_to_convert[key] = str(dict_to_convert[key]) diff --git a/django_google_structured_logger/middlewares.py b/django_google_structured_logger/middlewares.py index d2cf566..00f729f 100644 --- a/django_google_structured_logger/middlewares.py +++ b/django_google_structured_logger/middlewares.py @@ -3,9 +3,10 @@ import re import uuid from copy import deepcopy -from typing import Any, Dict, List, Optional, Union +from typing import Any, Callable, Dict, List, Optional, Union -from django.http import HttpRequest, HttpResponse +from django.core.handlers.wsgi import WSGIRequest +from django.http import HttpResponse from . import settings from .storages import RequestStorage, _current_request @@ -14,38 +15,34 @@ class SetUserContextMiddleware: - def __init__(self, get_response): + def __init__(self, get_response: Callable): self.get_response = get_response + self.user_id_field = settings.LOG_USER_ID_FIELD + self.user_display_field = settings.LOG_USER_DISPLAY_FIELD - def __call__(self, request): + def __call__(self, request: WSGIRequest): _current_request.set( RequestStorage( uuid=str(uuid.uuid4()), - user_id=lambda: self._get_user_attribute( - request.user, settings.LOG_USER_ID_FIELD - ), - user_display_field=lambda: self._get_user_attribute( - request.user, settings.LOG_USER_DISPLAY_FIELD - ), + user_id=lambda: self._get_user_attribute(request.user, self.user_id_field), + user_display_field=lambda: self._get_user_attribute(request.user, self.user_display_field), ) ) return self.get_response(request) @staticmethod - def _get_user_attribute(user, attribute) -> Any: + def _get_user_attribute(user, attribute: str) -> Any: return getattr(user, attribute, None) class LogRequestAndResponseMiddleware: """Middleware for logging requests and responses with sensitive data masked.""" - def __init__(self, get_response): + def __init__(self, get_response: Callable): self.get_response = get_response - self.log_excluded_headers_set = set( - map(str.lower, settings.LOG_EXCLUDED_HEADERS) - ) + self.log_excluded_headers_set = set(map(str.lower, settings.LOG_EXCLUDED_HEADERS)) - def __call__(self, request: HttpRequest) -> HttpResponse: + def __call__(self, request: WSGIRequest) -> HttpResponse: if not settings.LOG_MIDDLEWARE_ENABLED: return self.get_response(request) @@ -54,7 +51,7 @@ def __call__(self, request: HttpRequest) -> HttpResponse: self.process_response(request, response) return response - def process_request(self, request): + def process_request(self, request: WSGIRequest) -> Optional[WSGIRequest]: """ Log necessary data from the incoming request. @@ -68,22 +65,16 @@ def process_request(self, request): try: path = self._empty_value_none(getattr(request, "path", None)) method = self._empty_value_none(getattr(request, "method", None)) - content_type = self._empty_value_none( - getattr(request, "content_type", None) - ) + content_type = self._empty_value_none(getattr(request, "content_type", None)) request_body = self._empty_value_none(getattr(request, "body", None)) request_data = { "request": { "body": self._get_request_body(content_type, request_body), - "query_params": self._empty_value_none( - getattr(request, "GET", None) - ), + "query_params": self._empty_value_none(getattr(request, "GET", None)), "content_type": content_type, "method": method, "path": path, - "headers": self._empty_value_none( - self._exclude_keys(getattr(request, "headers", None)) - ), + "headers": self._empty_value_none(self._exclude_keys(getattr(request, "headers", None))), }, "first_operation": True, } @@ -95,7 +86,7 @@ def process_request(self, request): except Exception as exc: logger.exception(exc) - def process_response(self, request, response): + def process_response(self, request: WSGIRequest, response: HttpResponse) -> HttpResponse: """ Log necessary data from the outgoing response. @@ -110,12 +101,8 @@ def process_response(self, request, response): response_data = self._abridge(getattr(response, "data", None)) if response_data is None: response_content = self._abridge(getattr(response, "content", None)) - content_type = self._empty_value_none( - getattr(request, "content_type", None) - ) - response_data = ( - self._get_request_body(content_type, response_content), - ) + content_type = self._empty_value_none(getattr(request, "content_type", None)) + response_data = (self._get_request_body(content_type, response_content),) response_status_code = getattr(response, "status_code", 0) response_headers = self._exclude_keys(getattr(response, "headers", None)) @@ -128,12 +115,8 @@ def process_response(self, request, response): "last_operation": True, } - log_message = ( - f"Response {request.method} {request.path} > {response_status_code}" - ) - logger_method = ( - logger.info if 199 < response_status_code < 300 else logger.warning - ) + log_message = f"Response {request.method} {request.path} > {response_status_code}" + logger_method = logger.info if 199 < response_status_code < 300 else logger.warning logger_method(log_message, extra=data) except Exception as exc: @@ -158,17 +141,11 @@ def _abridge(self, data: Any, current_depth: int = 0) -> Any: return "..DEPTH EXCEEDED" if isinstance(data, dict): - data = { - k: self._abridge(v, current_depth + 1) - for k, v in data.items() - if k != "meta" - } + data = {k: self._abridge(v, current_depth + 1) for k, v in data.items() if k != "meta"} elif isinstance(data, str) and max_str_len and len(data) > max_str_len: return "{value}..SHORTENED".format(value=data[:max_str_len]) elif isinstance(data, list) and max_list_len: - return [ - self._abridge(item, current_depth + 1) for item in data[:max_list_len] - ] + return [self._abridge(item, current_depth + 1) for item in data[:max_list_len]] return data @staticmethod @@ -230,9 +207,7 @@ def partial_mask(value): } _mask_style = mask_styles.get(style) if _mask_style is None: - logger.warning( - f"Invalid mask style {style}. Using default style 'partial'." - ) + logger.warning(f"Invalid mask style {style}. Using default style 'partial'.") _mask_style = partial_mask return _mask_style @@ -259,11 +234,7 @@ def _exclude_keys(self, obj: Optional[Dict]) -> Optional[Dict]: """ if obj is None: return None - return { - k: v - for k, v in obj.items() - if k.lower() not in self.log_excluded_headers_set - } + return {k: v for k, v in obj.items() if k.lower() not in self.log_excluded_headers_set} def _get_request_body(self, content_type, request_body) -> Union[str, Dict, None]: """ @@ -275,13 +246,12 @@ def _get_request_body(self, content_type, request_body) -> Union[str, Dict, None """ def decode_and_abridge(body_bytes): - body_str = body_bytes.decode("UTF-8") if body_bytes else None + body_str: str = body_bytes.decode("UTF-8") if body_bytes else "" try: return self._abridge(json.loads(body_str)) except Exception: # noqa return self._abridge(body_str) - # Using traditional conditional checks instead of `match` for Python < 3.10 compatibility if content_type == "multipart/form-data": return "The image was uploaded to the server" elif content_type == "application/json": @@ -292,7 +262,7 @@ def decode_and_abridge(body_bytes): return self._mask_sensitive_data(content_type) @staticmethod - def _is_ignored(request) -> bool: + def _is_ignored(request: WSGIRequest) -> bool: """ Determine if the request should be ignored based on path. diff --git a/django_google_structured_logger/settings.py b/django_google_structured_logger/settings.py index 20d076a..012bf23 100644 --- a/django_google_structured_logger/settings.py +++ b/django_google_structured_logger/settings.py @@ -2,6 +2,9 @@ from django_google_structured_logger.constants import DEFAULT_SENSITIVE_HEADERS, DEFAULT_SENSITIVE_KEYS +LOG_FORMATTER_CLASS = getattr( + settings, "LOG_FORMATTER_CLASS", "django_google_structured_logger.formatter.StandardJSONFormatter" +) LOG_MAX_STR_LEN = getattr(settings, "LOG_MAX_STR_LEN", 200) LOG_MAX_LIST_LEN = getattr(settings, "LOG_MAX_LIST_LEN", 10) LOG_EXCLUDED_ENDPOINTS = getattr(settings, "LOG_EXCLUDED_ENDPOINTS", []) diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 0000000..58464e1 --- /dev/null +++ b/poetry.lock @@ -0,0 +1,670 @@ +# This file is automatically @generated by Poetry 2.1.2 and should not be changed by hand. + +[[package]] +name = "asgiref" +version = "3.8.1" +description = "ASGI specs, helper code, and adapters" +optional = false +python-versions = ">=3.8" +groups = ["main", "dev"] +files = [ + {file = "asgiref-3.8.1-py3-none-any.whl", hash = "sha256:3e1e3ecc849832fe52ccf2cb6686b7a55f82bb1d6aee72a58826471390335e47"}, + {file = "asgiref-3.8.1.tar.gz", hash = "sha256:c343bd80a0bec947a9860adb4c432ffa7db769836c64238fc34bdc3fec84d590"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4", markers = "python_version < \"3.11\""} + +[package.extras] +tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"] + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["dev"] +markers = "sys_platform == \"win32\"" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "contextvars" +version = "2.4" +description = "PEP 567 Backport" +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "contextvars-2.4.tar.gz", hash = "sha256:f38c908aaa59c14335eeea12abea5f443646216c4e29380d7bf34d2018e2c39e"}, +] + +[package.dependencies] +immutables = ">=0.9" + +[[package]] +name = "coverage" +version = "7.8.2" +description = "Code coverage measurement for Python" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "coverage-7.8.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bd8ec21e1443fd7a447881332f7ce9d35b8fbd2849e761bb290b584535636b0a"}, + {file = "coverage-7.8.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4c26c2396674816deaeae7ded0e2b42c26537280f8fe313335858ffff35019be"}, + {file = "coverage-7.8.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1aec326ed237e5880bfe69ad41616d333712c7937bcefc1343145e972938f9b3"}, + {file = "coverage-7.8.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5e818796f71702d7a13e50c70de2a1924f729228580bcba1607cccf32eea46e6"}, + {file = "coverage-7.8.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:546e537d9e24efc765c9c891328f30f826e3e4808e31f5d0f87c4ba12bbd1622"}, + {file = "coverage-7.8.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ab9b09a2349f58e73f8ebc06fac546dd623e23b063e5398343c5270072e3201c"}, + {file = "coverage-7.8.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:fd51355ab8a372d89fb0e6a31719e825cf8df8b6724bee942fb5b92c3f016ba3"}, + {file = "coverage-7.8.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:0774df1e093acb6c9e4d58bce7f86656aeed6c132a16e2337692c12786b32404"}, + {file = "coverage-7.8.2-cp310-cp310-win32.whl", hash = "sha256:00f2e2f2e37f47e5f54423aeefd6c32a7dbcedc033fcd3928a4f4948e8b96af7"}, + {file = "coverage-7.8.2-cp310-cp310-win_amd64.whl", hash = "sha256:145b07bea229821d51811bf15eeab346c236d523838eda395ea969d120d13347"}, + {file = "coverage-7.8.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b99058eef42e6a8dcd135afb068b3d53aff3921ce699e127602efff9956457a9"}, + {file = "coverage-7.8.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5feb7f2c3e6ea94d3b877def0270dff0947b8d8c04cfa34a17be0a4dc1836879"}, + {file = "coverage-7.8.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:670a13249b957bb9050fab12d86acef7bf8f6a879b9d1a883799276e0d4c674a"}, + {file = "coverage-7.8.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0bdc8bf760459a4a4187b452213e04d039990211f98644c7292adf1e471162b5"}, + {file = "coverage-7.8.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:07a989c867986c2a75f158f03fdb413128aad29aca9d4dbce5fc755672d96f11"}, + {file = "coverage-7.8.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:2db10dedeb619a771ef0e2949ccba7b75e33905de959c2643a4607bef2f3fb3a"}, + {file = "coverage-7.8.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e6ea7dba4e92926b7b5f0990634b78ea02f208d04af520c73a7c876d5a8d36cb"}, + {file = "coverage-7.8.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ef2f22795a7aca99fc3c84393a55a53dd18ab8c93fb431004e4d8f0774150f54"}, + {file = "coverage-7.8.2-cp311-cp311-win32.whl", hash = "sha256:641988828bc18a6368fe72355df5f1703e44411adbe49bba5644b941ce6f2e3a"}, + {file = "coverage-7.8.2-cp311-cp311-win_amd64.whl", hash = "sha256:8ab4a51cb39dc1933ba627e0875046d150e88478dbe22ce145a68393e9652975"}, + {file = "coverage-7.8.2-cp311-cp311-win_arm64.whl", hash = "sha256:8966a821e2083c74d88cca5b7dcccc0a3a888a596a04c0b9668a891de3a0cc53"}, + {file = "coverage-7.8.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:e2f6fe3654468d061942591aef56686131335b7a8325684eda85dacdf311356c"}, + {file = "coverage-7.8.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:76090fab50610798cc05241bf83b603477c40ee87acd358b66196ab0ca44ffa1"}, + {file = "coverage-7.8.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2bd0a0a5054be160777a7920b731a0570284db5142abaaf81bcbb282b8d99279"}, + {file = "coverage-7.8.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:da23ce9a3d356d0affe9c7036030b5c8f14556bd970c9b224f9c8205505e3b99"}, + {file = "coverage-7.8.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c9392773cffeb8d7e042a7b15b82a414011e9d2b5fdbbd3f7e6a6b17d5e21b20"}, + {file = "coverage-7.8.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:876cbfd0b09ce09d81585d266c07a32657beb3eaec896f39484b631555be0fe2"}, + {file = "coverage-7.8.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3da9b771c98977a13fbc3830f6caa85cae6c9c83911d24cb2d218e9394259c57"}, + {file = "coverage-7.8.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:9a990f6510b3292686713bfef26d0049cd63b9c7bb17e0864f133cbfd2e6167f"}, + {file = "coverage-7.8.2-cp312-cp312-win32.whl", hash = "sha256:bf8111cddd0f2b54d34e96613e7fbdd59a673f0cf5574b61134ae75b6f5a33b8"}, + {file = "coverage-7.8.2-cp312-cp312-win_amd64.whl", hash = "sha256:86a323a275e9e44cdf228af9b71c5030861d4d2610886ab920d9945672a81223"}, + {file = "coverage-7.8.2-cp312-cp312-win_arm64.whl", hash = "sha256:820157de3a589e992689ffcda8639fbabb313b323d26388d02e154164c57b07f"}, + {file = "coverage-7.8.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ea561010914ec1c26ab4188aef8b1567272ef6de096312716f90e5baa79ef8ca"}, + {file = "coverage-7.8.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cb86337a4fcdd0e598ff2caeb513ac604d2f3da6d53df2c8e368e07ee38e277d"}, + {file = "coverage-7.8.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26a4636ddb666971345541b59899e969f3b301143dd86b0ddbb570bd591f1e85"}, + {file = "coverage-7.8.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5040536cf9b13fb033f76bcb5e1e5cb3b57c4807fef37db9e0ed129c6a094257"}, + {file = "coverage-7.8.2-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc67994df9bcd7e0150a47ef41278b9e0a0ea187caba72414b71dc590b99a108"}, + {file = "coverage-7.8.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6e6c86888fd076d9e0fe848af0a2142bf606044dc5ceee0aa9eddb56e26895a0"}, + {file = "coverage-7.8.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:684ca9f58119b8e26bef860db33524ae0365601492e86ba0b71d513f525e7050"}, + {file = "coverage-7.8.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8165584ddedb49204c4e18da083913bdf6a982bfb558632a79bdaadcdafd0d48"}, + {file = "coverage-7.8.2-cp313-cp313-win32.whl", hash = "sha256:34759ee2c65362163699cc917bdb2a54114dd06d19bab860725f94ef45a3d9b7"}, + {file = "coverage-7.8.2-cp313-cp313-win_amd64.whl", hash = "sha256:2f9bc608fbafaee40eb60a9a53dbfb90f53cc66d3d32c2849dc27cf5638a21e3"}, + {file = "coverage-7.8.2-cp313-cp313-win_arm64.whl", hash = "sha256:9fe449ee461a3b0c7105690419d0b0aba1232f4ff6d120a9e241e58a556733f7"}, + {file = "coverage-7.8.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:8369a7c8ef66bded2b6484053749ff220dbf83cba84f3398c84c51a6f748a008"}, + {file = "coverage-7.8.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:159b81df53a5fcbc7d45dae3adad554fdbde9829a994e15227b3f9d816d00b36"}, + {file = "coverage-7.8.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e6fcbbd35a96192d042c691c9e0c49ef54bd7ed865846a3c9d624c30bb67ce46"}, + {file = "coverage-7.8.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:05364b9cc82f138cc86128dc4e2e1251c2981a2218bfcd556fe6b0fbaa3501be"}, + {file = "coverage-7.8.2-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46d532db4e5ff3979ce47d18e2fe8ecad283eeb7367726da0e5ef88e4fe64740"}, + {file = "coverage-7.8.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4000a31c34932e7e4fa0381a3d6deb43dc0c8f458e3e7ea6502e6238e10be625"}, + {file = "coverage-7.8.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:43ff5033d657cd51f83015c3b7a443287250dc14e69910577c3e03bd2e06f27b"}, + {file = "coverage-7.8.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:94316e13f0981cbbba132c1f9f365cac1d26716aaac130866ca812006f662199"}, + {file = "coverage-7.8.2-cp313-cp313t-win32.whl", hash = "sha256:3f5673888d3676d0a745c3d0e16da338c5eea300cb1f4ada9c872981265e76d8"}, + {file = "coverage-7.8.2-cp313-cp313t-win_amd64.whl", hash = "sha256:2c08b05ee8d7861e45dc5a2cc4195c8c66dca5ac613144eb6ebeaff2d502e73d"}, + {file = "coverage-7.8.2-cp313-cp313t-win_arm64.whl", hash = "sha256:1e1448bb72b387755e1ff3ef1268a06617afd94188164960dba8d0245a46004b"}, + {file = "coverage-7.8.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:496948261eaac5ac9cf43f5d0a9f6eb7a6d4cb3bedb2c5d294138142f5c18f2a"}, + {file = "coverage-7.8.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:eacd2de0d30871eff893bab0b67840a96445edcb3c8fd915e6b11ac4b2f3fa6d"}, + {file = "coverage-7.8.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b039ffddc99ad65d5078ef300e0c7eed08c270dc26570440e3ef18beb816c1ca"}, + {file = "coverage-7.8.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e49824808d4375ede9dd84e9961a59c47f9113039f1a525e6be170aa4f5c34d"}, + {file = "coverage-7.8.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b069938961dfad881dc2f8d02b47645cd2f455d3809ba92a8a687bf513839787"}, + {file = "coverage-7.8.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:de77c3ba8bb686d1c411e78ee1b97e6e0b963fb98b1637658dd9ad2c875cf9d7"}, + {file = "coverage-7.8.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:1676628065a498943bd3f64f099bb573e08cf1bc6088bbe33cf4424e0876f4b3"}, + {file = "coverage-7.8.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:8e1a26e7e50076e35f7afafde570ca2b4d7900a491174ca357d29dece5aacee7"}, + {file = "coverage-7.8.2-cp39-cp39-win32.whl", hash = "sha256:6782a12bf76fa61ad9350d5a6ef5f3f020b57f5e6305cbc663803f2ebd0f270a"}, + {file = "coverage-7.8.2-cp39-cp39-win_amd64.whl", hash = "sha256:1efa4166ba75ccefd647f2d78b64f53f14fb82622bc94c5a5cb0a622f50f1c9e"}, + {file = "coverage-7.8.2-pp39.pp310.pp311-none-any.whl", hash = "sha256:ec455eedf3ba0bbdf8f5a570012617eb305c63cb9f03428d39bf544cb2b94837"}, + {file = "coverage-7.8.2-py3-none-any.whl", hash = "sha256:726f32ee3713f7359696331a18daf0c3b3a70bb0ae71141b9d3c52be7c595e32"}, + {file = "coverage-7.8.2.tar.gz", hash = "sha256:a886d531373a1f6ff9fad2a2ba4a045b68467b779ae729ee0b3b10ac20033b27"}, +] + +[package.dependencies] +tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} + +[package.extras] +toml = ["tomli ; python_full_version <= \"3.11.0a6\""] + +[[package]] +name = "django" +version = "4.2.22" +description = "A high-level Python web framework that encourages rapid development and clean, pragmatic design." +optional = false +python-versions = ">=3.8" +groups = ["main", "dev"] +files = [ + {file = "django-4.2.22-py3-none-any.whl", hash = "sha256:0a32773b5b7f4e774a155ee253ab24a841fed7e9e9061db08bf2ce9711da404d"}, + {file = "django-4.2.22.tar.gz", hash = "sha256:e726764b094407c313adba5e2e866ab88f00436cad85c540a5bf76dc0a912c9e"}, +] + +[package.dependencies] +asgiref = ">=3.6.0,<4" +sqlparse = ">=0.3.1" +tzdata = {version = "*", markers = "sys_platform == \"win32\""} + +[package.extras] +argon2 = ["argon2-cffi (>=19.1.0)"] +bcrypt = ["bcrypt"] + +[[package]] +name = "django-stubs" +version = "5.2.0" +description = "Mypy stubs for Django" +optional = false +python-versions = ">=3.10" +groups = ["dev"] +files = [ + {file = "django_stubs-5.2.0-py3-none-any.whl", hash = "sha256:cd52da033489afc1357d6245f49e3cc57bf49015877253fb8efc6722ea3d2d2b"}, + {file = "django_stubs-5.2.0.tar.gz", hash = "sha256:07e25c2d3cbff5be540227ff37719cc89f215dfaaaa5eb038a75b01bbfbb2722"}, +] + +[package.dependencies] +asgiref = "*" +django = "*" +django-stubs-ext = ">=5.2.0" +tomli = {version = "*", markers = "python_version < \"3.11\""} +types-PyYAML = "*" +typing-extensions = ">=4.11.0" + +[package.extras] +compatible-mypy = ["mypy (>=1.13,<1.16)"] +oracle = ["oracledb"] +redis = ["redis"] + +[[package]] +name = "django-stubs-ext" +version = "5.2.0" +description = "Monkey-patching and extensions for django-stubs" +optional = false +python-versions = ">=3.10" +groups = ["dev"] +files = [ + {file = "django_stubs_ext-5.2.0-py3-none-any.whl", hash = "sha256:b27ae0aab970af4894ba4e9b3fcd3e03421dc8731516669659ee56122d148b23"}, + {file = "django_stubs_ext-5.2.0.tar.gz", hash = "sha256:00c4ae307b538f5643af761a914c3f8e4e3f25f4e7c6d7098f1906c0d8f2aac9"}, +] + +[package.dependencies] +django = "*" +typing-extensions = "*" + +[[package]] +name = "exceptiongroup" +version = "1.3.0" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +groups = ["dev"] +markers = "python_version == \"3.10\"" +files = [ + {file = "exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10"}, + {file = "exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.6.0", markers = "python_version < \"3.13\""} + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "immutables" +version = "0.21" +description = "Immutable Collections" +optional = false +python-versions = ">=3.8.0" +groups = ["main"] +files = [ + {file = "immutables-0.21-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:14cb09d4f4577ad9ab8770a340dc2158e0a5ab5775cb34c75960167a31104212"}, + {file = "immutables-0.21-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:22ba593f95044ac60d2af463f3dc86cd0e223f8c51df85dff65d663d93e19f51"}, + {file = "immutables-0.21-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25afc81a7bcf26c8364f85e52a14e0095344343e79493c73b0e9a765310a0bed"}, + {file = "immutables-0.21-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eac6e2868567289f88c6810f296940c328a1d38c9abc841eed04963102a27d12"}, + {file = "immutables-0.21-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:ba8bca21a1d034f4577ede1e9553a681dd01199c06b563f1a8316f2623b64985"}, + {file = "immutables-0.21-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:39337bfb42f83dd787a81e2d00e90efa17c4a39a9cf1210b8a50dafe32438aae"}, + {file = "immutables-0.21-cp310-cp310-win32.whl", hash = "sha256:b24aa98f6cdae4ba15baf3aa00e84223bafcd0d3fd7f0443474527ec951845e1"}, + {file = "immutables-0.21-cp310-cp310-win_amd64.whl", hash = "sha256:715f8e5f8e1c35f036f9ac62eaf8b672eec1cdc2b4f9b73864cc64eccc76661c"}, + {file = "immutables-0.21-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5d780c38067047911a2e06a86ba063ba0055618ab5573c8198ef3f368e321303"}, + {file = "immutables-0.21-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9aab9d0f0016f6e0bfe7e4a4cb831ef20063da6468b1bbc71d06ef285781ee9e"}, + {file = "immutables-0.21-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ff83390b05d3372acb9a0c928f6cc20c78e74ca20ed88eb941f84a63b65e444"}, + {file = "immutables-0.21-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d01497713e71509c4481ffccdbe3a47b94969345f4e92f814d6626f7c0a4c304"}, + {file = "immutables-0.21-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:bc7844c9fbb5bece5bfdf2bf8ea74d308f42f40b0665fd25c58abf56d7db024a"}, + {file = "immutables-0.21-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:984106fa4345efd9f96de22e9949fc97bac8598bdebee03c20b2497a88bff3b7"}, + {file = "immutables-0.21-cp311-cp311-win32.whl", hash = "sha256:1bdb5200518518601377e4877d5034e7c535e9ea8a9d601ed8b0eedef0c7becd"}, + {file = "immutables-0.21-cp311-cp311-win_amd64.whl", hash = "sha256:dd00c34f431c54c95e7b84bfdbdeacb4f039a6a24eb0c1f7aa4b168bb9a6ad0a"}, + {file = "immutables-0.21-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:ef1ed262094b755903122c3c3a83ad0e0d5c3ab7887cda12b2fe878769d1ee0d"}, + {file = "immutables-0.21-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce604f81d9d8f26e60b52ebcb56bb5c0462c8ea50fb17868487d15f048a2f13e"}, + {file = "immutables-0.21-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b48b116aaca4500398058b5a87814857a60c4cb09417fecc12d7da0f5639b73d"}, + {file = "immutables-0.21-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dad7c0c74b285cc0e555ec0e97acbdc6f1862fcd16b99abd612df3243732e741"}, + {file = "immutables-0.21-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e44346e2221a5a676c880ca8e0e6429fa24d1a4ae562573f5c04d7f2e759b030"}, + {file = "immutables-0.21-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8b10139b529a460e53fe8be699ebd848c54c8a33ebe67763bcfcc809a475a26f"}, + {file = "immutables-0.21-cp312-cp312-win32.whl", hash = "sha256:fc512d808662614feb17d2d92e98f611d69669a98c7af15910acf1dc72737038"}, + {file = "immutables-0.21-cp312-cp312-win_amd64.whl", hash = "sha256:461dcb0f58a131045155e52a2c43de6ec2fe5ba19bdced6858a3abb63cee5111"}, + {file = "immutables-0.21-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:79674b51aa8dd983f9ac55f7f67b433b1df84a6b4f28ab860588389a5659485b"}, + {file = "immutables-0.21-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:93c8350f8f7d0d9693f708229d9d0578e6f3b785ce6da4bced1da97137aacfad"}, + {file = "immutables-0.21-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:583d2a63e444ce1538cc2bda56ae1f4a1a11473dbc0377c82b516bc7eec3b81e"}, + {file = "immutables-0.21-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b274a52da9b106db55eceb93fc1aea858c4e6f4740189e3548e38613eafc2021"}, + {file = "immutables-0.21-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:338bede057250b33716a3e4892e15df0bf5a5ddbf1d67ead996b3e680b49ef9e"}, + {file = "immutables-0.21-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8781c89583b68f604cf30f0978b722165824c3075888639fde771bf1a3e12dc0"}, + {file = "immutables-0.21-cp313-cp313-win32.whl", hash = "sha256:e97ea83befad873712f283c0cccd630f70cba753e207b4868af28d5b85e9dc54"}, + {file = "immutables-0.21-cp313-cp313-win_amd64.whl", hash = "sha256:cfcb23bd898f5a4ef88692b42c51f52ca7373a35ba4dcc215060a668639eb5da"}, + {file = "immutables-0.21-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:07a37d8699255402a10784d4d45f2bcc00ca7dba8da763207a834b15767e6c62"}, + {file = "immutables-0.21-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9139fd80bb05501216f49c4306bb80d0c1a08c3f0f621ed2939bf52d7f762661"}, + {file = "immutables-0.21-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc6fc7e917e281361ad243be1a3cb56a7633de88ee67c94cdf5651958ead30d9"}, + {file = "immutables-0.21-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6a577f55eaaf763b685eef9710edbeb7ee95e2e5f54e7e5e0fd0f60ae2eb648"}, + {file = "immutables-0.21-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:ca912c1bb35615ccbe361a6bb76e6fd43827394102467967d5599d78b50dd0f4"}, + {file = "immutables-0.21-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:586e88ca7ed44b7bb2cd7b212abd2637b51bd95bdb4856ab111b44715a62071c"}, + {file = "immutables-0.21-cp38-cp38-win32.whl", hash = "sha256:21adc6b478a58692c79c5bf316b39d3fd0552441d2b38eef1782a7555deee484"}, + {file = "immutables-0.21-cp38-cp38-win_amd64.whl", hash = "sha256:ecff5274357dc18aae053e5e10b8eee5e9b4d6cc774d34148c992cb2eb787ec3"}, + {file = "immutables-0.21-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:e2aadf3bdd90daa0e8cb9c3cde4070e1021036e3b57f571a007ce24f323e47a9"}, + {file = "immutables-0.21-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5f8f507731d4d15e0c579aa77d8482471f988dc0f451e4bf3853ec36ccd42627"}, + {file = "immutables-0.21-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb9a378a4480381d7d3d63b0d201cf610eae0bf70e26a9306e3e631c9bd64010"}, + {file = "immutables-0.21-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a7b5920bbfcaf038894c8ce4ed2eff0b31c3559810a61806db751be8ab4d703"}, + {file = "immutables-0.21-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:8b90702d1fe313e8273ae7abb46fc0f0a87b47c1c9a83aed9a161301146e655c"}, + {file = "immutables-0.21-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:71cbbc6fbe7e7321648047ff9273f4605f8bd5ce456841a65ef151080e9d3481"}, + {file = "immutables-0.21-cp39-cp39-win32.whl", hash = "sha256:c44f286c47dc0d4d7b5bf19fbe975e6d57c56d2878cea413e1ec7a4bfffb2727"}, + {file = "immutables-0.21-cp39-cp39-win_amd64.whl", hash = "sha256:cf15314c39484b8947a4e20c3526021272510592fb2807b5136a2fcd6ab0151b"}, + {file = "immutables-0.21.tar.gz", hash = "sha256:b55ffaf0449790242feb4c56ab799ea7af92801a0a43f9e2f4f8af2ab24dfc4a"}, +] + +[package.extras] +test = ["flake8 (>=5.0,<6.0)", "mypy (>=1.4,<2.0)", "pycodestyle (>=2.9,<3.0)", "pytest (>=7.4,<8.0)"] + +[[package]] +name = "iniconfig" +version = "2.1.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760"}, + {file = "iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7"}, +] + +[[package]] +name = "mypy" +version = "1.16.0" +description = "Optional static typing for Python" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "mypy-1.16.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7909541fef256527e5ee9c0a7e2aeed78b6cda72ba44298d1334fe7881b05c5c"}, + {file = "mypy-1.16.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e71d6f0090c2256c713ed3d52711d01859c82608b5d68d4fa01a3fe30df95571"}, + {file = "mypy-1.16.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:936ccfdd749af4766be824268bfe22d1db9eb2f34a3ea1d00ffbe5b5265f5491"}, + {file = "mypy-1.16.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4086883a73166631307fdd330c4a9080ce24913d4f4c5ec596c601b3a4bdd777"}, + {file = "mypy-1.16.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:feec38097f71797da0231997e0de3a58108c51845399669ebc532c815f93866b"}, + {file = "mypy-1.16.0-cp310-cp310-win_amd64.whl", hash = "sha256:09a8da6a0ee9a9770b8ff61b39c0bb07971cda90e7297f4213741b48a0cc8d93"}, + {file = "mypy-1.16.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:9f826aaa7ff8443bac6a494cf743f591488ea940dd360e7dd330e30dd772a5ab"}, + {file = "mypy-1.16.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:82d056e6faa508501af333a6af192c700b33e15865bda49611e3d7d8358ebea2"}, + {file = "mypy-1.16.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:089bedc02307c2548eb51f426e085546db1fa7dd87fbb7c9fa561575cf6eb1ff"}, + {file = "mypy-1.16.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6a2322896003ba66bbd1318c10d3afdfe24e78ef12ea10e2acd985e9d684a666"}, + {file = "mypy-1.16.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:021a68568082c5b36e977d54e8f1de978baf401a33884ffcea09bd8e88a98f4c"}, + {file = "mypy-1.16.0-cp311-cp311-win_amd64.whl", hash = "sha256:54066fed302d83bf5128632d05b4ec68412e1f03ef2c300434057d66866cea4b"}, + {file = "mypy-1.16.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c5436d11e89a3ad16ce8afe752f0f373ae9620841c50883dc96f8b8805620b13"}, + {file = "mypy-1.16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f2622af30bf01d8fc36466231bdd203d120d7a599a6d88fb22bdcb9dbff84090"}, + {file = "mypy-1.16.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d045d33c284e10a038f5e29faca055b90eee87da3fc63b8889085744ebabb5a1"}, + {file = "mypy-1.16.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b4968f14f44c62e2ec4a038c8797a87315be8df7740dc3ee8d3bfe1c6bf5dba8"}, + {file = "mypy-1.16.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:eb14a4a871bb8efb1e4a50360d4e3c8d6c601e7a31028a2c79f9bb659b63d730"}, + {file = "mypy-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:bd4e1ebe126152a7bbaa4daedd781c90c8f9643c79b9748caa270ad542f12bec"}, + {file = "mypy-1.16.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a9e056237c89f1587a3be1a3a70a06a698d25e2479b9a2f57325ddaaffc3567b"}, + {file = "mypy-1.16.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0b07e107affb9ee6ce1f342c07f51552d126c32cd62955f59a7db94a51ad12c0"}, + {file = "mypy-1.16.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c6fb60cbd85dc65d4d63d37cb5c86f4e3a301ec605f606ae3a9173e5cf34997b"}, + {file = "mypy-1.16.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a7e32297a437cc915599e0578fa6bc68ae6a8dc059c9e009c628e1c47f91495d"}, + {file = "mypy-1.16.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:afe420c9380ccec31e744e8baff0d406c846683681025db3531b32db56962d52"}, + {file = "mypy-1.16.0-cp313-cp313-win_amd64.whl", hash = "sha256:55f9076c6ce55dd3f8cd0c6fff26a008ca8e5131b89d5ba6d86bd3f47e736eeb"}, + {file = "mypy-1.16.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f56236114c425620875c7cf71700e3d60004858da856c6fc78998ffe767b73d3"}, + {file = "mypy-1.16.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:15486beea80be24ff067d7d0ede673b001d0d684d0095803b3e6e17a886a2a92"}, + {file = "mypy-1.16.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f2ed0e0847a80655afa2c121835b848ed101cc7b8d8d6ecc5205aedc732b1436"}, + {file = "mypy-1.16.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:eb5fbc8063cb4fde7787e4c0406aa63094a34a2daf4673f359a1fb64050e9cb2"}, + {file = "mypy-1.16.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a5fcfdb7318c6a8dd127b14b1052743b83e97a970f0edb6c913211507a255e20"}, + {file = "mypy-1.16.0-cp39-cp39-win_amd64.whl", hash = "sha256:2e7e0ad35275e02797323a5aa1be0b14a4d03ffdb2e5f2b0489fa07b89c67b21"}, + {file = "mypy-1.16.0-py3-none-any.whl", hash = "sha256:29e1499864a3888bca5c1542f2d7232c6e586295183320caa95758fc84034031"}, + {file = "mypy-1.16.0.tar.gz", hash = "sha256:84b94283f817e2aa6350a14b4a8fb2a35a53c286f97c9d30f53b63620e7af8ab"}, +] + +[package.dependencies] +mypy_extensions = ">=1.0.0" +pathspec = ">=0.9.0" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing_extensions = ">=4.6.0" + +[package.extras] +dmypy = ["psutil (>=4.0)"] +faster-cache = ["orjson"] +install-types = ["pip"] +mypyc = ["setuptools (>=50)"] +reports = ["lxml"] + +[[package]] +name = "mypy-extensions" +version = "1.1.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505"}, + {file = "mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558"}, +] + +[[package]] +name = "packaging" +version = "25.0" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.8" +groups = ["main", "dev"] +files = [ + {file = "packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484"}, + {file = "packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f"}, +] + +[[package]] +name = "pathspec" +version = "0.12.1" +description = "Utility library for gitignore style pattern matching of file paths." +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, + {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, +] + +[[package]] +name = "pluggy" +version = "1.6.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746"}, + {file = "pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3"}, +] + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["coverage", "pytest", "pytest-benchmark"] + +[[package]] +name = "pygments" +version = "2.19.1" +description = "Pygments is a syntax highlighting package written in Python." +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c"}, + {file = "pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f"}, +] + +[package.extras] +windows-terminal = ["colorama (>=0.4.6)"] + +[[package]] +name = "pytest" +version = "8.4.0" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "pytest-8.4.0-py3-none-any.whl", hash = "sha256:f40f825768ad76c0977cbacdf1fd37c6f7a468e460ea6a0636078f8972d4517e"}, + {file = "pytest-8.4.0.tar.gz", hash = "sha256:14d920b48472ea0dbf68e45b96cd1ffda4705f33307dcc86c676c1b5104838a6"}, +] + +[package.dependencies] +colorama = {version = ">=0.4", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1", markers = "python_version < \"3.11\""} +iniconfig = ">=1" +packaging = ">=20" +pluggy = ">=1.5,<2" +pygments = ">=2.7.2" +tomli = {version = ">=1", markers = "python_version < \"3.11\""} + +[package.extras] +dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "pytest-cov" +version = "6.1.1" +description = "Pytest plugin for measuring coverage." +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "pytest_cov-6.1.1-py3-none-any.whl", hash = "sha256:bddf29ed2d0ab6f4df17b4c55b0a657287db8684af9c42ea546b21b1041b3dde"}, + {file = "pytest_cov-6.1.1.tar.gz", hash = "sha256:46935f7aaefba760e716c2ebfbe1c216240b9592966e7da99ea8292d4d3e2a0a"}, +] + +[package.dependencies] +coverage = {version = ">=7.5", extras = ["toml"]} +pytest = ">=4.6" + +[package.extras] +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "virtualenv"] + +[[package]] +name = "pytest-django" +version = "4.11.1" +description = "A Django plugin for pytest." +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "pytest_django-4.11.1-py3-none-any.whl", hash = "sha256:1b63773f648aa3d8541000c26929c1ea63934be1cfa674c76436966d73fe6a10"}, + {file = "pytest_django-4.11.1.tar.gz", hash = "sha256:a949141a1ee103cb0e7a20f1451d355f83f5e4a5d07bdd4dcfdd1fd0ff227991"}, +] + +[package.dependencies] +pytest = ">=7.0.0" + +[package.extras] +docs = ["sphinx", "sphinx_rtd_theme"] +testing = ["Django", "django-configurations (>=2.0)"] + +[[package]] +name = "pytest-mock" +version = "3.14.1" +description = "Thin-wrapper around the mock package for easier use with pytest" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "pytest_mock-3.14.1-py3-none-any.whl", hash = "sha256:178aefcd11307d874b4cd3100344e7e2d888d9791a6a1d9bfe90fbc1b74fd1d0"}, + {file = "pytest_mock-3.14.1.tar.gz", hash = "sha256:159e9edac4c451ce77a5cdb9fc5d1100708d2dd4ba3c3df572f14097351af80e"}, +] + +[package.dependencies] +pytest = ">=6.2.5" + +[package.extras] +dev = ["pre-commit", "pytest-asyncio", "tox"] + +[[package]] +name = "python-json-logger" +version = "2.0.7" +description = "A python library adding a json log formatter" +optional = false +python-versions = ">=3.6" +groups = ["main"] +files = [ + {file = "python-json-logger-2.0.7.tar.gz", hash = "sha256:23e7ec02d34237c5aa1e29a070193a4ea87583bb4e7f8fd06d3de8264c4b2e1c"}, + {file = "python_json_logger-2.0.7-py3-none-any.whl", hash = "sha256:f380b826a991ebbe3de4d897aeec42760035ac760345e57b812938dc8b35e2bd"}, +] + +[[package]] +name = "ruff" +version = "0.11.13" +description = "An extremely fast Python linter and code formatter, written in Rust." +optional = false +python-versions = ">=3.7" +groups = ["dev"] +files = [ + {file = "ruff-0.11.13-py3-none-linux_armv6l.whl", hash = "sha256:4bdfbf1240533f40042ec00c9e09a3aade6f8c10b6414cf11b519488d2635d46"}, + {file = "ruff-0.11.13-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:aef9c9ed1b5ca28bb15c7eac83b8670cf3b20b478195bd49c8d756ba0a36cf48"}, + {file = "ruff-0.11.13-py3-none-macosx_11_0_arm64.whl", hash = "sha256:53b15a9dfdce029c842e9a5aebc3855e9ab7771395979ff85b7c1dedb53ddc2b"}, + {file = "ruff-0.11.13-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ab153241400789138d13f362c43f7edecc0edfffce2afa6a68434000ecd8f69a"}, + {file = "ruff-0.11.13-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6c51f93029d54a910d3d24f7dd0bb909e31b6cd989a5e4ac513f4eb41629f0dc"}, + {file = "ruff-0.11.13-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1808b3ed53e1a777c2ef733aca9051dc9bf7c99b26ece15cb59a0320fbdbd629"}, + {file = "ruff-0.11.13-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:d28ce58b5ecf0f43c1b71edffabe6ed7f245d5336b17805803312ec9bc665933"}, + {file = "ruff-0.11.13-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:55e4bc3a77842da33c16d55b32c6cac1ec5fb0fbec9c8c513bdce76c4f922165"}, + {file = "ruff-0.11.13-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:633bf2c6f35678c56ec73189ba6fa19ff1c5e4807a78bf60ef487b9dd272cc71"}, + {file = "ruff-0.11.13-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4ffbc82d70424b275b089166310448051afdc6e914fdab90e08df66c43bb5ca9"}, + {file = "ruff-0.11.13-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:4a9ddd3ec62a9a89578c85842b836e4ac832d4a2e0bfaad3b02243f930ceafcc"}, + {file = "ruff-0.11.13-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:d237a496e0778d719efb05058c64d28b757c77824e04ffe8796c7436e26712b7"}, + {file = "ruff-0.11.13-py3-none-musllinux_1_2_i686.whl", hash = "sha256:26816a218ca6ef02142343fd24c70f7cd8c5aa6c203bca284407adf675984432"}, + {file = "ruff-0.11.13-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:51c3f95abd9331dc5b87c47ac7f376db5616041173826dfd556cfe3d4977f492"}, + {file = "ruff-0.11.13-py3-none-win32.whl", hash = "sha256:96c27935418e4e8e77a26bb05962817f28b8ef3843a6c6cc49d8783b5507f250"}, + {file = "ruff-0.11.13-py3-none-win_amd64.whl", hash = "sha256:29c3189895a8a6a657b7af4e97d330c8a3afd2c9c8f46c81e2fc5a31866517e3"}, + {file = "ruff-0.11.13-py3-none-win_arm64.whl", hash = "sha256:b4385285e9179d608ff1d2fb9922062663c658605819a6876d8beef0c30b7f3b"}, + {file = "ruff-0.11.13.tar.gz", hash = "sha256:26fa247dc68d1d4e72c179e08889a25ac0c7ba4d78aecfc835d49cbfd60bf514"}, +] + +[[package]] +name = "setuptools" +version = "80.9.0" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "setuptools-80.9.0-py3-none-any.whl", hash = "sha256:062d34222ad13e0cc312a4c02d73f059e86a4acbfbdea8f8f76b28c99f306922"}, + {file = "setuptools-80.9.0.tar.gz", hash = "sha256:f36b47402ecde768dbfafc46e8e4207b4360c654f1f3bb84475f0a28628fb19c"}, +] + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\"", "ruff (>=0.8.0) ; sys_platform != \"cygwin\""] +core = ["importlib_metadata (>=6) ; python_version < \"3.10\"", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1) ; python_version < \"3.11\"", "wheel (>=0.43.0)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21) ; python_version >= \"3.9\" and sys_platform != \"cygwin\"", "jaraco.envs (>=2.2)", "jaraco.path (>=3.7.2)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf ; sys_platform != \"cygwin\"", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +type = ["importlib_metadata (>=7.0.2) ; python_version < \"3.10\"", "jaraco.develop (>=7.21) ; sys_platform != \"cygwin\"", "mypy (==1.14.*)", "pytest-mypy"] + +[[package]] +name = "setuptools-scm" +version = "7.1.0" +description = "the blessed package to manage your versions by scm tags" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "setuptools_scm-7.1.0-py3-none-any.whl", hash = "sha256:73988b6d848709e2af142aa48c986ea29592bbcfca5375678064708205253d8e"}, + {file = "setuptools_scm-7.1.0.tar.gz", hash = "sha256:6c508345a771aad7d56ebff0e70628bf2b0ec7573762be9960214730de278f27"}, +] + +[package.dependencies] +packaging = ">=20.0" +setuptools = "*" +tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} +typing-extensions = "*" + +[package.extras] +test = ["pytest (>=6.2)", "virtualenv (>20)"] +toml = ["setuptools (>=42)"] + +[[package]] +name = "sqlparse" +version = "0.5.3" +description = "A non-validating SQL parser." +optional = false +python-versions = ">=3.8" +groups = ["main", "dev"] +files = [ + {file = "sqlparse-0.5.3-py3-none-any.whl", hash = "sha256:cf2196ed3418f3ba5de6af7e82c694a9fbdbfecccdfc72e281548517081f16ca"}, + {file = "sqlparse-0.5.3.tar.gz", hash = "sha256:09f67787f56a0b16ecdbde1bfc7f5d9c3371ca683cfeaa8e6ff60b4807ec9272"}, +] + +[package.extras] +dev = ["build", "hatch"] +doc = ["sphinx"] + +[[package]] +name = "tomli" +version = "2.2.1" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.8" +groups = ["main", "dev"] +files = [ + {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, + {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8"}, + {file = "tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff"}, + {file = "tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b"}, + {file = "tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea"}, + {file = "tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e"}, + {file = "tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98"}, + {file = "tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4"}, + {file = "tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7"}, + {file = "tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744"}, + {file = "tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec"}, + {file = "tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69"}, + {file = "tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"}, + {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"}, +] +markers = {main = "python_version == \"3.10\"", dev = "python_full_version <= \"3.11.0a6\""} + +[[package]] +name = "types-pyyaml" +version = "6.0.12.20250516" +description = "Typing stubs for PyYAML" +optional = false +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "types_pyyaml-6.0.12.20250516-py3-none-any.whl", hash = "sha256:8478208feaeb53a34cb5d970c56a7cd76b72659442e733e268a94dc72b2d0530"}, + {file = "types_pyyaml-6.0.12.20250516.tar.gz", hash = "sha256:9f21a70216fc0fa1b216a8176db5f9e0af6eb35d2f2932acb87689d03a5bf6ba"}, +] + +[[package]] +name = "typing-extensions" +version = "4.14.0" +description = "Backported and Experimental Type Hints for Python 3.9+" +optional = false +python-versions = ">=3.9" +groups = ["main", "dev"] +files = [ + {file = "typing_extensions-4.14.0-py3-none-any.whl", hash = "sha256:a1514509136dd0b477638fc68d6a91497af5076466ad0fa6c338e44e359944af"}, + {file = "typing_extensions-4.14.0.tar.gz", hash = "sha256:8676b788e32f02ab42d9e7c61324048ae4c6d844a399eebace3d4979d75ceef4"}, +] + +[[package]] +name = "tzdata" +version = "2025.2" +description = "Provider of IANA time zone data" +optional = false +python-versions = ">=2" +groups = ["main", "dev"] +markers = "sys_platform == \"win32\"" +files = [ + {file = "tzdata-2025.2-py2.py3-none-any.whl", hash = "sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8"}, + {file = "tzdata-2025.2.tar.gz", hash = "sha256:b60a638fcc0daffadf82fe0f57e53d06bdec2f36c4df66280ae79bce6bd6f2b9"}, +] + +[metadata] +lock-version = "2.1" +python-versions = ">=3.10" +content-hash = "0a0a04a21475b99a5389793d04fa9f7be2753fc5328f7db652a7ff0e5255b28d" diff --git a/poetry.toml b/poetry.toml new file mode 100644 index 0000000..ab1033b --- /dev/null +++ b/poetry.toml @@ -0,0 +1,2 @@ +[virtualenvs] +in-project = true diff --git a/pyproject.toml b/pyproject.toml index 9b1f899..46babc1 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,9 +1,9 @@ [project] name = "django-google-structured-logger" -version = "2.8.10" +version = "3.0.0" description = "Plugin for django to support Google Structured Logger" authors = [{ name = "Farruh Sheripov", email = "sheripov.farruh@gmail.com" }] -requires-python = ">=3.9" +requires-python = ">=3.10" readme = "README.md" dependencies = [ "python-json-logger>=2.0.7,<3", @@ -21,13 +21,22 @@ include = ["django_google_structured_logger"] [tool.hatch.build.targets.wheel] include = ["django_google_structured_logger"] +[tool.poetry.group.dev.dependencies] +pytest = "^8.4.0" +pytest-django = "^4.11.1" +pytest-mock = "^3.14.1" +ruff = "^0.11.13" +mypy = "^1.16.0" +django-stubs = "^5.2.0" +pytest-cov = "^6.1.1" + [build-system] requires = ["hatchling"] build-backend = "hatchling.build" [tool.mypy] mypy_path = "./src" -python_version = "3.8" +python_version = "3.10" check_untyped_defs = true ignore_missing_imports = true warn_unused_ignores = true @@ -36,30 +45,16 @@ warn_unused_configs = true warn_no_return = false pretty = true -[tool.isort] -profile = "black" -line_length = 120 -skip_glob = ["**/migrations/*.py"] +[tool.pytest.ini_options] +DJANGO_SETTINGS_MODULE = "tests.settings" +testpaths = ["tests"] +addopts = "--cov=django_google_structured_logger --cov-report=term-missing --cov-report=xml --cov-report=html" +pythonpath = ["."] -[tool.black] +[tool.ruff] line-length = 120 -target-version = ['py38'] -exclude = ''' -( - /( - \.eggs # exclude a few common directories in the - | \.git # root of the project - | \.hg - | \.mypy_cache - | \.tox - | \.venv - | _build - | buck-out - | build - | dist - | venv - | migrations - | \.exports - )/ -) -''' + +[tool.django-stubs] +django_settings_module = "tests.settings" +strict_settings = false +ignore_missing_settings = true diff --git a/setup.cfg b/setup.cfg index 096fb50..3943c74 100644 --- a/setup.cfg +++ b/setup.cfg @@ -36,7 +36,7 @@ classifiers = [options] include_package_data = true packages = find: -python_requires = >= 3.8 +python_requires = >= 3.10 install_requires = Django >= 3, < 6 python-json-logger == 2.0.7 diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 0000000..8fba0a1 --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,246 @@ +import json +import logging +import uuid +from typing import Callable, Generator +from unittest.mock import Mock + +import pytest +from django.conf import LazySettings +from django.contrib.auth import get_user_model +from django.core.handlers.wsgi import WSGIRequest +from django.http import HttpResponse +from django.test import Client, RequestFactory + +from django_google_structured_logger.middlewares import LogRequestAndResponseMiddleware +from django_google_structured_logger.storages import RequestStorage, _current_request + +User = get_user_model() + + +@pytest.fixture +def request_factory() -> RequestFactory: + """Django request factory for creating mock requests.""" + return RequestFactory() + + +@pytest.fixture +def client() -> Client: + """Django test client.""" + return Client() + + +@pytest.fixture +def mock_user() -> Mock: + """Mock authenticated user.""" + user = Mock(spec=User) + user.id = 1 + user.email = "test@example.com" + user.username = "testuser" + user.is_authenticated = True + user.is_anonymous = False + return user + + +@pytest.fixture +def anonymous_user() -> Mock: + """Mock anonymous user.""" + user = Mock(spec=User) + user.id = None + user.email = None + user.username = None + user.is_authenticated = False + user.is_anonymous = True + return user + + +@pytest.fixture +def authenticated_request(request_factory: RequestFactory, mock_user: Mock) -> WSGIRequest: + """HTTP request with authenticated user.""" + request = request_factory.get("/test/") + request.user = mock_user + return request + + +@pytest.fixture +def anonymous_request(request_factory: RequestFactory, anonymous_user: Mock) -> WSGIRequest: + """HTTP request with anonymous user.""" + request = request_factory.get("/test/") + request.user = anonymous_user + return request + + +@pytest.fixture +def post_request_with_data(request_factory: RequestFactory, mock_user: Mock) -> WSGIRequest: + """POST request with JSON data.""" + data = {"username": "test", "password": "secret123", "email": "test@example.com"} + request = request_factory.post("/api/login/", data=data, content_type="application/json") + request.user = mock_user + return request + + +@pytest.fixture +def request_with_sensitive_headers(request_factory: RequestFactory, mock_user: Mock) -> WSGIRequest: + """Request with sensitive headers.""" + request = request_factory.get( + "/api/users/", + HTTP_AUTHORIZATION="Bearer secret-token", + HTTP_X_API_KEY="api-key-123", + HTTP_COOKIE="sessionid=abc123", + ) + request.user = mock_user + return request + + +@pytest.fixture +def mock_request_storage() -> Generator[RequestStorage, None, None]: + """Mock request storage for context testing.""" + storage = RequestStorage(uuid=str(uuid.uuid4()), user_id=lambda: 1, user_display_field=lambda: "test@example.com") + _current_request.set(storage) + yield storage + _current_request.set(None) + + +@pytest.fixture +def mock_google_cloud_settings(settings: LazySettings) -> LazySettings: + """Mock Google Cloud Project settings.""" + settings.GOOGLE_CLOUD_PROJECT = "test-project-123" + return settings + + +@pytest.fixture +def middleware_settings(settings: LazySettings) -> LazySettings: + """Configure middleware settings for testing.""" + settings.LOG_MIDDLEWARE_ENABLED = True + settings.LOG_MAX_STR_LEN = 50 + settings.LOG_MAX_LIST_LEN = 5 + settings.LOG_MAX_DEPTH = 3 + settings.LOG_MASK_STYLE = "partial" + settings.LOG_USER_ID_FIELD = "id" + settings.LOG_USER_DISPLAY_FIELD = "email" + settings.LOG_EXCLUDED_ENDPOINTS = ["/health/", "/metrics/"] + return settings + + +@pytest.fixture +def mock_response() -> Mock: + """Mock HTTP response.""" + response = Mock() + response.status_code = 200 + response.content = b'{"success": true}' + response.data = {"success": True} + response.headers = {"Content-Type": "application/json"} + return response + + +@pytest.fixture(autouse=True) +def clear_context(): + """Clear context vars before each test.""" + _current_request.set(None) + yield + _current_request.set(None) + + +@pytest.fixture +def error_response() -> Mock: + """ + Mock HttpResponse object with error status code for testing logging of unsuccessful responses. + """ + response = Mock(spec=LogRequestAndResponseMiddleware) + response.status_code = 500 + error_data = {"error": "Internal Server Error", "status": "failed"} + response.content = json.dumps(error_data).encode("utf-8") + response.data = error_data + response.headers = {"Content-Type": "application/json"} + return response + + +@pytest.fixture +def request_to_ignored_endpoint(request_factory: RequestFactory) -> WSGIRequest: + """ + A request to an endpoint that should be ignored according to the LOG_EXCLUDED_ENDPOINTS settings. + """ + return request_factory.get("/health/") + + +@pytest.fixture +def request_with_long_data(request_factory: RequestFactory) -> WSGIRequest: + """ + A request containing a body with a very long string, a long list, and a deeply nested dictionary to test the data reduction mechanism. + """ + long_string = "a" * 300 + long_list = list(range(20)) + deep_dict = {"a": {"b": {"c": {"d": {"e": "f"}}}}} + data = {"long_str": long_string, "long_list": long_list, "deep_dict": deep_dict} + return request_factory.post("/long-data/", data=json.dumps(data), content_type="application/json") + + +@pytest.fixture +def response_with_long_data() -> Mock: + """ + A response containing a body with a very long string, a long list, and a deeply nested dictionary to test the data reduction mechanism. + """ + response = Mock(spec=LogRequestAndResponseMiddleware) + long_string = "a" * 300 + long_list = list(range(20)) + deep_dict = {"a": {"b": {"c": {"d": {"e": "f"}}}}} + data = {"long_str": long_string, "long_list": long_list, "deep_dict": deep_dict} + + response.status_code = 200 + response.data = data + response.content = json.dumps(data).encode("utf-8") + response.headers = {"Content-Type": "application/json"} + return response + + +@pytest.fixture +def mock_graphene_info(mock_user: Mock) -> Mock: + """ + Mock of the `info` object passed to Graphene resolvers. + Contains a context with a user object. + """ + info = Mock() + info.context.user = mock_user + return info + + +@pytest.fixture +def log_record_with_trace() -> logging.LogRecord: + """ + A `logging.LogRecord` instance with OpenTelemetry tracing attributes for testing GoogleCloudFormatter. + """ + record = logging.LogRecord( + name="test_logger", + level=logging.INFO, + pathname="/app/tests.py", + lineno=10, + msg="Test message with trace", + args=(), + exc_info=None, + ) + record.otelTraceID = "a1b2c3d4e5f6a1b2c3d4e5f6a1b2c3d4" + record.otelSpanID = "a1b2c3d4e5f6a1b2" + record.otelTraceSampled = True + return record + + +@pytest.fixture +def base_log_record() -> logging.LogRecord: + """A basic logging.LogRecord instance for formatter tests.""" + return logging.LogRecord( + name="test_logger", + level=logging.INFO, + pathname="/app/tests.py", + lineno=42, + msg="This is a test message", + args=(), + exc_info=None, + func="test_function", + ) + + +@pytest.fixture +def get_response_factory() -> Callable[[HttpResponse], Callable[[WSGIRequest], HttpResponse]]: + def factory(response: HttpResponse) -> Callable[[WSGIRequest], HttpResponse]: + return lambda request: response + + return factory diff --git a/tests/settings.py b/tests/settings.py new file mode 100644 index 0000000..43bfc8a --- /dev/null +++ b/tests/settings.py @@ -0,0 +1,151 @@ +""" +Django settings for testing django_google_structured_logger. +""" + +import os +from typing import List + +from django_google_structured_logger.constants import DEFAULT_SENSITIVE_HEADERS, DEFAULT_SENSITIVE_KEYS + +# Build paths inside the project like this: BASE_DIR / 'subdir'. +BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) + +# SECURITY WARNING: keep the secret key used in production secret! +SECRET_KEY = "test-secret-key-not-for-production" + +# SECURITY WARNING: don't run with debug turned on in production! +DEBUG = True + +ALLOWED_HOSTS = ["testserver", "localhost", "127.0.0.1"] + +# Application definition +INSTALLED_APPS = [ + "django.contrib.admin", + "django.contrib.auth", + "django.contrib.contenttypes", + "django.contrib.sessions", + "django.contrib.messages", + "django.contrib.staticfiles", + "django_google_structured_logger", +] + +MIDDLEWARE = [ + "django.middleware.security.SecurityMiddleware", + "django.contrib.sessions.middleware.SessionMiddleware", + "django.middleware.common.CommonMiddleware", + "django.middleware.csrf.CsrfViewMiddleware", + "django.contrib.auth.middleware.AuthenticationMiddleware", + "django.contrib.messages.middleware.MessageMiddleware", + "django.middleware.clickjacking.XFrameOptionsMiddleware", + "django_google_structured_logger.middlewares.SetUserContextMiddleware", + "django_google_structured_logger.middlewares.LogRequestAndResponseMiddleware", +] + +ROOT_URLCONF = "tests.urls" + +TEMPLATES = [ + { + "BACKEND": "django.template.backends.django.DjangoTemplates", + "DIRS": [], + "APP_DIRS": True, + "OPTIONS": { + "context_processors": [ + "django.template.context_processors.debug", + "django.template.context_processors.request", + "django.contrib.auth.context_processors.auth", + "django.contrib.messages.context_processors.messages", + ], + }, + }, +] + +# Database - SQLite in memory for testing +DATABASES = { + "default": { + "ENGINE": "django.db.backends.sqlite3", + "NAME": ":memory:", + } +} + +# Password validation +AUTH_PASSWORD_VALIDATORS: List[str] = [] + +# Internationalization +LANGUAGE_CODE = "en-us" +TIME_ZONE = "UTC" +USE_I18N = True +USE_TZ = True + +# Static files (CSS, JavaScript, Images) +STATIC_URL = "/static/" + +# Default primary key field type +DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField" + +# Google Cloud settings for testing +GOOGLE_CLOUD_PROJECT = "test-project-123" + +# Django Google Structured Logger settings +LOG_MAX_STR_LEN = 200 +LOG_MAX_LIST_LEN = 10 +LOG_MAX_DEPTH = 4 +LOG_EXCLUDED_ENDPOINTS = ["/health/", "/metrics/"] +LOG_SENSITIVE_KEYS = DEFAULT_SENSITIVE_KEYS +LOG_MASK_STYLE = "partial" +LOG_MIDDLEWARE_ENABLED = True +LOG_EXCLUDED_HEADERS = DEFAULT_SENSITIVE_HEADERS +LOG_USER_ID_FIELD = "id" +LOG_USER_DISPLAY_FIELD = "email" + +# Logging configuration for testing +LOGGING = { + "version": 1, + "disable_existing_loggers": False, + "formatters": { + "json": { + "()": "django_google_structured_logger.formatter.StandardJSONFormatter", + }, + "simple": { + "format": "{levelname} {name} {message}", + "style": "{", + }, + }, + "handlers": { + "console": { + "class": "logging.StreamHandler", + "formatter": "simple", + }, + "json-handler": { + "class": "logging.StreamHandler", + "formatter": "json", + }, + }, + "root": { + "handlers": ["console"], + "level": "INFO", + }, + "loggers": { + "django_google_structured_logger": { + "handlers": ["json-handler"], + "level": "DEBUG", + "propagate": False, + }, + "django": { + "handlers": ["console"], + "level": "WARNING", + "propagate": False, + }, + }, +} + + +# Disable migrations for faster testing +class DisableMigrations: + def __contains__(self, item): + return True + + def __getitem__(self, item): + return None + + +MIGRATION_MODULES = DisableMigrations() diff --git a/tests/test_formater.py b/tests/test_formater.py new file mode 100644 index 0000000..1fc2897 --- /dev/null +++ b/tests/test_formater.py @@ -0,0 +1,107 @@ +import json +from logging import LogRecord + +from django_google_structured_logger.formatter import GoogleCloudFormatter, StandardJSONFormatter +from django_google_structured_logger.storages import RequestStorage + + +class TestStandardJSONFormatter: + def test_correct_log_structure_with_context(self, base_log_record: LogRecord, mock_request_storage: RequestStorage): + """ + Tests that the formatter correctly structures log records and enriches them with data from the request context. + """ + formatter = StandardJSONFormatter() + # Add request as a direct attribute for the formatter to process + base_log_record.request = {"method": "GET", "url": "/test"} + formatted_log = formatter.format(base_log_record) + log_dict = json.loads(formatted_log) + + # Check for standard fields + assert log_dict["severity"] == "INFO" + assert log_dict["message"] == "This is a test message" + + # Check for source location + assert log_dict["source_location"] == { + "file": "/app/tests.py", + "line": 42, + "function": "test_function", + "logger_name": "test_logger", + } + + # Check for labels from context + assert log_dict["labels"] == {"user_id": "1", "user_display_field": "test@example.com"} + + # Check for operation from context + assert log_dict["operation"]["id"] == mock_request_storage.uuid + + # Check for http_request + assert log_dict["http_request"] == {"method": "GET", "url": "/test"} + + def test_works_without_request_context(self, base_log_record: LogRecord): + """ + Ensures the formatter does not raise errors when get_current_request() returns None and that user-dependent + fields are empty. + """ + formatter = StandardJSONFormatter() + formatted_log = formatter.format(base_log_record) + log_dict = json.loads(formatted_log) + + # Check that formatter runs without error and basic fields are present + assert log_dict["severity"] == "INFO" + assert log_dict["message"] == "This is a test message" + + # Check that user-dependent fields are string 'None' due to stringify_values + assert log_dict["labels"]["user_id"] == "None" + assert log_dict["labels"]["user_display_field"] == "None" + assert log_dict["operation"]["id"] is None + + +class TestGoogleCloudFormatter: + def test_field_remapping_for_google_cloud(self, base_log_record: LogRecord, mock_request_storage: RequestStorage): + """ + Verifies that standard fields (source_location, operation, labels) are renamed to Google-specific fields. + """ + formatter = GoogleCloudFormatter() + formatted_log = formatter.format(base_log_record) + log_dict = json.loads(formatted_log) + + # Check for renamed keys + assert "source_location" not in log_dict + assert "operation" not in log_dict + assert "labels" not in log_dict + + assert formatter.google_source_location_field in log_dict + assert formatter.google_operation_field in log_dict + assert formatter.google_labels_field in log_dict + + # Check content of renamed fields + assert log_dict[formatter.google_operation_field]["id"] == mock_request_storage.uuid + assert log_dict[formatter.google_labels_field]["user_id"] == "1" + + def test_trace_correlation(self, log_record_with_trace: LogRecord): + """ + Verifies that if otelTraceID is present, the log is enriched with the logging.googleapis.com/trace field. + """ + formatter = GoogleCloudFormatter() + formatted_log = formatter.format(log_record_with_trace) + log_dict = json.loads(formatted_log) + + assert hasattr(log_record_with_trace, "otelTraceID") + assert hasattr(log_record_with_trace, "otelSpanID") + expected_trace = f"projects/test-project-123/traces/{log_record_with_trace.otelTraceID}" + assert log_dict[formatter.google_trace_field] == expected_trace + assert log_dict["spanId"] == log_record_with_trace.otelSpanID + assert log_dict["traceSampled"] is True + + def test_trace_correlation_without_project_id(self, log_record_with_trace: LogRecord, settings): + """ + Verifies that the trace field is not added if GOOGLE_CLOUD_PROJECT is not set. + """ + settings.GOOGLE_CLOUD_PROJECT = None + formatter = GoogleCloudFormatter() + formatted_log = formatter.format(log_record_with_trace) + log_dict = json.loads(formatted_log) + + assert formatter.google_trace_field not in log_dict + assert "spanId" not in log_dict + assert "traceSampled" not in log_dict diff --git a/tests/test_middlewares.py b/tests/test_middlewares.py new file mode 100644 index 0000000..436d898 --- /dev/null +++ b/tests/test_middlewares.py @@ -0,0 +1,424 @@ +import uuid +from typing import Callable +from unittest.mock import Mock, patch + +import pytest +from django.conf import LazySettings +from django.core.handlers.wsgi import WSGIRequest +from django.http import HttpResponse + +from django_google_structured_logger.graphene_middlewares import GrapheneSetUserContextMiddleware +from django_google_structured_logger.middlewares import LogRequestAndResponseMiddleware, SetUserContextMiddleware +from django_google_structured_logger.storages import RequestStorage, _current_request + + +class TestSetUserContextMiddleware: + """Tests for SetUserContextMiddleware that manages user context in request storage.""" + + @pytest.fixture + def mock_get_response(self) -> Callable[[WSGIRequest], HttpResponse]: + """Mock get_response function that returns a basic HttpResponse.""" + + def get_response(request: WSGIRequest) -> HttpResponse: + return HttpResponse("Test response") + + return get_response + + @pytest.fixture + def middleware(self, mock_get_response: Callable[[WSGIRequest], HttpResponse]) -> SetUserContextMiddleware: + """SetUserContextMiddleware instance.""" + return SetUserContextMiddleware(mock_get_response) + + def test_authenticated_user_context( + self, + middleware: SetUserContextMiddleware, + authenticated_request: WSGIRequest, + middleware_settings: LazySettings, + ): + """Test that authenticated user context is correctly set in RequestStorage.""" + assert middleware_settings.LOG_MIDDLEWARE_ENABLED + # Call middleware + response = middleware(authenticated_request) + + # Get the current request storage + storage = _current_request.get() + + # Verify storage was created + assert storage is not None + assert isinstance(storage.uuid, str) + assert uuid.UUID(storage.uuid) # Verify valid UUID format + + # Verify user data extraction + assert storage.user_id() == 1 # mock_user.id + assert storage.user_display_field() == "test@example.com" # mock_user.email + + # Verify response is returned + assert response.status_code == 200 + + def test_anonymous_user_context( + self, middleware: SetUserContextMiddleware, anonymous_request: WSGIRequest, middleware_settings: LazySettings + ): + """Test that anonymous user context sets user fields to None.""" + assert middleware_settings.LOG_MIDDLEWARE_ENABLED + # Call middleware + response = middleware(anonymous_request) + + # Get the current request storage + storage = _current_request.get() + + # Verify storage was created + assert storage is not None + assert isinstance(storage.uuid, str) + assert uuid.UUID(storage.uuid) # Verify valid UUID format + + # Verify anonymous user data + assert storage.user_id() is None + assert storage.user_display_field() is None + + # Verify response is returned + assert response.status_code == 200 + + def test_custom_user_fields( + self, + mock_get_response: Callable[[WSGIRequest], HttpResponse], + authenticated_request: WSGIRequest, + settings: LazySettings, + ): + """Test middleware uses custom LOG_USER_ID_FIELD and LOG_USER_DISPLAY_FIELD settings.""" + + # Configure custom user field settings + settings.LOG_USER_ID_FIELD = "username" + settings.LOG_USER_DISPLAY_FIELD = "username" + + # Ensure mock user has username attribute + assert authenticated_request.user.is_authenticated + assert hasattr(authenticated_request.user, "username") + authenticated_request.user.username = "testuser" + + # Patch the settings in the middleware module and create new instance + with ( + patch("django_google_structured_logger.middlewares.settings.LOG_USER_ID_FIELD", "username"), + patch("django_google_structured_logger.middlewares.settings.LOG_USER_DISPLAY_FIELD", "username"), + ): + middleware = SetUserContextMiddleware(mock_get_response) + response = middleware(authenticated_request) + + # Get the current request storage + storage = _current_request.get() + + # Verify storage was created + assert storage is not None + assert isinstance(storage.uuid, str) + + # Verify custom field extraction + assert storage.user_id() == "testuser" + assert storage.user_display_field() == "testuser" + + # Verify response is returned + assert response.status_code == 200 + + def test_missing_user_attributes( + self, + mock_get_response: Callable[[WSGIRequest], HttpResponse], + authenticated_request: WSGIRequest, + settings: LazySettings, + ): + """Test middleware handles missing user attributes gracefully.""" + + # Configure non-existent field + settings.LOG_USER_ID_FIELD = "nonexistent_field" + settings.LOG_USER_DISPLAY_FIELD = "another_missing_field" + + # Patch the settings in the middleware module and create new instance + with ( + patch("django_google_structured_logger.middlewares.settings.LOG_USER_ID_FIELD", "nonexistent_field"), + patch( + "django_google_structured_logger.middlewares.settings.LOG_USER_DISPLAY_FIELD", "another_missing_field" + ), + ): + middleware = SetUserContextMiddleware(mock_get_response) + middleware(authenticated_request) + + # Get the current request storage + storage = _current_request.get() + + # Verify storage was created + assert storage is not None + + # Verify missing attributes return None + assert storage.user_id() is None + assert storage.user_display_field() is None + + def test_multiple_requests_different_contexts( + self, + middleware: SetUserContextMiddleware, + authenticated_request: WSGIRequest, + anonymous_request: WSGIRequest, + middleware_settings: LazySettings, + ): + """Test that each request gets its own context storage.""" + assert middleware_settings.LOG_MIDDLEWARE_ENABLED + # Process authenticated request + middleware(authenticated_request) + auth_storage = _current_request.get() + assert auth_storage is not None + auth_uuid = auth_storage.uuid + auth_user_id = auth_storage.user_id() + + # Process anonymous request + middleware(anonymous_request) + anon_storage = _current_request.get() + assert anon_storage is not None + anon_uuid = anon_storage.uuid + anon_user_id = anon_storage.user_id() + + # Verify different UUIDs and user contexts + assert auth_uuid != anon_uuid + assert auth_user_id == 1 + assert anon_user_id is None + + def test_uuid_uniqueness( + self, + middleware: SetUserContextMiddleware, + authenticated_request: WSGIRequest, + middleware_settings: LazySettings, + ): + """Test that each middleware call generates a unique UUID.""" + assert middleware_settings.LOG_MIDDLEWARE_ENABLED + uuids = set() + + # Call middleware multiple times + for _ in range(5): + middleware(authenticated_request) + storage = _current_request.get() + assert storage is not None + uuids.add(storage.uuid) + + # Verify all UUIDs are unique + assert len(uuids) == 5 + + +@patch("django_google_structured_logger.middlewares.logger") +class TestLogRequestAndResponseMiddleware: + def test_basic_request_and_response_logging( + self, + mock_logger: Mock, + authenticated_request: WSGIRequest, + mock_response: HttpResponse, + get_response_factory: Callable[[HttpResponse], Callable[[WSGIRequest], HttpResponse]], + ): + middleware = LogRequestAndResponseMiddleware(get_response_factory(mock_response)) + middleware(authenticated_request) + + assert mock_logger.info.call_count == 2 + request_log_call = mock_logger.info.call_args_list[0] + response_log_call = mock_logger.info.call_args_list[1] + + assert "request" in request_log_call.kwargs["extra"] + assert request_log_call.kwargs["extra"]["first_operation"] is True + assert request_log_call.kwargs["extra"]["request"]["method"] == "GET" + + assert "response" in response_log_call.kwargs["extra"] + assert response_log_call.kwargs["extra"]["last_operation"] is True + assert response_log_call.kwargs["extra"]["response"]["status_code"] == 200 + + def test_error_response_logging( + self, + mock_logger: Mock, + authenticated_request: WSGIRequest, + error_response: HttpResponse, + get_response_factory: Callable[[HttpResponse], Callable[[WSGIRequest], HttpResponse]], + ): + middleware = LogRequestAndResponseMiddleware(get_response_factory(error_response)) + middleware(authenticated_request) + + mock_logger.info.assert_called_once() + mock_logger.warning.assert_called_once() + + response_log_call = mock_logger.warning.call_args_list[0] + assert "response" in response_log_call.kwargs["extra"] + assert response_log_call.kwargs["extra"]["response"]["status_code"] == 500 + + def test_ignored_endpoint_from_settings( + self, + mock_logger: Mock, + request_to_ignored_endpoint: WSGIRequest, + mock_response: HttpResponse, + get_response_factory: Callable[[HttpResponse], Callable[[WSGIRequest], HttpResponse]], + ): + middleware = LogRequestAndResponseMiddleware(get_response_factory(mock_response)) + middleware(request_to_ignored_endpoint) + mock_logger.info.assert_not_called() + mock_logger.warning.assert_not_called() + + def test_excluded_headers( + self, + mock_logger: Mock, + request_with_sensitive_headers: WSGIRequest, + mock_response: HttpResponse, + get_response_factory: Callable[[HttpResponse], Callable[[WSGIRequest], HttpResponse]], + ): + middleware = LogRequestAndResponseMiddleware(get_response_factory(mock_response)) + middleware(request_with_sensitive_headers) + + mock_logger.info.assert_called() + request_log_call = mock_logger.info.call_args_list[0] + logged_headers = request_log_call.kwargs["extra"]["request"]["headers"] + + # All sensitive headers are excluded, making the header dict empty, which is then set to None. + assert logged_headers is None + + @patch("django_google_structured_logger.middlewares.settings.LOG_MASK_STYLE", "partial") + def test_partial_masking_of_sensitive_data( + self, + mock_logger: Mock, + post_request_with_data: WSGIRequest, + mock_response: HttpResponse, + get_response_factory: Callable[[HttpResponse], Callable[[WSGIRequest], HttpResponse]], + ): + middleware = LogRequestAndResponseMiddleware(get_response_factory(mock_response)) + middleware(post_request_with_data) + + request_log_call = mock_logger.info.call_args_list[0] + logged_body = request_log_call.kwargs["extra"]["request"]["body"] + assert logged_body["password"] == "se.....MASKED.....23" + + @patch("django_google_structured_logger.middlewares.settings.LOG_MASK_STYLE", "complete") + def test_complete_masking_of_sensitive_data( + self, + mock_logger: Mock, + post_request_with_data: WSGIRequest, + mock_response: HttpResponse, + get_response_factory: Callable[[HttpResponse], Callable[[WSGIRequest], HttpResponse]], + ): + middleware = LogRequestAndResponseMiddleware(get_response_factory(mock_response)) + middleware(post_request_with_data) + + request_log_call = mock_logger.info.call_args_list[0] + logged_body = request_log_call.kwargs["extra"]["request"]["body"] + assert logged_body["password"] == "...FULL_MASKED..." + + @patch("django_google_structured_logger.middlewares.settings.LOG_MAX_STR_LEN", 50) + def test_abridging_long_strings( + self, + mock_logger: Mock, + request_with_long_data: WSGIRequest, + response_with_long_data: HttpResponse, + get_response_factory: Callable[[HttpResponse], Callable[[WSGIRequest], HttpResponse]], + ): + middleware = LogRequestAndResponseMiddleware(get_response_factory(response_with_long_data)) + middleware(request_with_long_data) + + request_body = mock_logger.info.call_args_list[0].kwargs["extra"]["request"]["body"] + response_data = mock_logger.info.call_args_list[1].kwargs["extra"]["response"]["data"] + + assert request_body["long_str"].endswith("..SHORTENED") + assert len(request_body["long_str"]) == 50 + len("..SHORTENED") + assert response_data["long_str"].endswith("..SHORTENED") + assert len(response_data["long_str"]) == 50 + len("..SHORTENED") + + @patch("django_google_structured_logger.middlewares.settings.LOG_MAX_LIST_LEN", 5) + def test_abridging_long_lists( + self, + mock_logger: Mock, + request_with_long_data: WSGIRequest, + response_with_long_data: HttpResponse, + get_response_factory: Callable[[HttpResponse], Callable[[WSGIRequest], HttpResponse]], + ): + middleware = LogRequestAndResponseMiddleware(get_response_factory(response_with_long_data)) + middleware(request_with_long_data) + + request_body = mock_logger.info.call_args_list[0].kwargs["extra"]["request"]["body"] + response_data = mock_logger.info.call_args_list[1].kwargs["extra"]["response"]["data"] + + assert len(request_body["long_list"]) == 5 + assert len(response_data["long_list"]) == 5 + + @patch("django_google_structured_logger.middlewares.settings.LOG_MAX_DEPTH", 3) + def test_abridging_deeply_nested_data( + self, + mock_logger: Mock, + request_with_long_data: WSGIRequest, + response_with_long_data: HttpResponse, + get_response_factory: Callable[[HttpResponse], Callable[[WSGIRequest], HttpResponse]], + ): + middleware = LogRequestAndResponseMiddleware(get_response_factory(response_with_long_data)) + middleware(request_with_long_data) + + request_body = mock_logger.info.call_args_list[0].kwargs["extra"]["request"]["body"] + response_data = mock_logger.info.call_args_list[1].kwargs["extra"]["response"]["data"] + + assert request_body["deep_dict"]["a"]["b"]["c"] == "..DEPTH EXCEEDED" + assert response_data["deep_dict"]["a"]["b"]["c"] == "..DEPTH EXCEEDED" + + @patch("django_google_structured_logger.middlewares.settings.LOG_MIDDLEWARE_ENABLED", False) + def test_middleware_is_disabled( + self, + mock_logger: Mock, + authenticated_request: WSGIRequest, + mock_response: HttpResponse, + get_response_factory: Callable[[HttpResponse], Callable[[WSGIRequest], HttpResponse]], + ): + middleware = LogRequestAndResponseMiddleware(get_response_factory(mock_response)) + middleware(authenticated_request) + + mock_logger.info.assert_not_called() + mock_logger.warning.assert_not_called() + mock_logger.exception.assert_not_called() + + +class TestGrapheneSetUserContextMiddleware: + """Tests for GrapheneSetUserContextMiddleware.""" + + def test_sets_context_if_not_exists(self, mock_graphene_info: Mock, mock_user: Mock): + """ + Verify that the middleware correctly creates a RequestStorage and extracts user data from info.context.user + when no context is present. + """ + middleware = GrapheneSetUserContextMiddleware() + next_middleware = Mock() + + # Ensure context is initially empty + assert _current_request.get() is None + + middleware.resolve(next_middleware, None, mock_graphene_info) + + storage = _current_request.get() + assert storage is not None + assert isinstance(storage, RequestStorage) + assert isinstance(storage.uuid, str) + assert uuid.UUID(storage.uuid) # Ensures it's a valid UUID + + # Verify user data is extracted correctly + assert storage.user_id() == mock_user.id + assert storage.user_display_field() == mock_user.email + next_middleware.assert_called_once() + + def test_updates_existing_context( + self, mock_request_storage: RequestStorage, mock_graphene_info: Mock, mock_user: Mock + ): + """ + Verify that if RequestStorage already exists (e.g., created by Django middleware), it is correctly updated + rather than replaced. + """ + assert isinstance(mock_request_storage, RequestStorage) + middleware = GrapheneSetUserContextMiddleware() + next_middleware = Mock() + + # Get the initial storage and its UUID + initial_storage = _current_request.get() + assert initial_storage is not None + initial_uuid = initial_storage.uuid + + middleware.resolve(next_middleware, None, mock_graphene_info) + + updated_storage = _current_request.get() + assert updated_storage is not None + + # Verify the storage object was updated, not replaced + assert updated_storage.uuid == initial_uuid + + # Verify user data has been updated from the Graphene context + assert updated_storage.user_id() == mock_user.id + assert updated_storage.user_display_field() == mock_user.email + next_middleware.assert_called_once()