From 4c1fc49677389e43290caf76ae745502d9c3238a Mon Sep 17 00:00:00 2001 From: wunder957 Date: Tue, 21 Nov 2023 17:10:04 +0800 Subject: [PATCH] Support jaeger analyzer (#99) * Add some log for tracking data * Add jaeger proto bindings * Add pyi files for type hint * Demo for jaeger client * Init otel collector only once * Switch to otel build proto * Switch proto * Fix import issue * Fixing typo * Drop auth of image build and only build amd64 * Building test for jaeger * Trying 127.0.0.1 in github action * Replacing all localhost to 127.0.0.1 * Add timeout and use http response for checking * Support grpc secure channel * Support self-signed creds * Extract OTelInspector for all Otel code * Extract OTelInspector * Improve service api * Switch to async function * Add jaeger analyzer docs * Add docker build test for arm64 * Support query trace * Fix import issue on 3.11 * Support brief api * Add testing for all * Add some comments. * Fix not_support_params --- .github/workflows/docker-image.yml | 6 - .github/workflows/python-package.yml | 5 +- CONTRIBUTING.md | 14 +- dev-tools/start-jaeger.sh | 16 + docs/source/analyzer/index.rst | 1 + docs/source/analyzer/jaeger.rst | 10 + duetector/analyzer/base.py | 16 +- duetector/analyzer/db.py | 6 +- duetector/analyzer/jaeger/analyzer.py | 410 ++++++ duetector/analyzer/jaeger/proto/__init__.py | 4 + .../jaeger/proto/gogoproto/gogo_pb2.py | 107 ++ .../proto/google/api/annotations_pb2.py | 29 + .../jaeger/proto/google/api/http_pb2.py | 30 + duetector/analyzer/jaeger/proto/model_pb2.py | 1 + duetector/analyzer/jaeger/proto/model_pb2.pyi | 206 +++ duetector/analyzer/jaeger/proto/query_pb2.py | 1174 +++++++++++++++++ duetector/analyzer/jaeger/proto/query_pb2.pyi | 170 +++ .../analyzer/jaeger/proto/query_pb2_grpc.py | 304 +++++ duetector/analyzer/models.py | 34 +- duetector/analyzer/register.py | 3 +- duetector/collectors/base.py | 5 +- duetector/collectors/models.py | 11 +- duetector/collectors/otel.py | 60 +- duetector/exceptions.py | 4 + duetector/otel.py | 29 + duetector/service/query/controller.py | 7 + duetector/service/query/models.py | 4 +- duetector/service/query/routes.py | 11 +- duetector/service/utils.py | 11 + duetector/static/config.toml | 17 + duetector/utils.py | 51 + pyproject.toml | 5 +- pytest.ini | 4 + tests/config.toml | 9 + .../jaeger/test_jaeger_analyzer.py | 156 +++ tests/{ => standalone}/bin/dummy_process.py | 0 tests/{ => standalone}/service/test_query.py | 0 tests/{ => standalone}/service/test_token.py | 0 tests/{ => standalone}/test_bcc_monitor.py | 0 tests/{ => standalone}/test_config.py | 0 tests/{ => standalone}/test_daemon.py | 0 tests/standalone/test_db_analyzer.py | 119 ++ tests/{ => standalone}/test_db_collector.py | 0 tests/{ => standalone}/test_filter.py | 0 tests/{ => standalone}/test_otel_collector.py | 0 tests/{ => standalone}/test_poller.py | 0 tests/{ => standalone}/test_repo.py | 4 +- tests/{ => standalone}/test_sh_monitor.py | 0 tests/{ => standalone}/test_sp_monitor.py | 0 .../{ => standalone}/test_tracer_template.py | 0 tests/test_db_analyzer.py | 119 -- 51 files changed, 3010 insertions(+), 162 deletions(-) create mode 100755 dev-tools/start-jaeger.sh create mode 100644 docs/source/analyzer/jaeger.rst create mode 100644 duetector/analyzer/jaeger/analyzer.py create mode 100644 duetector/analyzer/jaeger/proto/__init__.py create mode 100644 duetector/analyzer/jaeger/proto/gogoproto/gogo_pb2.py create mode 100644 duetector/analyzer/jaeger/proto/google/api/annotations_pb2.py create mode 100644 duetector/analyzer/jaeger/proto/google/api/http_pb2.py create mode 100644 duetector/analyzer/jaeger/proto/model_pb2.py create mode 100644 duetector/analyzer/jaeger/proto/model_pb2.pyi create mode 100644 duetector/analyzer/jaeger/proto/query_pb2.py create mode 100644 duetector/analyzer/jaeger/proto/query_pb2.pyi create mode 100644 duetector/analyzer/jaeger/proto/query_pb2_grpc.py create mode 100644 duetector/otel.py create mode 100644 duetector/service/utils.py create mode 100644 pytest.ini create mode 100644 tests/integration/jaeger/test_jaeger_analyzer.py rename tests/{ => standalone}/bin/dummy_process.py (100%) rename tests/{ => standalone}/service/test_query.py (100%) rename tests/{ => standalone}/service/test_token.py (100%) rename tests/{ => standalone}/test_bcc_monitor.py (100%) rename tests/{ => standalone}/test_config.py (100%) rename tests/{ => standalone}/test_daemon.py (100%) create mode 100644 tests/standalone/test_db_analyzer.py rename tests/{ => standalone}/test_db_collector.py (100%) rename tests/{ => standalone}/test_filter.py (100%) rename tests/{ => standalone}/test_otel_collector.py (100%) rename tests/{ => standalone}/test_poller.py (100%) rename tests/{ => standalone}/test_repo.py (89%) rename tests/{ => standalone}/test_sh_monitor.py (100%) rename tests/{ => standalone}/test_sp_monitor.py (100%) rename tests/{ => standalone}/test_tracer_template.py (100%) delete mode 100644 tests/test_db_analyzer.py diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml index bfba132..04ce461 100644 --- a/.github/workflows/docker-image.yml +++ b/.github/workflows/docker-image.yml @@ -21,12 +21,6 @@ jobs: - name: Set up Docker Buildx uses: docker/setup-buildx-action@v2 - - - name: Login to DockerHub - uses: docker/login-action@v2 - with: - username: ${{ secrets.DOCKERHUB_USERNAME }} - password: ${{ secrets.DOCKERHUB_TOKEN }} - name: Build image id: docker_build_test diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index b525c1c..35e97f2 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -27,7 +27,10 @@ jobs: python -m pip install -e .[test] - name: Test with pytest run: | - pytest -vv --cov=duetector + pytest -vv --cov-append --cov=duetector --cov-report=term-missing tests/standalone + - name: Intergration Test for jaeger + run: | + pytest -vv --cov-append --cov=duetector --cov-report=term-missing tests/integration/jaeger/ - name: Install dependencies for building run: | pip install build twine hatch diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index d981a7b..44f6fe5 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -25,17 +25,27 @@ Comment style is [Google Python Style Guide](https://google.github.io/styleguide pip install -e .[test] ``` -## Unittest +## Unit-test We use pytest to write unit tests, and use pytest-cov to generate coverage reports ```bash -pytest -v # Run unit-test +pytest -v tests/standalone/ # Run unit-test pytest --cov=duetector # Generate coverage reports ``` Run unit-test before PR, **ensure that new features are covered by unit tests** +## Intergration-test + +In order not to be affected by the global variables of unit tests, integration tests need to be performed separately. + +Requirement: docker + +``` +pytest -vv tests/integration/jaeger +``` + ## Generating config Use script to generate config after add tracer/filter... diff --git a/dev-tools/start-jaeger.sh b/dev-tools/start-jaeger.sh new file mode 100755 index 0000000..d8cf112 --- /dev/null +++ b/dev-tools/start-jaeger.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env bash + +docker run --rm --name jaeger \ + -e COLLECTOR_ZIPKIN_HOST_PORT=:9411 \ + -p 6831:6831/udp \ + -p 6832:6832/udp \ + -p 5778:5778 \ + -p 16685:16685 \ + -p 16686:16686 \ + -p 4317:4317 \ + -p 4318:4318 \ + -p 14250:14250 \ + -p 14268:14268 \ + -p 14269:14269 \ + -p 9411:9411 \ + jaegertracing/all-in-one:1.50 diff --git a/docs/source/analyzer/index.rst b/docs/source/analyzer/index.rst index ae2bcff..fa406c3 100644 --- a/docs/source/analyzer/index.rst +++ b/docs/source/analyzer/index.rst @@ -15,6 +15,7 @@ Avaliable Analyzer :maxdepth: 2 DB Analyzer + Jaeger Analyzer Data Models ----------------------------------------------- diff --git a/docs/source/analyzer/jaeger.rst b/docs/source/analyzer/jaeger.rst new file mode 100644 index 0000000..fce23ba --- /dev/null +++ b/docs/source/analyzer/jaeger.rst @@ -0,0 +1,10 @@ +JaegerAnalyzer +=============================== + +``JaegerAnalyzer`` + +.. automodule:: duetector.analyzer.jaeger.analyzer + :members: + :undoc-members: + :private-members: + :show-inheritance: diff --git a/duetector/analyzer/base.py b/duetector/analyzer/base.py index 8f49b1a..a5f1477 100644 --- a/duetector/analyzer/base.py +++ b/duetector/analyzer/base.py @@ -51,14 +51,14 @@ def get_all_collector_ids(self) -> List[str]: """ raise NotImplementedError - def query( + async def query( self, tracers: Optional[List[str]] = None, collector_ids: Optional[List[str]] = None, start_datetime: Optional[datetime] = None, end_datetime: Optional[datetime] = None, start: int = 0, - limit: int = 0, + limit: int = 20, columns: Optional[List[str]] = None, where: Optional[Dict[str, Any]] = None, distinct: bool = False, @@ -66,7 +66,11 @@ def query( order_by_desc: Optional[List[str]] = None, ) -> List[Tracking]: """ - Query all tracking records from database. + Query all tracking records from backend. + + Note: + Some storage implementations do not guarantee the correct implementation of all parameters. + Some parameters may be ignored. Args: tracers (Optional[List[str]], optional): Tracer's name. Defaults to None, all tracers will be queried. @@ -74,7 +78,7 @@ def query( start_datetime (Optional[datetime], optional): Start time. Defaults to None. end_datetime (Optional[datetime], optional): End time. Defaults to None. start (int, optional): Start index. Defaults to 0. - limit (int, optional): Limit of records. Defaults to 20. ``0`` means no limit. + limit (int, optional): Limit of records, depends on backend implementations. Defaults to 20. ``0`` means no limit. columns (Optional[List[str]], optional): Columns to query. Defaults to None, all columns will be queried. where (Optional[Dict[str, Any]], optional): Where clause. Defaults to None. distinct (bool, optional): Distinct. Defaults to False. @@ -85,7 +89,7 @@ def query( """ raise NotImplementedError - def brief( + async def brief( self, tracers: Optional[List[str]] = None, collector_ids: Optional[List[str]] = None, @@ -116,6 +120,6 @@ def brief( """ raise NotImplementedError - def analyze(self): + async def analyze(self): # TODO: Not design yet. pass diff --git a/duetector/analyzer/db.py b/duetector/analyzer/db.py index 83aa2f5..4020f7f 100644 --- a/duetector/analyzer/db.py +++ b/duetector/analyzer/db.py @@ -74,14 +74,14 @@ def __init__(self, config: Optional[Dict[str, Any]] = None, *args, **kwargs): # Init as a submodel self.sm: SessionManager = SessionManager(self.config._config_dict) - def query( + async def query( self, tracers: Optional[List[str]] = None, collector_ids: Optional[List[str]] = None, start_datetime: Optional[datetime] = None, end_datetime: Optional[datetime] = None, start: int = 0, - limit: int = 0, + limit: int = 20, columns: Optional[List[str]] = None, where: Optional[Dict[str, Any]] = None, distinct: bool = False, @@ -243,7 +243,7 @@ def _convert_row_to_tracking(self, columns: List[str], row: Any, tracer: str) -> return Tracking(tracer=tracer, **{k: v for k, v in zip(columns, row)}) - def brief( + async def brief( self, tracers: Optional[List[str]] = None, collector_ids: Optional[List[str]] = None, diff --git a/duetector/analyzer/jaeger/analyzer.py b/duetector/analyzer/jaeger/analyzer.py new file mode 100644 index 0000000..1909e24 --- /dev/null +++ b/duetector/analyzer/jaeger/analyzer.py @@ -0,0 +1,410 @@ +import asyncio +import functools +from datetime import datetime +from typing import Any, Callable, Dict, List, Optional, Union + +import grpc +from google.protobuf.duration_pb2 import Duration +from google.protobuf.timestamp_pb2 import Timestamp + +from duetector.analyzer.jaeger.proto import model_pb2 +from duetector.exceptions import AnalysQueryError +from duetector.utils import get_grpc_cred_from_path + +try: + from functools import cache +except ImportError: + from functools import lru_cache as cache + +from duetector.analyzer.base import Analyzer +from duetector.analyzer.jaeger.proto.model_pb2 import Span +from duetector.analyzer.jaeger.proto.query_pb2 import * +from duetector.analyzer.jaeger.proto.query_pb2_grpc import * +from duetector.analyzer.models import AnalyzerBrief, Brief, Tracking +from duetector.extension.analyzer import hookimpl +from duetector.log import logger +from duetector.otel import OTelInspector + +ChannelInitializer = Callable[[], grpc.aio.Channel] + + +class JaegerConnector(OTelInspector): + """ + Providing query method for jaeger backend + """ + + def __init__(self, channel_initializer: ChannelInitializer): + self.channel_initializer: ChannelInitializer = channel_initializer + + async def inspect_all_collector_ids(self) -> List[str]: + logger.info("Querying all collector ids...") + async with self.channel_initializer() as channel: + stub = QueryServiceStub(channel) + response = await stub.GetServices(GetServicesRequest()) + return [ + self.get_identifier(service) + for service in response.services + if self.get_identifier(service) + ] + + async def get_operation(self, service: str, span_kind: Optional[str] = None) -> List[str]: + logger.info(f"Querying operations of {service}...") + async with self.channel_initializer() as channel: + stub = QueryServiceStub(channel) + response = await stub.GetOperations( + GetOperationsRequest(service=service, span_kind=span_kind) + ) + return [operation.name for operation in response.operations] + + async def inspect_all_tracers(self) -> List[str]: + logger.info("Querying all tracers...") + ret = [] + for collector_id in await self.inspect_all_collector_ids(): + service = self.generate_service_name(collector_id) + for operation in await self.get_operation(service): + tracer_name = self.get_tracer_name(operation) + if tracer_name and tracer_name not in ret: + ret.append(tracer_name) + return ret + + def _datetime_to_protobuf_timestamp(self, dt: datetime) -> Timestamp: + ts = Timestamp() + ts.FromDatetime(dt) + return ts + + def _protobuf_timestamp_to_datetime(self, ts: Timestamp) -> datetime: + return ts.ToDatetime() + + def get_find_tracers_request( + self, + collector_id: str, + tracer_name: str, + tags: Optional[Dict[str, Any]] = None, + start_time_min: Optional[datetime] = None, + start_time_max: Optional[datetime] = None, + duration_min: Optional[int] = None, + duration_max: Optional[int] = None, + search_depth: int = 20, + ) -> FindTracesRequest: + if not collector_id: + raise AnalysQueryError(f"collector_id is required, current:{collector_id}") + if not tracer_name: + raise AnalysQueryError(f"tracer_name is required, current:{tracer_name}") + if search_depth < 1 or search_depth > 1500: + raise AnalysQueryError("Jaeger search_depth must be between 1 and 1500.") + + return FindTracesRequest( + query=TraceQueryParameters( + service_name=self.generate_service_name(collector_id), + operation_name=self.generate_span_name(tracer_name), + tags=tags, + start_time_min=self._datetime_to_protobuf_timestamp(start_time_min) + if start_time_min + else None, + start_time_max=self._datetime_to_protobuf_timestamp(start_time_max) + if start_time_max + else None, + duration_min=Duration(seconds=duration_min) if duration_min else None, + duration_max=Duration(seconds=duration_max) if duration_max else None, + search_depth=search_depth, + ) + ) + + async def query_trace( + self, + collector_id: str, + tracer_name: str, + tags: Optional[Dict[str, Any]] = None, + start_time_min: Optional[datetime] = None, + start_time_max: Optional[datetime] = None, + duration_min: Optional[int] = None, + duration_max: Optional[int] = None, + search_depth: int = 20, + ) -> List[Tracking]: + if not collector_id: + raise AnalysQueryError(f"collector_id is required, current:{collector_id}") + if not tracer_name: + raise AnalysQueryError(f"tracer_name is required, current:{tracer_name}") + request = self.get_find_tracers_request( + collector_id=collector_id, + tracer_name=tracer_name, + tags=tags, + start_time_min=start_time_min, + start_time_max=start_time_max, + duration_min=duration_min, + duration_max=duration_max, + search_depth=search_depth, + ) + async with self.channel_initializer() as channel: + stub = QueryServiceStub(channel) + response = stub.FindTraces(request) + ret = [] + async for chunk in response: + ret.extend([Tracking.from_jaeger_span(tracer_name, span) for span in chunk.spans]) + return ret + + def inspect_span(self, span: Span) -> Dict[str, Any]: + value_type_to_field_attr = { + model_pb2.STRING: "str", + model_pb2.BOOL: "bool", + model_pb2.INT64: "int", + model_pb2.FLOAT64: "float", + model_pb2.BINARY: "bytes", + } + + return {msg.key: value_type_to_field_attr[msg.v_type] for msg in span.tags} + + async def brief( + self, + collector_id: str, + tracer_name: str, + start_time_min: Optional[datetime] = None, + start_time_max: Optional[datetime] = None, + inspect_type=True, + ) -> Optional[Brief]: + if not collector_id: + raise AnalysQueryError(f"collector_id is required, current:{collector_id}") + if not tracer_name: + raise AnalysQueryError(f"tracer_name is required, current:{tracer_name}") + request = self.get_find_tracers_request( + collector_id=collector_id, + tracer_name=tracer_name, + start_time_min=start_time_min, + start_time_max=start_time_max, + search_depth=1500, + ) + start_span = last_span = None + count = 0 + async with self.channel_initializer() as channel: + stub = QueryServiceStub(channel) + response = stub.FindTraces(request) + async for chunk in response: + if not chunk.spans: + break + spans = [span for span in chunk.spans] + count += len(spans) + start_span = spans[0] + last_span = spans[-1] + + if not (start_span and last_span): + return None + + return Brief( + tracer=tracer_name, + collector_id=collector_id, + start=self._protobuf_timestamp_to_datetime(start_span.start_time), + end=self._protobuf_timestamp_to_datetime(last_span.start_time), + fields={msg.key: None for msg in start_span.tags} + if not inspect_type + else self.inspect_span(start_span), + count=count, + ) + + +class JaegerAnalyzer(Analyzer): + default_config = { + "disabled": True, + "secure": False, + "root_certificates_path": "", + "private_key_path": "", + "certificate_chain_path": "", + "host": "localhost", + "port": 16685, + } + + def __init__(self, config: Optional[Dict[str, Any]] = None, *args, **kwargs): + super().__init__(config, *args, **kwargs) + + @property + @cache + def channel_initializer(self) -> ChannelInitializer: + """ + Example: + async with self.channel as channel: + stub = QueryServiceStub(channel) + response = await stub.GetServices(GetServicesRequest()) + print(response) + + """ + kwargs = {} + if self.config.secure: + target_func = grpc.aio.secure_channel + kwargs["credentials"] = get_grpc_cred_from_path( + root_certificates_path=self.config.root_certificates_path, + private_key_path=self.config.private_key_path, + certificate_chain_path=self.config.certificate_chain_path, + ) + else: + target_func = grpc.aio.insecure_channel + kwargs["target"] = f"{self.config.host}:{self.config.port}" + + return functools.partial(target_func, **kwargs) + + @property + @cache + def connector(self): + return JaegerConnector(self.channel_initializer) + + async def get_all_tracers(self) -> List[str]: + """ + Get all tracers from storage. + + Returns: + List[str]: List of tracer's name. + """ + + return await self.connector.inspect_all_tracers() + + async def get_all_collector_ids(self) -> List[str]: + """ + Get all collector id from storage. + + Returns: + List[str]: List of collector id. + """ + return await self.connector.inspect_all_collector_ids() + + async def query( + self, + tracers: Optional[List[str]] = None, + collector_ids: Optional[List[str]] = None, + start_datetime: Optional[datetime] = None, + end_datetime: Optional[datetime] = None, + start: int = 0, + limit: int = 20, + columns: Optional[List[str]] = None, + where: Optional[Dict[str, Any]] = None, + distinct: bool = False, + order_by_asc: Optional[List[str]] = None, + order_by_desc: Optional[List[str]] = None, + ) -> List[Tracking]: + """ + Query all tracking records from jaeger connector. + + Args: + tracers (Optional[List[str]], optional): Tracer's name. Defaults to None, all tracers will be queried. + collector_ids (Optional[List[str]], optional): Collector id. Defaults to None, all collector id will be queried. + start_datetime (Optional[datetime], optional): Start time. Defaults to None. + end_datetime (Optional[datetime], optional): End time. Defaults to None. + start (int, optional): Not support. + limit (int, optional): Limit for each tracer of each collector id. Defaults to 20. + columns (Optional[List[str]], optional): Not support, all tags will be returned. + where (Optional[Dict[str, Any]], optional): Tags filter. Defaults to None. + distinct (bool, optional): Not support. + order_by_asc (Optional[List[str]], optional): Not support. + order_by_desc (Optional[List[str]], optional): Not support. + Returns: + List[duetector.analyzer.models.Tracking]: List of tracking records. + """ + not_support_params = { + "start": start, + "columns": columns, + "distinct": distinct, + "order_by_asc": order_by_asc, + "order_by_desc": order_by_desc, + } + for k, v in not_support_params.items(): + if v: + logger.warning("Not support params: %s=%s", k, v) + + if not collector_ids: + collector_ids = await self.get_all_collector_ids() + if not tracers: + tracers = await self.get_all_tracers() + + return [ + await self.connector.query_trace( + collector_id=collector_id, + tracer_name=tracer, + tags=where, + start_time_min=start_datetime, + start_time_max=end_datetime, + search_depth=limit, + ) + for tracer in tracers + for collector_id in collector_ids + ] + + async def brief( + self, + tracers: Optional[List[str]] = None, + collector_ids: Optional[List[str]] = None, + start_datetime: Optional[datetime] = None, + end_datetime: Optional[datetime] = None, + with_details: bool = False, + distinct: bool = False, + inspect_type: bool = True, + ) -> AnalyzerBrief: + """ + Get a brief of this analyzer. + + Args: + tracers (Optional[List[str]], optional): + Tracers. Defaults to None, all tracers will be queried. + If a specific tracer is not found, it will be ignored. + collector_ids (Optional[List[str]], optional): + Collector ids. Defaults to None, all collector ids will be queried. + If a specific collector id is not found, it will be ignored. + start_datetime (Optional[datetime], optional): Start time. Defaults to None. + end_datetime (Optional[datetime], optional): End time. Defaults to None. + with_details (bool, optional): With details. Defaults to True. + distinct (bool, optional): Distinct. Defaults to False. + inspect_type (bool, optional): Weather fileds's value is type or type name. Defaults to False, type name. + + Returns: + AnalyzerBrief: A brief of this analyzer. + """ + not_support_params = { + "with_details": with_details, + "distinct": distinct, + } + for k, v in not_support_params.items(): + if v: + logger.warning("Not support params: %s=%s", k, v) + + if tracers: + tracers = [t for t in tracers if t in await self.get_all_tracers()] + else: + tracers = await self.get_all_tracers() + if collector_ids: + collector_ids = [c for c in collector_ids if c in await self.get_all_collector_ids()] + else: + collector_ids = await self.get_all_collector_ids() + + briefs: List[Optional[Brief]] = [ + await self.connector.brief( + collector_id=collector_id, + tracer_name=tracer, + start_time_min=start_datetime, + start_time_max=end_datetime, + inspect_type=inspect_type, + ) + for tracer in tracers + for collector_id in collector_ids + ] + + return AnalyzerBrief( + tracers=set(tracers), + collector_ids=set(collector_ids), + briefs={f"{brief.tracer}@{brief.collector_id}": brief for brief in briefs if brief}, + ) + + async def analyze(self): + # TODO: Not design yet. + pass + + +@hookimpl +def init_analyzer(config): + return JaegerAnalyzer(config) + + +if __name__ == "__main__": + + async def run() -> None: + Analyzer = JaegerAnalyzer() + await Analyzer.connector.query_trace( + collector_id="demo-service", tracer_name="tcp_v4_connect" + ) + + asyncio.run(run()) diff --git a/duetector/analyzer/jaeger/proto/__init__.py b/duetector/analyzer/jaeger/proto/__init__.py new file mode 100644 index 0000000..6682ad7 --- /dev/null +++ b/duetector/analyzer/jaeger/proto/__init__.py @@ -0,0 +1,4 @@ +import sys +from os.path import dirname + +sys.path.append(dirname(__file__)) diff --git a/duetector/analyzer/jaeger/proto/gogoproto/gogo_pb2.py b/duetector/analyzer/jaeger/proto/gogoproto/gogo_pb2.py new file mode 100644 index 0000000..36d0a12 --- /dev/null +++ b/duetector/analyzer/jaeger/proto/gogoproto/gogo_pb2.py @@ -0,0 +1,107 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: gogoproto/gogo.proto +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2 + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\n\x14gogoproto/gogo.proto\x12\tgogoproto\x1a google/protobuf/descriptor.proto:;\n\x13goproto_enum_prefix\x12\x1c.google.protobuf.EnumOptions\x18\xb1\xe4\x03 \x01(\x08:=\n\x15goproto_enum_stringer\x12\x1c.google.protobuf.EnumOptions\x18\xc5\xe4\x03 \x01(\x08:5\n\renum_stringer\x12\x1c.google.protobuf.EnumOptions\x18\xc6\xe4\x03 \x01(\x08:7\n\x0f\x65num_customname\x12\x1c.google.protobuf.EnumOptions\x18\xc7\xe4\x03 \x01(\t:0\n\x08\x65numdecl\x12\x1c.google.protobuf.EnumOptions\x18\xc8\xe4\x03 \x01(\x08:A\n\x14\x65numvalue_customname\x12!.google.protobuf.EnumValueOptions\x18\xd1\x83\x04 \x01(\t:;\n\x13goproto_getters_all\x12\x1c.google.protobuf.FileOptions\x18\x99\xec\x03 \x01(\x08:?\n\x17goproto_enum_prefix_all\x12\x1c.google.protobuf.FileOptions\x18\x9a\xec\x03 \x01(\x08:<\n\x14goproto_stringer_all\x12\x1c.google.protobuf.FileOptions\x18\x9b\xec\x03 \x01(\x08:9\n\x11verbose_equal_all\x12\x1c.google.protobuf.FileOptions\x18\x9c\xec\x03 \x01(\x08:0\n\x08\x66\x61\x63\x65_all\x12\x1c.google.protobuf.FileOptions\x18\x9d\xec\x03 \x01(\x08:4\n\x0cgostring_all\x12\x1c.google.protobuf.FileOptions\x18\x9e\xec\x03 \x01(\x08:4\n\x0cpopulate_all\x12\x1c.google.protobuf.FileOptions\x18\x9f\xec\x03 \x01(\x08:4\n\x0cstringer_all\x12\x1c.google.protobuf.FileOptions\x18\xa0\xec\x03 \x01(\x08:3\n\x0bonlyone_all\x12\x1c.google.protobuf.FileOptions\x18\xa1\xec\x03 \x01(\x08:1\n\tequal_all\x12\x1c.google.protobuf.FileOptions\x18\xa5\xec\x03 \x01(\x08:7\n\x0f\x64\x65scription_all\x12\x1c.google.protobuf.FileOptions\x18\xa6\xec\x03 \x01(\x08:3\n\x0btestgen_all\x12\x1c.google.protobuf.FileOptions\x18\xa7\xec\x03 \x01(\x08:4\n\x0c\x62\x65nchgen_all\x12\x1c.google.protobuf.FileOptions\x18\xa8\xec\x03 \x01(\x08:5\n\rmarshaler_all\x12\x1c.google.protobuf.FileOptions\x18\xa9\xec\x03 \x01(\x08:7\n\x0funmarshaler_all\x12\x1c.google.protobuf.FileOptions\x18\xaa\xec\x03 \x01(\x08:<\n\x14stable_marshaler_all\x12\x1c.google.protobuf.FileOptions\x18\xab\xec\x03 \x01(\x08:1\n\tsizer_all\x12\x1c.google.protobuf.FileOptions\x18\xac\xec\x03 \x01(\x08:A\n\x19goproto_enum_stringer_all\x12\x1c.google.protobuf.FileOptions\x18\xad\xec\x03 \x01(\x08:9\n\x11\x65num_stringer_all\x12\x1c.google.protobuf.FileOptions\x18\xae\xec\x03 \x01(\x08:<\n\x14unsafe_marshaler_all\x12\x1c.google.protobuf.FileOptions\x18\xaf\xec\x03 \x01(\x08:>\n\x16unsafe_unmarshaler_all\x12\x1c.google.protobuf.FileOptions\x18\xb0\xec\x03 \x01(\x08:B\n\x1agoproto_extensions_map_all\x12\x1c.google.protobuf.FileOptions\x18\xb1\xec\x03 \x01(\x08:@\n\x18goproto_unrecognized_all\x12\x1c.google.protobuf.FileOptions\x18\xb2\xec\x03 \x01(\x08:8\n\x10gogoproto_import\x12\x1c.google.protobuf.FileOptions\x18\xb3\xec\x03 \x01(\x08:6\n\x0eprotosizer_all\x12\x1c.google.protobuf.FileOptions\x18\xb4\xec\x03 \x01(\x08:3\n\x0b\x63ompare_all\x12\x1c.google.protobuf.FileOptions\x18\xb5\xec\x03 \x01(\x08:4\n\x0ctypedecl_all\x12\x1c.google.protobuf.FileOptions\x18\xb6\xec\x03 \x01(\x08:4\n\x0c\x65numdecl_all\x12\x1c.google.protobuf.FileOptions\x18\xb7\xec\x03 \x01(\x08:<\n\x14goproto_registration\x12\x1c.google.protobuf.FileOptions\x18\xb8\xec\x03 \x01(\x08:7\n\x0fmessagename_all\x12\x1c.google.protobuf.FileOptions\x18\xb9\xec\x03 \x01(\x08:=\n\x15goproto_sizecache_all\x12\x1c.google.protobuf.FileOptions\x18\xba\xec\x03 \x01(\x08:;\n\x13goproto_unkeyed_all\x12\x1c.google.protobuf.FileOptions\x18\xbb\xec\x03 \x01(\x08::\n\x0fgoproto_getters\x12\x1f.google.protobuf.MessageOptions\x18\x81\xf4\x03 \x01(\x08:;\n\x10goproto_stringer\x12\x1f.google.protobuf.MessageOptions\x18\x83\xf4\x03 \x01(\x08:8\n\rverbose_equal\x12\x1f.google.protobuf.MessageOptions\x18\x84\xf4\x03 \x01(\x08:/\n\x04\x66\x61\x63\x65\x12\x1f.google.protobuf.MessageOptions\x18\x85\xf4\x03 \x01(\x08:3\n\x08gostring\x12\x1f.google.protobuf.MessageOptions\x18\x86\xf4\x03 \x01(\x08:3\n\x08populate\x12\x1f.google.protobuf.MessageOptions\x18\x87\xf4\x03 \x01(\x08:3\n\x08stringer\x12\x1f.google.protobuf.MessageOptions\x18\xc0\x8b\x04 \x01(\x08:2\n\x07onlyone\x12\x1f.google.protobuf.MessageOptions\x18\x89\xf4\x03 \x01(\x08:0\n\x05\x65qual\x12\x1f.google.protobuf.MessageOptions\x18\x8d\xf4\x03 \x01(\x08:6\n\x0b\x64\x65scription\x12\x1f.google.protobuf.MessageOptions\x18\x8e\xf4\x03 \x01(\x08:2\n\x07testgen\x12\x1f.google.protobuf.MessageOptions\x18\x8f\xf4\x03 \x01(\x08:3\n\x08\x62\x65nchgen\x12\x1f.google.protobuf.MessageOptions\x18\x90\xf4\x03 \x01(\x08:4\n\tmarshaler\x12\x1f.google.protobuf.MessageOptions\x18\x91\xf4\x03 \x01(\x08:6\n\x0bunmarshaler\x12\x1f.google.protobuf.MessageOptions\x18\x92\xf4\x03 \x01(\x08:;\n\x10stable_marshaler\x12\x1f.google.protobuf.MessageOptions\x18\x93\xf4\x03 \x01(\x08:0\n\x05sizer\x12\x1f.google.protobuf.MessageOptions\x18\x94\xf4\x03 \x01(\x08:;\n\x10unsafe_marshaler\x12\x1f.google.protobuf.MessageOptions\x18\x97\xf4\x03 \x01(\x08:=\n\x12unsafe_unmarshaler\x12\x1f.google.protobuf.MessageOptions\x18\x98\xf4\x03 \x01(\x08:A\n\x16goproto_extensions_map\x12\x1f.google.protobuf.MessageOptions\x18\x99\xf4\x03 \x01(\x08:?\n\x14goproto_unrecognized\x12\x1f.google.protobuf.MessageOptions\x18\x9a\xf4\x03 \x01(\x08:5\n\nprotosizer\x12\x1f.google.protobuf.MessageOptions\x18\x9c\xf4\x03 \x01(\x08:2\n\x07\x63ompare\x12\x1f.google.protobuf.MessageOptions\x18\x9d\xf4\x03 \x01(\x08:3\n\x08typedecl\x12\x1f.google.protobuf.MessageOptions\x18\x9e\xf4\x03 \x01(\x08:6\n\x0bmessagename\x12\x1f.google.protobuf.MessageOptions\x18\xa1\xf4\x03 \x01(\x08:<\n\x11goproto_sizecache\x12\x1f.google.protobuf.MessageOptions\x18\xa2\xf4\x03 \x01(\x08::\n\x0fgoproto_unkeyed\x12\x1f.google.protobuf.MessageOptions\x18\xa3\xf4\x03 \x01(\x08:1\n\x08nullable\x12\x1d.google.protobuf.FieldOptions\x18\xe9\xfb\x03 \x01(\x08:.\n\x05\x65mbed\x12\x1d.google.protobuf.FieldOptions\x18\xea\xfb\x03 \x01(\x08:3\n\ncustomtype\x12\x1d.google.protobuf.FieldOptions\x18\xeb\xfb\x03 \x01(\t:3\n\ncustomname\x12\x1d.google.protobuf.FieldOptions\x18\xec\xfb\x03 \x01(\t:0\n\x07jsontag\x12\x1d.google.protobuf.FieldOptions\x18\xed\xfb\x03 \x01(\t:1\n\x08moretags\x12\x1d.google.protobuf.FieldOptions\x18\xee\xfb\x03 \x01(\t:1\n\x08\x63\x61sttype\x12\x1d.google.protobuf.FieldOptions\x18\xef\xfb\x03 \x01(\t:0\n\x07\x63\x61stkey\x12\x1d.google.protobuf.FieldOptions\x18\xf0\xfb\x03 \x01(\t:2\n\tcastvalue\x12\x1d.google.protobuf.FieldOptions\x18\xf1\xfb\x03 \x01(\t:0\n\x07stdtime\x12\x1d.google.protobuf.FieldOptions\x18\xf2\xfb\x03 \x01(\x08:4\n\x0bstdduration\x12\x1d.google.protobuf.FieldOptions\x18\xf3\xfb\x03 \x01(\x08:3\n\nwktpointer\x12\x1d.google.protobuf.FieldOptions\x18\xf4\xfb\x03 \x01(\x08\x42\x45\n\x13\x63om.google.protobufB\nGoGoProtosZ"github.com/gogo/protobuf/gogoproto' +) + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, "gogoproto.gogo_pb2", globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + google_dot_protobuf_dot_descriptor__pb2.EnumOptions.RegisterExtension(goproto_enum_prefix) + google_dot_protobuf_dot_descriptor__pb2.EnumOptions.RegisterExtension(goproto_enum_stringer) + google_dot_protobuf_dot_descriptor__pb2.EnumOptions.RegisterExtension(enum_stringer) + google_dot_protobuf_dot_descriptor__pb2.EnumOptions.RegisterExtension(enum_customname) + google_dot_protobuf_dot_descriptor__pb2.EnumOptions.RegisterExtension(enumdecl) + google_dot_protobuf_dot_descriptor__pb2.EnumValueOptions.RegisterExtension(enumvalue_customname) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(goproto_getters_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(goproto_enum_prefix_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(goproto_stringer_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(verbose_equal_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(face_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(gostring_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(populate_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(stringer_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(onlyone_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(equal_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(description_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(testgen_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(benchgen_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(marshaler_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(unmarshaler_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(stable_marshaler_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(sizer_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(goproto_enum_stringer_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(enum_stringer_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(unsafe_marshaler_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(unsafe_unmarshaler_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension( + goproto_extensions_map_all + ) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(goproto_unrecognized_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(gogoproto_import) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(protosizer_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(compare_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(typedecl_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(enumdecl_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(goproto_registration) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(messagename_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(goproto_sizecache_all) + google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(goproto_unkeyed_all) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(goproto_getters) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(goproto_stringer) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(verbose_equal) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(face) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(gostring) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(populate) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(stringer) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(onlyone) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(equal) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(description) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(testgen) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(benchgen) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(marshaler) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(unmarshaler) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(stable_marshaler) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(sizer) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(unsafe_marshaler) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(unsafe_unmarshaler) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(goproto_extensions_map) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(goproto_unrecognized) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(protosizer) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(compare) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(typedecl) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(messagename) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(goproto_sizecache) + google_dot_protobuf_dot_descriptor__pb2.MessageOptions.RegisterExtension(goproto_unkeyed) + google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(nullable) + google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(embed) + google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(customtype) + google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(customname) + google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(jsontag) + google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(moretags) + google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(casttype) + google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(castkey) + google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(castvalue) + google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(stdtime) + google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(stdduration) + google_dot_protobuf_dot_descriptor__pb2.FieldOptions.RegisterExtension(wktpointer) + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = ( + b'\n\023com.google.protobufB\nGoGoProtosZ"github.com/gogo/protobuf/gogoproto' + ) +# @@protoc_insertion_point(module_scope) diff --git a/duetector/analyzer/jaeger/proto/google/api/annotations_pb2.py b/duetector/analyzer/jaeger/proto/google/api/annotations_pb2.py new file mode 100644 index 0000000..ceddce4 --- /dev/null +++ b/duetector/analyzer/jaeger/proto/google/api/annotations_pb2.py @@ -0,0 +1,29 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/api/annotations.proto +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import http_pb2 as google_dot_api_dot_http__pb2 +from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2 + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\n\x1cgoogle/api/annotations.proto\x12\ngoogle.api\x1a\x15google/api/http.proto\x1a google/protobuf/descriptor.proto:E\n\x04http\x12\x1e.google.protobuf.MethodOptions\x18\xb0\xca\xbc" \x01(\x0b\x32\x14.google.api.HttpRuleBn\n\x0e\x63om.google.apiB\x10\x41nnotationsProtoP\x01ZAgoogle.golang.org/genproto/googleapis/api/annotations;annotations\xa2\x02\x04GAPIb\x06proto3' +) + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, "google.api.annotations_pb2", globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + google_dot_protobuf_dot_descriptor__pb2.MethodOptions.RegisterExtension(http) + + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b"\n\016com.google.apiB\020AnnotationsProtoP\001ZAgoogle.golang.org/genproto/googleapis/api/annotations;annotations\242\002\004GAPI" +# @@protoc_insertion_point(module_scope) diff --git a/duetector/analyzer/jaeger/proto/google/api/http_pb2.py b/duetector/analyzer/jaeger/proto/google/api/http_pb2.py new file mode 100644 index 0000000..6f741cf --- /dev/null +++ b/duetector/analyzer/jaeger/proto/google/api/http_pb2.py @@ -0,0 +1,30 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: google/api/http.proto +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( + b'\n\x15google/api/http.proto\x12\ngoogle.api"T\n\x04Http\x12#\n\x05rules\x18\x01 \x03(\x0b\x32\x14.google.api.HttpRule\x12\'\n\x1f\x66ully_decode_reserved_expansion\x18\x02 \x01(\x08"\x81\x02\n\x08HttpRule\x12\x10\n\x08selector\x18\x01 \x01(\t\x12\r\n\x03get\x18\x02 \x01(\tH\x00\x12\r\n\x03put\x18\x03 \x01(\tH\x00\x12\x0e\n\x04post\x18\x04 \x01(\tH\x00\x12\x10\n\x06\x64\x65lete\x18\x05 \x01(\tH\x00\x12\x0f\n\x05patch\x18\x06 \x01(\tH\x00\x12/\n\x06\x63ustom\x18\x08 \x01(\x0b\x32\x1d.google.api.CustomHttpPatternH\x00\x12\x0c\n\x04\x62ody\x18\x07 \x01(\t\x12\x15\n\rresponse_body\x18\x0c \x01(\t\x12\x31\n\x13\x61\x64\x64itional_bindings\x18\x0b \x03(\x0b\x32\x14.google.api.HttpRuleB\t\n\x07pattern"/\n\x11\x43ustomHttpPattern\x12\x0c\n\x04kind\x18\x01 \x01(\t\x12\x0c\n\x04path\x18\x02 \x01(\tBj\n\x0e\x63om.google.apiB\tHttpProtoP\x01ZAgoogle.golang.org/genproto/googleapis/api/annotations;annotations\xf8\x01\x01\xa2\x02\x04GAPIb\x06proto3' +) + +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, globals()) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, "google.api.http_pb2", globals()) +if _descriptor._USE_C_DESCRIPTORS == False: + DESCRIPTOR._options = None + DESCRIPTOR._serialized_options = b"\n\016com.google.apiB\tHttpProtoP\001ZAgoogle.golang.org/genproto/googleapis/api/annotations;annotations\370\001\001\242\002\004GAPI" + _HTTP._serialized_start = 37 + _HTTP._serialized_end = 121 + _HTTPRULE._serialized_start = 124 + _HTTPRULE._serialized_end = 381 + _CUSTOMHTTPPATTERN._serialized_start = 383 + _CUSTOMHTTPPATTERN._serialized_end = 430 +# @@protoc_insertion_point(module_scope) diff --git a/duetector/analyzer/jaeger/proto/model_pb2.py b/duetector/analyzer/jaeger/proto/model_pb2.py new file mode 100644 index 0000000..1fb979b --- /dev/null +++ b/duetector/analyzer/jaeger/proto/model_pb2.py @@ -0,0 +1 @@ +from opentelemetry.exporter.jaeger.proto.grpc.gen.model_pb2 import * diff --git a/duetector/analyzer/jaeger/proto/model_pb2.pyi b/duetector/analyzer/jaeger/proto/model_pb2.pyi new file mode 100644 index 0000000..c9af90f --- /dev/null +++ b/duetector/analyzer/jaeger/proto/model_pb2.pyi @@ -0,0 +1,206 @@ +from google.protobuf import timestamp_pb2 as _timestamp_pb2 +from google.protobuf import duration_pb2 as _duration_pb2 +from google.protobuf.internal import containers as _containers +from google.protobuf.internal import enum_type_wrapper as _enum_type_wrapper +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from typing import ( + ClassVar as _ClassVar, + Iterable as _Iterable, + Mapping as _Mapping, + Optional as _Optional, + Union as _Union, +) + +BINARY: ValueType +BOOL: ValueType +CHILD_OF: SpanRefType +DESCRIPTOR: _descriptor.FileDescriptor +FLOAT64: ValueType +FOLLOWS_FROM: SpanRefType +INT64: ValueType +STRING: ValueType + +class Batch(_message.Message): + __slots__ = ["process", "spans"] + PROCESS_FIELD_NUMBER: _ClassVar[int] + SPANS_FIELD_NUMBER: _ClassVar[int] + process: Process + spans: _containers.RepeatedCompositeFieldContainer[Span] + def __init__( + self, + spans: _Optional[_Iterable[_Union[Span, _Mapping]]] = ..., + process: _Optional[_Union[Process, _Mapping]] = ..., + ) -> None: ... + +class DependencyLink(_message.Message): + __slots__ = ["call_count", "child", "parent", "source"] + CALL_COUNT_FIELD_NUMBER: _ClassVar[int] + CHILD_FIELD_NUMBER: _ClassVar[int] + PARENT_FIELD_NUMBER: _ClassVar[int] + SOURCE_FIELD_NUMBER: _ClassVar[int] + call_count: int + child: str + parent: str + source: str + def __init__( + self, + parent: _Optional[str] = ..., + child: _Optional[str] = ..., + call_count: _Optional[int] = ..., + source: _Optional[str] = ..., + ) -> None: ... + +class KeyValue(_message.Message): + __slots__ = ["key", "v_binary", "v_bool", "v_float64", "v_int64", "v_str", "v_type"] + KEY_FIELD_NUMBER: _ClassVar[int] + V_BINARY_FIELD_NUMBER: _ClassVar[int] + V_BOOL_FIELD_NUMBER: _ClassVar[int] + V_FLOAT64_FIELD_NUMBER: _ClassVar[int] + V_INT64_FIELD_NUMBER: _ClassVar[int] + V_STR_FIELD_NUMBER: _ClassVar[int] + V_TYPE_FIELD_NUMBER: _ClassVar[int] + key: str + v_binary: bytes + v_bool: bool + v_float64: float + v_int64: int + v_str: str + v_type: ValueType + def __init__( + self, + key: _Optional[str] = ..., + v_type: _Optional[_Union[ValueType, str]] = ..., + v_str: _Optional[str] = ..., + v_bool: bool = ..., + v_int64: _Optional[int] = ..., + v_float64: _Optional[float] = ..., + v_binary: _Optional[bytes] = ..., + ) -> None: ... + +class Log(_message.Message): + __slots__ = ["fields", "timestamp"] + FIELDS_FIELD_NUMBER: _ClassVar[int] + TIMESTAMP_FIELD_NUMBER: _ClassVar[int] + fields: _containers.RepeatedCompositeFieldContainer[KeyValue] + timestamp: _timestamp_pb2.Timestamp + def __init__( + self, + timestamp: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., + fields: _Optional[_Iterable[_Union[KeyValue, _Mapping]]] = ..., + ) -> None: ... + +class Process(_message.Message): + __slots__ = ["service_name", "tags"] + SERVICE_NAME_FIELD_NUMBER: _ClassVar[int] + TAGS_FIELD_NUMBER: _ClassVar[int] + service_name: str + tags: _containers.RepeatedCompositeFieldContainer[KeyValue] + def __init__( + self, + service_name: _Optional[str] = ..., + tags: _Optional[_Iterable[_Union[KeyValue, _Mapping]]] = ..., + ) -> None: ... + +class Span(_message.Message): + __slots__ = [ + "duration", + "flags", + "logs", + "operation_name", + "process", + "process_id", + "references", + "span_id", + "start_time", + "tags", + "trace_id", + "warnings", + ] + DURATION_FIELD_NUMBER: _ClassVar[int] + FLAGS_FIELD_NUMBER: _ClassVar[int] + LOGS_FIELD_NUMBER: _ClassVar[int] + OPERATION_NAME_FIELD_NUMBER: _ClassVar[int] + PROCESS_FIELD_NUMBER: _ClassVar[int] + PROCESS_ID_FIELD_NUMBER: _ClassVar[int] + REFERENCES_FIELD_NUMBER: _ClassVar[int] + SPAN_ID_FIELD_NUMBER: _ClassVar[int] + START_TIME_FIELD_NUMBER: _ClassVar[int] + TAGS_FIELD_NUMBER: _ClassVar[int] + TRACE_ID_FIELD_NUMBER: _ClassVar[int] + WARNINGS_FIELD_NUMBER: _ClassVar[int] + duration: _duration_pb2.Duration + flags: int + logs: _containers.RepeatedCompositeFieldContainer[Log] + operation_name: str + process: Process + process_id: str + references: _containers.RepeatedCompositeFieldContainer[SpanRef] + span_id: bytes + start_time: _timestamp_pb2.Timestamp + tags: _containers.RepeatedCompositeFieldContainer[KeyValue] + trace_id: bytes + warnings: _containers.RepeatedScalarFieldContainer[str] + def __init__( + self, + trace_id: _Optional[bytes] = ..., + span_id: _Optional[bytes] = ..., + operation_name: _Optional[str] = ..., + references: _Optional[_Iterable[_Union[SpanRef, _Mapping]]] = ..., + flags: _Optional[int] = ..., + start_time: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., + duration: _Optional[_Union[_duration_pb2.Duration, _Mapping]] = ..., + tags: _Optional[_Iterable[_Union[KeyValue, _Mapping]]] = ..., + logs: _Optional[_Iterable[_Union[Log, _Mapping]]] = ..., + process: _Optional[_Union[Process, _Mapping]] = ..., + process_id: _Optional[str] = ..., + warnings: _Optional[_Iterable[str]] = ..., + ) -> None: ... + +class SpanRef(_message.Message): + __slots__ = ["ref_type", "span_id", "trace_id"] + REF_TYPE_FIELD_NUMBER: _ClassVar[int] + SPAN_ID_FIELD_NUMBER: _ClassVar[int] + TRACE_ID_FIELD_NUMBER: _ClassVar[int] + ref_type: SpanRefType + span_id: bytes + trace_id: bytes + def __init__( + self, + trace_id: _Optional[bytes] = ..., + span_id: _Optional[bytes] = ..., + ref_type: _Optional[_Union[SpanRefType, str]] = ..., + ) -> None: ... + +class Trace(_message.Message): + __slots__ = ["process_map", "spans", "warnings"] + + class ProcessMapping(_message.Message): + __slots__ = ["process", "process_id"] + PROCESS_FIELD_NUMBER: _ClassVar[int] + PROCESS_ID_FIELD_NUMBER: _ClassVar[int] + process: Process + process_id: str + def __init__( + self, + process_id: _Optional[str] = ..., + process: _Optional[_Union[Process, _Mapping]] = ..., + ) -> None: ... + PROCESS_MAP_FIELD_NUMBER: _ClassVar[int] + SPANS_FIELD_NUMBER: _ClassVar[int] + WARNINGS_FIELD_NUMBER: _ClassVar[int] + process_map: _containers.RepeatedCompositeFieldContainer[Trace.ProcessMapping] + spans: _containers.RepeatedCompositeFieldContainer[Span] + warnings: _containers.RepeatedScalarFieldContainer[str] + def __init__( + self, + spans: _Optional[_Iterable[_Union[Span, _Mapping]]] = ..., + process_map: _Optional[_Iterable[_Union[Trace.ProcessMapping, _Mapping]]] = ..., + warnings: _Optional[_Iterable[str]] = ..., + ) -> None: ... + +class ValueType(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): + __slots__ = [] + +class SpanRefType(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): + __slots__ = [] diff --git a/duetector/analyzer/jaeger/proto/query_pb2.py b/duetector/analyzer/jaeger/proto/query_pb2.py new file mode 100644 index 0000000..d4cd52b --- /dev/null +++ b/duetector/analyzer/jaeger/proto/query_pb2.py @@ -0,0 +1,1174 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# source: query.proto +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from google.protobuf import reflection as _reflection +from google.protobuf import symbol_database as _symbol_database + +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +import opentelemetry.exporter.jaeger.proto.grpc.gen.model_pb2 as model__pb2 +from gogoproto import gogo_pb2 as gogoproto_dot_gogo__pb2 +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.protobuf import duration_pb2 as google_dot_protobuf_dot_duration__pb2 +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 + +DESCRIPTOR = _descriptor.FileDescriptor( + name="query.proto", + package="jaeger.api_v2", + syntax="proto3", + serialized_options=b"\n\027io.jaegertracing.api_v2Z\006api_v2\310\342\036\001\320\342\036\001\340\342\036\001", + create_key=_descriptor._internal_create_key, + serialized_pb=b'\n\x0bquery.proto\x12\rjaeger.api_v2\x1a\x0bmodel.proto\x1a\x14gogoproto/gogo.proto\x1a\x1cgoogle/api/annotations.proto\x1a\x1fgoogle/protobuf/timestamp.proto\x1a\x1egoogle/protobuf/duration.proto"\xcf\x01\n\x0fGetTraceRequest\x12R\n\x08trace_id\x18\x01 \x01(\x0c\x42@\xc8\xde\x1f\x00\xda\xde\x1f-github.com/jaegertracing/jaeger/model.TraceID\xe2\xde\x1f\x07TraceID\x12\x34\n\nstart_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x04\x90\xdf\x1f\x01\x12\x32\n\x08\x65nd_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x04\x90\xdf\x1f\x01">\n\x12SpansResponseChunk\x12(\n\x05spans\x18\x01 \x03(\x0b\x32\x13.jaeger.api_v2.SpanB\x04\xc8\xde\x1f\x00"\xd3\x01\n\x13\x41rchiveTraceRequest\x12R\n\x08trace_id\x18\x01 \x01(\x0c\x42@\xc8\xde\x1f\x00\xda\xde\x1f-github.com/jaegertracing/jaeger/model.TraceID\xe2\xde\x1f\x07TraceID\x12\x34\n\nstart_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x04\x90\xdf\x1f\x01\x12\x32\n\x08\x65nd_time\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x04\x90\xdf\x1f\x01"\x16\n\x14\x41rchiveTraceResponse"\xb6\x03\n\x14TraceQueryParameters\x12\x14\n\x0cservice_name\x18\x01 \x01(\t\x12\x16\n\x0eoperation_name\x18\x02 \x01(\t\x12;\n\x04tags\x18\x03 \x03(\x0b\x32-.jaeger.api_v2.TraceQueryParameters.TagsEntry\x12<\n\x0estart_time_min\x18\x04 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x08\x90\xdf\x1f\x01\xc8\xde\x1f\x00\x12<\n\x0estart_time_max\x18\x05 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x08\x90\xdf\x1f\x01\xc8\xde\x1f\x00\x12\x39\n\x0c\x64uration_min\x18\x06 \x01(\x0b\x32\x19.google.protobuf.DurationB\x08\x98\xdf\x1f\x01\xc8\xde\x1f\x00\x12\x39\n\x0c\x64uration_max\x18\x07 \x01(\x0b\x32\x19.google.protobuf.DurationB\x08\x98\xdf\x1f\x01\xc8\xde\x1f\x00\x12\x14\n\x0csearch_depth\x18\x08 \x01(\x05\x1a+\n\tTagsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t:\x02\x38\x01"G\n\x11\x46indTracesRequest\x12\x32\n\x05query\x18\x01 \x01(\x0b\x32#.jaeger.api_v2.TraceQueryParameters"\x14\n\x12GetServicesRequest"\'\n\x13GetServicesResponse\x12\x10\n\x08services\x18\x01 \x03(\t":\n\x14GetOperationsRequest\x12\x0f\n\x07service\x18\x01 \x01(\t\x12\x11\n\tspan_kind\x18\x02 \x01(\t",\n\tOperation\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x11\n\tspan_kind\x18\x02 \x01(\t"]\n\x15GetOperationsResponse\x12\x16\n\x0eoperationNames\x18\x01 \x03(\t\x12,\n\noperations\x18\x02 \x03(\x0b\x32\x18.jaeger.api_v2.Operation"\x8a\x01\n\x16GetDependenciesRequest\x12\x38\n\nstart_time\x18\x01 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x08\x90\xdf\x1f\x01\xc8\xde\x1f\x00\x12\x36\n\x08\x65nd_time\x18\x02 \x01(\x0b\x32\x1a.google.protobuf.TimestampB\x08\x90\xdf\x1f\x01\xc8\xde\x1f\x00"T\n\x17GetDependenciesResponse\x12\x39\n\x0c\x64\x65pendencies\x18\x01 \x03(\x0b\x32\x1d.jaeger.api_v2.DependencyLinkB\x04\xc8\xde\x1f\x00\x32\xad\x05\n\x0cQueryService\x12k\n\x08GetTrace\x12\x1e.jaeger.api_v2.GetTraceRequest\x1a!.jaeger.api_v2.SpansResponseChunk"\x1a\x82\xd3\xe4\x93\x02\x14\x12\x12/traces/{trace_id}0\x01\x12t\n\x0c\x41rchiveTrace\x12".jaeger.api_v2.ArchiveTraceRequest\x1a#.jaeger.api_v2.ArchiveTraceResponse"\x1b\x82\xd3\xe4\x93\x02\x15"\x13/archive/{trace_id}\x12g\n\nFindTraces\x12 .jaeger.api_v2.FindTracesRequest\x1a!.jaeger.api_v2.SpansResponseChunk"\x12\x82\xd3\xe4\x93\x02\x0c"\x07/search:\x01*0\x01\x12g\n\x0bGetServices\x12!.jaeger.api_v2.GetServicesRequest\x1a".jaeger.api_v2.GetServicesResponse"\x11\x82\xd3\xe4\x93\x02\x0b\x12\t/services\x12o\n\rGetOperations\x12#.jaeger.api_v2.GetOperationsRequest\x1a$.jaeger.api_v2.GetOperationsResponse"\x13\x82\xd3\xe4\x93\x02\r\x12\x0b/operations\x12w\n\x0fGetDependencies\x12%.jaeger.api_v2.GetDependenciesRequest\x1a&.jaeger.api_v2.GetDependenciesResponse"\x15\x82\xd3\xe4\x93\x02\x0f\x12\r/dependenciesB-\n\x17io.jaegertracing.api_v2Z\x06\x61pi_v2\xc8\xe2\x1e\x01\xd0\xe2\x1e\x01\xe0\xe2\x1e\x01\x62\x06proto3', + dependencies=[ + model__pb2.DESCRIPTOR, + gogoproto_dot_gogo__pb2.DESCRIPTOR, + google_dot_api_dot_annotations__pb2.DESCRIPTOR, + google_dot_protobuf_dot_timestamp__pb2.DESCRIPTOR, + google_dot_protobuf_dot_duration__pb2.DESCRIPTOR, + ], +) + + +_GETTRACEREQUEST = _descriptor.Descriptor( + name="GetTraceRequest", + full_name="jaeger.api_v2.GetTraceRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name="trace_id", + full_name="jaeger.api_v2.GetTraceRequest.trace_id", + index=0, + number=1, + type=12, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"", + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=b"\310\336\037\000\332\336\037-github.com/jaegertracing/jaeger/model.TraceID\342\336\037\007TraceID", + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="start_time", + full_name="jaeger.api_v2.GetTraceRequest.start_time", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=b"\220\337\037\001", + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="end_time", + full_name="jaeger.api_v2.GetTraceRequest.end_time", + index=2, + number=3, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=b"\220\337\037\001", + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=161, + serialized_end=368, +) + + +_SPANSRESPONSECHUNK = _descriptor.Descriptor( + name="SpansResponseChunk", + full_name="jaeger.api_v2.SpansResponseChunk", + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name="spans", + full_name="jaeger.api_v2.SpansResponseChunk.spans", + index=0, + number=1, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=b"\310\336\037\000", + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=370, + serialized_end=432, +) + + +_ARCHIVETRACEREQUEST = _descriptor.Descriptor( + name="ArchiveTraceRequest", + full_name="jaeger.api_v2.ArchiveTraceRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name="trace_id", + full_name="jaeger.api_v2.ArchiveTraceRequest.trace_id", + index=0, + number=1, + type=12, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"", + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=b"\310\336\037\000\332\336\037-github.com/jaegertracing/jaeger/model.TraceID\342\336\037\007TraceID", + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="start_time", + full_name="jaeger.api_v2.ArchiveTraceRequest.start_time", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=b"\220\337\037\001", + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="end_time", + full_name="jaeger.api_v2.ArchiveTraceRequest.end_time", + index=2, + number=3, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=b"\220\337\037\001", + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=435, + serialized_end=646, +) + + +_ARCHIVETRACERESPONSE = _descriptor.Descriptor( + name="ArchiveTraceResponse", + full_name="jaeger.api_v2.ArchiveTraceResponse", + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=648, + serialized_end=670, +) + + +_TRACEQUERYPARAMETERS_TAGSENTRY = _descriptor.Descriptor( + name="TagsEntry", + full_name="jaeger.api_v2.TraceQueryParameters.TagsEntry", + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name="key", + full_name="jaeger.api_v2.TraceQueryParameters.TagsEntry.key", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="value", + full_name="jaeger.api_v2.TraceQueryParameters.TagsEntry.value", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=b"8\001", + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1068, + serialized_end=1111, +) + +_TRACEQUERYPARAMETERS = _descriptor.Descriptor( + name="TraceQueryParameters", + full_name="jaeger.api_v2.TraceQueryParameters", + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name="service_name", + full_name="jaeger.api_v2.TraceQueryParameters.service_name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="operation_name", + full_name="jaeger.api_v2.TraceQueryParameters.operation_name", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="tags", + full_name="jaeger.api_v2.TraceQueryParameters.tags", + index=2, + number=3, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="start_time_min", + full_name="jaeger.api_v2.TraceQueryParameters.start_time_min", + index=3, + number=4, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=b"\220\337\037\001\310\336\037\000", + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="start_time_max", + full_name="jaeger.api_v2.TraceQueryParameters.start_time_max", + index=4, + number=5, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=b"\220\337\037\001\310\336\037\000", + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="duration_min", + full_name="jaeger.api_v2.TraceQueryParameters.duration_min", + index=5, + number=6, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=b"\230\337\037\001\310\336\037\000", + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="duration_max", + full_name="jaeger.api_v2.TraceQueryParameters.duration_max", + index=6, + number=7, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=b"\230\337\037\001\310\336\037\000", + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="search_depth", + full_name="jaeger.api_v2.TraceQueryParameters.search_depth", + index=7, + number=8, + type=5, + cpp_type=1, + label=1, + has_default_value=False, + default_value=0, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + ], + extensions=[], + nested_types=[ + _TRACEQUERYPARAMETERS_TAGSENTRY, + ], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=673, + serialized_end=1111, +) + + +_FINDTRACESREQUEST = _descriptor.Descriptor( + name="FindTracesRequest", + full_name="jaeger.api_v2.FindTracesRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name="query", + full_name="jaeger.api_v2.FindTracesRequest.query", + index=0, + number=1, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1113, + serialized_end=1184, +) + + +_GETSERVICESREQUEST = _descriptor.Descriptor( + name="GetServicesRequest", + full_name="jaeger.api_v2.GetServicesRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1186, + serialized_end=1206, +) + + +_GETSERVICESRESPONSE = _descriptor.Descriptor( + name="GetServicesResponse", + full_name="jaeger.api_v2.GetServicesResponse", + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name="services", + full_name="jaeger.api_v2.GetServicesResponse.services", + index=0, + number=1, + type=9, + cpp_type=9, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1208, + serialized_end=1247, +) + + +_GETOPERATIONSREQUEST = _descriptor.Descriptor( + name="GetOperationsRequest", + full_name="jaeger.api_v2.GetOperationsRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name="service", + full_name="jaeger.api_v2.GetOperationsRequest.service", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="span_kind", + full_name="jaeger.api_v2.GetOperationsRequest.span_kind", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1249, + serialized_end=1307, +) + + +_OPERATION = _descriptor.Descriptor( + name="Operation", + full_name="jaeger.api_v2.Operation", + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name="name", + full_name="jaeger.api_v2.Operation.name", + index=0, + number=1, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="span_kind", + full_name="jaeger.api_v2.Operation.span_kind", + index=1, + number=2, + type=9, + cpp_type=9, + label=1, + has_default_value=False, + default_value=b"".decode("utf-8"), + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1309, + serialized_end=1353, +) + + +_GETOPERATIONSRESPONSE = _descriptor.Descriptor( + name="GetOperationsResponse", + full_name="jaeger.api_v2.GetOperationsResponse", + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name="operationNames", + full_name="jaeger.api_v2.GetOperationsResponse.operationNames", + index=0, + number=1, + type=9, + cpp_type=9, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="operations", + full_name="jaeger.api_v2.GetOperationsResponse.operations", + index=1, + number=2, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=None, + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1355, + serialized_end=1448, +) + + +_GETDEPENDENCIESREQUEST = _descriptor.Descriptor( + name="GetDependenciesRequest", + full_name="jaeger.api_v2.GetDependenciesRequest", + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name="start_time", + full_name="jaeger.api_v2.GetDependenciesRequest.start_time", + index=0, + number=1, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=b"\220\337\037\001\310\336\037\000", + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + _descriptor.FieldDescriptor( + name="end_time", + full_name="jaeger.api_v2.GetDependenciesRequest.end_time", + index=1, + number=2, + type=11, + cpp_type=10, + label=1, + has_default_value=False, + default_value=None, + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=b"\220\337\037\001\310\336\037\000", + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1451, + serialized_end=1589, +) + + +_GETDEPENDENCIESRESPONSE = _descriptor.Descriptor( + name="GetDependenciesResponse", + full_name="jaeger.api_v2.GetDependenciesResponse", + filename=None, + file=DESCRIPTOR, + containing_type=None, + create_key=_descriptor._internal_create_key, + fields=[ + _descriptor.FieldDescriptor( + name="dependencies", + full_name="jaeger.api_v2.GetDependenciesResponse.dependencies", + index=0, + number=1, + type=11, + cpp_type=10, + label=3, + has_default_value=False, + default_value=[], + message_type=None, + enum_type=None, + containing_type=None, + is_extension=False, + extension_scope=None, + serialized_options=b"\310\336\037\000", + file=DESCRIPTOR, + create_key=_descriptor._internal_create_key, + ), + ], + extensions=[], + nested_types=[], + enum_types=[], + serialized_options=None, + is_extendable=False, + syntax="proto3", + extension_ranges=[], + oneofs=[], + serialized_start=1591, + serialized_end=1675, +) + +_GETTRACEREQUEST.fields_by_name[ + "start_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_GETTRACEREQUEST.fields_by_name[ + "end_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_SPANSRESPONSECHUNK.fields_by_name["spans"].message_type = model__pb2._SPAN +_ARCHIVETRACEREQUEST.fields_by_name[ + "start_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_ARCHIVETRACEREQUEST.fields_by_name[ + "end_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_TRACEQUERYPARAMETERS_TAGSENTRY.containing_type = _TRACEQUERYPARAMETERS +_TRACEQUERYPARAMETERS.fields_by_name["tags"].message_type = _TRACEQUERYPARAMETERS_TAGSENTRY +_TRACEQUERYPARAMETERS.fields_by_name[ + "start_time_min" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_TRACEQUERYPARAMETERS.fields_by_name[ + "start_time_max" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_TRACEQUERYPARAMETERS.fields_by_name[ + "duration_min" +].message_type = google_dot_protobuf_dot_duration__pb2._DURATION +_TRACEQUERYPARAMETERS.fields_by_name[ + "duration_max" +].message_type = google_dot_protobuf_dot_duration__pb2._DURATION +_FINDTRACESREQUEST.fields_by_name["query"].message_type = _TRACEQUERYPARAMETERS +_GETOPERATIONSRESPONSE.fields_by_name["operations"].message_type = _OPERATION +_GETDEPENDENCIESREQUEST.fields_by_name[ + "start_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_GETDEPENDENCIESREQUEST.fields_by_name[ + "end_time" +].message_type = google_dot_protobuf_dot_timestamp__pb2._TIMESTAMP +_GETDEPENDENCIESRESPONSE.fields_by_name["dependencies"].message_type = model__pb2._DEPENDENCYLINK +DESCRIPTOR.message_types_by_name["GetTraceRequest"] = _GETTRACEREQUEST +DESCRIPTOR.message_types_by_name["SpansResponseChunk"] = _SPANSRESPONSECHUNK +DESCRIPTOR.message_types_by_name["ArchiveTraceRequest"] = _ARCHIVETRACEREQUEST +DESCRIPTOR.message_types_by_name["ArchiveTraceResponse"] = _ARCHIVETRACERESPONSE +DESCRIPTOR.message_types_by_name["TraceQueryParameters"] = _TRACEQUERYPARAMETERS +DESCRIPTOR.message_types_by_name["FindTracesRequest"] = _FINDTRACESREQUEST +DESCRIPTOR.message_types_by_name["GetServicesRequest"] = _GETSERVICESREQUEST +DESCRIPTOR.message_types_by_name["GetServicesResponse"] = _GETSERVICESRESPONSE +DESCRIPTOR.message_types_by_name["GetOperationsRequest"] = _GETOPERATIONSREQUEST +DESCRIPTOR.message_types_by_name["Operation"] = _OPERATION +DESCRIPTOR.message_types_by_name["GetOperationsResponse"] = _GETOPERATIONSRESPONSE +DESCRIPTOR.message_types_by_name["GetDependenciesRequest"] = _GETDEPENDENCIESREQUEST +DESCRIPTOR.message_types_by_name["GetDependenciesResponse"] = _GETDEPENDENCIESRESPONSE +_sym_db.RegisterFileDescriptor(DESCRIPTOR) + +GetTraceRequest = _reflection.GeneratedProtocolMessageType( + "GetTraceRequest", + (_message.Message,), + { + "DESCRIPTOR": _GETTRACEREQUEST, + "__module__": "query_pb2" + # @@protoc_insertion_point(class_scope:jaeger.api_v2.GetTraceRequest) + }, +) +_sym_db.RegisterMessage(GetTraceRequest) + +SpansResponseChunk = _reflection.GeneratedProtocolMessageType( + "SpansResponseChunk", + (_message.Message,), + { + "DESCRIPTOR": _SPANSRESPONSECHUNK, + "__module__": "query_pb2" + # @@protoc_insertion_point(class_scope:jaeger.api_v2.SpansResponseChunk) + }, +) +_sym_db.RegisterMessage(SpansResponseChunk) + +ArchiveTraceRequest = _reflection.GeneratedProtocolMessageType( + "ArchiveTraceRequest", + (_message.Message,), + { + "DESCRIPTOR": _ARCHIVETRACEREQUEST, + "__module__": "query_pb2" + # @@protoc_insertion_point(class_scope:jaeger.api_v2.ArchiveTraceRequest) + }, +) +_sym_db.RegisterMessage(ArchiveTraceRequest) + +ArchiveTraceResponse = _reflection.GeneratedProtocolMessageType( + "ArchiveTraceResponse", + (_message.Message,), + { + "DESCRIPTOR": _ARCHIVETRACERESPONSE, + "__module__": "query_pb2" + # @@protoc_insertion_point(class_scope:jaeger.api_v2.ArchiveTraceResponse) + }, +) +_sym_db.RegisterMessage(ArchiveTraceResponse) + +TraceQueryParameters = _reflection.GeneratedProtocolMessageType( + "TraceQueryParameters", + (_message.Message,), + { + "TagsEntry": _reflection.GeneratedProtocolMessageType( + "TagsEntry", + (_message.Message,), + { + "DESCRIPTOR": _TRACEQUERYPARAMETERS_TAGSENTRY, + "__module__": "query_pb2" + # @@protoc_insertion_point(class_scope:jaeger.api_v2.TraceQueryParameters.TagsEntry) + }, + ), + "DESCRIPTOR": _TRACEQUERYPARAMETERS, + "__module__": "query_pb2" + # @@protoc_insertion_point(class_scope:jaeger.api_v2.TraceQueryParameters) + }, +) +_sym_db.RegisterMessage(TraceQueryParameters) +_sym_db.RegisterMessage(TraceQueryParameters.TagsEntry) + +FindTracesRequest = _reflection.GeneratedProtocolMessageType( + "FindTracesRequest", + (_message.Message,), + { + "DESCRIPTOR": _FINDTRACESREQUEST, + "__module__": "query_pb2" + # @@protoc_insertion_point(class_scope:jaeger.api_v2.FindTracesRequest) + }, +) +_sym_db.RegisterMessage(FindTracesRequest) + +GetServicesRequest = _reflection.GeneratedProtocolMessageType( + "GetServicesRequest", + (_message.Message,), + { + "DESCRIPTOR": _GETSERVICESREQUEST, + "__module__": "query_pb2" + # @@protoc_insertion_point(class_scope:jaeger.api_v2.GetServicesRequest) + }, +) +_sym_db.RegisterMessage(GetServicesRequest) + +GetServicesResponse = _reflection.GeneratedProtocolMessageType( + "GetServicesResponse", + (_message.Message,), + { + "DESCRIPTOR": _GETSERVICESRESPONSE, + "__module__": "query_pb2" + # @@protoc_insertion_point(class_scope:jaeger.api_v2.GetServicesResponse) + }, +) +_sym_db.RegisterMessage(GetServicesResponse) + +GetOperationsRequest = _reflection.GeneratedProtocolMessageType( + "GetOperationsRequest", + (_message.Message,), + { + "DESCRIPTOR": _GETOPERATIONSREQUEST, + "__module__": "query_pb2" + # @@protoc_insertion_point(class_scope:jaeger.api_v2.GetOperationsRequest) + }, +) +_sym_db.RegisterMessage(GetOperationsRequest) + +Operation = _reflection.GeneratedProtocolMessageType( + "Operation", + (_message.Message,), + { + "DESCRIPTOR": _OPERATION, + "__module__": "query_pb2" + # @@protoc_insertion_point(class_scope:jaeger.api_v2.Operation) + }, +) +_sym_db.RegisterMessage(Operation) + +GetOperationsResponse = _reflection.GeneratedProtocolMessageType( + "GetOperationsResponse", + (_message.Message,), + { + "DESCRIPTOR": _GETOPERATIONSRESPONSE, + "__module__": "query_pb2" + # @@protoc_insertion_point(class_scope:jaeger.api_v2.GetOperationsResponse) + }, +) +_sym_db.RegisterMessage(GetOperationsResponse) + +GetDependenciesRequest = _reflection.GeneratedProtocolMessageType( + "GetDependenciesRequest", + (_message.Message,), + { + "DESCRIPTOR": _GETDEPENDENCIESREQUEST, + "__module__": "query_pb2" + # @@protoc_insertion_point(class_scope:jaeger.api_v2.GetDependenciesRequest) + }, +) +_sym_db.RegisterMessage(GetDependenciesRequest) + +GetDependenciesResponse = _reflection.GeneratedProtocolMessageType( + "GetDependenciesResponse", + (_message.Message,), + { + "DESCRIPTOR": _GETDEPENDENCIESRESPONSE, + "__module__": "query_pb2" + # @@protoc_insertion_point(class_scope:jaeger.api_v2.GetDependenciesResponse) + }, +) +_sym_db.RegisterMessage(GetDependenciesResponse) + + +DESCRIPTOR._options = None +_GETTRACEREQUEST.fields_by_name["trace_id"]._options = None +_GETTRACEREQUEST.fields_by_name["start_time"]._options = None +_GETTRACEREQUEST.fields_by_name["end_time"]._options = None +_SPANSRESPONSECHUNK.fields_by_name["spans"]._options = None +_ARCHIVETRACEREQUEST.fields_by_name["trace_id"]._options = None +_ARCHIVETRACEREQUEST.fields_by_name["start_time"]._options = None +_ARCHIVETRACEREQUEST.fields_by_name["end_time"]._options = None +_TRACEQUERYPARAMETERS_TAGSENTRY._options = None +_TRACEQUERYPARAMETERS.fields_by_name["start_time_min"]._options = None +_TRACEQUERYPARAMETERS.fields_by_name["start_time_max"]._options = None +_TRACEQUERYPARAMETERS.fields_by_name["duration_min"]._options = None +_TRACEQUERYPARAMETERS.fields_by_name["duration_max"]._options = None +_GETDEPENDENCIESREQUEST.fields_by_name["start_time"]._options = None +_GETDEPENDENCIESREQUEST.fields_by_name["end_time"]._options = None +_GETDEPENDENCIESRESPONSE.fields_by_name["dependencies"]._options = None + +_QUERYSERVICE = _descriptor.ServiceDescriptor( + name="QueryService", + full_name="jaeger.api_v2.QueryService", + file=DESCRIPTOR, + index=0, + serialized_options=None, + create_key=_descriptor._internal_create_key, + serialized_start=1678, + serialized_end=2363, + methods=[ + _descriptor.MethodDescriptor( + name="GetTrace", + full_name="jaeger.api_v2.QueryService.GetTrace", + index=0, + containing_service=None, + input_type=_GETTRACEREQUEST, + output_type=_SPANSRESPONSECHUNK, + serialized_options=b"\202\323\344\223\002\024\022\022/traces/{trace_id}", + create_key=_descriptor._internal_create_key, + ), + _descriptor.MethodDescriptor( + name="ArchiveTrace", + full_name="jaeger.api_v2.QueryService.ArchiveTrace", + index=1, + containing_service=None, + input_type=_ARCHIVETRACEREQUEST, + output_type=_ARCHIVETRACERESPONSE, + serialized_options=b'\202\323\344\223\002\025"\023/archive/{trace_id}', + create_key=_descriptor._internal_create_key, + ), + _descriptor.MethodDescriptor( + name="FindTraces", + full_name="jaeger.api_v2.QueryService.FindTraces", + index=2, + containing_service=None, + input_type=_FINDTRACESREQUEST, + output_type=_SPANSRESPONSECHUNK, + serialized_options=b'\202\323\344\223\002\014"\007/search:\001*', + create_key=_descriptor._internal_create_key, + ), + _descriptor.MethodDescriptor( + name="GetServices", + full_name="jaeger.api_v2.QueryService.GetServices", + index=3, + containing_service=None, + input_type=_GETSERVICESREQUEST, + output_type=_GETSERVICESRESPONSE, + serialized_options=b"\202\323\344\223\002\013\022\t/services", + create_key=_descriptor._internal_create_key, + ), + _descriptor.MethodDescriptor( + name="GetOperations", + full_name="jaeger.api_v2.QueryService.GetOperations", + index=4, + containing_service=None, + input_type=_GETOPERATIONSREQUEST, + output_type=_GETOPERATIONSRESPONSE, + serialized_options=b"\202\323\344\223\002\r\022\013/operations", + create_key=_descriptor._internal_create_key, + ), + _descriptor.MethodDescriptor( + name="GetDependencies", + full_name="jaeger.api_v2.QueryService.GetDependencies", + index=5, + containing_service=None, + input_type=_GETDEPENDENCIESREQUEST, + output_type=_GETDEPENDENCIESRESPONSE, + serialized_options=b"\202\323\344\223\002\017\022\r/dependencies", + create_key=_descriptor._internal_create_key, + ), + ], +) +_sym_db.RegisterServiceDescriptor(_QUERYSERVICE) + +DESCRIPTOR.services_by_name["QueryService"] = _QUERYSERVICE + +# @@protoc_insertion_point(module_scope) diff --git a/duetector/analyzer/jaeger/proto/query_pb2.pyi b/duetector/analyzer/jaeger/proto/query_pb2.pyi new file mode 100644 index 0000000..dbe57be --- /dev/null +++ b/duetector/analyzer/jaeger/proto/query_pb2.pyi @@ -0,0 +1,170 @@ +import model_pb2 as _model_pb2 +from google.api import annotations_pb2 as _annotations_pb2 +from google.protobuf import timestamp_pb2 as _timestamp_pb2 +from google.protobuf import duration_pb2 as _duration_pb2 +from google.protobuf.internal import containers as _containers +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from typing import ( + ClassVar as _ClassVar, + Iterable as _Iterable, + Mapping as _Mapping, + Optional as _Optional, + Union as _Union, +) + +DESCRIPTOR: _descriptor.FileDescriptor + +class ArchiveTraceRequest(_message.Message): + __slots__ = ["end_time", "start_time", "trace_id"] + END_TIME_FIELD_NUMBER: _ClassVar[int] + START_TIME_FIELD_NUMBER: _ClassVar[int] + TRACE_ID_FIELD_NUMBER: _ClassVar[int] + end_time: _timestamp_pb2.Timestamp + start_time: _timestamp_pb2.Timestamp + trace_id: bytes + def __init__( + self, + trace_id: _Optional[bytes] = ..., + start_time: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., + end_time: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., + ) -> None: ... + +class ArchiveTraceResponse(_message.Message): + __slots__ = [] + def __init__(self) -> None: ... + +class FindTracesRequest(_message.Message): + __slots__ = ["query"] + QUERY_FIELD_NUMBER: _ClassVar[int] + query: TraceQueryParameters + def __init__(self, query: _Optional[_Union[TraceQueryParameters, _Mapping]] = ...) -> None: ... + +class GetDependenciesRequest(_message.Message): + __slots__ = ["end_time", "start_time"] + END_TIME_FIELD_NUMBER: _ClassVar[int] + START_TIME_FIELD_NUMBER: _ClassVar[int] + end_time: _timestamp_pb2.Timestamp + start_time: _timestamp_pb2.Timestamp + def __init__( + self, + start_time: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., + end_time: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., + ) -> None: ... + +class GetDependenciesResponse(_message.Message): + __slots__ = ["dependencies"] + DEPENDENCIES_FIELD_NUMBER: _ClassVar[int] + dependencies: _containers.RepeatedCompositeFieldContainer[_model_pb2.DependencyLink] + def __init__( + self, + dependencies: _Optional[_Iterable[_Union[_model_pb2.DependencyLink, _Mapping]]] = ..., + ) -> None: ... + +class GetOperationsRequest(_message.Message): + __slots__ = ["service", "span_kind"] + SERVICE_FIELD_NUMBER: _ClassVar[int] + SPAN_KIND_FIELD_NUMBER: _ClassVar[int] + service: str + span_kind: str + def __init__(self, service: _Optional[str] = ..., span_kind: _Optional[str] = ...) -> None: ... + +class GetOperationsResponse(_message.Message): + __slots__ = ["operationNames", "operations"] + OPERATIONNAMES_FIELD_NUMBER: _ClassVar[int] + OPERATIONS_FIELD_NUMBER: _ClassVar[int] + operationNames: _containers.RepeatedScalarFieldContainer[str] + operations: _containers.RepeatedCompositeFieldContainer[Operation] + def __init__( + self, + operationNames: _Optional[_Iterable[str]] = ..., + operations: _Optional[_Iterable[_Union[Operation, _Mapping]]] = ..., + ) -> None: ... + +class GetServicesRequest(_message.Message): + __slots__ = [] + def __init__(self) -> None: ... + +class GetServicesResponse(_message.Message): + __slots__ = ["services"] + SERVICES_FIELD_NUMBER: _ClassVar[int] + services: _containers.RepeatedScalarFieldContainer[str] + def __init__(self, services: _Optional[_Iterable[str]] = ...) -> None: ... + +class GetTraceRequest(_message.Message): + __slots__ = ["end_time", "start_time", "trace_id"] + END_TIME_FIELD_NUMBER: _ClassVar[int] + START_TIME_FIELD_NUMBER: _ClassVar[int] + TRACE_ID_FIELD_NUMBER: _ClassVar[int] + end_time: _timestamp_pb2.Timestamp + start_time: _timestamp_pb2.Timestamp + trace_id: bytes + def __init__( + self, + trace_id: _Optional[bytes] = ..., + start_time: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., + end_time: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., + ) -> None: ... + +class Operation(_message.Message): + __slots__ = ["name", "span_kind"] + NAME_FIELD_NUMBER: _ClassVar[int] + SPAN_KIND_FIELD_NUMBER: _ClassVar[int] + name: str + span_kind: str + def __init__(self, name: _Optional[str] = ..., span_kind: _Optional[str] = ...) -> None: ... + +class SpansResponseChunk(_message.Message): + __slots__ = ["spans"] + SPANS_FIELD_NUMBER: _ClassVar[int] + spans: _containers.RepeatedCompositeFieldContainer[_model_pb2.Span] + def __init__( + self, spans: _Optional[_Iterable[_Union[_model_pb2.Span, _Mapping]]] = ... + ) -> None: ... + +class TraceQueryParameters(_message.Message): + __slots__ = [ + "duration_max", + "duration_min", + "operation_name", + "search_depth", + "service_name", + "start_time_max", + "start_time_min", + "tags", + ] + + class TagsEntry(_message.Message): + __slots__ = ["key", "value"] + KEY_FIELD_NUMBER: _ClassVar[int] + VALUE_FIELD_NUMBER: _ClassVar[int] + key: str + value: str + def __init__(self, key: _Optional[str] = ..., value: _Optional[str] = ...) -> None: ... + DURATION_MAX_FIELD_NUMBER: _ClassVar[int] + DURATION_MIN_FIELD_NUMBER: _ClassVar[int] + OPERATION_NAME_FIELD_NUMBER: _ClassVar[int] + SEARCH_DEPTH_FIELD_NUMBER: _ClassVar[int] + SERVICE_NAME_FIELD_NUMBER: _ClassVar[int] + START_TIME_MAX_FIELD_NUMBER: _ClassVar[int] + START_TIME_MIN_FIELD_NUMBER: _ClassVar[int] + TAGS_FIELD_NUMBER: _ClassVar[int] + duration_max: _duration_pb2.Duration + duration_min: _duration_pb2.Duration + operation_name: str + search_depth: int + service_name: str + start_time_max: _timestamp_pb2.Timestamp + start_time_min: _timestamp_pb2.Timestamp + tags: _containers.ScalarMap[str, str] + def __init__( + self, + service_name: _Optional[str] = ..., + operation_name: _Optional[str] = ..., + tags: _Optional[_Mapping[str, str]] = ..., + start_time_min: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., + start_time_max: _Optional[_Union[_timestamp_pb2.Timestamp, _Mapping]] = ..., + duration_min: _Optional[_Union[_duration_pb2.Duration, _Mapping]] = ..., + duration_max: _Optional[_Union[_duration_pb2.Duration, _Mapping]] = ..., + search_depth: _Optional[int] = ..., + ) -> None: ... diff --git a/duetector/analyzer/jaeger/proto/query_pb2_grpc.py b/duetector/analyzer/jaeger/proto/query_pb2_grpc.py new file mode 100644 index 0000000..83d0417 --- /dev/null +++ b/duetector/analyzer/jaeger/proto/query_pb2_grpc.py @@ -0,0 +1,304 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + +import duetector.analyzer.jaeger.proto.query_pb2 as query__pb2 + + +class QueryServiceStub(object): + """Missing associated documentation comment in .proto file.""" + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.GetTrace = channel.unary_stream( + "/jaeger.api_v2.QueryService/GetTrace", + request_serializer=query__pb2.GetTraceRequest.SerializeToString, + response_deserializer=query__pb2.SpansResponseChunk.FromString, + ) + self.ArchiveTrace = channel.unary_unary( + "/jaeger.api_v2.QueryService/ArchiveTrace", + request_serializer=query__pb2.ArchiveTraceRequest.SerializeToString, + response_deserializer=query__pb2.ArchiveTraceResponse.FromString, + ) + self.FindTraces = channel.unary_stream( + "/jaeger.api_v2.QueryService/FindTraces", + request_serializer=query__pb2.FindTracesRequest.SerializeToString, + response_deserializer=query__pb2.SpansResponseChunk.FromString, + ) + self.GetServices = channel.unary_unary( + "/jaeger.api_v2.QueryService/GetServices", + request_serializer=query__pb2.GetServicesRequest.SerializeToString, + response_deserializer=query__pb2.GetServicesResponse.FromString, + ) + self.GetOperations = channel.unary_unary( + "/jaeger.api_v2.QueryService/GetOperations", + request_serializer=query__pb2.GetOperationsRequest.SerializeToString, + response_deserializer=query__pb2.GetOperationsResponse.FromString, + ) + self.GetDependencies = channel.unary_unary( + "/jaeger.api_v2.QueryService/GetDependencies", + request_serializer=query__pb2.GetDependenciesRequest.SerializeToString, + response_deserializer=query__pb2.GetDependenciesResponse.FromString, + ) + + +class QueryServiceServicer(object): + """Missing associated documentation comment in .proto file.""" + + def GetTrace(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def ArchiveTrace(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def FindTraces(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def GetServices(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def GetOperations(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + def GetDependencies(self, request, context): + """Missing associated documentation comment in .proto file.""" + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") + + +def add_QueryServiceServicer_to_server(servicer, server): + rpc_method_handlers = { + "GetTrace": grpc.unary_stream_rpc_method_handler( + servicer.GetTrace, + request_deserializer=query__pb2.GetTraceRequest.FromString, + response_serializer=query__pb2.SpansResponseChunk.SerializeToString, + ), + "ArchiveTrace": grpc.unary_unary_rpc_method_handler( + servicer.ArchiveTrace, + request_deserializer=query__pb2.ArchiveTraceRequest.FromString, + response_serializer=query__pb2.ArchiveTraceResponse.SerializeToString, + ), + "FindTraces": grpc.unary_stream_rpc_method_handler( + servicer.FindTraces, + request_deserializer=query__pb2.FindTracesRequest.FromString, + response_serializer=query__pb2.SpansResponseChunk.SerializeToString, + ), + "GetServices": grpc.unary_unary_rpc_method_handler( + servicer.GetServices, + request_deserializer=query__pb2.GetServicesRequest.FromString, + response_serializer=query__pb2.GetServicesResponse.SerializeToString, + ), + "GetOperations": grpc.unary_unary_rpc_method_handler( + servicer.GetOperations, + request_deserializer=query__pb2.GetOperationsRequest.FromString, + response_serializer=query__pb2.GetOperationsResponse.SerializeToString, + ), + "GetDependencies": grpc.unary_unary_rpc_method_handler( + servicer.GetDependencies, + request_deserializer=query__pb2.GetDependenciesRequest.FromString, + response_serializer=query__pb2.GetDependenciesResponse.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + "jaeger.api_v2.QueryService", rpc_method_handlers + ) + server.add_generic_rpc_handlers((generic_handler,)) + + +# This class is part of an EXPERIMENTAL API. +class QueryService(object): + """Missing associated documentation comment in .proto file.""" + + @staticmethod + def GetTrace( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): + return grpc.experimental.unary_stream( + request, + target, + "/jaeger.api_v2.QueryService/GetTrace", + query__pb2.GetTraceRequest.SerializeToString, + query__pb2.SpansResponseChunk.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + ) + + @staticmethod + def ArchiveTrace( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): + return grpc.experimental.unary_unary( + request, + target, + "/jaeger.api_v2.QueryService/ArchiveTrace", + query__pb2.ArchiveTraceRequest.SerializeToString, + query__pb2.ArchiveTraceResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + ) + + @staticmethod + def FindTraces( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): + return grpc.experimental.unary_stream( + request, + target, + "/jaeger.api_v2.QueryService/FindTraces", + query__pb2.FindTracesRequest.SerializeToString, + query__pb2.SpansResponseChunk.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + ) + + @staticmethod + def GetServices( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): + return grpc.experimental.unary_unary( + request, + target, + "/jaeger.api_v2.QueryService/GetServices", + query__pb2.GetServicesRequest.SerializeToString, + query__pb2.GetServicesResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + ) + + @staticmethod + def GetOperations( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): + return grpc.experimental.unary_unary( + request, + target, + "/jaeger.api_v2.QueryService/GetOperations", + query__pb2.GetOperationsRequest.SerializeToString, + query__pb2.GetOperationsResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + ) + + @staticmethod + def GetDependencies( + request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None, + ): + return grpc.experimental.unary_unary( + request, + target, + "/jaeger.api_v2.QueryService/GetDependencies", + query__pb2.GetDependenciesRequest.SerializeToString, + query__pb2.GetDependenciesResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + ) diff --git a/duetector/analyzer/models.py b/duetector/analyzer/models.py index 1093278..8163530 100644 --- a/duetector/analyzer/models.py +++ b/duetector/analyzer/models.py @@ -1,10 +1,13 @@ from __future__ import annotations from datetime import datetime -from typing import Any, Dict, List, Optional, Set +from typing import Any, Dict, Optional, Set import pydantic +from duetector.analyzer.jaeger.proto import model_pb2 as JModel +from duetector.analyzer.jaeger.proto.model_pb2 import Span as JSpan + class Tracking(pydantic.BaseModel): """ @@ -54,6 +57,35 @@ class Tracking(pydantic.BaseModel): Extended fields, will be stored in ``extended`` field as a dict """ + @classmethod + def normalize_field(cls, field, data): + if field == "timestamp": + field = "dt" + data = datetime.fromtimestamp(data) + return field, data + + @classmethod + def from_jaeger_span(cls, tracer_name, span: JSpan) -> "Tracking": + value_type_to_field_attr = { + JModel.STRING: "v_str", + JModel.BOOL: "v_bool", + JModel.INT64: "v_int64", + JModel.FLOAT64: "v_float64", + JModel.BINARY: "v_binary", + } + + t = Tracking(tracer=tracer_name) + for msg in span.tags: + field = msg.key + data = getattr(msg, value_type_to_field_attr[msg.v_type]) + field, data = Tracking.normalize_field(field, data) + if field in Tracking.model_fields: + setattr(t, field, data) + else: + t.extended[field] = data + + return t + class Brief(pydantic.BaseModel): """ diff --git a/duetector/analyzer/register.py b/duetector/analyzer/register.py index 2692569..7822d49 100644 --- a/duetector/analyzer/register.py +++ b/duetector/analyzer/register.py @@ -1,4 +1,5 @@ # Expose for plugin system from . import db +from .jaeger import analyzer -registers = [db] +registers = [db, analyzer] diff --git a/duetector/collectors/base.py b/duetector/collectors/base.py index 7d8ff5f..58d120b 100644 --- a/duetector/collectors/base.py +++ b/duetector/collectors/base.py @@ -5,6 +5,7 @@ from duetector.config import Config, Configuable from duetector.extension.collector import hookimpl +from duetector.log import logger from .models import Tracking @@ -75,7 +76,9 @@ def emit(self, tracer, data: NamedTuple): if self.disabled: return - self._backend.submit(self._emit, Tracking.from_namedtuple(tracer, data)) + if not tracer: + logger.warning("Empty tracer, skip emit") + return self._backend.submit(self._emit, Tracking.from_namedtuple(tracer, data)) def _emit(self, t: Tracking): """ diff --git a/duetector/collectors/models.py b/duetector/collectors/models.py index 865e0a5..c40a296 100644 --- a/duetector/collectors/models.py +++ b/duetector/collectors/models.py @@ -5,6 +5,7 @@ import pydantic +from duetector.log import logger from duetector.utils import get_boot_time_duration_ns @@ -111,8 +112,11 @@ def from_namedtuple(tracer, data: NamedTuple) -> Tracking: # type: ignore except Exception: # Process may already exit pass - - return Tracking(**args) + try: + return Tracking(**args) + except ValueError as e: + logger.error("Failed to create Tracking instance: %s", e) + logger.exception(e) def set_span(self, collector, span): for k in self.model_fields: @@ -126,9 +130,6 @@ def set_span(self, collector, span): span.set_attribute(k, v) span.set_attribute("collector_id", collector.id) - def span_name(self, collector): - return f"{self.tracer}@{collector.id}" - if __name__ == "__main__": Tracking(tracer="test", dt=datetime.now()) diff --git a/duetector/collectors/otel.py b/duetector/collectors/otel.py index 15e9b2e..461c924 100644 --- a/duetector/collectors/otel.py +++ b/duetector/collectors/otel.py @@ -22,9 +22,12 @@ from duetector.collectors.base import Collector from duetector.collectors.models import Tracking from duetector.extension.collector import hookimpl +from duetector.log import logger +from duetector.otel import OTelInspector +from duetector.utils import Singleton, get_grpc_cred_from_path -class OTelInitiator: +class OTelInitiator(metaclass=Singleton): """ Host the OpenTelemetry SDK and initialize the provider and exporter. @@ -79,8 +82,10 @@ def initialize( provider_kwargs: Optional[Dict[str, Any]] = None, exporter="console", exporter_kwargs: Optional[Dict[str, Any]] = None, + processor_kwargs: Optional[Dict[str, Any]] = None, ) -> None: if self._initialized: + logger.info("Already initiated. Skip...") return if not resource_kwargs: @@ -95,7 +100,11 @@ def initialize( if not exporter_kwargs: exporter_kwargs = {} - processor = BatchSpanProcessor(self.exporter_cls[exporter](**exporter_kwargs)) + if processor_kwargs: + processor_kwargs = {} + processor = BatchSpanProcessor( + self.exporter_cls[exporter](**exporter_kwargs), **processor_kwargs + ) provider.add_span_processor(processor) trace.set_tracer_provider(provider) @@ -108,7 +117,7 @@ def shutdown(self): self.provider = None -class OTelCollector(Collector): +class OTelCollector(Collector, OTelInspector): """ A collector using OpenTelemetry SDK. @@ -121,11 +130,21 @@ class OTelCollector(Collector): """ + service_prefix = "duetector" + service_sep = "-" + default_config = { **Collector.default_config, "disabled": True, "exporter": "console", "exporter_kwargs": {}, + "grpc_exporter_kwargs": { + "secure": False, + "root_certificates_path": "", + "private_key_path": "", + "certificate_chain_path": "", + }, + "processor_kwargs": {}, } @property @@ -138,24 +157,51 @@ def endpoint(self) -> Optional[str]: @property def exporter_kwargs(self) -> Dict[str, Any]: - return self.config.exporter_kwargs + return self.config.exporter_kwargs._config_dict + + @property + def processor_kwargs(self) -> Dict[str, Any]: + return self.config.processor_kwargs._config_dict @property def service_name(self) -> str: - return f"duetector-{self.id}" + return self.generate_service_name(self.id) + + @property + def grpc_exporter_kwargs(self) -> Dict[str, Any]: + kwargs = self.config.grpc_exporter_kwargs._config_dict + wrapped_kwargs = {} + if kwargs.get("secure"): + creds = get_grpc_cred_from_path( + root_certificates_path=kwargs.get("root_certificates_path"), + private_key_path=kwargs.get("private_key_path"), + certificate_chain_path=kwargs.get("certificate_chain_path"), + ) + wrapped_kwargs = { + "insecure": False, + "credentials": creds, + } + + return wrapped_kwargs def __init__(self, config: Optional[Dict[str, Any]] = None, *args, **kwargs): super().__init__(config, *args, **kwargs) + + if "grpc" in self.exporter: + logger.info("Merge grpc kwargs into exporter_kwargs") + self.exporter_kwargs.update(self.grpc_exporter_kwargs) + self.otel = OTelInitiator() self.otel.initialize( service_name=self.service_name, exporter=self.exporter, - exporter_kwargs=self.exporter_kwargs._config_dict, + exporter_kwargs=self.exporter_kwargs, + processor_kwargs=self.processor_kwargs, ) def _emit(self, t: Tracking): tracer = trace.get_tracer(self.id) - with tracer.start_as_current_span(t.span_name(self)) as span: + with tracer.start_as_current_span(self.generate_span_name(t)) as span: t.set_span(self, span) def summary(self) -> Dict: diff --git a/duetector/exceptions.py b/duetector/exceptions.py index 7d5627f..c84758d 100644 --- a/duetector/exceptions.py +++ b/duetector/exceptions.py @@ -12,3 +12,7 @@ class ConfigError(Exception): class ConfigFileNotFoundError(ConfigError): pass + + +class AnalysQueryError(ValueError): + pass diff --git a/duetector/otel.py b/duetector/otel.py new file mode 100644 index 0000000..58dd7df --- /dev/null +++ b/duetector/otel.py @@ -0,0 +1,29 @@ +from typing import Optional, Union + +from duetector.collectors.models import Tracking as CTracking + + +class OTelInspector: + service_prefix = "duetector" + service_sep = "-" + + @classmethod + def generate_service_name(cls, identifier: str) -> str: + return cls.service_sep.join([f"{cls.service_prefix}", f"{identifier}"]) + + @classmethod + def get_identifier(cls, service_name: str) -> Optional[str]: + if not service_name.startswith(cls.service_prefix): + return None + + return service_name.replace(cls.service_prefix + cls.service_sep, "") + + @classmethod + def generate_span_name(cls, t: Union[CTracking, str]) -> str: + if isinstance(t, str): + return t + return t.tracer + + @classmethod + def get_tracer_name(cls, span_name: str) -> str: + return span_name diff --git a/duetector/service/query/controller.py b/duetector/service/query/controller.py index 1736508..d64272d 100644 --- a/duetector/service/query/controller.py +++ b/duetector/service/query/controller.py @@ -4,6 +4,7 @@ from duetector.managers.analyzer import AnalyzerManager from duetector.service.base import Controller from duetector.service.exceptions import NotFoundError +from duetector.service.query.models import QueryBody class AnalyzerController(Controller): @@ -49,3 +50,9 @@ def get_analyzer(self, analyzer_name: str) -> Analyzer: if not a: raise NotFoundError(analyzer_name) return a + + def wrap_query_param(self, query_param: QueryBody) -> Dict[str, Any]: + model = query_param.model_dump() + + model["collector_ids"] = [model.pop("collector_id")] + return model diff --git a/duetector/service/query/models.py b/duetector/service/query/models.py index c74df63..26530ed 100644 --- a/duetector/service/query/models.py +++ b/duetector/service/query/models.py @@ -11,8 +11,8 @@ class AvaliableAnalyzers(BaseModel): class QueryBody(BaseModel): + collector_id: str tracers: Optional[List[str]] = None - collector_ids: Optional[List[str]] = None start_datetime: Optional[datetime] = None end_datetime: Optional[datetime] = None start: int = 0 @@ -28,7 +28,7 @@ class QueryBody(BaseModel): "examples": [ { "tracers": [], - "collector_ids": [], + "collector_id": "test-service", "start_datetime": datetime.fromtimestamp(0), "end_datetime": datetime.now(), "start": 0, diff --git a/duetector/service/query/routes.py b/duetector/service/query/routes.py index f25b889..060dc08 100644 --- a/duetector/service/query/routes.py +++ b/duetector/service/query/routes.py @@ -1,7 +1,4 @@ -from asyncio import sleep - from fastapi import APIRouter, Body, Depends -from fastapi.concurrency import run_in_threadpool from duetector.service.base import get_controller from duetector.service.query.controller import AnalyzerController @@ -11,6 +8,7 @@ QueryBody, QueryResult, ) +from duetector.service.utils import ensure_async r = APIRouter( prefix="/query", @@ -31,14 +29,15 @@ async def root( @r.post("/{analyzer_name}", response_model=QueryResult) async def query( analyzer_name: str, - query_param: QueryBody = Body(default=QueryBody()), + query_param: QueryBody = Body(default=QueryBody(collector_id="unknown-collector-id")), controller: AnalyzerController = Depends(get_controller(AnalyzerController)), ): """ Query data from analyzer """ analyzer = controller.get_analyzer(analyzer_name) - trackings = await run_in_threadpool(analyzer.query, **query_param.model_dump()) + query_param = controller.wrap_query_param(query_param) + trackings = await ensure_async(analyzer.query, **query_param) return QueryResult( trackings=trackings, @@ -53,7 +52,7 @@ async def query_brief( ): # type is not serializable, so we need to get analyzer without inspect type analyzer = controller.get_analyzer(analyzer_name) - brief = await run_in_threadpool(analyzer.brief, inspect_type=False) + brief = await ensure_async(analyzer.brief, inspect_type=False) return BriefResult( brief=brief, analyzer_name=analyzer_name, diff --git a/duetector/service/utils.py b/duetector/service/utils.py new file mode 100644 index 0000000..2da94ee --- /dev/null +++ b/duetector/service/utils.py @@ -0,0 +1,11 @@ +import asyncio + +from fastapi.concurrency import run_in_threadpool + + +async def ensure_async(f: callable, *args, **kwargs): + # await async function, run sync function in thread pool + if asyncio.iscoroutinefunction(f): + return await f(*args, **kwargs) + + return await run_in_threadpool(f, *args, **kwargs) diff --git a/duetector/static/config.toml b/duetector/static/config.toml index d053cdf..179c0ec 100644 --- a/duetector/static/config.toml +++ b/duetector/static/config.toml @@ -73,6 +73,14 @@ max_workers = 10 [collector.otelcollector.exporter_kwargs] +[collector.otelcollector.grpc_exporter_kwargs] +secure = false +root_certificates_path = "" +private_key_path = "" +certificate_chain_path = "" + +[collector.otelcollector.processor_kwargs] + [collector.dbcollector] disabled = false statis_id = "" @@ -98,6 +106,15 @@ max_workers = 10 disabled = false include_extension = true +[analyzer.jaegeranalyzer] +disabled = true +secure = false +root_certificates_path = "" +private_key_path = "" +certificate_chain_path = "" +host = "localhost" +port = 16685 + [analyzer.dbanalyzer] disabled = false diff --git a/duetector/utils.py b/duetector/utils.py index 1074773..5d3649c 100644 --- a/duetector/utils.py +++ b/duetector/utils.py @@ -1,11 +1,18 @@ +import os import threading from datetime import datetime, timedelta +from pathlib import Path +from typing import Optional, Union try: from functools import cache except ImportError: from functools import lru_cache as cache +import grpc + +from duetector.log import logger + class Singleton(type): _instances = {} @@ -43,6 +50,50 @@ def get_boot_time_duration_ns(ns) -> datetime: return get_boot_time() + timedelta(microseconds=ns / 1000) +@cache +def get_grpc_cred_from_path( + root_certificates_path: Optional[Union[str, Path]], + private_key_path: Optional[Union[str, Path]], + certificate_chain_path: Optional[Union[str, Path]], +) -> grpc.ChannelCredentials: + def _read_content(path: Optional[Union[str, Path]]) -> Optional[bytes]: + if not path: + return None + if isinstance(path, str): + path = Path(path) + if not path.exists(): + return None + with path.open("rb") as f: + return f.read() + + root_certificates = _read_content(root_certificates_path) + if not root_certificates: + # Support GRPC_DEFAULT_SSL_ROOTS_FILE_PATH env + if "GRPC_DEFAULT_SSL_ROOTS_FILE_PATH" in os.environ: + logger.debug( + "Using GRPC_DEFAULT_SSL_ROOTS_FILE_PATH env for root_certificates: %s", + os.environ["GRPC_DEFAULT_SSL_ROOTS_FILE_PATH"], + ) + + root_certificates = _read_content(os.environ["GRPC_DEFAULT_SSL_ROOTS_FILE_PATH"]) + + private_key = _read_content(private_key_path) + certificate_chain = _read_content(certificate_chain_path) + + logger.debug( + "root_certificates: %s, private_key: %s, certificate_chain: %s", + root_certificates, + private_key, + certificate_chain, + ) + + return grpc.ssl_channel_credentials( + root_certificates=root_certificates, + private_key=private_key, + certificate_chain=certificate_chain, + ) + + if __name__ == "__main__": print(get_boot_time()) print(get_boot_time_duration_ns("13205215231927")) diff --git a/pyproject.toml b/pyproject.toml index 7b01bdc..e33488a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -25,6 +25,9 @@ dependencies = [ "opentelemetry-exporter-jaeger", "opentelemetry-exporter-zipkin-proto-http", "opentelemetry-exporter-zipkin-json", + # Proto for jaeger and ozther backend + 'grpcio', + 'protobuf', # Web server "fastapi", "uvicorn[standard]", @@ -39,7 +42,7 @@ classifiers = [ 'Programming Language :: Python :: 3.11', ] [project.optional-dependencies] -test = ["pytest", "pytest-cov", "httpx"] +test = ["pytest", "pytest-cov", "pytest-asyncio","pytest-timeout", "httpx", "docker"] docs = ["Sphinx<=7.2.4", "sphinx-rtd-theme", "sphinx-click", "autodoc_pydantic"] [project.scripts] diff --git a/pytest.ini b/pytest.ini new file mode 100644 index 0000000..aab86ce --- /dev/null +++ b/pytest.ini @@ -0,0 +1,4 @@ +[pytest] +asyncio_mode = auto + +timeout = 180 diff --git a/tests/config.toml b/tests/config.toml index bf5b8d4..2ffa7fe 100644 --- a/tests/config.toml +++ b/tests/config.toml @@ -112,6 +112,15 @@ interval_ms = 500 disabled = false include_extension = true +[analyzer.jaegeranalyzer] +disabled = true +secure = false +root_certificates_path = "" +private_key_path = "" +certificate_chain_path = "" +host = "localhost" +port = 16685 + [analyzer.dbanalyzer] disabled = false diff --git a/tests/integration/jaeger/test_jaeger_analyzer.py b/tests/integration/jaeger/test_jaeger_analyzer.py new file mode 100644 index 0000000..690e3cb --- /dev/null +++ b/tests/integration/jaeger/test_jaeger_analyzer.py @@ -0,0 +1,156 @@ +import socket +import time +from collections import namedtuple + +import httpx +import pytest + +import docker +from duetector.analyzer.jaeger.analyzer import JaegerAnalyzer +from duetector.collectors.models import Tracking +from duetector.collectors.otel import OTelCollector + + +def get_port(): + # Get an unoccupied port + with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: + s.bind(("127.0.0.1", 0)) + return s.getsockname()[1] + + +from duetector.utils import get_boot_time_duration_ns + +timestamp = 13205215231927 +datetime = get_boot_time_duration_ns(timestamp) + + +@pytest.fixture(scope="session") +def data_t(): + d = namedtuple("Tracking", ["pid", "uid", "gid", "comm", "fname", "timestamp", "custom"]) + + yield d( + pid=9999, + uid=9999, + gid=9999, + comm="dummy", + fname="dummy.file", + timestamp=timestamp, + custom="dummy-xargs", + ) + + +@pytest.fixture(scope="session") +def service_id(): + yield "unittest-service" + + +@pytest.fixture(scope="session") +def docker_client(): + try: + client = docker.from_env() + client.ping() + return client + except: + pytest.skip("Docker is not available") + + +@pytest.fixture(scope="session") +def jaeger_container(docker_client: docker.DockerClient, service_id, data_t): + query_port = get_port() + otel_grpc_port = get_port() + ui_port = get_port() + try: + """ + docker run --rm --name jaeger \ + -p {random_query_port}:16685 \ + -p {random_otel_port}:4317 \ + jaegertracing/all-in-one:1.50 + """ + container = docker_client.containers.run( + "jaegertracing/all-in-one:1.50", + detach=True, + ports={"16685": query_port, "4317": otel_grpc_port, "16686": ui_port}, + ) + # Waiting for the container to start by query ui_port + while True: + try: + response = httpx.get(f"http://127.0.0.1:{ui_port}") + if response.status_code == 200: + break + except: + time.sleep(0.5) + + # Generate testing data + config = { + "otelcollector": { + "disabled": False, + "statis_id": service_id, + "exporter": "otlp-grpc", + "exporter_kwargs": { + "endpoint": f"127.0.0.1:{otel_grpc_port}", + "insecure": True, + }, + "processor_kwargs": { + "max_queue_size": 1, + "schedule_delay_millis": 100.0, + "max_export_batch_size": 1, + }, + } + } + collector = OTelCollector(config) + collector.emit("dummy", data_t) + collector.shutdown() + + yield query_port + finally: + container.stop() + + +@pytest.fixture +def jaeger_analyzer(jaeger_container): + config = { + "jaegeranalyzer": { + "disabled": False, + "host": "127.0.0.1", + "port": jaeger_container, + } + } + yield JaegerAnalyzer(config) + + +async def test_jaeger_channel(jaeger_analyzer: JaegerAnalyzer, service_id): + from duetector.analyzer.jaeger.proto.query_pb2 import GetServicesRequest + from duetector.analyzer.jaeger.proto.query_pb2_grpc import QueryServiceStub + + async with jaeger_analyzer.channel_initializer() as channel: + stub = QueryServiceStub(channel) + response = await stub.GetServices(GetServicesRequest()) + assert f"duetector-{service_id}" in response.services + + +async def test_jaeger_connector(jaeger_analyzer: JaegerAnalyzer, service_id): + connector = jaeger_analyzer.connector + assert service_id in await connector.inspect_all_collector_ids() + assert "dummy" in await connector.inspect_all_tracers() + assert await connector.query_trace(collector_id=service_id, tracer_name="dummy") + + brief = await connector.brief(collector_id=service_id, tracer_name="dummy") + assert "dummy" == brief.tracer + assert service_id == brief.collector_id + assert brief.start == brief.end + assert brief.count == 1 + assert brief.fields.keys() + assert any(brief.fields.values()) + + brief = await connector.brief(collector_id=service_id, tracer_name="dummy", inspect_type=False) + assert brief.fields.keys() + assert not any(brief.fields.values()) + + +async def test_jaeger_analyzer(jaeger_analyzer: JaegerAnalyzer): + assert await jaeger_analyzer.query() + assert await jaeger_analyzer.brief() + + +if __name__ == "__main__": + pytest.main(["-vv", "-s", __file__]) diff --git a/tests/bin/dummy_process.py b/tests/standalone/bin/dummy_process.py similarity index 100% rename from tests/bin/dummy_process.py rename to tests/standalone/bin/dummy_process.py diff --git a/tests/service/test_query.py b/tests/standalone/service/test_query.py similarity index 100% rename from tests/service/test_query.py rename to tests/standalone/service/test_query.py diff --git a/tests/service/test_token.py b/tests/standalone/service/test_token.py similarity index 100% rename from tests/service/test_token.py rename to tests/standalone/service/test_token.py diff --git a/tests/test_bcc_monitor.py b/tests/standalone/test_bcc_monitor.py similarity index 100% rename from tests/test_bcc_monitor.py rename to tests/standalone/test_bcc_monitor.py diff --git a/tests/test_config.py b/tests/standalone/test_config.py similarity index 100% rename from tests/test_config.py rename to tests/standalone/test_config.py diff --git a/tests/test_daemon.py b/tests/standalone/test_daemon.py similarity index 100% rename from tests/test_daemon.py rename to tests/standalone/test_daemon.py diff --git a/tests/standalone/test_db_analyzer.py b/tests/standalone/test_db_analyzer.py new file mode 100644 index 0000000..edbc654 --- /dev/null +++ b/tests/standalone/test_db_analyzer.py @@ -0,0 +1,119 @@ +from datetime import datetime, timedelta + +import pytest + +from duetector.analyzer.db import DBAnalyzer +from duetector.analyzer.models import Tracking as AT +from duetector.collectors.models import Tracking as CT +from duetector.managers.analyzer import AnalyzerManager + +now = datetime.now() + +tracking_kwargs = dict( + tracer="db_analyzer_tests", + pid=9999, + uid=9999, + gid=9999, + comm="dummy", + cwd=None, + fname="dummy.file", + dt=datetime.now(), + extended={"custom": "dummy-xargs"}, +) + + +@pytest.fixture +def tracer_name(): + return "db_analyzer_tests" + + +@pytest.fixture +def collector_id(): + return "db_analyzer_tests_collector" + + +@pytest.fixture +def c_tracking(): + yield CT(**tracking_kwargs) + + +@pytest.fixture +def a_tracking(): + yield AT(**tracking_kwargs) + + +@pytest.fixture +def config(full_config): + yield AnalyzerManager(full_config).config._config_dict + + +@pytest.fixture +def db_analyzer(config, c_tracking, collector_id): + db_analyzer = DBAnalyzer(config) + sessionmanager = db_analyzer.sm + + m = sessionmanager.get_tracking_model(c_tracking.tracer, collector_id) + + with sessionmanager.begin() as session: + session.add(m(**c_tracking.model_dump(exclude=["tracer"]))) + session.add(m(**c_tracking.model_dump(exclude=["tracer"]))) + session.commit() + + assert sessionmanager.inspect_all_tables() == [ + sessionmanager.get_table_names(c_tracking.tracer, collector_id) + ] + assert sessionmanager.inspect_all_tables("not-exist") == [] + yield db_analyzer + + +async def test_query(db_analyzer: DBAnalyzer, a_tracking, collector_id): + assert a_tracking in await db_analyzer.query() + assert a_tracking in await db_analyzer.query(tracers=[a_tracking.tracer]) + assert a_tracking in await db_analyzer.query(collector_ids=[collector_id]) + assert a_tracking in await db_analyzer.query( + tracers=[a_tracking.tracer], collector_ids=[collector_id] + ) + assert a_tracking in await db_analyzer.query(start_datetime=now - timedelta(days=1)) + assert a_tracking in await db_analyzer.query(end_datetime=now + timedelta(days=1)) + assert a_tracking in await db_analyzer.query(order_by_asc=["pid"]) + assert a_tracking in await db_analyzer.query(order_by_desc=["pid"]) + + assert len(await db_analyzer.query()) == 2 + assert len(await db_analyzer.query(distinct=True)) == 1 + + assert AT( + tracer=a_tracking.tracer, + pid=a_tracking.pid, + fname=a_tracking.fname, + ) in await db_analyzer.query(columns=["pid", "fname"]) + + assert not await db_analyzer.query(tracers=["not-exist"]) + assert not await db_analyzer.query(collector_ids=["not-exist"]) + assert not await db_analyzer.query(start_datetime=now + timedelta(days=1)) + assert not await db_analyzer.query(end_datetime=now - timedelta(days=1)) + assert not await db_analyzer.query(start=100) + assert not await db_analyzer.query(where={"pid": 1}) + + +async def test_brief(db_analyzer: DBAnalyzer, a_tracking, collector_id): + assert await db_analyzer.brief() + assert await db_analyzer.brief(tracers=[a_tracking.tracer]) + assert await db_analyzer.brief(collector_ids=[collector_id]) + assert await db_analyzer.brief(tracers=[a_tracking.tracer], collector_ids=[collector_id]) + assert await db_analyzer.brief(start_datetime=now - timedelta(days=1)) + assert await db_analyzer.brief(end_datetime=now + timedelta(days=1)) + assert await db_analyzer.brief(with_details=False) + assert await db_analyzer.brief(distinct=True) + + assert not (await db_analyzer.brief(tracers=["not-exist"])).tracers + assert not (await db_analyzer.brief(collector_ids=["not-exist"])).collector_ids + assert not list( + (await db_analyzer.brief(start_datetime=now + timedelta(days=1))).briefs.values() + )[0].count + assert not list( + (await db_analyzer.brief(end_datetime=now - timedelta(days=1))).briefs.values() + )[0].count + + +if __name__ == "__main__": + pytest.main(["-vv", "-s", __file__]) diff --git a/tests/test_db_collector.py b/tests/standalone/test_db_collector.py similarity index 100% rename from tests/test_db_collector.py rename to tests/standalone/test_db_collector.py diff --git a/tests/test_filter.py b/tests/standalone/test_filter.py similarity index 100% rename from tests/test_filter.py rename to tests/standalone/test_filter.py diff --git a/tests/test_otel_collector.py b/tests/standalone/test_otel_collector.py similarity index 100% rename from tests/test_otel_collector.py rename to tests/standalone/test_otel_collector.py diff --git a/tests/test_poller.py b/tests/standalone/test_poller.py similarity index 100% rename from tests/test_poller.py rename to tests/standalone/test_poller.py diff --git a/tests/test_repo.py b/tests/standalone/test_repo.py similarity index 89% rename from tests/test_repo.py rename to tests/standalone/test_repo.py index cf47522..859717c 100644 --- a/tests/test_repo.py +++ b/tests/standalone/test_repo.py @@ -8,10 +8,12 @@ _HERE = Path(__file__).parent.absolute() +import duetector + def test_repo_config_uptodate(tmpdir): # Check default config in repo is up to date - CONFIG_IN_REPO = _HERE / ".." / "duetector/static/config.toml" + CONFIG_IN_REPO = Path(duetector.__path__[0]).resolve() / "static/config.toml" GENERATED_CONFIG = tmpdir.join("g-default-config.toml") config_generator = ConfigGenerator(load=False, load_env=False) config_generator.generate(GENERATED_CONFIG) diff --git a/tests/test_sh_monitor.py b/tests/standalone/test_sh_monitor.py similarity index 100% rename from tests/test_sh_monitor.py rename to tests/standalone/test_sh_monitor.py diff --git a/tests/test_sp_monitor.py b/tests/standalone/test_sp_monitor.py similarity index 100% rename from tests/test_sp_monitor.py rename to tests/standalone/test_sp_monitor.py diff --git a/tests/test_tracer_template.py b/tests/standalone/test_tracer_template.py similarity index 100% rename from tests/test_tracer_template.py rename to tests/standalone/test_tracer_template.py diff --git a/tests/test_db_analyzer.py b/tests/test_db_analyzer.py deleted file mode 100644 index 12b0823..0000000 --- a/tests/test_db_analyzer.py +++ /dev/null @@ -1,119 +0,0 @@ -from datetime import datetime, timedelta - -import pytest - -from duetector.analyzer.db import DBAnalyzer -from duetector.analyzer.models import Tracking as AT -from duetector.collectors.models import Tracking as CT -from duetector.managers.analyzer import AnalyzerManager - -now = datetime.now() - -tracking_kwargs = dict( - tracer="db_analyzer_tests", - pid=9999, - uid=9999, - gid=9999, - comm="dummy", - cwd=None, - fname="dummy.file", - dt=datetime.now(), - extended={"custom": "dummy-xargs"}, -) - - -@pytest.fixture -def tracer_name(): - return "db_analyzer_tests" - - -@pytest.fixture -def collector_id(): - return "db_analyzer_tests_collector" - - -@pytest.fixture -def c_tracking(): - yield CT(**tracking_kwargs) - - -@pytest.fixture -def a_tracking(): - yield AT(**tracking_kwargs) - - -@pytest.fixture -def config(full_config): - yield AnalyzerManager(full_config).config._config_dict - - -@pytest.fixture -def db_analyzer(config, c_tracking, collector_id): - db_analyzer = DBAnalyzer(config) - sessionmanager = db_analyzer.sm - - m = sessionmanager.get_tracking_model(c_tracking.tracer, collector_id) - - with sessionmanager.begin() as session: - session.add(m(**c_tracking.model_dump(exclude=["tracer"]))) - session.add(m(**c_tracking.model_dump(exclude=["tracer"]))) - session.commit() - - assert sessionmanager.inspect_all_tables() == [ - sessionmanager.get_table_names(c_tracking.tracer, collector_id) - ] - assert sessionmanager.inspect_all_tables("not-exist") == [] - yield db_analyzer - - -def test_query(db_analyzer: DBAnalyzer, a_tracking, collector_id): - assert a_tracking in db_analyzer.query() - assert a_tracking in db_analyzer.query(tracers=[a_tracking.tracer]) - assert a_tracking in db_analyzer.query(collector_ids=[collector_id]) - assert a_tracking in db_analyzer.query( - tracers=[a_tracking.tracer], collector_ids=[collector_id] - ) - assert a_tracking in db_analyzer.query(start_datetime=now - timedelta(days=1)) - assert a_tracking in db_analyzer.query(end_datetime=now + timedelta(days=1)) - assert a_tracking in db_analyzer.query(order_by_asc=["pid"]) - assert a_tracking in db_analyzer.query(order_by_desc=["pid"]) - - assert len(db_analyzer.query()) == 2 - assert len(db_analyzer.query(distinct=True)) == 1 - - assert AT( - tracer=a_tracking.tracer, - pid=a_tracking.pid, - fname=a_tracking.fname, - ) in db_analyzer.query(columns=["pid", "fname"]) - - assert not db_analyzer.query(tracers=["not-exist"]) - assert not db_analyzer.query(collector_ids=["not-exist"]) - assert not db_analyzer.query(start_datetime=now + timedelta(days=1)) - assert not db_analyzer.query(end_datetime=now - timedelta(days=1)) - assert not db_analyzer.query(start=100) - assert not db_analyzer.query(where={"pid": 1}) - - -def test_brief(db_analyzer: DBAnalyzer, a_tracking, collector_id): - assert db_analyzer.brief() - assert db_analyzer.brief(tracers=[a_tracking.tracer]) - assert db_analyzer.brief(collector_ids=[collector_id]) - assert db_analyzer.brief(tracers=[a_tracking.tracer], collector_ids=[collector_id]) - assert db_analyzer.brief(start_datetime=now - timedelta(days=1)) - assert db_analyzer.brief(end_datetime=now + timedelta(days=1)) - assert db_analyzer.brief(with_details=False) - assert db_analyzer.brief(distinct=True) - - assert not db_analyzer.brief(tracers=["not-exist"]).tracers - assert not db_analyzer.brief(collector_ids=["not-exist"]).collector_ids - assert not list(db_analyzer.brief(start_datetime=now + timedelta(days=1)).briefs.values())[ - 0 - ].count - assert not list(db_analyzer.brief(end_datetime=now - timedelta(days=1)).briefs.values())[ - 0 - ].count - - -if __name__ == "__main__": - pytest.main(["-vv", "-s", __file__])