From 869f6b867f8bf935a5001617690f338e049e65b5 Mon Sep 17 00:00:00 2001 From: Shayan Patel Date: Wed, 9 Oct 2024 19:10:47 +0000 Subject: [PATCH 1/7] WIP: Integrate with Tempo HA + test relay support of tracing traffic through grafana-agent --- .../data_platform_libs/v0/data_interfaces.py | 21 +- .../v0}/charm_tracing.py | 48 +- lib/charms/tempo_k8s/v2/tracing.py | 996 ------------------ poetry.lock | 217 +++- src/abstract_charm.py | 11 +- src/machine_charm.py | 2 +- src/relations/cos.py | 12 +- tests/unit/conftest.py | 2 +- 8 files changed, 228 insertions(+), 1081 deletions(-) rename lib/charms/{tempo_k8s/v1 => tempo_coordinator_k8s/v0}/charm_tracing.py (95%) delete mode 100644 lib/charms/tempo_k8s/v2/tracing.py diff --git a/lib/charms/data_platform_libs/v0/data_interfaces.py b/lib/charms/data_platform_libs/v0/data_interfaces.py index aaed2e52..3bc2dd85 100644 --- a/lib/charms/data_platform_libs/v0/data_interfaces.py +++ b/lib/charms/data_platform_libs/v0/data_interfaces.py @@ -331,7 +331,7 @@ def _on_topic_requested(self, event: TopicRequestedEvent): # Increment this PATCH version before using `charmcraft publish-lib` or reset # to 0 if you are raising the major API version -LIBPATCH = 39 +LIBPATCH = 40 PYDEPS = ["ops>=2.0.0"] @@ -391,6 +391,10 @@ class IllegalOperationError(DataInterfacesError): """To be used when an operation is not allowed to be performed.""" +class PrematureDataAccessError(DataInterfacesError): + """To be raised when the Relation Data may be accessed (written) before protocol init complete.""" + + ############################################################################## # Global helpers / utilities ############################################################################## @@ -1453,6 +1457,8 @@ def _on_relation_changed_event(self, event: RelationChangedEvent) -> None: class ProviderData(Data): """Base provides-side of the data products relation.""" + RESOURCE_FIELD = "database" + def __init__( self, model: Model, @@ -1618,6 +1624,15 @@ def _fetch_my_specific_relation_data( def _update_relation_data(self, relation: Relation, data: Dict[str, str]) -> None: """Set values for fields not caring whether it's a secret or not.""" req_secret_fields = [] + + keys = set(data.keys()) + if self.fetch_relation_field(relation.id, self.RESOURCE_FIELD) is None and ( + keys - {"endpoints", "read-only-endpoints", "replset"} + ): + raise PrematureDataAccessError( + "Premature access to relation data, update is forbidden before the connection is initialized." + ) + if relation.app: req_secret_fields = get_encoded_list(relation, relation.app, REQ_SECRET_FIELDS) @@ -3290,6 +3305,8 @@ class KafkaRequiresEvents(CharmEvents): class KafkaProviderData(ProviderData): """Provider-side of the Kafka relation.""" + RESOURCE_FIELD = "topic" + def __init__(self, model: Model, relation_name: str) -> None: super().__init__(model, relation_name) @@ -3539,6 +3556,8 @@ class OpenSearchRequiresEvents(CharmEvents): class OpenSearchProvidesData(ProviderData): """Provider-side of the OpenSearch relation.""" + RESOURCE_FIELD = "index" + def __init__(self, model: Model, relation_name: str) -> None: super().__init__(model, relation_name) diff --git a/lib/charms/tempo_k8s/v1/charm_tracing.py b/lib/charms/tempo_coordinator_k8s/v0/charm_tracing.py similarity index 95% rename from lib/charms/tempo_k8s/v1/charm_tracing.py rename to lib/charms/tempo_coordinator_k8s/v0/charm_tracing.py index 2dbdddd6..1e7ff840 100644 --- a/lib/charms/tempo_k8s/v1/charm_tracing.py +++ b/lib/charms/tempo_coordinator_k8s/v0/charm_tracing.py @@ -12,15 +12,15 @@ # Quickstart Fetch the following charm libs (and ensure the minimum version/revision numbers are satisfied): - charmcraft fetch-lib charms.tempo_k8s.v2.tracing # >= 1.10 - charmcraft fetch-lib charms.tempo_k8s.v1.charm_tracing # >= 2.7 + charmcraft fetch-lib charms.tempo_coordinator_k8s.v0.tracing # >= 1.10 + charmcraft fetch-lib charms.tempo_coordinator_k8s.v0.charm_tracing # >= 2.7 Then edit your charm code to include: ```python # import the necessary charm libs -from charms.tempo_k8s.v2.tracing import TracingEndpointRequirer, charm_tracing_config -from charms.tempo_k8s.v1.charm_tracing import charm_tracing +from charms.tempo_coordinator_k8s.v0.tracing import TracingEndpointRequirer, charm_tracing_config +from charms.tempo_coordinator_k8s.v0.charm_tracing import charm_tracing # decorate your charm class with charm_tracing: @charm_tracing( @@ -51,7 +51,7 @@ def __init__(self, ...): 2) add to your charm a "my_tracing_endpoint" (you can name this attribute whatever you like) **property**, **method** or **instance attribute** that returns an otlp http/https endpoint url. -If you are using the ``charms.tempo_k8s.v2.tracing.TracingEndpointRequirer`` as +If you are using the ``charms.tempo_coordinator_k8s.v0.tracing.TracingEndpointRequirer`` as ``self.tracing = TracingEndpointRequirer(self)``, the implementation could be: ``` @@ -80,7 +80,7 @@ def my_tracing_endpoint(self) -> Optional[str]: For example: ``` -from charms.tempo_k8s.v1.charm_tracing import trace_charm +from charms.tempo_coordinator_k8s.v0.charm_tracing import trace_charm @trace_charm( tracing_endpoint="my_tracing_endpoint", server_cert="_server_cert" @@ -129,7 +129,7 @@ def get_tracer(self) -> opentelemetry.trace.Tracer: For example: ``` - from charms.tempo_k8s.v0.charm_tracing import trace_charm + from charms.tempo_coordinator_k8s.v0.charm_tracing import trace_charm @trace_charm( tracing_endpoint="my_tracing_endpoint", @@ -150,7 +150,7 @@ def my_tracing_endpoint(self) -> Optional[str]: needs to be replaced with: ``` - from charms.tempo_k8s.v1.charm_tracing import trace_charm + from charms.tempo_coordinator_k8s.v0.charm_tracing import trace_charm @trace_charm( tracing_endpoint="my_tracing_endpoint", @@ -249,28 +249,27 @@ def _remove_stale_otel_sdk_packages(): from opentelemetry.sdk.resources import Resource from opentelemetry.sdk.trace import Span, TracerProvider from opentelemetry.sdk.trace.export import BatchSpanProcessor +from opentelemetry.trace import INVALID_SPAN, Tracer +from opentelemetry.trace import get_current_span as otlp_get_current_span from opentelemetry.trace import ( - INVALID_SPAN, - Tracer, get_tracer, get_tracer_provider, set_span_in_context, set_tracer_provider, ) -from opentelemetry.trace import get_current_span as otlp_get_current_span from ops.charm import CharmBase from ops.framework import Framework # The unique Charmhub library identifier, never change it -LIBID = "cb1705dcd1a14ca09b2e60187d1215c7" +LIBID = "01780f1e588c42c3976d26780fdf9b89" # Increment this major API version when introducing breaking changes -LIBAPI = 1 +LIBAPI = 0 # Increment this PATCH version before using `charmcraft publish-lib` or reset # to 0 if you are raising the major API version -LIBPATCH = 15 +LIBPATCH = 2 PYDEPS = ["opentelemetry-exporter-otlp-proto-http==1.21.0"] @@ -332,7 +331,7 @@ def _get_tracer() -> Optional[Tracer]: return tracer.get() except LookupError: # fallback: this course-corrects for a user error where charm_tracing symbols are imported - # from different paths (typically charms.tempo_k8s... and lib.charms.tempo_k8s...) + # from different paths (typically charms.tempo_coordinator_k8s... and lib.charms.tempo_coordinator_k8s...) try: ctx: Context = copy_context() if context_tracer := _get_tracer_from_context(ctx): @@ -372,10 +371,6 @@ class UntraceableObjectError(TracingError): """Raised when an object you're attempting to instrument cannot be autoinstrumented.""" -class TLSError(TracingError): - """Raised when the tracing endpoint is https but we don't have a cert yet.""" - - def _get_tracing_endpoint( tracing_endpoint_attr: str, charm_instance: object, @@ -485,10 +480,15 @@ def wrap_init(self: CharmBase, framework: Framework, *args, **kwargs): ) if tracing_endpoint.startswith("https://") and not server_cert: - raise TLSError( + logger.error( "Tracing endpoint is https, but no server_cert has been passed." - "Please point @trace_charm to a `server_cert` attr." + "Please point @trace_charm to a `server_cert` attr. " + "This might also mean that the tracing provider is related to a " + "certificates provider, but this application is not (yet). " + "In that case, you might just have to wait a bit for the certificates " + "integration to settle. " ) + return exporter = OTLPSpanExporter( endpoint=tracing_endpoint, @@ -562,8 +562,8 @@ def trace_charm( method calls on instances of this class. Usage: - >>> from charms.tempo_k8s.v1.charm_tracing import trace_charm - >>> from charms.tempo_k8s.v1.tracing import TracingEndpointRequirer + >>> from charms.tempo_coordinator_k8s.v0.charm_tracing import trace_charm + >>> from charms.tempo_coordinator_k8s.v0.tracing import TracingEndpointRequirer >>> from ops import CharmBase >>> >>> @trace_charm( @@ -626,7 +626,7 @@ def _autoinstrument( Usage: - >>> from charms.tempo_k8s.v1.charm_tracing import _autoinstrument + >>> from charms.tempo_coordinator_k8s.v0.charm_tracing import _autoinstrument >>> from ops.main import main >>> _autoinstrument( >>> MyCharm, diff --git a/lib/charms/tempo_k8s/v2/tracing.py b/lib/charms/tempo_k8s/v2/tracing.py deleted file mode 100644 index 81bf1f1f..00000000 --- a/lib/charms/tempo_k8s/v2/tracing.py +++ /dev/null @@ -1,996 +0,0 @@ -# Copyright 2024 Canonical Ltd. -# See LICENSE file for licensing details. -"""## Overview. - -This document explains how to integrate with the Tempo charm for the purpose of pushing traces to a -tracing endpoint provided by Tempo. It also explains how alternative implementations of the Tempo charm -may maintain the same interface and be backward compatible with all currently integrated charms. - -## Requirer Library Usage - -Charms seeking to push traces to Tempo, must do so using the `TracingEndpointRequirer` -object from this charm library. For the simplest use cases, using the `TracingEndpointRequirer` -object only requires instantiating it, typically in the constructor of your charm. The -`TracingEndpointRequirer` constructor requires the name of the relation over which a tracing endpoint - is exposed by the Tempo charm, and a list of protocols it intends to send traces with. - This relation must use the `tracing` interface. - The `TracingEndpointRequirer` object may be instantiated as follows - - from charms.tempo_k8s.v2.tracing import TracingEndpointRequirer - - def __init__(self, *args): - super().__init__(*args) - # ... - self.tracing = TracingEndpointRequirer(self, - protocols=['otlp_grpc', 'otlp_http', 'jaeger_http_thrift'] - ) - # ... - -Note that the first argument (`self`) to `TracingEndpointRequirer` is always a reference to the -parent charm. - -Alternatively to providing the list of requested protocols at init time, the charm can do it at -any point in time by calling the -`TracingEndpointRequirer.request_protocols(*protocol:str, relation:Optional[Relation])` method. -Using this method also allows you to use per-relation protocols. - -Units of provider charms obtain the tempo endpoint to which they will push their traces by calling -`TracingEndpointRequirer.get_endpoint(protocol: str)`, where `protocol` is, for example: -- `otlp_grpc` -- `otlp_http` -- `zipkin` -- `tempo` - -If the `protocol` is not in the list of protocols that the charm requested at endpoint set-up time, -the library will raise an error. - -## Requirer Library Usage - -The `TracingEndpointProvider` object may be used by charms to manage relations with their -trace sources. For this purposes a Tempo-like charm needs to do two things - -1. Instantiate the `TracingEndpointProvider` object by providing it a -reference to the parent (Tempo) charm and optionally the name of the relation that the Tempo charm -uses to interact with its trace sources. This relation must conform to the `tracing` interface -and it is strongly recommended that this relation be named `tracing` which is its -default value. - -For example a Tempo charm may instantiate the `TracingEndpointProvider` in its constructor as -follows - - from charms.tempo_k8s.v2.tracing import TracingEndpointProvider - - def __init__(self, *args): - super().__init__(*args) - # ... - self.tracing = TracingEndpointProvider(self) - # ... - - - -""" # noqa: W505 -import enum -import json -import logging -from pathlib import Path -from typing import ( - TYPE_CHECKING, - Any, - Dict, - List, - Literal, - MutableMapping, - Optional, - Sequence, - Tuple, - Union, - cast, -) - -import pydantic -from ops.charm import ( - CharmBase, - CharmEvents, - RelationBrokenEvent, - RelationEvent, - RelationRole, -) -from ops.framework import EventSource, Object -from ops.model import ModelError, Relation -from pydantic import BaseModel, Field - -# The unique Charmhub library identifier, never change it -LIBID = "12977e9aa0b34367903d8afeb8c3d85d" - -# Increment this major API version when introducing breaking changes -LIBAPI = 2 - -# Increment this PATCH version before using `charmcraft publish-lib` or reset -# to 0 if you are raising the major API version -LIBPATCH = 10 - -PYDEPS = ["pydantic"] - -logger = logging.getLogger(__name__) - -DEFAULT_RELATION_NAME = "tracing" -RELATION_INTERFACE_NAME = "tracing" - -# Supported list rationale https://github.com/canonical/tempo-coordinator-k8s-operator/issues/8 -ReceiverProtocol = Literal[ - "zipkin", - "otlp_grpc", - "otlp_http", - "jaeger_grpc", - "jaeger_thrift_http", -] - -RawReceiver = Tuple[ReceiverProtocol, str] -"""Helper type. A raw receiver is defined as a tuple consisting of the protocol name, and the (external, if available), -(secured, if available) resolvable server url. -""" - -BUILTIN_JUJU_KEYS = {"ingress-address", "private-address", "egress-subnets"} - - -class TransportProtocolType(str, enum.Enum): - """Receiver Type.""" - - http = "http" - grpc = "grpc" - - -receiver_protocol_to_transport_protocol: Dict[ReceiverProtocol, TransportProtocolType] = { - "zipkin": TransportProtocolType.http, - "otlp_grpc": TransportProtocolType.grpc, - "otlp_http": TransportProtocolType.http, - "jaeger_thrift_http": TransportProtocolType.http, - "jaeger_grpc": TransportProtocolType.grpc, -} -"""A mapping between telemetry protocols and their corresponding transport protocol. -""" - - -class TracingError(Exception): - """Base class for custom errors raised by this library.""" - - -class NotReadyError(TracingError): - """Raised by the provider wrapper if a requirer hasn't published the required data (yet).""" - - -class ProtocolNotRequestedError(TracingError): - """Raised if the user attempts to obtain an endpoint for a protocol it did not request.""" - - -class DataValidationError(TracingError): - """Raised when data validation fails on IPU relation data.""" - - -class AmbiguousRelationUsageError(TracingError): - """Raised when one wrongly assumes that there can only be one relation on an endpoint.""" - - -if int(pydantic.version.VERSION.split(".")[0]) < 2: - - class DatabagModel(BaseModel): # type: ignore - """Base databag model.""" - - class Config: - """Pydantic config.""" - - # ignore any extra fields in the databag - extra = "ignore" - """Ignore any extra fields in the databag.""" - allow_population_by_field_name = True - """Allow instantiating this class by field name (instead of forcing alias).""" - - _NEST_UNDER = None - - @classmethod - def load(cls, databag: MutableMapping): - """Load this model from a Juju databag.""" - if cls._NEST_UNDER: - return cls.parse_obj(json.loads(databag[cls._NEST_UNDER])) - - try: - data = { - k: json.loads(v) - for k, v in databag.items() - # Don't attempt to parse model-external values - if k in {f.alias for f in cls.__fields__.values()} - } - except json.JSONDecodeError as e: - msg = f"invalid databag contents: expecting json. {databag}" - logger.error(msg) - raise DataValidationError(msg) from e - - try: - return cls.parse_raw(json.dumps(data)) # type: ignore - except pydantic.ValidationError as e: - msg = f"failed to validate databag: {databag}" - logger.debug(msg, exc_info=True) - raise DataValidationError(msg) from e - - def dump(self, databag: Optional[MutableMapping] = None, clear: bool = True): - """Write the contents of this model to Juju databag. - - :param databag: the databag to write the data to. - :param clear: ensure the databag is cleared before writing it. - """ - if clear and databag: - databag.clear() - - if databag is None: - databag = {} - - if self._NEST_UNDER: - databag[self._NEST_UNDER] = self.json(by_alias=True) - return databag - - dct = self.dict() - for key, field in self.__fields__.items(): # type: ignore - value = dct[key] - databag[field.alias or key] = json.dumps(value) - - return databag - -else: - from pydantic import ConfigDict - - class DatabagModel(BaseModel): - """Base databag model.""" - - model_config = ConfigDict( - # ignore any extra fields in the databag - extra="ignore", - # Allow instantiating this class by field name (instead of forcing alias). - populate_by_name=True, - # Custom config key: whether to nest the whole datastructure (as json) - # under a field or spread it out at the toplevel. - _NEST_UNDER=None, # type: ignore - ) - """Pydantic config.""" - - @classmethod - def load(cls, databag: MutableMapping): - """Load this model from a Juju databag.""" - nest_under = cls.model_config.get("_NEST_UNDER") # type: ignore - if nest_under: - return cls.model_validate(json.loads(databag[nest_under])) # type: ignore - - try: - data = { - k: json.loads(v) - for k, v in databag.items() - # Don't attempt to parse model-external values - if k in {(f.alias or n) for n, f in cls.__fields__.items()} - } - except json.JSONDecodeError as e: - msg = f"invalid databag contents: expecting json. {databag}" - logger.error(msg) - raise DataValidationError(msg) from e - - try: - return cls.model_validate_json(json.dumps(data)) # type: ignore - except pydantic.ValidationError as e: - msg = f"failed to validate databag: {databag}" - logger.debug(msg, exc_info=True) - raise DataValidationError(msg) from e - - def dump(self, databag: Optional[MutableMapping] = None, clear: bool = True): - """Write the contents of this model to Juju databag. - - :param databag: the databag to write the data to. - :param clear: ensure the databag is cleared before writing it. - """ - if clear and databag: - databag.clear() - - if databag is None: - databag = {} - nest_under = self.model_config.get("_NEST_UNDER") - if nest_under: - databag[nest_under] = self.model_dump_json( # type: ignore - by_alias=True, - # skip keys whose values are default - exclude_defaults=True, - ) - return databag - - dct = self.model_dump() # type: ignore - for key, field in self.model_fields.items(): # type: ignore - value = dct[key] - if value == field.default: - continue - databag[field.alias or key] = json.dumps(value) - - return databag - - -# todo use models from charm-relation-interfaces -if int(pydantic.version.VERSION.split(".")[0]) < 2: - - class ProtocolType(BaseModel): # type: ignore - """Protocol Type.""" - - class Config: - """Pydantic config.""" - - use_enum_values = True - """Allow serializing enum values.""" - - name: str = Field( - ..., - description="Receiver protocol name. What protocols are supported (and what they are called) " - "may differ per provider.", - examples=["otlp_grpc", "otlp_http", "tempo_http"], - ) - - type: TransportProtocolType = Field( - ..., - description="The transport protocol used by this receiver.", - examples=["http", "grpc"], - ) - -else: - - class ProtocolType(BaseModel): - """Protocol Type.""" - - model_config = ConfigDict( # type: ignore - # Allow serializing enum values. - use_enum_values=True - ) - """Pydantic config.""" - - name: str = Field( - ..., - description="Receiver protocol name. What protocols are supported (and what they are called) " - "may differ per provider.", - examples=["otlp_grpc", "otlp_http", "tempo_http"], - ) - - type: TransportProtocolType = Field( - ..., - description="The transport protocol used by this receiver.", - examples=["http", "grpc"], - ) - - -class Receiver(BaseModel): - """Specification of an active receiver.""" - - protocol: ProtocolType = Field(..., description="Receiver protocol name and type.") - url: str = Field( - ..., - description="""URL at which the receiver is reachable. If there's an ingress, it would be the external URL. - Otherwise, it would be the service's fqdn or internal IP. - If the protocol type is grpc, the url will not contain a scheme.""", - examples=[ - "http://traefik_address:2331", - "https://traefik_address:2331", - "http://tempo_public_ip:2331", - "https://tempo_public_ip:2331", - "tempo_public_ip:2331", - ], - ) - - -class TracingProviderAppData(DatabagModel): # noqa: D101 - """Application databag model for the tracing provider.""" - - receivers: List[Receiver] = Field( - ..., - description="List of all receivers enabled on the tracing provider.", - ) - - -class TracingRequirerAppData(DatabagModel): # noqa: D101 - """Application databag model for the tracing requirer.""" - - receivers: List[ReceiverProtocol] - """Requested receivers.""" - - -class _AutoSnapshotEvent(RelationEvent): - __args__: Tuple[str, ...] = () - __optional_kwargs__: Dict[str, Any] = {} - - @classmethod - def __attrs__(cls): - return cls.__args__ + tuple(cls.__optional_kwargs__.keys()) - - def __init__(self, handle, relation, *args, **kwargs): - super().__init__(handle, relation) - - if not len(self.__args__) == len(args): - raise TypeError("expected {} args, got {}".format(len(self.__args__), len(args))) - - for attr, obj in zip(self.__args__, args): - setattr(self, attr, obj) - for attr, default in self.__optional_kwargs__.items(): - obj = kwargs.get(attr, default) - setattr(self, attr, obj) - - def snapshot(self) -> dict: - dct = super().snapshot() - for attr in self.__attrs__(): - obj = getattr(self, attr) - try: - dct[attr] = obj - except ValueError as e: - raise ValueError( - "cannot automagically serialize {}: " - "override this method and do it " - "manually.".format(obj) - ) from e - - return dct - - def restore(self, snapshot: dict) -> None: - super().restore(snapshot) - for attr, obj in snapshot.items(): - setattr(self, attr, obj) - - -class RelationNotFoundError(Exception): - """Raised if no relation with the given name is found.""" - - def __init__(self, relation_name: str): - self.relation_name = relation_name - self.message = "No relation named '{}' found".format(relation_name) - super().__init__(self.message) - - -class RelationInterfaceMismatchError(Exception): - """Raised if the relation with the given name has an unexpected interface.""" - - def __init__( - self, - relation_name: str, - expected_relation_interface: str, - actual_relation_interface: str, - ): - self.relation_name = relation_name - self.expected_relation_interface = expected_relation_interface - self.actual_relation_interface = actual_relation_interface - self.message = ( - "The '{}' relation has '{}' as interface rather than the expected '{}'".format( - relation_name, actual_relation_interface, expected_relation_interface - ) - ) - - super().__init__(self.message) - - -class RelationRoleMismatchError(Exception): - """Raised if the relation with the given name has a different role than expected.""" - - def __init__( - self, - relation_name: str, - expected_relation_role: RelationRole, - actual_relation_role: RelationRole, - ): - self.relation_name = relation_name - self.expected_relation_interface = expected_relation_role - self.actual_relation_role = actual_relation_role - self.message = "The '{}' relation has role '{}' rather than the expected '{}'".format( - relation_name, repr(actual_relation_role), repr(expected_relation_role) - ) - - super().__init__(self.message) - - -def _validate_relation_by_interface_and_direction( - charm: CharmBase, - relation_name: str, - expected_relation_interface: str, - expected_relation_role: RelationRole, -): - """Validate a relation. - - Verifies that the `relation_name` provided: (1) exists in metadata.yaml, - (2) declares as interface the interface name passed as `relation_interface` - and (3) has the right "direction", i.e., it is a relation that `charm` - provides or requires. - - Args: - charm: a `CharmBase` object to scan for the matching relation. - relation_name: the name of the relation to be verified. - expected_relation_interface: the interface name to be matched by the - relation named `relation_name`. - expected_relation_role: whether the `relation_name` must be either - provided or required by `charm`. - - Raises: - RelationNotFoundError: If there is no relation in the charm's metadata.yaml - with the same name as provided via `relation_name` argument. - RelationInterfaceMismatchError: The relation with the same name as provided - via `relation_name` argument does not have the same relation interface - as specified via the `expected_relation_interface` argument. - RelationRoleMismatchError: If the relation with the same name as provided - via `relation_name` argument does not have the same role as specified - via the `expected_relation_role` argument. - """ - if relation_name not in charm.meta.relations: - raise RelationNotFoundError(relation_name) - - relation = charm.meta.relations[relation_name] - - # fixme: why do we need to cast here? - actual_relation_interface = cast(str, relation.interface_name) - - if actual_relation_interface != expected_relation_interface: - raise RelationInterfaceMismatchError( - relation_name, expected_relation_interface, actual_relation_interface - ) - - if expected_relation_role is RelationRole.provides: - if relation_name not in charm.meta.provides: - raise RelationRoleMismatchError( - relation_name, RelationRole.provides, RelationRole.requires - ) - elif expected_relation_role is RelationRole.requires: - if relation_name not in charm.meta.requires: - raise RelationRoleMismatchError( - relation_name, RelationRole.requires, RelationRole.provides - ) - else: - raise TypeError("Unexpected RelationDirection: {}".format(expected_relation_role)) - - -class RequestEvent(RelationEvent): - """Event emitted when a remote requests a tracing endpoint.""" - - @property - def requested_receivers(self) -> List[ReceiverProtocol]: - """List of receiver protocols that have been requested.""" - relation = self.relation - app = relation.app - if not app: - raise NotReadyError("relation.app is None") - - return TracingRequirerAppData.load(relation.data[app]).receivers - - -class BrokenEvent(RelationBrokenEvent): - """Event emitted when a relation on tracing is broken.""" - - -class TracingEndpointProviderEvents(CharmEvents): - """TracingEndpointProvider events.""" - - request = EventSource(RequestEvent) - broken = EventSource(BrokenEvent) - - -class TracingEndpointProvider(Object): - """Class representing a trace receiver service.""" - - on = TracingEndpointProviderEvents() # type: ignore - - def __init__( - self, - charm: CharmBase, - external_url: Optional[str] = None, - relation_name: str = DEFAULT_RELATION_NAME, - ): - """Initialize. - - Args: - charm: a `CharmBase` instance that manages this instance of the Tempo service. - external_url: external address of the node hosting the tempo server, - if an ingress is present. - relation_name: an optional string name of the relation between `charm` - and the Tempo charmed service. The default is "tracing". - - Raises: - RelationNotFoundError: If there is no relation in the charm's metadata.yaml - with the same name as provided via `relation_name` argument. - RelationInterfaceMismatchError: The relation with the same name as provided - via `relation_name` argument does not have the `tracing` relation - interface. - RelationRoleMismatchError: If the relation with the same name as provided - via `relation_name` argument does not have the `RelationRole.requires` - role. - """ - _validate_relation_by_interface_and_direction( - charm, relation_name, RELATION_INTERFACE_NAME, RelationRole.provides - ) - - super().__init__(charm, relation_name + "tracing-provider") - self._charm = charm - self._external_url = external_url - self._relation_name = relation_name - self.framework.observe( - self._charm.on[relation_name].relation_joined, self._on_relation_event - ) - self.framework.observe( - self._charm.on[relation_name].relation_created, self._on_relation_event - ) - self.framework.observe( - self._charm.on[relation_name].relation_changed, self._on_relation_event - ) - self.framework.observe( - self._charm.on[relation_name].relation_broken, self._on_relation_broken_event - ) - - def _on_relation_broken_event(self, e: RelationBrokenEvent): - """Handle relation broken events.""" - self.on.broken.emit(e.relation) - - def _on_relation_event(self, e: RelationEvent): - """Handle relation created/joined/changed events.""" - if self.is_requirer_ready(e.relation): - self.on.request.emit(e.relation) - - def is_requirer_ready(self, relation: Relation): - """Attempt to determine if requirer has already populated app data.""" - try: - self._get_requested_protocols(relation) - except NotReadyError: - return False - return True - - @staticmethod - def _get_requested_protocols(relation: Relation): - app = relation.app - if not app: - raise NotReadyError("relation.app is None") - - try: - databag = TracingRequirerAppData.load(relation.data[app]) - except (json.JSONDecodeError, pydantic.ValidationError, DataValidationError): - logger.info(f"relation {relation} is not ready to talk tracing") - raise NotReadyError() - return databag.receivers - - def requested_protocols(self): - """All receiver protocols that have been requested by our related apps.""" - requested_protocols = set() - for relation in self.relations: - try: - protocols = self._get_requested_protocols(relation) - except NotReadyError: - continue - requested_protocols.update(protocols) - return requested_protocols - - @property - def relations(self) -> List[Relation]: - """All relations active on this endpoint.""" - return self._charm.model.relations[self._relation_name] - - def publish_receivers(self, receivers: Sequence[RawReceiver]): - """Let all requirers know that these receivers are active and listening.""" - if not self._charm.unit.is_leader(): - raise RuntimeError("only leader can do this") - - for relation in self.relations: - try: - TracingProviderAppData( - receivers=[ - Receiver( - url=url, - protocol=ProtocolType( - name=protocol, - type=receiver_protocol_to_transport_protocol[protocol], - ), - ) - for protocol, url in receivers - ], - ).dump(relation.data[self._charm.app]) - - except ModelError as e: - # args are bytes - msg = e.args[0] - if isinstance(msg, bytes): - if msg.startswith( - b"ERROR cannot read relation application settings: permission denied" - ): - logger.error( - f"encountered error {e} while attempting to update_relation_data." - f"The relation must be gone." - ) - continue - raise - - -class EndpointRemovedEvent(RelationBrokenEvent): - """Event representing a change in one of the receiver endpoints.""" - - -class EndpointChangedEvent(_AutoSnapshotEvent): - """Event representing a change in one of the receiver endpoints.""" - - __args__ = ("_receivers",) - - if TYPE_CHECKING: - _receivers = [] # type: List[dict] - - @property - def receivers(self) -> List[Receiver]: - """Cast receivers back from dict.""" - return [Receiver(**i) for i in self._receivers] - - -class TracingEndpointRequirerEvents(CharmEvents): - """TracingEndpointRequirer events.""" - - endpoint_changed = EventSource(EndpointChangedEvent) - endpoint_removed = EventSource(EndpointRemovedEvent) - - -class TracingEndpointRequirer(Object): - """A tracing endpoint for Tempo.""" - - on = TracingEndpointRequirerEvents() # type: ignore - - def __init__( - self, - charm: CharmBase, - relation_name: str = DEFAULT_RELATION_NAME, - protocols: Optional[List[ReceiverProtocol]] = None, - ): - """Construct a tracing requirer for a Tempo charm. - - If your application supports pushing traces to a distributed tracing backend, the - `TracingEndpointRequirer` object enables your charm to easily access endpoint information - exchanged over a `tracing` relation interface. - - Args: - charm: a `CharmBase` object that manages this - `TracingEndpointRequirer` object. Typically, this is `self` in the instantiating - class. - relation_name: an optional string name of the relation between `charm` - and the Tempo charmed service. The default is "tracing". It is strongly - advised not to change the default, so that people deploying your charm will have a - consistent experience with all other charms that provide tracing endpoints. - protocols: optional list of protocols that the charm intends to send traces with. - The provider will enable receivers for these and only these protocols, - so be sure to enable all protocols the charm or its workload are going to need. - - Raises: - RelationNotFoundError: If there is no relation in the charm's metadata.yaml - with the same name as provided via `relation_name` argument. - RelationInterfaceMismatchError: The relation with the same name as provided - via `relation_name` argument does not have the `tracing` relation - interface. - RelationRoleMismatchError: If the relation with the same name as provided - via `relation_name` argument does not have the `RelationRole.provides` - role. - """ - _validate_relation_by_interface_and_direction( - charm, relation_name, RELATION_INTERFACE_NAME, RelationRole.requires - ) - - super().__init__(charm, relation_name) - - self._is_single_endpoint = charm.meta.relations[relation_name].limit == 1 - - self._charm = charm - self._relation_name = relation_name - - events = self._charm.on[self._relation_name] - self.framework.observe(events.relation_changed, self._on_tracing_relation_changed) - self.framework.observe(events.relation_broken, self._on_tracing_relation_broken) - - if protocols: - self.request_protocols(protocols) - - def request_protocols( - self, protocols: Sequence[ReceiverProtocol], relation: Optional[Relation] = None - ): - """Publish the list of protocols which the provider should activate.""" - # todo: should we check if _is_single_endpoint and len(self.relations) > 1 and raise, here? - relations = [relation] if relation else self.relations - - if not protocols: - # empty sequence - raise ValueError( - "You need to pass a nonempty sequence of protocols to `request_protocols`." - ) - - try: - if self._charm.unit.is_leader(): - for relation in relations: - TracingRequirerAppData( - receivers=list(protocols), - ).dump(relation.data[self._charm.app]) - - except ModelError as e: - # args are bytes - msg = e.args[0] - if isinstance(msg, bytes): - if msg.startswith( - b"ERROR cannot read relation application settings: permission denied" - ): - logger.error( - f"encountered error {e} while attempting to request_protocols." - f"The relation must be gone." - ) - return - raise - - @property - def relations(self) -> List[Relation]: - """The tracing relations associated with this endpoint.""" - return self._charm.model.relations[self._relation_name] - - @property - def _relation(self) -> Optional[Relation]: - """If this wraps a single endpoint, the relation bound to it, if any.""" - if not self._is_single_endpoint: - objname = type(self).__name__ - raise AmbiguousRelationUsageError( - f"This {objname} wraps a {self._relation_name} endpoint that has " - "limit != 1. We can't determine what relation, of the possibly many, you are " - f"talking about. Please pass a relation instance while calling {objname}, " - "or set limit=1 in the charm metadata." - ) - relations = self.relations - return relations[0] if relations else None - - def is_ready(self, relation: Optional[Relation] = None): - """Is this endpoint ready?""" - relation = relation or self._relation - if not relation: - logger.debug(f"no relation on {self._relation_name !r}: tracing not ready") - return False - if relation.data is None: - logger.error(f"relation data is None for {relation}") - return False - if not relation.app: - logger.error(f"{relation} event received but there is no relation.app") - return False - try: - databag = dict(relation.data[relation.app]) - TracingProviderAppData.load(databag) - - except (json.JSONDecodeError, pydantic.ValidationError, DataValidationError): - logger.info(f"failed validating relation data for {relation}") - return False - return True - - def _on_tracing_relation_changed(self, event): - """Notify the providers that there is new endpoint information available.""" - relation = event.relation - if not self.is_ready(relation): - self.on.endpoint_removed.emit(relation) # type: ignore - return - - data = TracingProviderAppData.load(relation.data[relation.app]) - self.on.endpoint_changed.emit(relation, [i.dict() for i in data.receivers]) # type: ignore - - def _on_tracing_relation_broken(self, event: RelationBrokenEvent): - """Notify the providers that the endpoint is broken.""" - relation = event.relation - self.on.endpoint_removed.emit(relation) # type: ignore - - def get_all_endpoints( - self, relation: Optional[Relation] = None - ) -> Optional[TracingProviderAppData]: - """Unmarshalled relation data.""" - relation = relation or self._relation - if not self.is_ready(relation): - return - return TracingProviderAppData.load(relation.data[relation.app]) # type: ignore - - def _get_endpoint( - self, relation: Optional[Relation], protocol: ReceiverProtocol - ) -> Optional[str]: - app_data = self.get_all_endpoints(relation) - if not app_data: - return None - receivers: List[Receiver] = list( - filter(lambda i: i.protocol.name == protocol, app_data.receivers) - ) - if not receivers: - logger.error(f"no receiver found with protocol={protocol!r}") - return - if len(receivers) > 1: - logger.error( - f"too many receivers with protocol={protocol!r}; using first one. Found: {receivers}" - ) - return - - receiver = receivers[0] - return receiver.url - - def get_endpoint( - self, protocol: ReceiverProtocol, relation: Optional[Relation] = None - ) -> Optional[str]: - """Receiver endpoint for the given protocol. - - It could happen that this function gets called before the provider publishes the endpoints. - In such a scenario, if a non-leader unit calls this function, a permission denied exception will be raised due to - restricted access. To prevent this, this function needs to be guarded by the `is_ready` check. - - Raises: - ProtocolNotRequestedError: - If the charm unit is the leader unit and attempts to obtain an endpoint for a protocol it did not request. - """ - endpoint = self._get_endpoint(relation or self._relation, protocol=protocol) - if not endpoint: - requested_protocols = set() - relations = [relation] if relation else self.relations - for relation in relations: - try: - databag = TracingRequirerAppData.load(relation.data[self._charm.app]) - except DataValidationError: - continue - - requested_protocols.update(databag.receivers) - - if protocol not in requested_protocols: - raise ProtocolNotRequestedError(protocol, relation) - - return None - return endpoint - - -def charm_tracing_config( - endpoint_requirer: TracingEndpointRequirer, cert_path: Optional[Union[Path, str]] -) -> Tuple[Optional[str], Optional[str]]: - """Return the charm_tracing config you likely want. - - If no endpoint is provided: - disable charm tracing. - If https endpoint is provided but cert_path is not found on disk: - disable charm tracing. - If https endpoint is provided and cert_path is None: - ERROR - Else: - proceed with charm tracing (with or without tls, as appropriate) - - Usage: - If you are using charm_tracing >= v1.9: - >>> from lib.charms.tempo_k8s.v1.charm_tracing import trace_charm - >>> from lib.charms.tempo_k8s.v2.tracing import charm_tracing_config - >>> @trace_charm(tracing_endpoint="my_endpoint", cert_path="cert_path") - >>> class MyCharm(...): - >>> _cert_path = "/path/to/cert/on/charm/container.crt" - >>> def __init__(self, ...): - >>> self.tracing = TracingEndpointRequirer(...) - >>> self.my_endpoint, self.cert_path = charm_tracing_config( - ... self.tracing, self._cert_path) - - If you are using charm_tracing < v1.9: - >>> from lib.charms.tempo_k8s.v1.charm_tracing import trace_charm - >>> from lib.charms.tempo_k8s.v2.tracing import charm_tracing_config - >>> @trace_charm(tracing_endpoint="my_endpoint", cert_path="cert_path") - >>> class MyCharm(...): - >>> _cert_path = "/path/to/cert/on/charm/container.crt" - >>> def __init__(self, ...): - >>> self.tracing = TracingEndpointRequirer(...) - >>> self._my_endpoint, self._cert_path = charm_tracing_config( - ... self.tracing, self._cert_path) - >>> @property - >>> def my_endpoint(self): - >>> return self._my_endpoint - >>> @property - >>> def cert_path(self): - >>> return self._cert_path - - """ - if not endpoint_requirer.is_ready(): - return None, None - - endpoint = endpoint_requirer.get_endpoint("otlp_http") - if not endpoint: - return None, None - - is_https = endpoint.startswith("https://") - - if is_https: - if cert_path is None: - raise TracingError("Cannot send traces to an https endpoint without a certificate.") - elif not Path(cert_path).exists(): - # if endpoint is https BUT we don't have a server_cert yet: - # disable charm tracing until we do to prevent tls errors - return None, None - return endpoint, str(cert_path) - else: - return endpoint, None diff --git a/poetry.lock b/poetry.lock index 2d8d3809..ab5da8d6 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,5 +1,27 @@ # This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. +[[package]] +name = "anyio" +version = "4.5.0" +description = "High level compatibility layer for multiple asynchronous event loop implementations" +optional = false +python-versions = ">=3.8" +files = [ + {file = "anyio-4.5.0-py3-none-any.whl", hash = "sha256:fdeb095b7cc5a5563175eedd926ec4ae55413bb4be5770c424af0ba46ccb4a78"}, + {file = "anyio-4.5.0.tar.gz", hash = "sha256:c5a275fe5ca0afd788001f58fca1e69e29ce706d746e317d660e21f70c530ef9"}, +] + +[package.dependencies] +exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} +idna = ">=2.8" +sniffio = ">=1.1" +typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} + +[package.extras] +doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.21.0b1)"] +trio = ["trio (>=0.26.1)"] + [[package]] name = "appnope" version = "0.1.4" @@ -325,18 +347,21 @@ files = [ [[package]] name = "cosl" -version = "0.0.12" +version = "0.0.39" description = "Utils for COS Lite charms" optional = false python-versions = ">=3.8" files = [ - {file = "cosl-0.0.12-py3-none-any.whl", hash = "sha256:4efa647c251c0a5e53016833ccffbba3899c0a64f0a81ba0e8e8a5f8e080032a"}, - {file = "cosl-0.0.12.tar.gz", hash = "sha256:6c6eefb3025dd49e526e99d09cde574a235ac6d0563e80c271d21cf50dd510bf"}, + {file = "cosl-0.0.39-py3-none-any.whl", hash = "sha256:c4e733a203e3d8a53a9b2d35e298d393f36bc2bce0ad306ac6707b3e12f2365a"}, + {file = "cosl-0.0.39.tar.gz", hash = "sha256:1b7e8575d53167643f924a31f29a99b241a0e31c463c39568fbbe778fe00fe55"}, ] [package.dependencies] +lightkube = ">=v0.15.4" ops = "*" +pydantic = "*" pyyaml = "*" +tenacity = "*" typing-extensions = "*" [[package]] @@ -565,6 +590,63 @@ protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4 [package.extras] grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] +[[package]] +name = "h11" +version = "0.14.0" +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +optional = false +python-versions = ">=3.7" +files = [ + {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, + {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, +] + +[[package]] +name = "httpcore" +version = "1.0.6" +description = "A minimal low-level HTTP client." +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpcore-1.0.6-py3-none-any.whl", hash = "sha256:27b59625743b85577a8c0e10e55b50b5368a4f2cfe8cc7bcfa9cf00829c2682f"}, + {file = "httpcore-1.0.6.tar.gz", hash = "sha256:73f6dbd6eb8c21bbf7ef8efad555481853f5f6acdeaff1edb0694289269ee17f"}, +] + +[package.dependencies] +certifi = "*" +h11 = ">=0.13,<0.15" + +[package.extras] +asyncio = ["anyio (>=4.0,<5.0)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] +trio = ["trio (>=0.22.0,<1.0)"] + +[[package]] +name = "httpx" +version = "0.27.2" +description = "The next generation HTTP client." +optional = false +python-versions = ">=3.8" +files = [ + {file = "httpx-0.27.2-py3-none-any.whl", hash = "sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0"}, + {file = "httpx-0.27.2.tar.gz", hash = "sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2"}, +] + +[package.dependencies] +anyio = "*" +certifi = "*" +httpcore = "==1.*" +idna = "*" +sniffio = "*" + +[package.extras] +brotli = ["brotli", "brotlicffi"] +cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (==1.*)"] +zstd = ["zstandard (>=0.18.0)"] + [[package]] name = "hvac" version = "2.3.0" @@ -819,6 +901,36 @@ websocket-client = ">=0.32.0,<0.40.0 || >0.40.0,<0.41.dev0 || >=0.43.dev0" [package.extras] adal = ["adal (>=1.0.2)"] +[[package]] +name = "lightkube" +version = "0.15.4" +description = "Lightweight kubernetes client library" +optional = false +python-versions = "*" +files = [ + {file = "lightkube-0.15.4-py3-none-any.whl", hash = "sha256:7dde49694f2933b757ee6dfee0e028a56d7a13f47476bf54a21ec7cee343b09b"}, + {file = "lightkube-0.15.4.tar.gz", hash = "sha256:fe7939f8da5b68d80809243c9abf601fca2e5425228bb02c2ad23ed3e56401a8"}, +] + +[package.dependencies] +httpx = ">=0.24.0" +lightkube-models = ">=1.15.12.0" +PyYAML = "*" + +[package.extras] +dev = ["pytest", "pytest-asyncio (<0.17.0)", "respx"] + +[[package]] +name = "lightkube-models" +version = "1.31.1.8" +description = "Models and Resources for lightkube module" +optional = false +python-versions = "*" +files = [ + {file = "lightkube-models-1.31.1.8.tar.gz", hash = "sha256:14fbfa990b4d3393fa4ac3e9e46d67514c4d659508e296b30f1a5d254eecc097"}, + {file = "lightkube_models-1.31.1.8-py3-none-any.whl", hash = "sha256:50c0e2dd2c125cd9b50e93269e2d212bcbec19f7b00de91aa66a5ec320772fae"}, +] + [[package]] name = "macaroonbakery" version = "1.3.4" @@ -1332,54 +1444,54 @@ files = [ [[package]] name = "pydantic" -version = "1.10.17" +version = "1.10.18" description = "Data validation and settings management using python type hints" optional = false python-versions = ">=3.7" files = [ - {file = "pydantic-1.10.17-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0fa51175313cc30097660b10eec8ca55ed08bfa07acbfe02f7a42f6c242e9a4b"}, - {file = "pydantic-1.10.17-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c7e8988bb16988890c985bd2093df9dd731bfb9d5e0860db054c23034fab8f7a"}, - {file = "pydantic-1.10.17-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:371dcf1831f87c9e217e2b6a0c66842879a14873114ebb9d0861ab22e3b5bb1e"}, - {file = "pydantic-1.10.17-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4866a1579c0c3ca2c40575398a24d805d4db6cb353ee74df75ddeee3c657f9a7"}, - {file = "pydantic-1.10.17-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:543da3c6914795b37785703ffc74ba4d660418620cc273490d42c53949eeeca6"}, - {file = "pydantic-1.10.17-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7623b59876f49e61c2e283551cc3647616d2fbdc0b4d36d3d638aae8547ea681"}, - {file = "pydantic-1.10.17-cp310-cp310-win_amd64.whl", hash = "sha256:409b2b36d7d7d19cd8310b97a4ce6b1755ef8bd45b9a2ec5ec2b124db0a0d8f3"}, - {file = "pydantic-1.10.17-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fa43f362b46741df8f201bf3e7dff3569fa92069bcc7b4a740dea3602e27ab7a"}, - {file = "pydantic-1.10.17-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2a72d2a5ff86a3075ed81ca031eac86923d44bc5d42e719d585a8eb547bf0c9b"}, - {file = "pydantic-1.10.17-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4ad32aed3bf5eea5ca5decc3d1bbc3d0ec5d4fbcd72a03cdad849458decbc63"}, - {file = "pydantic-1.10.17-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aeb4e741782e236ee7dc1fb11ad94dc56aabaf02d21df0e79e0c21fe07c95741"}, - {file = "pydantic-1.10.17-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:d2f89a719411cb234105735a520b7c077158a81e0fe1cb05a79c01fc5eb59d3c"}, - {file = "pydantic-1.10.17-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:db3b48d9283d80a314f7a682f7acae8422386de659fffaba454b77a083c3937d"}, - {file = "pydantic-1.10.17-cp311-cp311-win_amd64.whl", hash = "sha256:9c803a5113cfab7bbb912f75faa4fc1e4acff43e452c82560349fff64f852e1b"}, - {file = "pydantic-1.10.17-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:820ae12a390c9cbb26bb44913c87fa2ff431a029a785642c1ff11fed0a095fcb"}, - {file = "pydantic-1.10.17-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c1e51d1af306641b7d1574d6d3307eaa10a4991542ca324f0feb134fee259815"}, - {file = "pydantic-1.10.17-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e53fb834aae96e7b0dadd6e92c66e7dd9cdf08965340ed04c16813102a47fab"}, - {file = "pydantic-1.10.17-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e2495309b1266e81d259a570dd199916ff34f7f51f1b549a0d37a6d9b17b4dc"}, - {file = "pydantic-1.10.17-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:098ad8de840c92ea586bf8efd9e2e90c6339d33ab5c1cfbb85be66e4ecf8213f"}, - {file = "pydantic-1.10.17-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:525bbef620dac93c430d5d6bdbc91bdb5521698d434adf4434a7ef6ffd5c4b7f"}, - {file = "pydantic-1.10.17-cp312-cp312-win_amd64.whl", hash = "sha256:6654028d1144df451e1da69a670083c27117d493f16cf83da81e1e50edce72ad"}, - {file = "pydantic-1.10.17-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c87cedb4680d1614f1d59d13fea353faf3afd41ba5c906a266f3f2e8c245d655"}, - {file = "pydantic-1.10.17-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11289fa895bcbc8f18704efa1d8020bb9a86314da435348f59745473eb042e6b"}, - {file = "pydantic-1.10.17-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:94833612d6fd18b57c359a127cbfd932d9150c1b72fea7c86ab58c2a77edd7c7"}, - {file = "pydantic-1.10.17-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:d4ecb515fa7cb0e46e163ecd9d52f9147ba57bc3633dca0e586cdb7a232db9e3"}, - {file = "pydantic-1.10.17-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:7017971ffa7fd7808146880aa41b266e06c1e6e12261768a28b8b41ba55c8076"}, - {file = "pydantic-1.10.17-cp37-cp37m-win_amd64.whl", hash = "sha256:e840e6b2026920fc3f250ea8ebfdedf6ea7a25b77bf04c6576178e681942ae0f"}, - {file = "pydantic-1.10.17-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bfbb18b616abc4df70591b8c1ff1b3eabd234ddcddb86b7cac82657ab9017e33"}, - {file = "pydantic-1.10.17-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ebb249096d873593e014535ab07145498957091aa6ae92759a32d40cb9998e2e"}, - {file = "pydantic-1.10.17-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8c209af63ccd7b22fba94b9024e8b7fd07feffee0001efae50dd99316b27768"}, - {file = "pydantic-1.10.17-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d4b40c9e13a0b61583e5599e7950490c700297b4a375b55b2b592774332798b7"}, - {file = "pydantic-1.10.17-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c31d281c7485223caf6474fc2b7cf21456289dbaa31401844069b77160cab9c7"}, - {file = "pydantic-1.10.17-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ae5184e99a060a5c80010a2d53c99aee76a3b0ad683d493e5f0620b5d86eeb75"}, - {file = "pydantic-1.10.17-cp38-cp38-win_amd64.whl", hash = "sha256:ad1e33dc6b9787a6f0f3fd132859aa75626528b49cc1f9e429cdacb2608ad5f0"}, - {file = "pydantic-1.10.17-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7e17c0ee7192e54a10943f245dc79e36d9fe282418ea05b886e1c666063a7b54"}, - {file = "pydantic-1.10.17-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cafb9c938f61d1b182dfc7d44a7021326547b7b9cf695db5b68ec7b590214773"}, - {file = "pydantic-1.10.17-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95ef534e3c22e5abbdbdd6f66b6ea9dac3ca3e34c5c632894f8625d13d084cbe"}, - {file = "pydantic-1.10.17-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62d96b8799ae3d782df7ec9615cb59fc32c32e1ed6afa1b231b0595f6516e8ab"}, - {file = "pydantic-1.10.17-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ab2f976336808fd5d539fdc26eb51f9aafc1f4b638e212ef6b6f05e753c8011d"}, - {file = "pydantic-1.10.17-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b8ad363330557beac73159acfbeed220d5f1bfcd6b930302a987a375e02f74fd"}, - {file = "pydantic-1.10.17-cp39-cp39-win_amd64.whl", hash = "sha256:48db882e48575ce4b39659558b2f9f37c25b8d348e37a2b4e32971dd5a7d6227"}, - {file = "pydantic-1.10.17-py3-none-any.whl", hash = "sha256:e41b5b973e5c64f674b3b4720286ded184dcc26a691dd55f34391c62c6934688"}, - {file = "pydantic-1.10.17.tar.gz", hash = "sha256:f434160fb14b353caf634149baaf847206406471ba70e64657c1e8330277a991"}, + {file = "pydantic-1.10.18-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e405ffcc1254d76bb0e760db101ee8916b620893e6edfbfee563b3c6f7a67c02"}, + {file = "pydantic-1.10.18-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e306e280ebebc65040034bff1a0a81fd86b2f4f05daac0131f29541cafd80b80"}, + {file = "pydantic-1.10.18-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11d9d9b87b50338b1b7de4ebf34fd29fdb0d219dc07ade29effc74d3d2609c62"}, + {file = "pydantic-1.10.18-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b661ce52c7b5e5f600c0c3c5839e71918346af2ef20062705ae76b5c16914cab"}, + {file = "pydantic-1.10.18-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c20f682defc9ef81cd7eaa485879ab29a86a0ba58acf669a78ed868e72bb89e0"}, + {file = "pydantic-1.10.18-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c5ae6b7c8483b1e0bf59e5f1843e4fd8fd405e11df7de217ee65b98eb5462861"}, + {file = "pydantic-1.10.18-cp310-cp310-win_amd64.whl", hash = "sha256:74fe19dda960b193b0eb82c1f4d2c8e5e26918d9cda858cbf3f41dd28549cb70"}, + {file = "pydantic-1.10.18-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:72fa46abace0a7743cc697dbb830a41ee84c9db8456e8d77a46d79b537efd7ec"}, + {file = "pydantic-1.10.18-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ef0fe7ad7cbdb5f372463d42e6ed4ca9c443a52ce544472d8842a0576d830da5"}, + {file = "pydantic-1.10.18-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a00e63104346145389b8e8f500bc6a241e729feaf0559b88b8aa513dd2065481"}, + {file = "pydantic-1.10.18-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae6fa2008e1443c46b7b3a5eb03800121868d5ab6bc7cda20b5df3e133cde8b3"}, + {file = "pydantic-1.10.18-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:9f463abafdc92635da4b38807f5b9972276be7c8c5121989768549fceb8d2588"}, + {file = "pydantic-1.10.18-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3445426da503c7e40baccefb2b2989a0c5ce6b163679dd75f55493b460f05a8f"}, + {file = "pydantic-1.10.18-cp311-cp311-win_amd64.whl", hash = "sha256:467a14ee2183bc9c902579bb2f04c3d3dac00eff52e252850509a562255b2a33"}, + {file = "pydantic-1.10.18-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:efbc8a7f9cb5fe26122acba1852d8dcd1e125e723727c59dcd244da7bdaa54f2"}, + {file = "pydantic-1.10.18-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:24a4a159d0f7a8e26bf6463b0d3d60871d6a52eac5bb6a07a7df85c806f4c048"}, + {file = "pydantic-1.10.18-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b74be007703547dc52e3c37344d130a7bfacca7df112a9e5ceeb840a9ce195c7"}, + {file = "pydantic-1.10.18-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fcb20d4cb355195c75000a49bb4a31d75e4295200df620f454bbc6bdf60ca890"}, + {file = "pydantic-1.10.18-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:46f379b8cb8a3585e3f61bf9ae7d606c70d133943f339d38b76e041ec234953f"}, + {file = "pydantic-1.10.18-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:cbfbca662ed3729204090c4d09ee4beeecc1a7ecba5a159a94b5a4eb24e3759a"}, + {file = "pydantic-1.10.18-cp312-cp312-win_amd64.whl", hash = "sha256:c6d0a9f9eccaf7f438671a64acf654ef0d045466e63f9f68a579e2383b63f357"}, + {file = "pydantic-1.10.18-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3d5492dbf953d7d849751917e3b2433fb26010d977aa7a0765c37425a4026ff1"}, + {file = "pydantic-1.10.18-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe734914977eed33033b70bfc097e1baaffb589517863955430bf2e0846ac30f"}, + {file = "pydantic-1.10.18-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:15fdbe568beaca9aacfccd5ceadfb5f1a235087a127e8af5e48df9d8a45ae85c"}, + {file = "pydantic-1.10.18-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c3e742f62198c9eb9201781fbebe64533a3bbf6a76a91b8d438d62b813079dbc"}, + {file = "pydantic-1.10.18-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:19a3bd00b9dafc2cd7250d94d5b578edf7a0bd7daf102617153ff9a8fa37871c"}, + {file = "pydantic-1.10.18-cp37-cp37m-win_amd64.whl", hash = "sha256:2ce3fcf75b2bae99aa31bd4968de0474ebe8c8258a0110903478bd83dfee4e3b"}, + {file = "pydantic-1.10.18-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:335a32d72c51a313b33fa3a9b0fe283503272ef6467910338e123f90925f0f03"}, + {file = "pydantic-1.10.18-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:34a3613c7edb8c6fa578e58e9abe3c0f5e7430e0fc34a65a415a1683b9c32d9a"}, + {file = "pydantic-1.10.18-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9ee4e6ca1d9616797fa2e9c0bfb8815912c7d67aca96f77428e316741082a1b"}, + {file = "pydantic-1.10.18-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:23e8ec1ce4e57b4f441fc91e3c12adba023fedd06868445a5b5f1d48f0ab3682"}, + {file = "pydantic-1.10.18-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:44ae8a3e35a54d2e8fa88ed65e1b08967a9ef8c320819a969bfa09ce5528fafe"}, + {file = "pydantic-1.10.18-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5389eb3b48a72da28c6e061a247ab224381435256eb541e175798483368fdd3"}, + {file = "pydantic-1.10.18-cp38-cp38-win_amd64.whl", hash = "sha256:069b9c9fc645474d5ea3653788b544a9e0ccd3dca3ad8c900c4c6eac844b4620"}, + {file = "pydantic-1.10.18-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:80b982d42515632eb51f60fa1d217dfe0729f008e81a82d1544cc392e0a50ddf"}, + {file = "pydantic-1.10.18-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:aad8771ec8dbf9139b01b56f66386537c6fe4e76c8f7a47c10261b69ad25c2c9"}, + {file = "pydantic-1.10.18-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941a2eb0a1509bd7f31e355912eb33b698eb0051730b2eaf9e70e2e1589cae1d"}, + {file = "pydantic-1.10.18-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:65f7361a09b07915a98efd17fdec23103307a54db2000bb92095457ca758d485"}, + {file = "pydantic-1.10.18-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6951f3f47cb5ca4da536ab161ac0163cab31417d20c54c6de5ddcab8bc813c3f"}, + {file = "pydantic-1.10.18-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7a4c5eec138a9b52c67f664c7d51d4c7234c5ad65dd8aacd919fb47445a62c86"}, + {file = "pydantic-1.10.18-cp39-cp39-win_amd64.whl", hash = "sha256:49e26c51ca854286bffc22b69787a8d4063a62bf7d83dc21d44d2ff426108518"}, + {file = "pydantic-1.10.18-py3-none-any.whl", hash = "sha256:06a189b81ffc52746ec9c8c007f16e5167c8b0a696e1a726369327e3db7b2a82"}, + {file = "pydantic-1.10.18.tar.gz", hash = "sha256:baebdff1907d1d96a139c25136a9bb7d17e118f133a76a2ef3b845e831e3403a"}, ] [package.dependencies] @@ -1923,6 +2035,17 @@ files = [ {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] +[[package]] +name = "sniffio" +version = "1.3.1" +description = "Sniff out which async library your code is running under" +optional = false +python-versions = ">=3.7" +files = [ + {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, + {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, +] + [[package]] name = "stack-data" version = "0.6.3" diff --git a/src/abstract_charm.py b/src/abstract_charm.py index bc4eac49..becf8bb5 100644 --- a/src/abstract_charm.py +++ b/src/abstract_charm.py @@ -8,7 +8,6 @@ import typing import ops -from charms.tempo_k8s.v2.tracing import TracingEndpointRequirer import container import lifecycle @@ -33,9 +32,6 @@ class MySQLRouterCharm(ops.CharmBase, abc.ABC): _READ_WRITE_X_PORT = 6448 _READ_ONLY_X_PORT = 6449 - _TRACING_RELATION_NAME = "tracing" - _TRACING_PROTOCOL = "otlp_http" - def __init__(self, *args) -> None: super().__init__(*args) # Instantiate before registering other event observers @@ -71,10 +67,6 @@ def __init__(self, *args) -> None: ) self.tls = relations.tls.RelationEndpoint(self) - self.tracing = TracingEndpointRequirer( - self, relation_name=self._TRACING_RELATION_NAME, protocols=[self._TRACING_PROTOCOL] - ) - @property @abc.abstractmethod def _subordinate_relation_endpoint_names(self) -> typing.Optional[typing.Iterable[str]]: @@ -148,8 +140,7 @@ def _tls_certificate(self) -> typing.Optional[str]: @property def tracing_endpoint(self) -> typing.Optional[str]: """Otlp http endpoint for charm instrumentation.""" - if self.tracing.is_ready(): - return self.tracing.get_endpoint(self._TRACING_PROTOCOL) + self._cos_relation.tracing_endpoint def _cos_exporter_config(self, event) -> typing.Optional[relations.cos.ExporterConfig]: """Returns the exporter config for MySQLRouter exporter if cos relation exists""" diff --git a/src/machine_charm.py b/src/machine_charm.py index 788823b5..cdc2bdc6 100755 --- a/src/machine_charm.py +++ b/src/machine_charm.py @@ -12,7 +12,7 @@ import ops import tenacity -from charms.tempo_k8s.v1.charm_tracing import trace_charm +from charms.tempo_coordinator_k8s.v0.charm_tracing import trace_charm import abstract_charm import logrotate diff --git a/src/relations/cos.py b/src/relations/cos.py index de4907cb..d25dfcc9 100644 --- a/src/relations/cos.py +++ b/src/relations/cos.py @@ -8,7 +8,7 @@ from dataclasses import dataclass import ops -from charms.grafana_agent.v0.cos_agent import COSAgentProvider +from charms.grafana_agent.v0.cos_agent import COSAgentProvider, charm_tracing_config import container import relations.secrets @@ -42,6 +42,8 @@ class COSRelation: MONITORING_USERNAME = "monitoring" _MONITORING_PASSWORD_KEY = "monitoring-password" + _TRACING_PROTOCOL = "otlp_http" + def __init__(self, charm_: "abstract_charm.MySQLRouterCharm", container_: container.Container): self._interface = COSAgentProvider( charm_, @@ -52,6 +54,7 @@ def __init__(self, charm_: "abstract_charm.MySQLRouterCharm", container_: contai } ], log_slots=[f"{_SNAP_NAME}:logs"], + tracing_protocols=[self._TRACING_PROTOCOL], ) self._charm = charm_ self._container = container_ @@ -71,6 +74,8 @@ def __init__(self, charm_: "abstract_charm.MySQLRouterCharm", container_: contai unit_secret_fields=[self._MONITORING_PASSWORD_KEY], ) + self._tracing_endpoint, _ = charm_tracing_config(self._interface, None) + @property def exporter_user_config(self) -> ExporterConfig: """Returns user config needed for the router exporter service.""" @@ -86,6 +91,11 @@ def relation_exists(self) -> bool: """Whether relation with cos exists.""" return len(self._charm.model.relations.get(self._NAME, [])) == 1 + @property + def tracing_endpoint(self) -> bool: + """The tracing endpoint.""" + return self._tracing_endpoint + def get_monitoring_password(self) -> str: """Gets the monitoring password from unit peer data, or generate and cache it.""" monitoring_password = self._secrets.get_value( diff --git a/tests/unit/conftest.py b/tests/unit/conftest.py index 0c9b77f2..fc7e2af4 100644 --- a/tests/unit/conftest.py +++ b/tests/unit/conftest.py @@ -5,7 +5,7 @@ import ops import pytest -from charms.tempo_k8s.v1.charm_tracing import charm_tracing_disabled +from charms.tempo_coordinator_k8s.v0.charm_tracing import charm_tracing_disabled from pytest_mock import MockerFixture import snap From b048093d77cbd2fe868a22ff150e8c26101b029e Mon Sep 17 00:00:00 2001 From: Shayan Patel Date: Tue, 22 Oct 2024 12:56:53 +0000 Subject: [PATCH 2/7] Add missing return statement --- src/abstract_charm.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/abstract_charm.py b/src/abstract_charm.py index becf8bb5..eed9e086 100644 --- a/src/abstract_charm.py +++ b/src/abstract_charm.py @@ -140,7 +140,7 @@ def _tls_certificate(self) -> typing.Optional[str]: @property def tracing_endpoint(self) -> typing.Optional[str]: """Otlp http endpoint for charm instrumentation.""" - self._cos_relation.tracing_endpoint + return self._cos_relation.tracing_endpoint def _cos_exporter_config(self, event) -> typing.Optional[relations.cos.ExporterConfig]: """Returns the exporter config for MySQLRouter exporter if cos relation exists""" From f5e9f11901a8e7d5806fe8fe183f26c5063b2fc8 Mon Sep 17 00:00:00 2001 From: Shayan Patel Date: Tue, 22 Oct 2024 13:03:20 +0000 Subject: [PATCH 3/7] Remove comment about deleted charm tracing lib --- pyproject.toml | 1 - 1 file changed, 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index d8460909..92e2a8c6 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -25,7 +25,6 @@ ops = ">=2.0.0" cryptography = ">=42.0.5" jsonschema = "*" # grafana_agent/v0/cos_agent.py -# tempo_coordinator_k8s/v/tracing.py supports both pydantic v1 and v2 pydantic = "<2" cosl = "*" # tempo_coordinator_k8s/v0/charm_tracing.py From 27d87298903dbe3970dc4c94f94c56fe85fdf018 Mon Sep 17 00:00:00 2001 From: Shayan Patel Date: Tue, 22 Oct 2024 13:06:28 +0000 Subject: [PATCH 4/7] Update outdate charm_tracing charm lib --- .../tempo_coordinator_k8s/v0/charm_tracing.py | 389 ++++++++++++++++-- 1 file changed, 359 insertions(+), 30 deletions(-) diff --git a/lib/charms/tempo_coordinator_k8s/v0/charm_tracing.py b/lib/charms/tempo_coordinator_k8s/v0/charm_tracing.py index 1e7ff840..cf8def11 100644 --- a/lib/charms/tempo_coordinator_k8s/v0/charm_tracing.py +++ b/lib/charms/tempo_coordinator_k8s/v0/charm_tracing.py @@ -69,6 +69,9 @@ def my_tracing_endpoint(self) -> Optional[str]: - every event as a span (including custom events) - every charm method call (except dunders) as a span +We recommend that you scale up your tracing provider and relate it to an ingress so that your tracing requests +go through the ingress and get load balanced across all units. Otherwise, if the provider's leader goes down, your tracing goes down. + ## TLS support If your charm integrates with a TLS provider which is also trusted by the tracing provider (the Tempo charm), @@ -114,6 +117,57 @@ def get_tracer(self) -> opentelemetry.trace.Tracer: See the official opentelemetry Python SDK documentation for usage: https://opentelemetry-python.readthedocs.io/en/latest/ + +## Caching traces +The `trace_charm` machinery will buffer any traces collected during charm execution and store them +to a file on the charm container until a tracing backend becomes available. At that point, it will +flush them to the tracing receiver. + +By default, the buffer is configured to start dropping old traces if any of these conditions apply: + +- the storage size exceeds 10 MiB +- the number of buffered events exceeds 100 + +You can configure this by, for example: + +```python +@trace_charm( + tracing_endpoint="my_tracing_endpoint", + server_cert="_server_cert", + # only cache up to 42 events + buffer_max_events=42, + # only cache up to 42 MiB + buffer_max_size_mib=42, # minimum 10! +) +class MyCharm(CharmBase): + ... +``` + +Note that setting `buffer_max_events` to 0 will effectively disable the buffer. + +The path of the buffer file is by default in the charm's execution root, which for k8s charms means +that in case of pod churn, the cache will be lost. The recommended solution is to use an existing storage +(or add a new one) such as: + +```yaml +storage: + data: + type: filesystem + location: /charm-traces +``` + +and then configure the `@trace_charm` decorator to use it as path for storing the buffer: +```python +@trace_charm( + tracing_endpoint="my_tracing_endpoint", + server_cert="_server_cert", + # store traces to a PVC so they're not lost on pod restart. + buffer_path="/charm-traces/buffer.file", +) +class MyCharm(CharmBase): + ... +``` + ## Upgrading from `v0` If you are upgrading from `charm_tracing` v0, you need to take the following steps (assuming you already @@ -171,6 +225,12 @@ def my_tracing_endpoint(self) -> Optional[str]: 3) If you were passing a certificate (str) using `server_cert`, you need to change it to provide an *absolute* path to the certificate file instead. """ +import typing + +from opentelemetry.exporter.otlp.proto.common._internal.trace_encoder import ( + encode_spans, +) +from opentelemetry.sdk.trace.export.in_memory_span_exporter import InMemorySpanExporter def _remove_stale_otel_sdk_packages(): @@ -222,6 +282,9 @@ def _remove_stale_otel_sdk_packages(): otel_logger.debug("Successfully applied _remove_stale_otel_sdk_packages patch. ") +# apply hacky patch to remove stale opentelemetry sdk packages on upgrade-charm. +# it could be trouble if someone ever decides to implement their own tracer parallel to +# ours and before the charm has inited. We assume they won't. _remove_stale_otel_sdk_packages() import functools @@ -235,6 +298,7 @@ def _remove_stale_otel_sdk_packages(): Any, Callable, Generator, + List, Optional, Sequence, Type, @@ -247,8 +311,12 @@ def _remove_stale_otel_sdk_packages(): import ops from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter from opentelemetry.sdk.resources import Resource -from opentelemetry.sdk.trace import Span, TracerProvider -from opentelemetry.sdk.trace.export import BatchSpanProcessor +from opentelemetry.sdk.trace import ReadableSpan, Span, TracerProvider +from opentelemetry.sdk.trace.export import ( + BatchSpanProcessor, + SpanExporter, + SpanExportResult, +) from opentelemetry.trace import INVALID_SPAN, Tracer from opentelemetry.trace import get_current_span as otlp_get_current_span from opentelemetry.trace import ( @@ -269,7 +337,7 @@ def _remove_stale_otel_sdk_packages(): # Increment this PATCH version before using `charmcraft publish-lib` or reset # to 0 if you are raising the major API version -LIBPATCH = 2 +LIBPATCH = 4 PYDEPS = ["opentelemetry-exporter-otlp-proto-http==1.21.0"] @@ -277,7 +345,7 @@ def _remove_stale_otel_sdk_packages(): dev_logger = logging.getLogger("tracing-dev") # set this to 0 if you are debugging/developing this library source -dev_logger.setLevel(logging.CRITICAL) +dev_logger.setLevel(logging.ERROR) _CharmType = Type[CharmBase] # the type CharmBase and any subclass thereof _C = TypeVar("_C", bound=_CharmType) @@ -287,6 +355,186 @@ def _remove_stale_otel_sdk_packages(): _GetterType = Union[Callable[[_CharmType], Optional[str]], property] CHARM_TRACING_ENABLED = "CHARM_TRACING_ENABLED" +BUFFER_DEFAULT_CACHE_FILE_NAME = ".charm_tracing_buffer.raw" +# we store the buffer as raw otlp-native protobuf (bytes) since it's hard to serialize/deserialize it in +# any portable format. Json dumping is supported, but loading isn't. +# cfr: https://github.com/open-telemetry/opentelemetry-python/issues/1003 + +BUFFER_DEFAULT_CACHE_FILE_SIZE_LIMIT_MiB = 10 +_BUFFER_CACHE_FILE_SIZE_LIMIT_MiB_MIN = 10 +BUFFER_DEFAULT_MAX_EVENT_HISTORY_LENGTH = 100 +_MiB_TO_B = 2**20 # megabyte to byte conversion rate +_OTLP_SPAN_EXPORTER_TIMEOUT = 1 +"""Timeout in seconds that the OTLP span exporter has to push traces to the backend.""" + + +class _Buffer: + """Handles buffering for spans emitted while no tracing backend is configured or available. + + Use the max_event_history_length_buffering param of @trace_charm to tune + the amount of memory that this will hog on your units. + + The buffer is formatted as a bespoke byte dump (protobuf limitation). + We cannot store them as json because that is not well-supported by the sdk + (see https://github.com/open-telemetry/opentelemetry-python/issues/3364). + """ + + _SPANSEP = b"__CHARM_TRACING_BUFFER_SPAN_SEP__" + + def __init__(self, db_file: Path, max_event_history_length: int, max_buffer_size_mib: int): + self._db_file = db_file + self._max_event_history_length = max_event_history_length + self._max_buffer_size_mib = max(max_buffer_size_mib, _BUFFER_CACHE_FILE_SIZE_LIMIT_MiB_MIN) + + # set by caller + self.exporter: Optional[OTLPSpanExporter] = None + + def save(self, spans: typing.Sequence[ReadableSpan]): + """Save the spans collected by this exporter to the cache file. + + This method should be as fail-safe as possible. + """ + if self._max_event_history_length < 1: + dev_logger.debug("buffer disabled: max history length < 1") + return + + current_history_length = len(self.load()) + new_history_length = current_history_length + len(spans) + if (diff := self._max_event_history_length - new_history_length) < 0: + self.drop(diff) + self._save(spans) + + def _serialize(self, spans: Sequence[ReadableSpan]) -> bytes: + # encode because otherwise we can't json-dump them + return encode_spans(spans).SerializeToString() + + def _save(self, spans: Sequence[ReadableSpan], replace: bool = False): + dev_logger.debug(f"saving {len(spans)} new spans to buffer") + old = [] if replace else self.load() + new = self._serialize(spans) + + try: + # if the buffer exceeds the size limit, we start dropping old spans until it does + + while len((new + self._SPANSEP.join(old))) > (self._max_buffer_size_mib * _MiB_TO_B): + if not old: + # if we've already dropped all spans and still we can't get under the + # size limit, we can't save this span + logger.error( + f"span exceeds total buffer size limit ({self._max_buffer_size_mib}MiB); " + f"buffering FAILED" + ) + return + + old = old[1:] + logger.warning( + f"buffer size exceeds {self._max_buffer_size_mib}MiB; dropping older spans... " + f"Please increase the buffer size, disable buffering, or ensure the spans can be flushed." + ) + + self._db_file.write_bytes(new + self._SPANSEP.join(old)) + except Exception: + logger.exception("error buffering spans") + + def load(self) -> List[bytes]: + """Load currently buffered spans from the cache file. + + This method should be as fail-safe as possible. + """ + if not self._db_file.exists(): + dev_logger.debug("buffer file not found. buffer empty.") + return [] + try: + spans = self._db_file.read_bytes().split(self._SPANSEP) + except Exception: + logger.exception(f"error parsing {self._db_file}") + return [] + return spans + + def drop(self, n_spans: Optional[int] = None): + """Drop some currently buffered spans from the cache file.""" + current = self.load() + if n_spans: + dev_logger.debug(f"dropping {n_spans} spans from buffer") + new = current[n_spans:] + else: + dev_logger.debug("emptying buffer") + new = [] + + self._db_file.write_bytes(self._SPANSEP.join(new)) + + def flush(self) -> Optional[bool]: + """Export all buffered spans to the given exporter, then clear the buffer. + + Returns whether the flush was successful, and None if there was nothing to flush. + """ + if not self.exporter: + dev_logger.debug("no exporter set; skipping buffer flush") + return False + + buffered_spans = self.load() + if not buffered_spans: + dev_logger.debug("nothing to flush; buffer empty") + return None + + errors = False + for span in buffered_spans: + try: + out = self.exporter._export(span) # type: ignore + if not (200 <= out.status_code < 300): + # take any 2xx status code as a success + errors = True + except ConnectionError: + dev_logger.debug( + "failed exporting buffered span; backend might be down or still starting" + ) + errors = True + except Exception: + logger.exception("unexpected error while flushing span batch from buffer") + errors = True + + if not errors: + self.drop() + else: + logger.error("failed flushing spans; buffer preserved") + return not errors + + @property + def is_empty(self): + """Utility to check whether the buffer has any stored spans. + + This is more efficient than attempting a load() given how large the buffer might be. + """ + return (not self._db_file.exists()) or (self._db_file.stat().st_size == 0) + + +class _OTLPSpanExporter(OTLPSpanExporter): + """Subclass of OTLPSpanExporter to configure the max retry timeout, so that it fails a bit faster.""" + + # The issue we're trying to solve is that the model takes AGES to settle if e.g. tls is misconfigured, + # as every hook of a charm_tracing-instrumented charm takes about a minute to exit, as the charm can't + # flush the traces and keeps retrying for 'too long' + + _MAX_RETRY_TIMEOUT = 4 + # we give the exporter 4 seconds in total to succeed pushing the traces to tempo + # if it fails, we'll be caching the data in the buffer and flush it the next time, so there's no data loss risk. + # this means 2/3 retries (hard to guess from the implementation) and up to ~7 seconds total wait + + +class _BufferedExporter(InMemorySpanExporter): + def __init__(self, buffer: _Buffer) -> None: + super().__init__() + self._buffer = buffer + + def export(self, spans: typing.Sequence[ReadableSpan]) -> SpanExportResult: + self._buffer.save(spans) + return super().export(spans) + + def force_flush(self, timeout_millis: int = 0) -> bool: + # parent implementation is fake, so the timeout_millis arg is not doing anything. + result = super().force_flush(timeout_millis) + self._buffer.save(self.get_finished_spans()) + return result def is_enabled() -> bool: @@ -423,7 +671,10 @@ def _setup_root_span_initializer( charm_type: _CharmType, tracing_endpoint_attr: str, server_cert_attr: Optional[str], - service_name: Optional[str] = None, + service_name: Optional[str], + buffer_path: Optional[Path], + buffer_max_events: int, + buffer_max_size_mib: int, ): """Patch the charm's initializer.""" original_init = charm_type.__init__ @@ -442,18 +693,11 @@ def wrap_init(self: CharmBase, framework: Framework, *args, **kwargs): logger.info("Tracing DISABLED: skipping root span initialization") return - # already init some attrs that will be reinited later by calling original_init: - # self.framework = framework - # self.handle = Handle(None, self.handle_kind, None) - original_event_context = framework._event_context # default service name isn't just app name because it could conflict with the workload service name _service_name = service_name or f"{self.app.name}-charm" unit_name = self.unit.name - # apply hacky patch to remove stale opentelemetry sdk packages on upgrade-charm. - # it could be trouble if someone ever decides to implement their own tracer parallel to - # ours and before the charm has inited. We assume they won't. resource = Resource.create( attributes={ "service.name": _service_name, @@ -471,33 +715,60 @@ def wrap_init(self: CharmBase, framework: Framework, *args, **kwargs): # if anything goes wrong with retrieving the endpoint, we let the exception bubble up. tracing_endpoint = _get_tracing_endpoint(tracing_endpoint_attr, self, charm_type) + buffer_only = False + # whether we're only exporting to buffer, or also to the otlp exporter. + if not tracing_endpoint: # tracing is off if tracing_endpoint is None - return + # however we can buffer things until tracing comes online + buffer_only = True server_cert: Optional[Union[str, Path]] = ( _get_server_cert(server_cert_attr, self, charm_type) if server_cert_attr else None ) - if tracing_endpoint.startswith("https://") and not server_cert: + if (tracing_endpoint and tracing_endpoint.startswith("https://")) and not server_cert: logger.error( "Tracing endpoint is https, but no server_cert has been passed." "Please point @trace_charm to a `server_cert` attr. " "This might also mean that the tracing provider is related to a " "certificates provider, but this application is not (yet). " "In that case, you might just have to wait a bit for the certificates " - "integration to settle. " + "integration to settle. This span will be buffered." ) - return + buffer_only = True - exporter = OTLPSpanExporter( - endpoint=tracing_endpoint, - certificate_file=str(Path(server_cert).absolute()) if server_cert else None, - timeout=2, + buffer = _Buffer( + db_file=buffer_path or Path() / BUFFER_DEFAULT_CACHE_FILE_NAME, + max_event_history_length=buffer_max_events, + max_buffer_size_mib=buffer_max_size_mib, ) + previous_spans_buffered = not buffer.is_empty + + exporters: List[SpanExporter] = [] + if buffer_only: + # we have to buffer because we're missing necessary backend configuration + dev_logger.debug("buffering mode: ON") + exporters.append(_BufferedExporter(buffer)) + + else: + dev_logger.debug("buffering mode: FALLBACK") + # in principle, we have the right configuration to be pushing traces, + # but if we fail for whatever reason, we will put everything in the buffer + # and retry the next time + otlp_exporter = _OTLPSpanExporter( + endpoint=tracing_endpoint, + certificate_file=str(Path(server_cert).absolute()) if server_cert else None, + timeout=_OTLP_SPAN_EXPORTER_TIMEOUT, # give individual requests 1 second to succeed + ) + exporters.append(otlp_exporter) + exporters.append(_BufferedExporter(buffer)) + buffer.exporter = otlp_exporter + + for exporter in exporters: + processor = BatchSpanProcessor(exporter) + provider.add_span_processor(processor) - processor = BatchSpanProcessor(exporter) - provider.add_span_processor(processor) set_tracer_provider(provider) _tracer = get_tracer(_service_name) # type: ignore _tracer_token = tracer.set(_tracer) @@ -521,7 +792,7 @@ def wrap_init(self: CharmBase, framework: Framework, *args, **kwargs): @contextmanager def wrap_event_context(event_name: str): - dev_logger.info(f"entering event context: {event_name}") + dev_logger.debug(f"entering event context: {event_name}") # when the framework enters an event context, we create a span. with _span("event: " + event_name) as event_context_span: if event_context_span: @@ -535,12 +806,50 @@ def wrap_event_context(event_name: str): @functools.wraps(original_close) def wrap_close(): - dev_logger.info("tearing down tracer and flushing traces") + dev_logger.debug("tearing down tracer and flushing traces") span.end() opentelemetry.context.detach(span_token) # type: ignore tracer.reset(_tracer_token) tp = cast(TracerProvider, get_tracer_provider()) - tp.force_flush(timeout_millis=1000) # don't block for too long + flush_successful = tp.force_flush(timeout_millis=1000) # don't block for too long + + if buffer_only: + # if we're in buffer_only mode, it means we couldn't even set up the exporter for + # tempo as we're missing some data. + # so attempting to flush the buffer doesn't make sense + dev_logger.debug("tracing backend unavailable: all spans pushed to buffer") + + else: + dev_logger.debug("tracing backend found: attempting to flush buffer...") + + # if we do have an exporter for tempo, and we could send traces to it, + # we can attempt to flush the buffer as well. + if not flush_successful: + logger.error("flushing FAILED: unable to push traces to backend.") + else: + dev_logger.debug("flush succeeded.") + + # the backend has accepted the spans generated during this event, + if not previous_spans_buffered: + # if the buffer was empty to begin with, any spans we collected now can be discarded + buffer.drop() + dev_logger.debug("buffer dropped: this trace has been sent already") + else: + # if the buffer was nonempty, we can attempt to flush it + dev_logger.debug("attempting buffer flush...") + buffer_flush_successful = buffer.flush() + if buffer_flush_successful: + dev_logger.debug("buffer flush OK") + elif buffer_flush_successful is None: + # TODO is this even possible? + dev_logger.debug("buffer flush OK; empty: nothing to flush") + else: + # this situation is pretty weird, I'm not even sure it can happen, + # because it would mean that we did manage + # to push traces directly to the tempo exporter (flush_successful), + # but the buffer flush failed to push to the same exporter! + logger.error("buffer flush FAILED") + tp.shutdown() original_close() @@ -555,6 +864,9 @@ def trace_charm( server_cert: Optional[str] = None, service_name: Optional[str] = None, extra_types: Sequence[type] = (), + buffer_max_events: int = BUFFER_DEFAULT_MAX_EVENT_HISTORY_LENGTH, + buffer_max_size_mib: int = BUFFER_DEFAULT_CACHE_FILE_SIZE_LIMIT_MiB, + buffer_path: Optional[Union[str, Path]] = None, ) -> Callable[[_T], _T]: """Autoinstrument the decorated charm with tracing telemetry. @@ -596,6 +908,10 @@ def trace_charm( Defaults to the juju application name this charm is deployed under. :param extra_types: pass any number of types that you also wish to autoinstrument. For example, charm libs, relation endpoint wrappers, workload abstractions, ... + :param buffer_max_events: max number of events to save in the buffer. Set to 0 to disable buffering. + :param buffer_max_size_mib: max size of the buffer file. When exceeded, spans will be dropped. + Minimum 10MiB. + :param buffer_path: path to buffer file to use for saving buffered spans. """ def _decorator(charm_type: _T) -> _T: @@ -606,6 +922,9 @@ def _decorator(charm_type: _T) -> _T: server_cert_attr=server_cert, service_name=service_name, extra_types=extra_types, + buffer_path=Path(buffer_path) if buffer_path else None, + buffer_max_size_mib=buffer_max_size_mib, + buffer_max_events=buffer_max_events, ) return charm_type @@ -618,6 +937,9 @@ def _autoinstrument( server_cert_attr: Optional[str] = None, service_name: Optional[str] = None, extra_types: Sequence[type] = (), + buffer_max_events: int = BUFFER_DEFAULT_MAX_EVENT_HISTORY_LENGTH, + buffer_max_size_mib: int = BUFFER_DEFAULT_CACHE_FILE_SIZE_LIMIT_MiB, + buffer_path: Optional[Path] = None, ) -> _T: """Set up tracing on this charm class. @@ -650,13 +972,20 @@ def _autoinstrument( Defaults to the juju application name this charm is deployed under. :param extra_types: pass any number of types that you also wish to autoinstrument. For example, charm libs, relation endpoint wrappers, workload abstractions, ... + :param buffer_max_events: max number of events to save in the buffer. Set to 0 to disable buffering. + :param buffer_max_size_mib: max size of the buffer file. When exceeded, spans will be dropped. + Minimum 10MiB. + :param buffer_path: path to buffer file to use for saving buffered spans. """ - dev_logger.info(f"instrumenting {charm_type}") + dev_logger.debug(f"instrumenting {charm_type}") _setup_root_span_initializer( charm_type, tracing_endpoint_attr, server_cert_attr=server_cert_attr, service_name=service_name, + buffer_path=buffer_path, + buffer_max_events=buffer_max_events, + buffer_max_size_mib=buffer_max_size_mib, ) trace_type(charm_type) for type_ in extra_types: @@ -672,12 +1001,12 @@ def trace_type(cls: _T) -> _T: It assumes that this class is only instantiated after a charm type decorated with `@trace_charm` has been instantiated. """ - dev_logger.info(f"instrumenting {cls}") + dev_logger.debug(f"instrumenting {cls}") for name, method in inspect.getmembers(cls, predicate=inspect.isfunction): - dev_logger.info(f"discovered {method}") + dev_logger.debug(f"discovered {method}") if method.__name__.startswith("__"): - dev_logger.info(f"skipping {method} (dunder)") + dev_logger.debug(f"skipping {method} (dunder)") continue # the span title in the general case should be: @@ -723,7 +1052,7 @@ def trace_function(function: _F, name: Optional[str] = None) -> _F: def _trace_callable(callable: _F, qualifier: str, name: Optional[str] = None) -> _F: - dev_logger.info(f"instrumenting {callable}") + dev_logger.debug(f"instrumenting {callable}") # sig = inspect.signature(callable) @functools.wraps(callable) From c720a5bb55e4c591dce0cee7cfcc8f0586323460 Mon Sep 17 00:00:00 2001 From: Shayan Patel Date: Tue, 22 Oct 2024 16:19:37 +0000 Subject: [PATCH 5/7] Revert accidental changes to the poetry lock file --- poetry.lock | 217 ++++++++++++---------------------------------------- 1 file changed, 47 insertions(+), 170 deletions(-) diff --git a/poetry.lock b/poetry.lock index ab5da8d6..2d8d3809 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,27 +1,5 @@ # This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand. -[[package]] -name = "anyio" -version = "4.5.0" -description = "High level compatibility layer for multiple asynchronous event loop implementations" -optional = false -python-versions = ">=3.8" -files = [ - {file = "anyio-4.5.0-py3-none-any.whl", hash = "sha256:fdeb095b7cc5a5563175eedd926ec4ae55413bb4be5770c424af0ba46ccb4a78"}, - {file = "anyio-4.5.0.tar.gz", hash = "sha256:c5a275fe5ca0afd788001f58fca1e69e29ce706d746e317d660e21f70c530ef9"}, -] - -[package.dependencies] -exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} -idna = ">=2.8" -sniffio = ">=1.1" -typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} - -[package.extras] -doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] -test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.21.0b1)"] -trio = ["trio (>=0.26.1)"] - [[package]] name = "appnope" version = "0.1.4" @@ -347,21 +325,18 @@ files = [ [[package]] name = "cosl" -version = "0.0.39" +version = "0.0.12" description = "Utils for COS Lite charms" optional = false python-versions = ">=3.8" files = [ - {file = "cosl-0.0.39-py3-none-any.whl", hash = "sha256:c4e733a203e3d8a53a9b2d35e298d393f36bc2bce0ad306ac6707b3e12f2365a"}, - {file = "cosl-0.0.39.tar.gz", hash = "sha256:1b7e8575d53167643f924a31f29a99b241a0e31c463c39568fbbe778fe00fe55"}, + {file = "cosl-0.0.12-py3-none-any.whl", hash = "sha256:4efa647c251c0a5e53016833ccffbba3899c0a64f0a81ba0e8e8a5f8e080032a"}, + {file = "cosl-0.0.12.tar.gz", hash = "sha256:6c6eefb3025dd49e526e99d09cde574a235ac6d0563e80c271d21cf50dd510bf"}, ] [package.dependencies] -lightkube = ">=v0.15.4" ops = "*" -pydantic = "*" pyyaml = "*" -tenacity = "*" typing-extensions = "*" [[package]] @@ -590,63 +565,6 @@ protobuf = ">=3.20.2,<4.21.1 || >4.21.1,<4.21.2 || >4.21.2,<4.21.3 || >4.21.3,<4 [package.extras] grpc = ["grpcio (>=1.44.0,<2.0.0.dev0)"] -[[package]] -name = "h11" -version = "0.14.0" -description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" -optional = false -python-versions = ">=3.7" -files = [ - {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, - {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, -] - -[[package]] -name = "httpcore" -version = "1.0.6" -description = "A minimal low-level HTTP client." -optional = false -python-versions = ">=3.8" -files = [ - {file = "httpcore-1.0.6-py3-none-any.whl", hash = "sha256:27b59625743b85577a8c0e10e55b50b5368a4f2cfe8cc7bcfa9cf00829c2682f"}, - {file = "httpcore-1.0.6.tar.gz", hash = "sha256:73f6dbd6eb8c21bbf7ef8efad555481853f5f6acdeaff1edb0694289269ee17f"}, -] - -[package.dependencies] -certifi = "*" -h11 = ">=0.13,<0.15" - -[package.extras] -asyncio = ["anyio (>=4.0,<5.0)"] -http2 = ["h2 (>=3,<5)"] -socks = ["socksio (==1.*)"] -trio = ["trio (>=0.22.0,<1.0)"] - -[[package]] -name = "httpx" -version = "0.27.2" -description = "The next generation HTTP client." -optional = false -python-versions = ">=3.8" -files = [ - {file = "httpx-0.27.2-py3-none-any.whl", hash = "sha256:7bb2708e112d8fdd7829cd4243970f0c223274051cb35ee80c03301ee29a3df0"}, - {file = "httpx-0.27.2.tar.gz", hash = "sha256:f7c2be1d2f3c3c3160d441802406b206c2b76f5947b11115e6df10c6c65e66c2"}, -] - -[package.dependencies] -anyio = "*" -certifi = "*" -httpcore = "==1.*" -idna = "*" -sniffio = "*" - -[package.extras] -brotli = ["brotli", "brotlicffi"] -cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] -http2 = ["h2 (>=3,<5)"] -socks = ["socksio (==1.*)"] -zstd = ["zstandard (>=0.18.0)"] - [[package]] name = "hvac" version = "2.3.0" @@ -901,36 +819,6 @@ websocket-client = ">=0.32.0,<0.40.0 || >0.40.0,<0.41.dev0 || >=0.43.dev0" [package.extras] adal = ["adal (>=1.0.2)"] -[[package]] -name = "lightkube" -version = "0.15.4" -description = "Lightweight kubernetes client library" -optional = false -python-versions = "*" -files = [ - {file = "lightkube-0.15.4-py3-none-any.whl", hash = "sha256:7dde49694f2933b757ee6dfee0e028a56d7a13f47476bf54a21ec7cee343b09b"}, - {file = "lightkube-0.15.4.tar.gz", hash = "sha256:fe7939f8da5b68d80809243c9abf601fca2e5425228bb02c2ad23ed3e56401a8"}, -] - -[package.dependencies] -httpx = ">=0.24.0" -lightkube-models = ">=1.15.12.0" -PyYAML = "*" - -[package.extras] -dev = ["pytest", "pytest-asyncio (<0.17.0)", "respx"] - -[[package]] -name = "lightkube-models" -version = "1.31.1.8" -description = "Models and Resources for lightkube module" -optional = false -python-versions = "*" -files = [ - {file = "lightkube-models-1.31.1.8.tar.gz", hash = "sha256:14fbfa990b4d3393fa4ac3e9e46d67514c4d659508e296b30f1a5d254eecc097"}, - {file = "lightkube_models-1.31.1.8-py3-none-any.whl", hash = "sha256:50c0e2dd2c125cd9b50e93269e2d212bcbec19f7b00de91aa66a5ec320772fae"}, -] - [[package]] name = "macaroonbakery" version = "1.3.4" @@ -1444,54 +1332,54 @@ files = [ [[package]] name = "pydantic" -version = "1.10.18" +version = "1.10.17" description = "Data validation and settings management using python type hints" optional = false python-versions = ">=3.7" files = [ - {file = "pydantic-1.10.18-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e405ffcc1254d76bb0e760db101ee8916b620893e6edfbfee563b3c6f7a67c02"}, - {file = "pydantic-1.10.18-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e306e280ebebc65040034bff1a0a81fd86b2f4f05daac0131f29541cafd80b80"}, - {file = "pydantic-1.10.18-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11d9d9b87b50338b1b7de4ebf34fd29fdb0d219dc07ade29effc74d3d2609c62"}, - {file = "pydantic-1.10.18-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b661ce52c7b5e5f600c0c3c5839e71918346af2ef20062705ae76b5c16914cab"}, - {file = "pydantic-1.10.18-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:c20f682defc9ef81cd7eaa485879ab29a86a0ba58acf669a78ed868e72bb89e0"}, - {file = "pydantic-1.10.18-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c5ae6b7c8483b1e0bf59e5f1843e4fd8fd405e11df7de217ee65b98eb5462861"}, - {file = "pydantic-1.10.18-cp310-cp310-win_amd64.whl", hash = "sha256:74fe19dda960b193b0eb82c1f4d2c8e5e26918d9cda858cbf3f41dd28549cb70"}, - {file = "pydantic-1.10.18-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:72fa46abace0a7743cc697dbb830a41ee84c9db8456e8d77a46d79b537efd7ec"}, - {file = "pydantic-1.10.18-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ef0fe7ad7cbdb5f372463d42e6ed4ca9c443a52ce544472d8842a0576d830da5"}, - {file = "pydantic-1.10.18-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a00e63104346145389b8e8f500bc6a241e729feaf0559b88b8aa513dd2065481"}, - {file = "pydantic-1.10.18-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae6fa2008e1443c46b7b3a5eb03800121868d5ab6bc7cda20b5df3e133cde8b3"}, - {file = "pydantic-1.10.18-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:9f463abafdc92635da4b38807f5b9972276be7c8c5121989768549fceb8d2588"}, - {file = "pydantic-1.10.18-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3445426da503c7e40baccefb2b2989a0c5ce6b163679dd75f55493b460f05a8f"}, - {file = "pydantic-1.10.18-cp311-cp311-win_amd64.whl", hash = "sha256:467a14ee2183bc9c902579bb2f04c3d3dac00eff52e252850509a562255b2a33"}, - {file = "pydantic-1.10.18-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:efbc8a7f9cb5fe26122acba1852d8dcd1e125e723727c59dcd244da7bdaa54f2"}, - {file = "pydantic-1.10.18-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:24a4a159d0f7a8e26bf6463b0d3d60871d6a52eac5bb6a07a7df85c806f4c048"}, - {file = "pydantic-1.10.18-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b74be007703547dc52e3c37344d130a7bfacca7df112a9e5ceeb840a9ce195c7"}, - {file = "pydantic-1.10.18-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fcb20d4cb355195c75000a49bb4a31d75e4295200df620f454bbc6bdf60ca890"}, - {file = "pydantic-1.10.18-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:46f379b8cb8a3585e3f61bf9ae7d606c70d133943f339d38b76e041ec234953f"}, - {file = "pydantic-1.10.18-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:cbfbca662ed3729204090c4d09ee4beeecc1a7ecba5a159a94b5a4eb24e3759a"}, - {file = "pydantic-1.10.18-cp312-cp312-win_amd64.whl", hash = "sha256:c6d0a9f9eccaf7f438671a64acf654ef0d045466e63f9f68a579e2383b63f357"}, - {file = "pydantic-1.10.18-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3d5492dbf953d7d849751917e3b2433fb26010d977aa7a0765c37425a4026ff1"}, - {file = "pydantic-1.10.18-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe734914977eed33033b70bfc097e1baaffb589517863955430bf2e0846ac30f"}, - {file = "pydantic-1.10.18-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:15fdbe568beaca9aacfccd5ceadfb5f1a235087a127e8af5e48df9d8a45ae85c"}, - {file = "pydantic-1.10.18-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:c3e742f62198c9eb9201781fbebe64533a3bbf6a76a91b8d438d62b813079dbc"}, - {file = "pydantic-1.10.18-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:19a3bd00b9dafc2cd7250d94d5b578edf7a0bd7daf102617153ff9a8fa37871c"}, - {file = "pydantic-1.10.18-cp37-cp37m-win_amd64.whl", hash = "sha256:2ce3fcf75b2bae99aa31bd4968de0474ebe8c8258a0110903478bd83dfee4e3b"}, - {file = "pydantic-1.10.18-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:335a32d72c51a313b33fa3a9b0fe283503272ef6467910338e123f90925f0f03"}, - {file = "pydantic-1.10.18-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:34a3613c7edb8c6fa578e58e9abe3c0f5e7430e0fc34a65a415a1683b9c32d9a"}, - {file = "pydantic-1.10.18-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e9ee4e6ca1d9616797fa2e9c0bfb8815912c7d67aca96f77428e316741082a1b"}, - {file = "pydantic-1.10.18-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:23e8ec1ce4e57b4f441fc91e3c12adba023fedd06868445a5b5f1d48f0ab3682"}, - {file = "pydantic-1.10.18-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:44ae8a3e35a54d2e8fa88ed65e1b08967a9ef8c320819a969bfa09ce5528fafe"}, - {file = "pydantic-1.10.18-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d5389eb3b48a72da28c6e061a247ab224381435256eb541e175798483368fdd3"}, - {file = "pydantic-1.10.18-cp38-cp38-win_amd64.whl", hash = "sha256:069b9c9fc645474d5ea3653788b544a9e0ccd3dca3ad8c900c4c6eac844b4620"}, - {file = "pydantic-1.10.18-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:80b982d42515632eb51f60fa1d217dfe0729f008e81a82d1544cc392e0a50ddf"}, - {file = "pydantic-1.10.18-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:aad8771ec8dbf9139b01b56f66386537c6fe4e76c8f7a47c10261b69ad25c2c9"}, - {file = "pydantic-1.10.18-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:941a2eb0a1509bd7f31e355912eb33b698eb0051730b2eaf9e70e2e1589cae1d"}, - {file = "pydantic-1.10.18-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:65f7361a09b07915a98efd17fdec23103307a54db2000bb92095457ca758d485"}, - {file = "pydantic-1.10.18-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:6951f3f47cb5ca4da536ab161ac0163cab31417d20c54c6de5ddcab8bc813c3f"}, - {file = "pydantic-1.10.18-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:7a4c5eec138a9b52c67f664c7d51d4c7234c5ad65dd8aacd919fb47445a62c86"}, - {file = "pydantic-1.10.18-cp39-cp39-win_amd64.whl", hash = "sha256:49e26c51ca854286bffc22b69787a8d4063a62bf7d83dc21d44d2ff426108518"}, - {file = "pydantic-1.10.18-py3-none-any.whl", hash = "sha256:06a189b81ffc52746ec9c8c007f16e5167c8b0a696e1a726369327e3db7b2a82"}, - {file = "pydantic-1.10.18.tar.gz", hash = "sha256:baebdff1907d1d96a139c25136a9bb7d17e118f133a76a2ef3b845e831e3403a"}, + {file = "pydantic-1.10.17-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0fa51175313cc30097660b10eec8ca55ed08bfa07acbfe02f7a42f6c242e9a4b"}, + {file = "pydantic-1.10.17-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c7e8988bb16988890c985bd2093df9dd731bfb9d5e0860db054c23034fab8f7a"}, + {file = "pydantic-1.10.17-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:371dcf1831f87c9e217e2b6a0c66842879a14873114ebb9d0861ab22e3b5bb1e"}, + {file = "pydantic-1.10.17-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4866a1579c0c3ca2c40575398a24d805d4db6cb353ee74df75ddeee3c657f9a7"}, + {file = "pydantic-1.10.17-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:543da3c6914795b37785703ffc74ba4d660418620cc273490d42c53949eeeca6"}, + {file = "pydantic-1.10.17-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7623b59876f49e61c2e283551cc3647616d2fbdc0b4d36d3d638aae8547ea681"}, + {file = "pydantic-1.10.17-cp310-cp310-win_amd64.whl", hash = "sha256:409b2b36d7d7d19cd8310b97a4ce6b1755ef8bd45b9a2ec5ec2b124db0a0d8f3"}, + {file = "pydantic-1.10.17-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fa43f362b46741df8f201bf3e7dff3569fa92069bcc7b4a740dea3602e27ab7a"}, + {file = "pydantic-1.10.17-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2a72d2a5ff86a3075ed81ca031eac86923d44bc5d42e719d585a8eb547bf0c9b"}, + {file = "pydantic-1.10.17-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4ad32aed3bf5eea5ca5decc3d1bbc3d0ec5d4fbcd72a03cdad849458decbc63"}, + {file = "pydantic-1.10.17-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aeb4e741782e236ee7dc1fb11ad94dc56aabaf02d21df0e79e0c21fe07c95741"}, + {file = "pydantic-1.10.17-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:d2f89a719411cb234105735a520b7c077158a81e0fe1cb05a79c01fc5eb59d3c"}, + {file = "pydantic-1.10.17-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:db3b48d9283d80a314f7a682f7acae8422386de659fffaba454b77a083c3937d"}, + {file = "pydantic-1.10.17-cp311-cp311-win_amd64.whl", hash = "sha256:9c803a5113cfab7bbb912f75faa4fc1e4acff43e452c82560349fff64f852e1b"}, + {file = "pydantic-1.10.17-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:820ae12a390c9cbb26bb44913c87fa2ff431a029a785642c1ff11fed0a095fcb"}, + {file = "pydantic-1.10.17-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c1e51d1af306641b7d1574d6d3307eaa10a4991542ca324f0feb134fee259815"}, + {file = "pydantic-1.10.17-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e53fb834aae96e7b0dadd6e92c66e7dd9cdf08965340ed04c16813102a47fab"}, + {file = "pydantic-1.10.17-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e2495309b1266e81d259a570dd199916ff34f7f51f1b549a0d37a6d9b17b4dc"}, + {file = "pydantic-1.10.17-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:098ad8de840c92ea586bf8efd9e2e90c6339d33ab5c1cfbb85be66e4ecf8213f"}, + {file = "pydantic-1.10.17-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:525bbef620dac93c430d5d6bdbc91bdb5521698d434adf4434a7ef6ffd5c4b7f"}, + {file = "pydantic-1.10.17-cp312-cp312-win_amd64.whl", hash = "sha256:6654028d1144df451e1da69a670083c27117d493f16cf83da81e1e50edce72ad"}, + {file = "pydantic-1.10.17-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c87cedb4680d1614f1d59d13fea353faf3afd41ba5c906a266f3f2e8c245d655"}, + {file = "pydantic-1.10.17-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11289fa895bcbc8f18704efa1d8020bb9a86314da435348f59745473eb042e6b"}, + {file = "pydantic-1.10.17-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:94833612d6fd18b57c359a127cbfd932d9150c1b72fea7c86ab58c2a77edd7c7"}, + {file = "pydantic-1.10.17-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:d4ecb515fa7cb0e46e163ecd9d52f9147ba57bc3633dca0e586cdb7a232db9e3"}, + {file = "pydantic-1.10.17-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:7017971ffa7fd7808146880aa41b266e06c1e6e12261768a28b8b41ba55c8076"}, + {file = "pydantic-1.10.17-cp37-cp37m-win_amd64.whl", hash = "sha256:e840e6b2026920fc3f250ea8ebfdedf6ea7a25b77bf04c6576178e681942ae0f"}, + {file = "pydantic-1.10.17-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bfbb18b616abc4df70591b8c1ff1b3eabd234ddcddb86b7cac82657ab9017e33"}, + {file = "pydantic-1.10.17-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ebb249096d873593e014535ab07145498957091aa6ae92759a32d40cb9998e2e"}, + {file = "pydantic-1.10.17-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8c209af63ccd7b22fba94b9024e8b7fd07feffee0001efae50dd99316b27768"}, + {file = "pydantic-1.10.17-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d4b40c9e13a0b61583e5599e7950490c700297b4a375b55b2b592774332798b7"}, + {file = "pydantic-1.10.17-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:c31d281c7485223caf6474fc2b7cf21456289dbaa31401844069b77160cab9c7"}, + {file = "pydantic-1.10.17-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:ae5184e99a060a5c80010a2d53c99aee76a3b0ad683d493e5f0620b5d86eeb75"}, + {file = "pydantic-1.10.17-cp38-cp38-win_amd64.whl", hash = "sha256:ad1e33dc6b9787a6f0f3fd132859aa75626528b49cc1f9e429cdacb2608ad5f0"}, + {file = "pydantic-1.10.17-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7e17c0ee7192e54a10943f245dc79e36d9fe282418ea05b886e1c666063a7b54"}, + {file = "pydantic-1.10.17-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:cafb9c938f61d1b182dfc7d44a7021326547b7b9cf695db5b68ec7b590214773"}, + {file = "pydantic-1.10.17-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95ef534e3c22e5abbdbdd6f66b6ea9dac3ca3e34c5c632894f8625d13d084cbe"}, + {file = "pydantic-1.10.17-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62d96b8799ae3d782df7ec9615cb59fc32c32e1ed6afa1b231b0595f6516e8ab"}, + {file = "pydantic-1.10.17-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ab2f976336808fd5d539fdc26eb51f9aafc1f4b638e212ef6b6f05e753c8011d"}, + {file = "pydantic-1.10.17-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b8ad363330557beac73159acfbeed220d5f1bfcd6b930302a987a375e02f74fd"}, + {file = "pydantic-1.10.17-cp39-cp39-win_amd64.whl", hash = "sha256:48db882e48575ce4b39659558b2f9f37c25b8d348e37a2b4e32971dd5a7d6227"}, + {file = "pydantic-1.10.17-py3-none-any.whl", hash = "sha256:e41b5b973e5c64f674b3b4720286ded184dcc26a691dd55f34391c62c6934688"}, + {file = "pydantic-1.10.17.tar.gz", hash = "sha256:f434160fb14b353caf634149baaf847206406471ba70e64657c1e8330277a991"}, ] [package.dependencies] @@ -2035,17 +1923,6 @@ files = [ {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] -[[package]] -name = "sniffio" -version = "1.3.1" -description = "Sniff out which async library your code is running under" -optional = false -python-versions = ">=3.7" -files = [ - {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, - {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, -] - [[package]] name = "stack-data" version = "0.6.3" From 9e3e75add5f1d60d2c6dbadd7ea0a18bd15b8f5c Mon Sep 17 00:00:00 2001 From: Shayan Patel Date: Wed, 4 Dec 2024 21:19:09 +0000 Subject: [PATCH 6/7] Attempt reverting the charm_tracing charm lib --- .../tempo_coordinator_k8s/v0/charm_tracing.py | 389 ++---------------- 1 file changed, 30 insertions(+), 359 deletions(-) diff --git a/lib/charms/tempo_coordinator_k8s/v0/charm_tracing.py b/lib/charms/tempo_coordinator_k8s/v0/charm_tracing.py index cf8def11..1e7ff840 100644 --- a/lib/charms/tempo_coordinator_k8s/v0/charm_tracing.py +++ b/lib/charms/tempo_coordinator_k8s/v0/charm_tracing.py @@ -69,9 +69,6 @@ def my_tracing_endpoint(self) -> Optional[str]: - every event as a span (including custom events) - every charm method call (except dunders) as a span -We recommend that you scale up your tracing provider and relate it to an ingress so that your tracing requests -go through the ingress and get load balanced across all units. Otherwise, if the provider's leader goes down, your tracing goes down. - ## TLS support If your charm integrates with a TLS provider which is also trusted by the tracing provider (the Tempo charm), @@ -117,57 +114,6 @@ def get_tracer(self) -> opentelemetry.trace.Tracer: See the official opentelemetry Python SDK documentation for usage: https://opentelemetry-python.readthedocs.io/en/latest/ - -## Caching traces -The `trace_charm` machinery will buffer any traces collected during charm execution and store them -to a file on the charm container until a tracing backend becomes available. At that point, it will -flush them to the tracing receiver. - -By default, the buffer is configured to start dropping old traces if any of these conditions apply: - -- the storage size exceeds 10 MiB -- the number of buffered events exceeds 100 - -You can configure this by, for example: - -```python -@trace_charm( - tracing_endpoint="my_tracing_endpoint", - server_cert="_server_cert", - # only cache up to 42 events - buffer_max_events=42, - # only cache up to 42 MiB - buffer_max_size_mib=42, # minimum 10! -) -class MyCharm(CharmBase): - ... -``` - -Note that setting `buffer_max_events` to 0 will effectively disable the buffer. - -The path of the buffer file is by default in the charm's execution root, which for k8s charms means -that in case of pod churn, the cache will be lost. The recommended solution is to use an existing storage -(or add a new one) such as: - -```yaml -storage: - data: - type: filesystem - location: /charm-traces -``` - -and then configure the `@trace_charm` decorator to use it as path for storing the buffer: -```python -@trace_charm( - tracing_endpoint="my_tracing_endpoint", - server_cert="_server_cert", - # store traces to a PVC so they're not lost on pod restart. - buffer_path="/charm-traces/buffer.file", -) -class MyCharm(CharmBase): - ... -``` - ## Upgrading from `v0` If you are upgrading from `charm_tracing` v0, you need to take the following steps (assuming you already @@ -225,12 +171,6 @@ def my_tracing_endpoint(self) -> Optional[str]: 3) If you were passing a certificate (str) using `server_cert`, you need to change it to provide an *absolute* path to the certificate file instead. """ -import typing - -from opentelemetry.exporter.otlp.proto.common._internal.trace_encoder import ( - encode_spans, -) -from opentelemetry.sdk.trace.export.in_memory_span_exporter import InMemorySpanExporter def _remove_stale_otel_sdk_packages(): @@ -282,9 +222,6 @@ def _remove_stale_otel_sdk_packages(): otel_logger.debug("Successfully applied _remove_stale_otel_sdk_packages patch. ") -# apply hacky patch to remove stale opentelemetry sdk packages on upgrade-charm. -# it could be trouble if someone ever decides to implement their own tracer parallel to -# ours and before the charm has inited. We assume they won't. _remove_stale_otel_sdk_packages() import functools @@ -298,7 +235,6 @@ def _remove_stale_otel_sdk_packages(): Any, Callable, Generator, - List, Optional, Sequence, Type, @@ -311,12 +247,8 @@ def _remove_stale_otel_sdk_packages(): import ops from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter from opentelemetry.sdk.resources import Resource -from opentelemetry.sdk.trace import ReadableSpan, Span, TracerProvider -from opentelemetry.sdk.trace.export import ( - BatchSpanProcessor, - SpanExporter, - SpanExportResult, -) +from opentelemetry.sdk.trace import Span, TracerProvider +from opentelemetry.sdk.trace.export import BatchSpanProcessor from opentelemetry.trace import INVALID_SPAN, Tracer from opentelemetry.trace import get_current_span as otlp_get_current_span from opentelemetry.trace import ( @@ -337,7 +269,7 @@ def _remove_stale_otel_sdk_packages(): # Increment this PATCH version before using `charmcraft publish-lib` or reset # to 0 if you are raising the major API version -LIBPATCH = 4 +LIBPATCH = 2 PYDEPS = ["opentelemetry-exporter-otlp-proto-http==1.21.0"] @@ -345,7 +277,7 @@ def _remove_stale_otel_sdk_packages(): dev_logger = logging.getLogger("tracing-dev") # set this to 0 if you are debugging/developing this library source -dev_logger.setLevel(logging.ERROR) +dev_logger.setLevel(logging.CRITICAL) _CharmType = Type[CharmBase] # the type CharmBase and any subclass thereof _C = TypeVar("_C", bound=_CharmType) @@ -355,186 +287,6 @@ def _remove_stale_otel_sdk_packages(): _GetterType = Union[Callable[[_CharmType], Optional[str]], property] CHARM_TRACING_ENABLED = "CHARM_TRACING_ENABLED" -BUFFER_DEFAULT_CACHE_FILE_NAME = ".charm_tracing_buffer.raw" -# we store the buffer as raw otlp-native protobuf (bytes) since it's hard to serialize/deserialize it in -# any portable format. Json dumping is supported, but loading isn't. -# cfr: https://github.com/open-telemetry/opentelemetry-python/issues/1003 - -BUFFER_DEFAULT_CACHE_FILE_SIZE_LIMIT_MiB = 10 -_BUFFER_CACHE_FILE_SIZE_LIMIT_MiB_MIN = 10 -BUFFER_DEFAULT_MAX_EVENT_HISTORY_LENGTH = 100 -_MiB_TO_B = 2**20 # megabyte to byte conversion rate -_OTLP_SPAN_EXPORTER_TIMEOUT = 1 -"""Timeout in seconds that the OTLP span exporter has to push traces to the backend.""" - - -class _Buffer: - """Handles buffering for spans emitted while no tracing backend is configured or available. - - Use the max_event_history_length_buffering param of @trace_charm to tune - the amount of memory that this will hog on your units. - - The buffer is formatted as a bespoke byte dump (protobuf limitation). - We cannot store them as json because that is not well-supported by the sdk - (see https://github.com/open-telemetry/opentelemetry-python/issues/3364). - """ - - _SPANSEP = b"__CHARM_TRACING_BUFFER_SPAN_SEP__" - - def __init__(self, db_file: Path, max_event_history_length: int, max_buffer_size_mib: int): - self._db_file = db_file - self._max_event_history_length = max_event_history_length - self._max_buffer_size_mib = max(max_buffer_size_mib, _BUFFER_CACHE_FILE_SIZE_LIMIT_MiB_MIN) - - # set by caller - self.exporter: Optional[OTLPSpanExporter] = None - - def save(self, spans: typing.Sequence[ReadableSpan]): - """Save the spans collected by this exporter to the cache file. - - This method should be as fail-safe as possible. - """ - if self._max_event_history_length < 1: - dev_logger.debug("buffer disabled: max history length < 1") - return - - current_history_length = len(self.load()) - new_history_length = current_history_length + len(spans) - if (diff := self._max_event_history_length - new_history_length) < 0: - self.drop(diff) - self._save(spans) - - def _serialize(self, spans: Sequence[ReadableSpan]) -> bytes: - # encode because otherwise we can't json-dump them - return encode_spans(spans).SerializeToString() - - def _save(self, spans: Sequence[ReadableSpan], replace: bool = False): - dev_logger.debug(f"saving {len(spans)} new spans to buffer") - old = [] if replace else self.load() - new = self._serialize(spans) - - try: - # if the buffer exceeds the size limit, we start dropping old spans until it does - - while len((new + self._SPANSEP.join(old))) > (self._max_buffer_size_mib * _MiB_TO_B): - if not old: - # if we've already dropped all spans and still we can't get under the - # size limit, we can't save this span - logger.error( - f"span exceeds total buffer size limit ({self._max_buffer_size_mib}MiB); " - f"buffering FAILED" - ) - return - - old = old[1:] - logger.warning( - f"buffer size exceeds {self._max_buffer_size_mib}MiB; dropping older spans... " - f"Please increase the buffer size, disable buffering, or ensure the spans can be flushed." - ) - - self._db_file.write_bytes(new + self._SPANSEP.join(old)) - except Exception: - logger.exception("error buffering spans") - - def load(self) -> List[bytes]: - """Load currently buffered spans from the cache file. - - This method should be as fail-safe as possible. - """ - if not self._db_file.exists(): - dev_logger.debug("buffer file not found. buffer empty.") - return [] - try: - spans = self._db_file.read_bytes().split(self._SPANSEP) - except Exception: - logger.exception(f"error parsing {self._db_file}") - return [] - return spans - - def drop(self, n_spans: Optional[int] = None): - """Drop some currently buffered spans from the cache file.""" - current = self.load() - if n_spans: - dev_logger.debug(f"dropping {n_spans} spans from buffer") - new = current[n_spans:] - else: - dev_logger.debug("emptying buffer") - new = [] - - self._db_file.write_bytes(self._SPANSEP.join(new)) - - def flush(self) -> Optional[bool]: - """Export all buffered spans to the given exporter, then clear the buffer. - - Returns whether the flush was successful, and None if there was nothing to flush. - """ - if not self.exporter: - dev_logger.debug("no exporter set; skipping buffer flush") - return False - - buffered_spans = self.load() - if not buffered_spans: - dev_logger.debug("nothing to flush; buffer empty") - return None - - errors = False - for span in buffered_spans: - try: - out = self.exporter._export(span) # type: ignore - if not (200 <= out.status_code < 300): - # take any 2xx status code as a success - errors = True - except ConnectionError: - dev_logger.debug( - "failed exporting buffered span; backend might be down or still starting" - ) - errors = True - except Exception: - logger.exception("unexpected error while flushing span batch from buffer") - errors = True - - if not errors: - self.drop() - else: - logger.error("failed flushing spans; buffer preserved") - return not errors - - @property - def is_empty(self): - """Utility to check whether the buffer has any stored spans. - - This is more efficient than attempting a load() given how large the buffer might be. - """ - return (not self._db_file.exists()) or (self._db_file.stat().st_size == 0) - - -class _OTLPSpanExporter(OTLPSpanExporter): - """Subclass of OTLPSpanExporter to configure the max retry timeout, so that it fails a bit faster.""" - - # The issue we're trying to solve is that the model takes AGES to settle if e.g. tls is misconfigured, - # as every hook of a charm_tracing-instrumented charm takes about a minute to exit, as the charm can't - # flush the traces and keeps retrying for 'too long' - - _MAX_RETRY_TIMEOUT = 4 - # we give the exporter 4 seconds in total to succeed pushing the traces to tempo - # if it fails, we'll be caching the data in the buffer and flush it the next time, so there's no data loss risk. - # this means 2/3 retries (hard to guess from the implementation) and up to ~7 seconds total wait - - -class _BufferedExporter(InMemorySpanExporter): - def __init__(self, buffer: _Buffer) -> None: - super().__init__() - self._buffer = buffer - - def export(self, spans: typing.Sequence[ReadableSpan]) -> SpanExportResult: - self._buffer.save(spans) - return super().export(spans) - - def force_flush(self, timeout_millis: int = 0) -> bool: - # parent implementation is fake, so the timeout_millis arg is not doing anything. - result = super().force_flush(timeout_millis) - self._buffer.save(self.get_finished_spans()) - return result def is_enabled() -> bool: @@ -671,10 +423,7 @@ def _setup_root_span_initializer( charm_type: _CharmType, tracing_endpoint_attr: str, server_cert_attr: Optional[str], - service_name: Optional[str], - buffer_path: Optional[Path], - buffer_max_events: int, - buffer_max_size_mib: int, + service_name: Optional[str] = None, ): """Patch the charm's initializer.""" original_init = charm_type.__init__ @@ -693,11 +442,18 @@ def wrap_init(self: CharmBase, framework: Framework, *args, **kwargs): logger.info("Tracing DISABLED: skipping root span initialization") return + # already init some attrs that will be reinited later by calling original_init: + # self.framework = framework + # self.handle = Handle(None, self.handle_kind, None) + original_event_context = framework._event_context # default service name isn't just app name because it could conflict with the workload service name _service_name = service_name or f"{self.app.name}-charm" unit_name = self.unit.name + # apply hacky patch to remove stale opentelemetry sdk packages on upgrade-charm. + # it could be trouble if someone ever decides to implement their own tracer parallel to + # ours and before the charm has inited. We assume they won't. resource = Resource.create( attributes={ "service.name": _service_name, @@ -715,60 +471,33 @@ def wrap_init(self: CharmBase, framework: Framework, *args, **kwargs): # if anything goes wrong with retrieving the endpoint, we let the exception bubble up. tracing_endpoint = _get_tracing_endpoint(tracing_endpoint_attr, self, charm_type) - buffer_only = False - # whether we're only exporting to buffer, or also to the otlp exporter. - if not tracing_endpoint: # tracing is off if tracing_endpoint is None - # however we can buffer things until tracing comes online - buffer_only = True + return server_cert: Optional[Union[str, Path]] = ( _get_server_cert(server_cert_attr, self, charm_type) if server_cert_attr else None ) - if (tracing_endpoint and tracing_endpoint.startswith("https://")) and not server_cert: + if tracing_endpoint.startswith("https://") and not server_cert: logger.error( "Tracing endpoint is https, but no server_cert has been passed." "Please point @trace_charm to a `server_cert` attr. " "This might also mean that the tracing provider is related to a " "certificates provider, but this application is not (yet). " "In that case, you might just have to wait a bit for the certificates " - "integration to settle. This span will be buffered." + "integration to settle. " ) - buffer_only = True + return - buffer = _Buffer( - db_file=buffer_path or Path() / BUFFER_DEFAULT_CACHE_FILE_NAME, - max_event_history_length=buffer_max_events, - max_buffer_size_mib=buffer_max_size_mib, + exporter = OTLPSpanExporter( + endpoint=tracing_endpoint, + certificate_file=str(Path(server_cert).absolute()) if server_cert else None, + timeout=2, ) - previous_spans_buffered = not buffer.is_empty - - exporters: List[SpanExporter] = [] - if buffer_only: - # we have to buffer because we're missing necessary backend configuration - dev_logger.debug("buffering mode: ON") - exporters.append(_BufferedExporter(buffer)) - - else: - dev_logger.debug("buffering mode: FALLBACK") - # in principle, we have the right configuration to be pushing traces, - # but if we fail for whatever reason, we will put everything in the buffer - # and retry the next time - otlp_exporter = _OTLPSpanExporter( - endpoint=tracing_endpoint, - certificate_file=str(Path(server_cert).absolute()) if server_cert else None, - timeout=_OTLP_SPAN_EXPORTER_TIMEOUT, # give individual requests 1 second to succeed - ) - exporters.append(otlp_exporter) - exporters.append(_BufferedExporter(buffer)) - buffer.exporter = otlp_exporter - - for exporter in exporters: - processor = BatchSpanProcessor(exporter) - provider.add_span_processor(processor) + processor = BatchSpanProcessor(exporter) + provider.add_span_processor(processor) set_tracer_provider(provider) _tracer = get_tracer(_service_name) # type: ignore _tracer_token = tracer.set(_tracer) @@ -792,7 +521,7 @@ def wrap_init(self: CharmBase, framework: Framework, *args, **kwargs): @contextmanager def wrap_event_context(event_name: str): - dev_logger.debug(f"entering event context: {event_name}") + dev_logger.info(f"entering event context: {event_name}") # when the framework enters an event context, we create a span. with _span("event: " + event_name) as event_context_span: if event_context_span: @@ -806,50 +535,12 @@ def wrap_event_context(event_name: str): @functools.wraps(original_close) def wrap_close(): - dev_logger.debug("tearing down tracer and flushing traces") + dev_logger.info("tearing down tracer and flushing traces") span.end() opentelemetry.context.detach(span_token) # type: ignore tracer.reset(_tracer_token) tp = cast(TracerProvider, get_tracer_provider()) - flush_successful = tp.force_flush(timeout_millis=1000) # don't block for too long - - if buffer_only: - # if we're in buffer_only mode, it means we couldn't even set up the exporter for - # tempo as we're missing some data. - # so attempting to flush the buffer doesn't make sense - dev_logger.debug("tracing backend unavailable: all spans pushed to buffer") - - else: - dev_logger.debug("tracing backend found: attempting to flush buffer...") - - # if we do have an exporter for tempo, and we could send traces to it, - # we can attempt to flush the buffer as well. - if not flush_successful: - logger.error("flushing FAILED: unable to push traces to backend.") - else: - dev_logger.debug("flush succeeded.") - - # the backend has accepted the spans generated during this event, - if not previous_spans_buffered: - # if the buffer was empty to begin with, any spans we collected now can be discarded - buffer.drop() - dev_logger.debug("buffer dropped: this trace has been sent already") - else: - # if the buffer was nonempty, we can attempt to flush it - dev_logger.debug("attempting buffer flush...") - buffer_flush_successful = buffer.flush() - if buffer_flush_successful: - dev_logger.debug("buffer flush OK") - elif buffer_flush_successful is None: - # TODO is this even possible? - dev_logger.debug("buffer flush OK; empty: nothing to flush") - else: - # this situation is pretty weird, I'm not even sure it can happen, - # because it would mean that we did manage - # to push traces directly to the tempo exporter (flush_successful), - # but the buffer flush failed to push to the same exporter! - logger.error("buffer flush FAILED") - + tp.force_flush(timeout_millis=1000) # don't block for too long tp.shutdown() original_close() @@ -864,9 +555,6 @@ def trace_charm( server_cert: Optional[str] = None, service_name: Optional[str] = None, extra_types: Sequence[type] = (), - buffer_max_events: int = BUFFER_DEFAULT_MAX_EVENT_HISTORY_LENGTH, - buffer_max_size_mib: int = BUFFER_DEFAULT_CACHE_FILE_SIZE_LIMIT_MiB, - buffer_path: Optional[Union[str, Path]] = None, ) -> Callable[[_T], _T]: """Autoinstrument the decorated charm with tracing telemetry. @@ -908,10 +596,6 @@ def trace_charm( Defaults to the juju application name this charm is deployed under. :param extra_types: pass any number of types that you also wish to autoinstrument. For example, charm libs, relation endpoint wrappers, workload abstractions, ... - :param buffer_max_events: max number of events to save in the buffer. Set to 0 to disable buffering. - :param buffer_max_size_mib: max size of the buffer file. When exceeded, spans will be dropped. - Minimum 10MiB. - :param buffer_path: path to buffer file to use for saving buffered spans. """ def _decorator(charm_type: _T) -> _T: @@ -922,9 +606,6 @@ def _decorator(charm_type: _T) -> _T: server_cert_attr=server_cert, service_name=service_name, extra_types=extra_types, - buffer_path=Path(buffer_path) if buffer_path else None, - buffer_max_size_mib=buffer_max_size_mib, - buffer_max_events=buffer_max_events, ) return charm_type @@ -937,9 +618,6 @@ def _autoinstrument( server_cert_attr: Optional[str] = None, service_name: Optional[str] = None, extra_types: Sequence[type] = (), - buffer_max_events: int = BUFFER_DEFAULT_MAX_EVENT_HISTORY_LENGTH, - buffer_max_size_mib: int = BUFFER_DEFAULT_CACHE_FILE_SIZE_LIMIT_MiB, - buffer_path: Optional[Path] = None, ) -> _T: """Set up tracing on this charm class. @@ -972,20 +650,13 @@ def _autoinstrument( Defaults to the juju application name this charm is deployed under. :param extra_types: pass any number of types that you also wish to autoinstrument. For example, charm libs, relation endpoint wrappers, workload abstractions, ... - :param buffer_max_events: max number of events to save in the buffer. Set to 0 to disable buffering. - :param buffer_max_size_mib: max size of the buffer file. When exceeded, spans will be dropped. - Minimum 10MiB. - :param buffer_path: path to buffer file to use for saving buffered spans. """ - dev_logger.debug(f"instrumenting {charm_type}") + dev_logger.info(f"instrumenting {charm_type}") _setup_root_span_initializer( charm_type, tracing_endpoint_attr, server_cert_attr=server_cert_attr, service_name=service_name, - buffer_path=buffer_path, - buffer_max_events=buffer_max_events, - buffer_max_size_mib=buffer_max_size_mib, ) trace_type(charm_type) for type_ in extra_types: @@ -1001,12 +672,12 @@ def trace_type(cls: _T) -> _T: It assumes that this class is only instantiated after a charm type decorated with `@trace_charm` has been instantiated. """ - dev_logger.debug(f"instrumenting {cls}") + dev_logger.info(f"instrumenting {cls}") for name, method in inspect.getmembers(cls, predicate=inspect.isfunction): - dev_logger.debug(f"discovered {method}") + dev_logger.info(f"discovered {method}") if method.__name__.startswith("__"): - dev_logger.debug(f"skipping {method} (dunder)") + dev_logger.info(f"skipping {method} (dunder)") continue # the span title in the general case should be: @@ -1052,7 +723,7 @@ def trace_function(function: _F, name: Optional[str] = None) -> _F: def _trace_callable(callable: _F, qualifier: str, name: Optional[str] = None) -> _F: - dev_logger.debug(f"instrumenting {callable}") + dev_logger.info(f"instrumenting {callable}") # sig = inspect.signature(callable) @functools.wraps(callable) From 482876330d1ff3119c7b69a38c84db4709000aaf Mon Sep 17 00:00:00 2001 From: Shayan Patel Date: Thu, 5 Dec 2024 11:43:58 +0000 Subject: [PATCH 7/7] Update data-platform-workflows to v23.1.1 --- .github/workflows/ci.yaml | 2 +- .github/workflows/release.yaml | 4 ++-- .github/workflows/sync_docs.yaml | 2 +- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 6dedba14..5179fbf9 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -66,7 +66,7 @@ jobs: build: name: Build charm - uses: canonical/data-platform-workflows/.github/workflows/build_charm.yaml@v22.0.0 + uses: canonical/data-platform-workflows/.github/workflows/build_charm.yaml@v23.1.1 with: # Use of cache blocked by https://github.com/canonical/charmcraft/issues/1456 # Details: https://github.com/canonical/charmcraftcache/issues/3 diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index c6f78636..3eca85dd 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -15,14 +15,14 @@ jobs: build: name: Build charm - uses: canonical/data-platform-workflows/.github/workflows/build_charm.yaml@v22.0.0 + uses: canonical/data-platform-workflows/.github/workflows/build_charm.yaml@v23.1.1 release: name: Release charm needs: - ci-tests - build - uses: canonical/data-platform-workflows/.github/workflows/release_charm.yaml@v22.0.0 + uses: canonical/data-platform-workflows/.github/workflows/release_charm.yaml@v23.1.1 with: channel: dpe/edge artifact-prefix: ${{ needs.build.outputs.artifact-prefix }} diff --git a/.github/workflows/sync_docs.yaml b/.github/workflows/sync_docs.yaml index 4e48da8e..14aa1c26 100644 --- a/.github/workflows/sync_docs.yaml +++ b/.github/workflows/sync_docs.yaml @@ -10,7 +10,7 @@ on: jobs: sync-docs: name: Sync docs from Discourse - uses: canonical/data-platform-workflows/.github/workflows/sync_docs.yaml@v22.0.0 + uses: canonical/data-platform-workflows/.github/workflows/sync_docs.yaml@v23.1.1 with: reviewers: a-velasco permissions: