diff --git a/lib/charms/grafana_agent/v0/cos_agent.py b/lib/charms/grafana_agent/v0/cos_agent.py
index 870ba62a1..565cca58a 100644
--- a/lib/charms/grafana_agent/v0/cos_agent.py
+++ b/lib/charms/grafana_agent/v0/cos_agent.py
@@ -22,7 +22,7 @@
Using the `COSAgentProvider` object only requires instantiating it,
typically in the `__init__` method of your charm (the one which sends telemetry).
-The constructor of `COSAgentProvider` has only one required and nine optional parameters:
+The constructor of `COSAgentProvider` has only one required and ten optional parameters:
```python
def __init__(
@@ -36,6 +36,7 @@ def __init__(
log_slots: Optional[List[str]] = None,
dashboard_dirs: Optional[List[str]] = None,
refresh_events: Optional[List] = None,
+ tracing_protocols: Optional[List[str]] = None,
scrape_configs: Optional[Union[List[Dict], Callable]] = None,
):
```
@@ -65,6 +66,8 @@ def __init__(
- `refresh_events`: List of events on which to refresh relation data.
+- `tracing_protocols`: List of requested tracing protocols that the charm requires to send traces.
+
- `scrape_configs`: List of standard scrape_configs dicts or a callable that returns the list in
case the configs need to be generated dynamically. The contents of this list will be merged
with the configs from `metrics_endpoints`.
@@ -108,6 +111,7 @@ def __init__(self, *args):
log_slots=["my-app:slot"],
dashboard_dirs=["./src/dashboards_1", "./src/dashboards_2"],
refresh_events=["update-status", "upgrade-charm"],
+ tracing_protocols=["otlp_http", "otlp_grpc"],
scrape_configs=[
{
"job_name": "custom_job",
@@ -206,19 +210,34 @@ def __init__(self, *args):
```
"""
+import enum
import json
import logging
+import socket
from collections import namedtuple
from itertools import chain
from pathlib import Path
-from typing import TYPE_CHECKING, Any, Callable, ClassVar, Dict, List, Optional, Set, Tuple, Union
+from typing import (
+ TYPE_CHECKING,
+ Any,
+ Callable,
+ ClassVar,
+ Dict,
+ List,
+ Literal,
+ MutableMapping,
+ Optional,
+ Set,
+ Tuple,
+ Union,
+)
import pydantic
from cosl import GrafanaDashboard, JujuTopology
from cosl.rules import AlertRules
from ops.charm import RelationChangedEvent
from ops.framework import EventBase, EventSource, Object, ObjectEvents
-from ops.model import Relation
+from ops.model import ModelError, Relation
from ops.testing import CharmType
if TYPE_CHECKING:
@@ -234,9 +253,9 @@ class _MetricsEndpointDict(TypedDict):
LIBID = "dc15fa84cef84ce58155fb84f6c6213a"
LIBAPI = 0
-LIBPATCH = 8
+LIBPATCH = 11
-PYDEPS = ["cosl", "pydantic < 2"]
+PYDEPS = ["cosl", "pydantic"]
DEFAULT_RELATION_NAME = "cos-agent"
DEFAULT_PEER_RELATION_NAME = "peers"
@@ -249,7 +268,207 @@ class _MetricsEndpointDict(TypedDict):
SnapEndpoint = namedtuple("SnapEndpoint", "owner, name")
-class CosAgentProviderUnitData(pydantic.BaseModel):
+# Note: MutableMapping is imported from the typing module and not collections.abc
+# because subscripting collections.abc.MutableMapping was added in python 3.9, but
+# most of our charms are based on 20.04, which has python 3.8.
+
+_RawDatabag = MutableMapping[str, str]
+
+
+class TransportProtocolType(str, enum.Enum):
+ """Receiver Type."""
+
+ http = "http"
+ grpc = "grpc"
+
+
+receiver_protocol_to_transport_protocol = {
+ "zipkin": TransportProtocolType.http,
+ "kafka": TransportProtocolType.http,
+ "tempo_http": TransportProtocolType.http,
+ "tempo_grpc": TransportProtocolType.grpc,
+ "otlp_grpc": TransportProtocolType.grpc,
+ "otlp_http": TransportProtocolType.http,
+ "jaeger_thrift_http": TransportProtocolType.http,
+}
+
+_tracing_receivers_ports = {
+ # OTLP receiver: see
+ # https://github.com/open-telemetry/opentelemetry-collector/tree/v0.96.0/receiver/otlpreceiver
+ "otlp_http": 4318,
+ "otlp_grpc": 4317,
+ # Jaeger receiver: see
+ # https://github.com/open-telemetry/opentelemetry-collector-contrib/tree/v0.96.0/receiver/jaegerreceiver
+ "jaeger_grpc": 14250,
+ "jaeger_thrift_http": 14268,
+ # Zipkin receiver: see
+ # https://github.com/open-telemetry/opentelemetry-collector-contrib/tree/v0.96.0/receiver/zipkinreceiver
+ "zipkin": 9411,
+}
+
+ReceiverProtocol = Literal["otlp_grpc", "otlp_http", "zipkin", "jaeger_thrift_http", "jaeger_grpc"]
+
+
+class TracingError(Exception):
+ """Base class for custom errors raised by tracing."""
+
+
+class NotReadyError(TracingError):
+ """Raised by the provider wrapper if a requirer hasn't published the required data (yet)."""
+
+
+class ProtocolNotRequestedError(TracingError):
+ """Raised if the user attempts to obtain an endpoint for a protocol it did not request."""
+
+
+class DataValidationError(TracingError):
+ """Raised when data validation fails on IPU relation data."""
+
+
+class AmbiguousRelationUsageError(TracingError):
+ """Raised when one wrongly assumes that there can only be one relation on an endpoint."""
+
+
+# TODO we want to eventually use `DatabagModel` from cosl but it likely needs a move to common package first
+if int(pydantic.version.VERSION.split(".")[0]) < 2: # type: ignore
+
+ class DatabagModel(pydantic.BaseModel): # type: ignore
+ """Base databag model."""
+
+ class Config:
+ """Pydantic config."""
+
+ # ignore any extra fields in the databag
+ extra = "ignore"
+ """Ignore any extra fields in the databag."""
+ allow_population_by_field_name = True
+ """Allow instantiating this class by field name (instead of forcing alias)."""
+
+ _NEST_UNDER = None
+
+ @classmethod
+ def load(cls, databag: MutableMapping):
+ """Load this model from a Juju databag."""
+ if cls._NEST_UNDER:
+ return cls.parse_obj(json.loads(databag[cls._NEST_UNDER]))
+
+ try:
+ data = {
+ k: json.loads(v)
+ for k, v in databag.items()
+ # Don't attempt to parse model-external values
+ if k in {f.alias for f in cls.__fields__.values()}
+ }
+ except json.JSONDecodeError as e:
+ msg = f"invalid databag contents: expecting json. {databag}"
+ logger.error(msg)
+ raise DataValidationError(msg) from e
+
+ try:
+ return cls.model_validate_json(json.dumps(data)) # type: ignore
+ except pydantic.ValidationError as e:
+ msg = f"failed to validate databag: {databag}"
+ logger.debug(msg, exc_info=True)
+ raise DataValidationError(msg) from e
+
+ def dump(self, databag: Optional[MutableMapping] = None, clear: bool = True):
+ """Write the contents of this model to Juju databag.
+
+ :param databag: the databag to write the data to.
+ :param clear: ensure the databag is cleared before writing it.
+ """
+ if clear and databag:
+ databag.clear()
+
+ if databag is None:
+ databag = {}
+
+ if self._NEST_UNDER:
+ databag[self._NEST_UNDER] = self.model_dump_json(by_alias=True)
+ return databag
+
+ dct = self.dict()
+ for key, field in self.__fields__.items(): # type: ignore
+ value = dct[key]
+ databag[field.alias or key] = json.dumps(value)
+
+ return databag
+
+else:
+ from pydantic import ConfigDict
+
+ class DatabagModel(pydantic.BaseModel):
+ """Base databag model."""
+
+ model_config = ConfigDict(
+ # ignore any extra fields in the databag
+ extra="ignore",
+ # Allow instantiating this class by field name (instead of forcing alias).
+ populate_by_name=True,
+ # Custom config key: whether to nest the whole datastructure (as json)
+ # under a field or spread it out at the toplevel.
+ _NEST_UNDER=None, # type: ignore
+ arbitrary_types_allowed=True,
+ )
+ """Pydantic config."""
+
+ @classmethod
+ def load(cls, databag: MutableMapping):
+ """Load this model from a Juju databag."""
+ nest_under = cls.model_config.get("_NEST_UNDER") # type: ignore
+ if nest_under:
+ return cls.model_validate(json.loads(databag[nest_under])) # type: ignore
+
+ try:
+ data = {
+ k: json.loads(v)
+ for k, v in databag.items()
+ # Don't attempt to parse model-external values
+ if k in {(f.alias or n) for n, f in cls.__fields__.items()}
+ }
+ except json.JSONDecodeError as e:
+ msg = f"invalid databag contents: expecting json. {databag}"
+ logger.error(msg)
+ raise DataValidationError(msg) from e
+
+ try:
+ return cls.model_validate_json(json.dumps(data)) # type: ignore
+ except pydantic.ValidationError as e:
+ msg = f"failed to validate databag: {databag}"
+ logger.debug(msg, exc_info=True)
+ raise DataValidationError(msg) from e
+
+ def dump(self, databag: Optional[MutableMapping] = None, clear: bool = True):
+ """Write the contents of this model to Juju databag.
+
+ :param databag: the databag to write the data to.
+ :param clear: ensure the databag is cleared before writing it.
+ """
+ if clear and databag:
+ databag.clear()
+
+ if databag is None:
+ databag = {}
+ nest_under = self.model_config.get("_NEST_UNDER")
+ if nest_under:
+ databag[nest_under] = self.model_dump_json( # type: ignore
+ by_alias=True,
+ # skip keys whose values are default
+ exclude_defaults=True,
+ )
+ return databag
+
+ dct = self.model_dump() # type: ignore
+ for key, field in self.model_fields.items(): # type: ignore
+ value = dct[key]
+ if value == field.default:
+ continue
+ databag[field.alias or key] = json.dumps(value)
+
+ return databag
+
+
+class CosAgentProviderUnitData(DatabagModel):
"""Unit databag model for `cos-agent` relation."""
# The following entries are the same for all units of the same principal.
@@ -267,13 +486,16 @@ class CosAgentProviderUnitData(pydantic.BaseModel):
metrics_scrape_jobs: List[Dict]
log_slots: List[str]
+ # Requested tracing protocols.
+ tracing_protocols: Optional[List[str]] = None
+
# when this whole datastructure is dumped into a databag, it will be nested under this key.
# while not strictly necessary (we could have it 'flattened out' into the databag),
# this simplifies working with the model.
KEY: ClassVar[str] = "config"
-class CosAgentPeersUnitData(pydantic.BaseModel):
+class CosAgentPeersUnitData(DatabagModel):
"""Unit databag model for `peers` cos-agent machine charm peer relation."""
# We need the principal unit name and relation metadata to be able to render identifiers
@@ -304,6 +526,83 @@ def app_name(self) -> str:
return self.unit_name.split("/")[0]
+if int(pydantic.version.VERSION.split(".")[0]) < 2: # type: ignore
+
+ class ProtocolType(pydantic.BaseModel): # type: ignore
+ """Protocol Type."""
+
+ class Config:
+ """Pydantic config."""
+
+ use_enum_values = True
+ """Allow serializing enum values."""
+
+ name: str = pydantic.Field(
+ ...,
+ description="Receiver protocol name. What protocols are supported (and what they are called) "
+ "may differ per provider.",
+ examples=["otlp_grpc", "otlp_http", "tempo_http"],
+ )
+
+ type: TransportProtocolType = pydantic.Field(
+ ...,
+ description="The transport protocol used by this receiver.",
+ examples=["http", "grpc"],
+ )
+
+else:
+
+ class ProtocolType(pydantic.BaseModel):
+ """Protocol Type."""
+
+ model_config = pydantic.ConfigDict(
+ # Allow serializing enum values.
+ use_enum_values=True
+ )
+ """Pydantic config."""
+
+ name: str = pydantic.Field(
+ ...,
+ description="Receiver protocol name. What protocols are supported (and what they are called) "
+ "may differ per provider.",
+ examples=["otlp_grpc", "otlp_http", "tempo_http"],
+ )
+
+ type: TransportProtocolType = pydantic.Field(
+ ...,
+ description="The transport protocol used by this receiver.",
+ examples=["http", "grpc"],
+ )
+
+
+class Receiver(pydantic.BaseModel):
+ """Specification of an active receiver."""
+
+ protocol: ProtocolType = pydantic.Field(..., description="Receiver protocol name and type.")
+ url: str = pydantic.Field(
+ ...,
+ description="""URL at which the receiver is reachable. If there's an ingress, it would be the external URL.
+ Otherwise, it would be the service's fqdn or internal IP.
+ If the protocol type is grpc, the url will not contain a scheme.""",
+ examples=[
+ "http://traefik_address:2331",
+ "https://traefik_address:2331",
+ "http://tempo_public_ip:2331",
+ "https://tempo_public_ip:2331",
+ "tempo_public_ip:2331",
+ ],
+ )
+
+
+class CosAgentRequirerUnitData(DatabagModel): # noqa: D101
+ """Application databag model for the COS-agent requirer."""
+
+ receivers: List[Receiver] = pydantic.Field(
+ ...,
+ description="List of all receivers enabled on the tracing provider.",
+ )
+
+
class COSAgentProvider(Object):
"""Integration endpoint wrapper for the provider side of the cos_agent interface."""
@@ -318,6 +617,7 @@ def __init__(
log_slots: Optional[List[str]] = None,
dashboard_dirs: Optional[List[str]] = None,
refresh_events: Optional[List] = None,
+ tracing_protocols: Optional[List[str]] = None,
*,
scrape_configs: Optional[Union[List[dict], Callable]] = None,
):
@@ -336,6 +636,7 @@ def __init__(
in the form ["snap-name:slot", ...].
dashboard_dirs: Directory where the dashboards are stored.
refresh_events: List of events on which to refresh relation data.
+ tracing_protocols: List of protocols that the charm will be using for sending traces.
scrape_configs: List of standard scrape_configs dicts or a callable
that returns the list in case the configs need to be generated dynamically.
The contents of this list will be merged with the contents of `metrics_endpoints`.
@@ -353,6 +654,8 @@ def __init__(
self._log_slots = log_slots or []
self._dashboard_dirs = dashboard_dirs
self._refresh_events = refresh_events or [self._charm.on.config_changed]
+ self._tracing_protocols = tracing_protocols
+ self._is_single_endpoint = charm.meta.relations[relation_name].limit == 1
events = self._charm.on[relation_name]
self.framework.observe(events.relation_joined, self._on_refresh)
@@ -377,8 +680,9 @@ def _on_refresh(self, event):
dashboards=self._dashboards,
metrics_scrape_jobs=self._scrape_jobs,
log_slots=self._log_slots,
+ tracing_protocols=self._tracing_protocols,
)
- relation.data[self._charm.unit][data.KEY] = data.json()
+ relation.data[self._charm.unit][data.KEY] = data.model_dump_json()
except (
pydantic.ValidationError,
json.decoder.JSONDecodeError,
@@ -441,6 +745,103 @@ def _dashboards(self) -> List[GrafanaDashboard]:
dashboards.append(dashboard)
return dashboards
+ @property
+ def relations(self) -> List[Relation]:
+ """The tracing relations associated with this endpoint."""
+ return self._charm.model.relations[self._relation_name]
+
+ @property
+ def _relation(self) -> Optional[Relation]:
+ """If this wraps a single endpoint, the relation bound to it, if any."""
+ if not self._is_single_endpoint:
+ objname = type(self).__name__
+ raise AmbiguousRelationUsageError(
+ f"This {objname} wraps a {self._relation_name} endpoint that has "
+ "limit != 1. We can't determine what relation, of the possibly many, you are "
+ f"referring to. Please pass a relation instance while calling {objname}, "
+ "or set limit=1 in the charm metadata."
+ )
+ relations = self.relations
+ return relations[0] if relations else None
+
+ def is_ready(self, relation: Optional[Relation] = None):
+ """Is this endpoint ready?"""
+ relation = relation or self._relation
+ if not relation:
+ logger.debug(f"no relation on {self._relation_name !r}: tracing not ready")
+ return False
+ if relation.data is None:
+ logger.error(f"relation data is None for {relation}")
+ return False
+ if not relation.app:
+ logger.error(f"{relation} event received but there is no relation.app")
+ return False
+ try:
+ unit = next(iter(relation.units), None)
+ if not unit:
+ return False
+ databag = dict(relation.data[unit])
+ CosAgentRequirerUnitData.load(databag)
+
+ except (json.JSONDecodeError, pydantic.ValidationError, DataValidationError):
+ logger.info(f"failed validating relation data for {relation}")
+ return False
+ return True
+
+ def get_all_endpoints(
+ self, relation: Optional[Relation] = None
+ ) -> Optional[CosAgentRequirerUnitData]:
+ """Unmarshalled relation data."""
+ relation = relation or self._relation
+ if not relation or not self.is_ready(relation):
+ return None
+ unit = next(iter(relation.units), None)
+ if not unit:
+ return None
+ return CosAgentRequirerUnitData.load(relation.data[unit]) # type: ignore
+
+ def _get_tracing_endpoint(
+ self, relation: Optional[Relation], protocol: ReceiverProtocol
+ ) -> Optional[str]:
+ unit_data = self.get_all_endpoints(relation)
+ if not unit_data:
+ return None
+ receivers: List[Receiver] = [i for i in unit_data.receivers if i.protocol.name == protocol]
+ if not receivers:
+ logger.error(f"no receiver found with protocol={protocol!r}")
+ return None
+ if len(receivers) > 1:
+ logger.error(
+ f"too many receivers with protocol={protocol!r}; using first one. Found: {receivers}"
+ )
+ return None
+
+ receiver = receivers[0]
+ return receiver.url
+
+ def get_tracing_endpoint(
+ self, protocol: ReceiverProtocol, relation: Optional[Relation] = None
+ ) -> Optional[str]:
+ """Receiver endpoint for the given protocol."""
+ endpoint = self._get_tracing_endpoint(relation or self._relation, protocol=protocol)
+ if not endpoint:
+ requested_protocols = set()
+ relations = [relation] if relation else self.relations
+ for relation in relations:
+ try:
+ databag = CosAgentProviderUnitData.load(relation.data[self._charm.unit])
+ except DataValidationError:
+ continue
+
+ if databag.tracing_protocols:
+ requested_protocols.update(databag.tracing_protocols)
+
+ if protocol not in requested_protocols:
+ raise ProtocolNotRequestedError(protocol, relation)
+
+ return None
+ return endpoint
+
class COSAgentDataChanged(EventBase):
"""Event emitted by `COSAgentRequirer` when relation data changes."""
@@ -554,6 +955,12 @@ def _on_relation_data_changed(self, event: RelationChangedEvent):
if not (provider_data := self._validated_provider_data(raw)):
return
+ # write enabled receivers to cos-agent relation
+ try:
+ self.update_tracing_receivers()
+ except ModelError:
+ raise
+
# Copy data from the cos_agent relation to the peer relation, so the leader could
# follow up.
# Save the originating unit name, so it could be used for topology later on by the leader.
@@ -567,13 +974,44 @@ def _on_relation_data_changed(self, event: RelationChangedEvent):
)
self.peer_relation.data[self._charm.unit][
f"{CosAgentPeersUnitData.KEY}-{event.unit.name}"
- ] = data.json()
+ ] = data.model_dump_json()
# We can't easily tell if the data that was changed is limited to only the data
# that goes into peer relation (in which case, if this is not a leader unit, we wouldn't
# need to emit `on.data_changed`), so we're emitting `on.data_changed` either way.
self.on.data_changed.emit() # pyright: ignore
+ def update_tracing_receivers(self):
+ """Updates the list of exposed tracing receivers in all relations."""
+ try:
+ for relation in self._charm.model.relations[self._relation_name]:
+ CosAgentRequirerUnitData(
+ receivers=[
+ Receiver(
+ url=f"{self._get_tracing_receiver_url(protocol)}",
+ protocol=ProtocolType(
+ name=protocol,
+ type=receiver_protocol_to_transport_protocol[protocol],
+ ),
+ )
+ for protocol in self.requested_tracing_protocols()
+ ],
+ ).dump(relation.data[self._charm.unit])
+
+ except ModelError as e:
+ # args are bytes
+ msg = e.args[0]
+ if isinstance(msg, bytes):
+ if msg.startswith(
+ b"ERROR cannot read relation application settings: permission denied"
+ ):
+ logger.error(
+ f"encountered error {e} while attempting to update_relation_data."
+ f"The relation must be gone."
+ )
+ return
+ raise
+
def _validated_provider_data(self, raw) -> Optional[CosAgentProviderUnitData]:
try:
return CosAgentProviderUnitData(**json.loads(raw))
@@ -586,6 +1024,55 @@ def trigger_refresh(self, _):
# FIXME: Figure out what we should do here
self.on.data_changed.emit() # pyright: ignore
+ def _get_requested_protocols(self, relation: Relation):
+ # Coherence check
+ units = relation.units
+ if len(units) > 1:
+ # should never happen
+ raise ValueError(
+ f"unexpected error: subordinate relation {relation} "
+ f"should have exactly one unit"
+ )
+
+ unit = next(iter(units), None)
+
+ if not unit:
+ return None
+
+ if not (raw := relation.data[unit].get(CosAgentProviderUnitData.KEY)):
+ return None
+
+ if not (provider_data := self._validated_provider_data(raw)):
+ return None
+
+ return provider_data.tracing_protocols
+
+ def requested_tracing_protocols(self):
+ """All receiver protocols that have been requested by our related apps."""
+ requested_protocols = set()
+ for relation in self._charm.model.relations[self._relation_name]:
+ try:
+ protocols = self._get_requested_protocols(relation)
+ except NotReadyError:
+ continue
+ if protocols:
+ requested_protocols.update(protocols)
+ return requested_protocols
+
+ def _get_tracing_receiver_url(self, protocol: str):
+ scheme = "http"
+ try:
+ if self._charm.cert.enabled: # type: ignore
+ scheme = "https"
+ # not only Grafana Agent can implement cos_agent. If the charm doesn't have the `cert` attribute
+ # using our cert_handler, it won't have the `enabled` parameter. In this case, we pass and assume http.
+ except AttributeError:
+ pass
+ # the assumption is that a subordinate charm will always be accessible to its principal charm under its fqdn
+ if receiver_protocol_to_transport_protocol[protocol] == TransportProtocolType.grpc:
+ return f"{socket.getfqdn()}:{_tracing_receivers_ports[protocol]}"
+ return f"{scheme}://{socket.getfqdn()}:{_tracing_receivers_ports[protocol]}"
+
@property
def _remote_data(self) -> List[Tuple[CosAgentProviderUnitData, JujuTopology]]:
"""Return a list of remote data from each of the related units.
@@ -721,8 +1208,18 @@ def metrics_jobs(self) -> List[Dict]:
@property
def snap_log_endpoints(self) -> List[SnapEndpoint]:
"""Fetch logging endpoints exposed by related snaps."""
+ endpoints = []
+ endpoints_with_topology = self.snap_log_endpoints_with_topology
+ for endpoint, _ in endpoints_with_topology:
+ endpoints.append(endpoint)
+
+ return endpoints
+
+ @property
+ def snap_log_endpoints_with_topology(self) -> List[Tuple[SnapEndpoint, JujuTopology]]:
+ """Fetch logging endpoints and charm topology for each related snap."""
plugs = []
- for data, _ in self._remote_data:
+ for data, topology in self._remote_data:
targets = data.log_slots
if targets:
for target in targets:
@@ -733,15 +1230,16 @@ def snap_log_endpoints(self) -> List[SnapEndpoint]:
"endpoints; this should not happen."
)
else:
- plugs.append(target)
+ plugs.append((target, topology))
endpoints = []
- for plug in plugs:
+ for plug, topology in plugs:
if ":" not in plug:
logger.error(f"invalid plug definition received: {plug}. Ignoring...")
else:
endpoint = SnapEndpoint(*plug.split(":"))
- endpoints.append(endpoint)
+ endpoints.append((endpoint, topology))
+
return endpoints
@property
@@ -804,3 +1302,67 @@ def dashboards(self) -> List[Dict[str, str]]:
)
return dashboards
+
+
+def charm_tracing_config(
+ endpoint_requirer: COSAgentProvider, cert_path: Optional[Union[Path, str]]
+) -> Tuple[Optional[str], Optional[str]]:
+ """Utility function to determine the charm_tracing config you will likely want.
+
+ If no endpoint is provided:
+ disable charm tracing.
+ If https endpoint is provided but cert_path is not found on disk:
+ disable charm tracing.
+ If https endpoint is provided and cert_path is None:
+ ERROR
+ Else:
+ proceed with charm tracing (with or without tls, as appropriate)
+
+ Usage:
+ If you are using charm_tracing >= v1.9:
+ >>> from lib.charms.tempo_k8s.v1.charm_tracing import trace_charm
+ >>> from lib.charms.tempo_k8s.v0.cos_agent import charm_tracing_config
+ >>> @trace_charm(tracing_endpoint="my_endpoint", cert_path="cert_path")
+ >>> class MyCharm(...):
+ >>> _cert_path = "/path/to/cert/on/charm/container.crt"
+ >>> def __init__(self, ...):
+ >>> self.cos_agent = COSAgentProvider(...)
+ >>> self.my_endpoint, self.cert_path = charm_tracing_config(
+ ... self.cos_agent, self._cert_path)
+
+ If you are using charm_tracing < v1.9:
+ >>> from lib.charms.tempo_k8s.v1.charm_tracing import trace_charm
+ >>> from lib.charms.tempo_k8s.v2.tracing import charm_tracing_config
+ >>> @trace_charm(tracing_endpoint="my_endpoint", cert_path="cert_path")
+ >>> class MyCharm(...):
+ >>> _cert_path = "/path/to/cert/on/charm/container.crt"
+ >>> def __init__(self, ...):
+ >>> self.cos_agent = COSAgentProvider(...)
+ >>> self.my_endpoint, self.cert_path = charm_tracing_config(
+ ... self.cos_agent, self._cert_path)
+ >>> @property
+ >>> def my_endpoint(self):
+ >>> return self._my_endpoint
+ >>> @property
+ >>> def cert_path(self):
+ >>> return self._cert_path
+
+ """
+ if not endpoint_requirer.is_ready():
+ return None, None
+
+ endpoint = endpoint_requirer.get_tracing_endpoint("otlp_http")
+ if not endpoint:
+ return None, None
+
+ is_https = endpoint.startswith("https://")
+
+ if is_https:
+ if cert_path is None:
+ raise TracingError("Cannot send traces to an https endpoint without a certificate.")
+ if not Path(cert_path).exists():
+ # if endpoint is https BUT we don't have a server_cert yet:
+ # disable charm tracing until we do to prevent tls errors
+ return None, None
+ return endpoint, str(cert_path)
+ return endpoint, None
diff --git a/requirements.txt b/requirements.txt
index 3bed85a6d..5b99ad1ff 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -7,9 +7,9 @@ pylxd @ git+https://github.com/canonical/pylxd
requests
typing-extensions
cryptography <=43.0.1
-pydantic ==1.10.18
+pydantic ==2.8.2
cosl ==0.0.15
# juju 3.1.2.0 depends on pyyaml<=6.0 and >=5.1.2
PyYAML ==6.0.*
pyOpenSSL==24.2.1
-github_runner_manager @ git+https://github.com/canonical/github-runner-manager.git@33bbaff42d7cc0f250006fdd08d24659cef364c9
+github_runner_manager @ git+https://github.com/canonical/github-runner-manager.git@feat-pydantic-v2
diff --git a/src-docs/charm_state.py.md b/src-docs/charm_state.py.md
index 354e0416a..ab52b857b 100644
--- a/src-docs/charm_state.py.md
+++ b/src-docs/charm_state.py.md
@@ -38,21 +38,6 @@ State of the Charm.
- **LTS_IMAGE_VERSION_TAG_MAP**
----
-
-## class `AnyHttpsUrl`
-Represents an HTTPS URL.
-
-
-
-**Attributes:**
-
- - `allowed_schemes`: Allowed schemes for the URL.
-
-
-
-
-
---
## class `Arch`
@@ -106,11 +91,33 @@ Some charm configurations are grouped into other configuration models.
- `token`: GitHub personal access token for GitHub API.
+---
+
+#### property model_extra
+
+Get extra fields set during validation.
+
+**Returns:**
+ A dictionary of extra fields, or `None` if `config.extra` is not set to `"allow"`.
+
---
-
+#### property model_fields_set
+
+Returns the set of fields that have been explicitly set on this model instance.
+
+
+
+**Returns:**
+ A set of strings representing the fields that have been set, i.e. that were not filled from defaults.
+
+
+
+---
+
+
### classmethod `check_reconcile_interval`
@@ -139,7 +146,7 @@ Validate the general charm configuration.
---
-
+
### classmethod `from_charm`
@@ -178,7 +185,7 @@ Raised when charm config is invalid.
- `msg`: Explanation of the error.
-
+
### function `__init__`
@@ -221,7 +228,7 @@ The charm state.
---
-
+
### classmethod `from_charm`
@@ -267,7 +274,7 @@ Charm configuration related to GitHub.
---
-
+
### classmethod `from_charm`
@@ -300,7 +307,7 @@ Get github related charm configuration values from charm.
## class `ImmutableConfigChangedError`
Represents an error when changing immutable charm state.
-
+
### function `__init__`
@@ -351,11 +358,33 @@ Runner configurations for local LXD instances.
- `runner_storage`: Storage to be used as disk for the runner.
+---
+
+#### property model_extra
+Get extra fields set during validation.
+
+
+
+**Returns:**
+ A dictionary of extra fields, or `None` if `config.extra` is not set to `"allow"`.
---
-
+#### property model_fields_set
+
+Returns the set of fields that have been explicitly set on this model instance.
+
+
+
+**Returns:**
+ A set of strings representing the fields that have been set, i.e. that were not filled from defaults.
+
+
+
+---
+
+
### classmethod `check_virtual_machine_resources`
@@ -386,7 +415,7 @@ Validate the virtual_machine_resources field values.
---
-
+
### classmethod `check_virtual_machines`
@@ -415,7 +444,7 @@ Validate the virtual machines configuration value.
---
-
+
### classmethod `from_charm`
@@ -471,11 +500,33 @@ OpenstackImage from image builder relation data.
- `tags`: Image tags, e.g. jammy
+---
+
+#### property model_extra
+
+Get extra fields set during validation.
+
+
+
+**Returns:**
+ A dictionary of extra fields, or `None` if `config.extra` is not set to `"allow"`.
+
+---
+
+#### property model_fields_set
+
+Returns the set of fields that have been explicitly set on this model instance.
+
+
+
+**Returns:**
+ A set of strings representing the fields that have been set, i.e. that were not filled from defaults.
+
---
-
+
### classmethod `from_charm`
@@ -514,11 +565,33 @@ Runner configuration for OpenStack Instances.
- `openstack_image`: Openstack image to use for virtual machines.
+---
+
+#### property model_extra
+
+Get extra fields set during validation.
+
+**Returns:**
+ A dictionary of extra fields, or `None` if `config.extra` is not set to `"allow"`.
+
---
-
+#### property model_fields_set
+
+Returns the set of fields that have been explicitly set on this model instance.
+
+
+
+**Returns:**
+ A set of strings representing the fields that have been set, i.e. that were not filled from defaults.
+
+
+
+---
+
+
### classmethod `from_charm`
@@ -556,10 +629,13 @@ Proxy configuration.
**Attributes:**
- `aproxy_address`: The address of aproxy snap instance if use_aproxy is enabled.
- - `http`: HTTP proxy address.
- - `https`: HTTPS proxy address.
+ - `http`: HTTP proxy address string.
+ - `http_url`: HTTP proxy address url.
+ - `https`: HTTPS proxy address string.
+ - `https_url`: HTTPS proxy address url.
- `no_proxy`: Comma-separated list of hosts that should not be proxied.
- `use_aproxy`: Whether aproxy should be used for the runners.
+ - `model_config`: Config for the pydantic model
---
@@ -568,26 +644,53 @@ Proxy configuration.
Return the aproxy address.
+---
+#### property http
+
+Return string version of http url.
---
-
+#### property https
-### classmethod `check_use_aproxy`
+Return string version of https url.
-```python
-check_use_aproxy(use_aproxy: bool, values: dict) → bool
-```
+---
-Validate the proxy configuration.
+#### property model_extra
+Get extra fields set during validation.
+
+
+
+**Returns:**
+ A dictionary of extra fields, or `None` if `config.extra` is not set to `"allow"`.
+
+---
+
+#### property model_fields_set
+
+Returns the set of fields that have been explicitly set on this model instance.
+
+
+
+**Returns:**
+ A set of strings representing the fields that have been set, i.e. that were not filled from defaults.
-**Args:**
-
- - `use_aproxy`: Value of use_aproxy variable.
- - `values`: Values in the pydantic model.
+
+---
+
+
+
+### function `check_use_aproxy`
+
+```python
+check_use_aproxy(self: 'ProxyConfig') → ProxyConfig
+```
+
+Validate the proxy configuration.
@@ -598,11 +701,11 @@ Validate the proxy configuration.
**Returns:**
- Validated use_aproxy value.
+ Validated ProxyConfig instance.
---
-
+
### classmethod `from_charm`
@@ -636,11 +739,33 @@ Represents the configuration for reactive scheduling.
- `mq_uri`: The URI of the MQ to use to spawn runners reactively.
+---
+
+#### property model_extra
+
+Get extra fields set during validation.
+
+
+
+**Returns:**
+ A dictionary of extra fields, or `None` if `config.extra` is not set to `"allow"`.
+
+---
+
+#### property model_fields_set
+
+Returns the set of fields that have been explicitly set on this model instance.
+
+
+
+**Returns:**
+ A set of strings representing the fields that have been set, i.e. that were not filled from defaults.
+
---
-
+
### classmethod `from_database`
@@ -681,11 +806,33 @@ Configuration for the repo policy compliance service.
- `url`: URL of the repo policy compliance service.
+---
+
+#### property model_extra
+
+Get extra fields set during validation.
+
+
+
+**Returns:**
+ A dictionary of extra fields, or `None` if `config.extra` is not set to `"allow"`.
+
+---
+
+#### property model_fields_set
+
+Returns the set of fields that have been explicitly set on this model instance.
+
+
+
+**Returns:**
+ A set of strings representing the fields that have been set, i.e. that were not filled from defaults.
+
---
-
+
### classmethod `from_charm`
@@ -744,11 +891,33 @@ SSH connection information for debug workflow.
- `ed25519_fingerprint`: The host SSH server public ed25519 key fingerprint.
+---
+
+#### property model_extra
+
+Get extra fields set during validation.
+
+
+
+**Returns:**
+ A dictionary of extra fields, or `None` if `config.extra` is not set to `"allow"`.
+
+---
+
+#### property model_fields_set
+
+Returns the set of fields that have been explicitly set on this model instance.
+
+
+
+**Returns:**
+ A set of strings representing the fields that have been set, i.e. that were not filled from defaults.
+
---
-
+
### classmethod `from_charm`
@@ -781,7 +950,7 @@ Raised when given machine charm architecture is unsupported.
- `arch`: The current machine architecture.
-
+
### function `__init__`
diff --git a/src-docs/logrotate.py.md b/src-docs/logrotate.py.md
index fecc13634..9c4395ae7 100644
--- a/src-docs/logrotate.py.md
+++ b/src-docs/logrotate.py.md
@@ -47,6 +47,28 @@ Configuration for logrotate.
- `frequency`: The frequency of log rotation.
+---
+
+#### property model_extra
+
+Get extra fields set during validation.
+
+
+
+**Returns:**
+ A dictionary of extra fields, or `None` if `config.extra` is not set to `"allow"`.
+
+---
+
+#### property model_fields_set
+
+Returns the set of fields that have been explicitly set on this model instance.
+
+
+
+**Returns:**
+ A set of strings representing the fields that have been set, i.e. that were not filled from defaults.
+
diff --git a/src-docs/runner_type.py.md b/src-docs/runner_type.py.md
index e3a7025aa..8b1378959 100644
--- a/src-docs/runner_type.py.md
+++ b/src-docs/runner_type.py.md
@@ -16,6 +16,7 @@ Represent HTTP-related proxy settings.
**Attributes:**
+ - `use_aproxy`: Whether aproxy should be used for the runners.
- `no_proxy`: The comma separated URLs to not go through proxy.
- `http`: HTTP proxy URL.
- `https`: HTTPS proxy URL.
diff --git a/src/charm_state.py b/src/charm_state.py
index afe174388..8b4aeacdb 100644
--- a/src/charm_state.py
+++ b/src/charm_state.py
@@ -14,7 +14,7 @@
import re
from enum import Enum
from pathlib import Path
-from typing import NamedTuple, Optional, TypedDict, cast
+from typing import Annotated, NamedTuple, Optional, cast
from urllib.parse import urlsplit
import yaml
@@ -26,13 +26,18 @@
from pydantic import (
AnyHttpUrl,
BaseModel,
+ ConfigDict,
Field,
IPvAnyAddress,
MongoDsn,
+ TypeAdapter,
+ UrlConstraints,
ValidationError,
- create_model_from_typeddict,
- validator,
+ field_validator,
+ model_validator,
)
+from pydantic_core import Url
+from typing_extensions import TypedDict
from errors import MissingMongoDBError
from firewall import FirewallEntry
@@ -84,14 +89,7 @@
"""Representation of storage size with KiB, MiB, GiB, TiB, PiB, EiB as unit."""
-class AnyHttpsUrl(AnyHttpUrl):
- """Represents an HTTPS URL.
-
- Attributes:
- allowed_schemes: Allowed schemes for the URL.
- """
-
- allowed_schemes = {"https"}
+AnyHttpsUrl = Annotated[Url, UrlConstraints(allowed_schemes=["https"])]
@dataclasses.dataclass
@@ -427,9 +425,8 @@ def _parse_openstack_clouds_config(cls, charm: CharmBase) -> OpenStackCloudsYAML
openstack_clouds_yaml: OpenStackCloudsYAML = yaml.safe_load(
cast(str, openstack_clouds_yaml_str)
)
- # use Pydantic to validate TypedDict.
- create_model_from_typeddict(OpenStackCloudsYAML)(**openstack_clouds_yaml)
- except (yaml.YAMLError, TypeError) as exc:
+ TypeAdapter(OpenStackCloudsYAML).validate_python(openstack_clouds_yaml)
+ except (yaml.YAMLError, TypeError, ValidationError) as exc:
logger.error(f"Invalid {OPENSTACK_CLOUDS_YAML_CONFIG_NAME} config: %s.", exc)
raise CharmConfigInvalidError(
f"Invalid {OPENSTACK_CLOUDS_YAML_CONFIG_NAME} config. Invalid yaml."
@@ -445,7 +442,7 @@ def _parse_openstack_clouds_config(cls, charm: CharmBase) -> OpenStackCloudsYAML
return openstack_clouds_yaml
- @validator("reconcile_interval")
+ @field_validator("reconcile_interval")
@classmethod
def check_reconcile_interval(cls, reconcile_interval: int) -> int:
"""Validate the general charm configuration.
@@ -720,7 +717,7 @@ def from_charm(cls, charm: CharmBase) -> "LocalLxdRunnerConfig":
runner_storage=runner_storage,
)
- @validator("virtual_machines")
+ @field_validator("virtual_machines")
@classmethod
def check_virtual_machines(cls, virtual_machines: int) -> int:
"""Validate the virtual machines configuration value.
@@ -742,7 +739,7 @@ def check_virtual_machines(cls, virtual_machines: int) -> int:
return virtual_machines
- @validator("virtual_machine_resources")
+ @field_validator("virtual_machine_resources")
@classmethod
def check_virtual_machine_resources(
cls, vm_resources: VirtualMachineResources
@@ -783,22 +780,36 @@ class ProxyConfig(BaseModel):
Attributes:
aproxy_address: The address of aproxy snap instance if use_aproxy is enabled.
- http: HTTP proxy address.
- https: HTTPS proxy address.
+ http: HTTP proxy address string.
+ http_url: HTTP proxy address url.
+ https: HTTPS proxy address string.
+ https_url: HTTPS proxy address url.
no_proxy: Comma-separated list of hosts that should not be proxied.
use_aproxy: Whether aproxy should be used for the runners.
+ model_config: Config for the pydantic model
"""
- http: Optional[AnyHttpUrl]
- https: Optional[AnyHttpUrl]
- no_proxy: Optional[str]
+ http_url: Optional[AnyHttpUrl] = None
+ https_url: Optional[AnyHttpUrl] = None
+ no_proxy: Optional[str] = None
use_aproxy: bool = False
+ model_config = ConfigDict(frozen=True)
+
+ @property
+ def http(self) -> Optional[str]:
+ """Return string version of http url."""
+ return str(self.http_url) if self.http_url else None
+
+ @property
+ def https(self) -> Optional[str]:
+ """Return string version of https url."""
+ return str(self.https_url) if self.https_url else None
@property
def aproxy_address(self) -> Optional[str]:
"""Return the aproxy address."""
if self.use_aproxy:
- proxy_address = self.http or self.https
+ proxy_address = self.http_url or self.https_url
# assert is only used to make mypy happy
assert (
proxy_address is not None and proxy_address.host is not None
@@ -812,25 +823,20 @@ def aproxy_address(self) -> Optional[str]:
aproxy_address = None
return aproxy_address
- @validator("use_aproxy")
- @classmethod
- def check_use_aproxy(cls, use_aproxy: bool, values: dict) -> bool:
+ @model_validator(mode="after")
+ def check_use_aproxy(self: "ProxyConfig") -> "ProxyConfig":
"""Validate the proxy configuration.
- Args:
- use_aproxy: Value of use_aproxy variable.
- values: Values in the pydantic model.
-
Raises:
ValueError: if use_aproxy was set but no http/https was passed.
Returns:
- Validated use_aproxy value.
+ Validated ProxyConfig instance.
"""
- if use_aproxy and not (values.get("http") or values.get("https")):
+ if self.use_aproxy and not (self.http_url or self.https_url):
raise ValueError("aproxy requires http or https to be set")
- return use_aproxy
+ return self
def __bool__(self) -> bool:
"""Return whether the proxy config is set.
@@ -838,7 +844,7 @@ def __bool__(self) -> bool:
Returns:
Whether the proxy config is set.
"""
- return bool(self.http or self.https)
+ return bool(self.http_url or self.https_url)
@classmethod
def from_charm(cls, charm: CharmBase) -> "ProxyConfig":
@@ -860,21 +866,12 @@ def from_charm(cls, charm: CharmBase) -> "ProxyConfig":
no_proxy = None
return cls(
- http=http_proxy,
- https=https_proxy,
+ http_url=http_proxy,
+ https_url=https_proxy,
no_proxy=no_proxy,
use_aproxy=use_aproxy,
)
- class Config: # pylint: disable=too-few-public-methods
- """Pydantic model configuration.
-
- Attributes:
- allow_mutation: Whether the model is mutable.
- """
-
- allow_mutation = False
-
class UnsupportedArchitectureError(Exception):
"""Raised when given machine charm architecture is unsupported.
@@ -953,11 +950,9 @@ def from_charm(cls, charm: CharmBase) -> list["SSHDebugConnection"]:
)
continue
ssh_debug_connections.append(
- # pydantic allows string to be passed as IPvAnyAddress and as int,
- # mypy complains about it
SSHDebugConnection(
- host=host, # type: ignore
- port=port, # type: ignore
+ host=IPvAnyAddress(host),
+ port=int(port),
rsa_fingerprint=rsa_fingerprint,
ed25519_fingerprint=ed25519_fingerprint,
)
@@ -972,7 +967,7 @@ class ReactiveConfig(BaseModel):
mq_uri: The URI of the MQ to use to spawn runners reactively.
"""
- mq_uri: MongoDsn
+ mq_uri: Annotated[str, MongoDsn]
@classmethod
def from_database(cls, database: DatabaseRequires) -> "ReactiveConfig | None":
@@ -1055,13 +1050,15 @@ def _store_state(cls, state: "CharmState") -> None:
"""
state_dict = dataclasses.asdict(state)
# Convert pydantic object to python object serializable by json module.
- state_dict["proxy_config"] = json.loads(state_dict["proxy_config"].json())
- state_dict["charm_config"] = json.loads(state_dict["charm_config"].json())
+ state_dict["proxy_config"] = json.loads(state_dict["proxy_config"].model_dump_json())
+ state_dict["charm_config"] = json.loads(state_dict["charm_config"].model_dump_json())
if state.reactive_config:
- state_dict["reactive_config"] = json.loads(state_dict["reactive_config"].json())
- state_dict["runner_config"] = json.loads(state_dict["runner_config"].json())
+ state_dict["reactive_config"] = json.loads(
+ state_dict["reactive_config"].model_dump_json()
+ )
+ state_dict["runner_config"] = json.loads(state_dict["runner_config"].model_dump_json())
state_dict["ssh_debug_connections"] = [
- debug_info.json() for debug_info in state_dict["ssh_debug_connections"]
+ debug_info.model_dump_json() for debug_info in state_dict["ssh_debug_connections"]
]
json_data = json.dumps(state_dict, ensure_ascii=False)
CHARM_STATE_PATH.write_text(json_data, encoding="utf-8")
diff --git a/src/runner_type.py b/src/runner_type.py
index eec8793ee..3c25fc8ca 100644
--- a/src/runner_type.py
+++ b/src/runner_type.py
@@ -31,16 +31,18 @@ class ProxySetting:
"""Represent HTTP-related proxy settings.
Attributes:
+ use_aproxy: Whether aproxy should be used for the runners.
no_proxy: The comma separated URLs to not go through proxy.
http: HTTP proxy URL.
https: HTTPS proxy URL.
aproxy_address: Aproxy URL.
"""
- no_proxy: Optional[str]
- http: Optional[str]
- https: Optional[str]
- aproxy_address: Optional[str]
+ use_aproxy: bool = False
+ no_proxy: str | None = None
+ http: str | None = None
+ https: str | None = None
+ aproxy_address: str | None = None
@dataclass
diff --git a/tests/integration/test_reactive.py b/tests/integration/test_reactive.py
index 167bded81..69eeeeedf 100644
--- a/tests/integration/test_reactive.py
+++ b/tests/integration/test_reactive.py
@@ -75,7 +75,7 @@ async def test_reactive_mode_spawns_runner(
# label validation in the reactive consumer.
job = _create_job_details(run=run, labels=labels)
_add_to_queue(
- json.dumps(json.loads(job.json()) | {"ignored_noise": "foobar"}),
+ json.dumps(job.dict() | {"ignored_noise": "foobar"}, default=lambda x: str(x)),
mongodb_uri,
app.name,
)
diff --git a/tests/integration/test_runner_manager_openstack.py b/tests/integration/test_runner_manager_openstack.py
index 582ebca00..53043aea5 100644
--- a/tests/integration/test_runner_manager_openstack.py
+++ b/tests/integration/test_runner_manager_openstack.py
@@ -88,8 +88,8 @@ def openstack_proxy_config_fixture(
http_proxy = openstack_http_proxy if openstack_http_proxy else None
https_proxy = openstack_https_proxy if openstack_https_proxy else None
return ProxyConfig(
- http=http_proxy,
- https=https_proxy,
+ http_url=http_proxy,
+ https_url=https_proxy,
no_proxy=openstack_no_proxy,
use_aproxy=use_aproxy,
)
diff --git a/tests/unit/test_charm.py b/tests/unit/test_charm.py
index a28fc9743..ddaa18cd4 100644
--- a/tests/unit/test_charm.py
+++ b/tests/unit/test_charm.py
@@ -184,8 +184,8 @@ def test_proxy_setting(harness: Harness):
assert: The proxy configuration are set.
"""
state = harness.charm._setup_state()
- assert state.proxy_config.https == TEST_PROXY_SERVER_URL
- assert state.proxy_config.http == TEST_PROXY_SERVER_URL
+ assert state.proxy_config.https == f"{TEST_PROXY_SERVER_URL}/"
+ assert state.proxy_config.http == f"{TEST_PROXY_SERVER_URL}/"
assert state.proxy_config.no_proxy == "127.0.0.1,localhost"
@@ -277,7 +277,7 @@ def test_get_runner_manager(harness: Harness):
assert runner_manager is not None
assert runner_manager.config.token == "mocktoken"
assert runner_manager.proxies == ProxyConfig(
- http=None, https=None, no_proxy=None, use_aproxy=False
+ http_url=None, https_url=None, no_proxy=None, use_aproxy=False
)
@@ -358,7 +358,7 @@ def test__refresh_firewall(monkeypatch, harness: Harness, runner_binary_path: Pa
"host": test_unit_ip_addresses[0],
"port": "10022",
"rsa_fingerprint": "SHA256:abcd",
- "ed25519_fingerprint": "abcd",
+ "ed25519_fingerprint": "SHA256:abcd",
},
)
harness.update_relation_data(
@@ -368,7 +368,7 @@ def test__refresh_firewall(monkeypatch, harness: Harness, runner_binary_path: Pa
"host": test_unit_ip_addresses[1],
"port": "10022",
"rsa_fingerprint": "SHA256:abcd",
- "ed25519_fingerprint": "abcd",
+ "ed25519_fingerprint": "SHA256:abcd",
},
)
harness.update_relation_data(
@@ -378,7 +378,7 @@ def test__refresh_firewall(monkeypatch, harness: Harness, runner_binary_path: Pa
"host": test_unit_ip_addresses[2],
"port": "10022",
"rsa_fingerprint": "SHA256:abcd",
- "ed25519_fingerprint": "abcd",
+ "ed25519_fingerprint": "SHA256:abcd",
},
)
diff --git a/tests/unit/test_charm_state.py b/tests/unit/test_charm_state.py
index 52b19781c..f674236f0 100644
--- a/tests/unit/test_charm_state.py
+++ b/tests/unit/test_charm_state.py
@@ -13,8 +13,7 @@
import yaml
from charms.data_platform_libs.v0.data_interfaces import DatabaseRequires
from github_runner_manager.types_.github import GitHubOrg, GitHubRepo
-from pydantic import BaseModel
-from pydantic.error_wrappers import ValidationError
+from pydantic import AnyHttpUrl, BaseModel
from pydantic.networks import IPv4Address
import charm_state
@@ -506,7 +505,7 @@ def test_charm_config_from_charm_valid():
FirewallEntry(ip_range="192.168.1.1"),
FirewallEntry(ip_range="192.168.1.2"),
]
- assert result.dockerhub_mirror == "https://example.com"
+ assert result.dockerhub_mirror == AnyHttpUrl("https://example.com")
assert result.openstack_clouds_yaml == test_openstack_config
assert result.labels == ("label1", "label2", "label3")
assert result.token == "abc123"
@@ -805,8 +804,9 @@ def test_runner_charm_config_from_charm_valid():
@pytest.mark.parametrize(
"http, https, use_aproxy, expected_address",
[
- ("http://proxy.example.com", None, True, "proxy.example.com"),
- (None, "https://secureproxy.example.com", True, "secureproxy.example.com"),
+ ("http://proxy.example.com", None, True, "proxy.example.com:80"),
+ ("http://squid.internal:3128", None, True, "squid.internal:3128"),
+ (None, "https://secureproxy.example.com", True, "secureproxy.example.com:443"),
(None, None, False, None),
("http://proxy.example.com", None, False, None),
],
@@ -819,7 +819,7 @@ def test_apropy_address(
act: Access the aproxy_address property of the ProxyConfig instance.
assert: Verify that the property returns the expected apropy address.
"""
- proxy_config = ProxyConfig(http=http, https=https, use_aproxy=use_aproxy)
+ proxy_config = ProxyConfig(http_url=http, https_url=https, use_aproxy=use_aproxy)
result = proxy_config.aproxy_address
@@ -833,13 +833,10 @@ def test_check_use_aproxy():
act: Call the check_use_aproxy method with the provided values.
assert: Verify that the method raises a ValueError with the expected message.
"""
- values = {"http": None, "https": None}
- use_aproxy = True
-
with pytest.raises(ValueError) as exc_info:
- ProxyConfig.check_use_aproxy(use_aproxy, values)
+ ProxyConfig(use_aproxy=True)
- assert str(exc_info.value) == "aproxy requires http or https to be set"
+ assert "aproxy requires http or https to be set" in str(exc_info.value)
@pytest.mark.parametrize(
@@ -861,7 +858,7 @@ def test___bool__(http: str | None, https: str | None, expected_result: bool):
act: Call the __bool__ method on the instance.
assert: Verify that the method returns the expected boolean value.
"""
- proxy_instance = ProxyConfig(http=http, https=https)
+ proxy_instance = ProxyConfig(http_url=http, https_url=https)
result = bool(proxy_instance)
@@ -1014,7 +1011,6 @@ def test_reactive_config_from_charm():
)
connection_info = charm_state.ReactiveConfig.from_database(database)
-
assert isinstance(connection_info, charm_state.ReactiveConfig)
assert connection_info.mq_uri == mongodb_uri
@@ -1213,7 +1209,7 @@ class MockModel(BaseModel):
(
ProxyConfig,
"from_charm",
- ValidationError([], MockModel),
+ ValueError,
),
(ProxyConfig, "from_charm", ValueError),
(
@@ -1221,10 +1217,10 @@ class MockModel(BaseModel):
"_check_immutable_config_change",
ImmutableConfigChangedError("Immutable config changed"),
),
- (CharmConfig, "from_charm", ValidationError([], MockModel)),
+ (CharmConfig, "from_charm", ValueError),
(CharmConfig, "from_charm", ValueError),
(charm_state, "_get_supported_arch", UnsupportedArchitectureError(arch="testarch")),
- (SSHDebugConnection, "from_charm", ValidationError([], MockModel)),
+ (SSHDebugConnection, "from_charm", CharmConfigInvalidError("Invalid SSH Debug info")),
],
)
def test_charm_state_from_charm_invalid_cases(
diff --git a/tests/unit/test_lxd_runner_manager.py b/tests/unit/test_lxd_runner_manager.py
index 2c5aca46c..5b6d696a8 100644
--- a/tests/unit/test_lxd_runner_manager.py
+++ b/tests/unit/test_lxd_runner_manager.py
@@ -28,7 +28,7 @@
from runner_type import RunnerNameByHealth
from tests.unit.mock import TEST_BINARY, MockLxdImageManager
-FAKE_MONGODB_URI = "mongodb://example.com/db"
+FAKE_MONGODB_URI = "mongodb://example.com:27017/db"
IMAGE_NAME = "jammy"
@@ -68,8 +68,8 @@ def charm_state_fixture(charm_config: MagicMock):
GitHubRepo("test_owner", "test_repo"),
ProxyConfig(
no_proxy="test_no_proxy",
- http=TEST_PROXY_SERVER_URL,
- https=TEST_PROXY_SERVER_URL,
+ http_url=TEST_PROXY_SERVER_URL,
+ https_url=TEST_PROXY_SERVER_URL,
use_aproxy=False,
),
),